Version 1.18.0
diff --git a/.gitignore b/.gitignore
index e66367d..f839f5f 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,15 +1,10 @@
-# These ignores are auto-generated from the canonical svn ignore metadata
-# using:
-#
-#  $ git svn create-ignore
-#
-# Whenever possible, prefer creating ignores in svn and then generating them
-# to keep git in sync.
+# Build artifacts and dependencies.
 /.children
 /.project
 /Makefile
+/base
 /benchmarks
-/build
+/buildtools
 /ipch
 /out
 /xcodebuild
@@ -27,9 +22,6 @@
 *.vcxproj.filters
 /*.vcxproj.user
 
-# End generated ignores. The following are hand-added because the svn:ignore
-# stuff doesn't handle them well.
-
 # Gyp generated files
 *.xcodeproj
 *.intermediate
diff --git a/CHANGELOG.md b/CHANGELOG.md
index a649682..26425b5 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,3 +1,11 @@
+## 1.18.0 - 2016-07-27
+
+### Core library changes
+
+* `dart:io`
+  * Adds file locking modes `FileLock.BLOCKING_SHARED` and
+    `FileLock.BLOCKING_EXCLUSIVE`.
+
 ## 1.17.1 - 2016-06-10
 
 Patch release, resolves two issues:
diff --git a/DEPS b/DEPS
index 1b74da6..cbcfa64 100644
--- a/DEPS
+++ b/DEPS
@@ -21,12 +21,18 @@
   "github_mirror":
       "https://chromium.googlesource.com/external/github.com/dart-lang/%s.git",
 
+  # Chromium git
+  "chromium_git": "https://chromium.googlesource.com",
+
   # Only use this temporarily while waiting for a mirror for a new package.
   "github_dartlang": "https://github.com/dart-lang/%s.git",
 
   "gyp_rev": "@6ee91ad8659871916f9aa840d42e1513befdf638",
-  "co19_rev": "@3ed795ea02e022ef19c77cf1b6095b7c8f5584d0",
-  "chromium_git": "https://chromium.googlesource.com",
+  "co19_rev": "@3f0a4bc9a080a792cdf5f093147a900f99ea301f",
+
+  # Revisions of GN/Mojo/Flutter related dependencies.
+  "base_revision": "@672b04e54b937ec899429a6bd5409c5a6300d151",
+  "buildtools_revision": "@565d04e8741429fb1b4f26d102f2c6c3b849edeb",
 
   # Revisions of /third_party/* dependencies.
   "args_tag": "@0.13.4",
@@ -41,28 +47,30 @@
   "charcode_tag": "@1.1.0",
   "chrome_rev" : "@19997",
   "cli_util_tag" : "@0.0.1+2",
-  "collection_tag": "@1.6.0",
-  "convert_tag": "@1.0.0",
-  "crypto_tag" : "@1.1.0",
+  "code_transformers_rev": "@bfe9799e88d9c231747435e1c1d2495ef5ecd966",
+  "collection_tag": "@1.8.0",
+  "convert_tag": "@2.0.0",
+  "crypto_tag" : "@2.0.1",
   "csslib_tag" : "@0.12.0",
   "dart2js_info_rev" : "@0a221eaf16aec3879c45719de656680ccb80d8a1",
   "dart_services_rev" : "@7aea2574e6f3924bf409a80afb8ad52aa2be4f97",
   "dart_style_tag": "@0.2.4",
-  "dartdoc_tag" : "@v0.9.0",
-  "dev_compiler_rev": "@adda29b31dc8fe2f8d1a5c1304384988503384c8",
+  "dartdoc_tag" : "@v0.9.6+2",
+  "dev_compiler_rev": "@7e9708eb5e9f3fcdc68b9af039d78cf39ce502b7",
   "fixnum_tag": "@0.10.5",
   "func_rev": "@8d4aea75c21be2179cb00dc2b94a71414653094e",
   "glob_rev": "@704cf75e4f26b417505c5c611bdaacd8808467dd",
   "html_tag" : "@0.12.1+1",
   "http_multi_server_tag" : "@2.0.0",
-  "http_parser_tag" : "@1.1.0",
+  "http_parser_tag" : "@3.0.2",
   "http_tag" : "@0.11.3+3",
   "http_throttle_rev" : "@a81f08be942cdd608883c7b67795c12226abc235",
   "idl_parser_rev": "@7fbe68cab90c38147dee4f48c30ad0d496c17915",
-  "intl_rev": "@a8b480b9c436f6c0ec16730804c914bdb4e30d53",
+  "initialize_rev": "@595d501a92c3716395ad2d81f9aabdb9f90879b6",
+  "intl_tag": "@0.13.0",
   "jinja2_rev": "@2222b31554f03e62600cd7e383376a7c187967a1",
   "json_rpc_2_tag": "@2.0.0",
-  "linter_rev": "@ccd8dbf7562b7645dc8c54a578b78b38970c71d6",
+  "linter_rev": "@7ca3aab6ca45b988440e425c187993a533fbe27e",
   "logging_rev": "@85d83e002670545e9039ad3985f0018ab640e597",
   "markdown_rev": "@4aaadf3d940bb172e1f6285af4d2b1710d309982",
   "matcher_tag": "@0.12.0",
@@ -70,9 +78,9 @@
   "mime_rev": "@75890811d4af5af080351ba8a2853ad4c8df98dd",
   "mustache4dart_rev" : "@5724cfd85151e5b6b53ddcd3380daf188fe47f92",
   "oauth2_tag": "@1.0.0",
-  "observatory_pub_packages_rev": "@cf90eb9077177d3d6b3fd5e8289477c2385c026a",
+  "observatory_pub_packages_rev": "@e5e1e543bea10d4bed95b22ad3e7aa2b20a23584",
   "observe_rev": "@eee2b8ec34236fa46982575fbccff84f61202ac6",
-  "package_config_rev": "@0.1.3",
+  "package_config_rev": "@0.1.5",
   "path_tag": "@1.3.6",
   "plugin_tag": "@0.2.0",
   "ply_rev": "@604b32590ffad5cbb82e4afef1d305512d06ae93",
@@ -85,8 +93,8 @@
   "resource_rev":"@a49101ba2deb29c728acba6fb86000a8f730f4b1",
   "root_certificates_rev": "@aed07942ce98507d2be28cbd29e879525410c7fc",
   "scheduled_test_tag": "@0.12.5+2",
-  "shelf_static_tag": "@0.2.3+1",
-  "shelf_tag": "@0.6.5",
+  "shelf_static_tag": "@0.2.3+4",
+  "shelf_tag": "@0.6.5+2",
   "shelf_web_socket_tag": "@0.2.0",
   "smoke_rev" : "@f3361191cc2a85ebc1e4d4c33aec672d7915aba9",
   "source_map_stack_trace_tag": "@1.0.4",
@@ -98,13 +106,13 @@
   "string_scanner_tag": "@0.1.4",
   "sunflower_rev": "@879b704933413414679396b129f5dfa96f7a0b1e",
   "test_reflective_loader_tag": "@0.0.3",
-  "test_tag": "@0.12.12",
+  "test_tag": "@0.12.13+5",
   "typed_data_tag": "@1.1.2",
   "usage_rev": "@b5080dac0d26a5609b266f8fdb0d053bc4c1c638",
   "utf_rev": "@1f55027068759e2d52f2c12de6a57cce5f3c5ee6",
   "watcher_tag": "@0.9.7+2",
   "web_components_rev": "@6349e09f9118dce7ae1b309af5763745e25a9d61",
-  "web_socket_channel_tag": "@1.0.0",
+  "web_socket_channel_tag": "@1.0.4",
   "WebCore_rev": "@a86fe28efadcfc781f836037a80f27e22a5dad17",
   "when_tag": "@0.2.0+2",
   "which_tag": "@0.1.3+1",
@@ -117,6 +125,14 @@
   Var("dart_root") + "/third_party/gyp":
       Var('chromium_git') + '/external/gyp.git' + Var("gyp_rev"),
 
+  # Stuff needed for GN/Mojo/Flutter.
+  Var("dart_root") + "/base":
+     Var('chromium_git') + '/external/github.com/domokit/base' +  Var('base_revision'),
+
+  Var("dart_root") + "/buildtools":
+     Var('chromium_git') + '/chromium/buildtools.git' +
+     Var('buildtools_revision'),
+
   Var("dart_root") + "/tests/co19/src":
       (Var("github_mirror") % "co19") + Var("co19_rev"),
 
@@ -170,6 +186,9 @@
       (Var("github_mirror") % "crypto") + Var("crypto_tag"),
   Var("dart_root") + "/third_party/pkg/csslib":
       (Var("github_mirror") % "csslib") + Var("csslib_tag"),
+  Var("dart_root") + "/third_party/pkg/code_transformers":
+      (Var("github_dartlang") % "code_transformers") +
+      Var("code_transformers_rev"),
   Var("dart_root") + "/third_party/dart-services":
       (Var("github_mirror") % "dart-services") +
       Var("dart_services_rev"),
@@ -199,8 +218,10 @@
   Var("dart_root") + "/third_party/pkg/http_throttle":
       (Var("github_mirror") % "http_throttle") +
       Var("http_throttle_rev"),
+  Var("dart_root") + "/third_party/pkg/initialize":
+      (Var("github_dartlang") % "initialize") + Var("initialize_rev"),
   Var("dart_root") + "/third_party/pkg/intl":
-      (Var("github_mirror") % "intl") + Var("intl_rev"),
+      (Var("github_mirror") % "intl") + Var("intl_tag"),
   Var("dart_root") + "/third_party/pkg/json_rpc_2":
       (Var("github_mirror") % "json_rpc_2") + Var("json_rpc_2_tag"),
   Var("dart_root") + "/third_party/pkg/linter":
diff --git a/README.fuchsia b/README.fuchsia
new file mode 100644
index 0000000..f09ff26
--- /dev/null
+++ b/README.fuchsia
@@ -0,0 +1,41 @@
+This is a README file describing how to build Dart for Fuchsia. It assumes that
+you have built the magenta kernel under //magenta, its toolchains are
+under //toolchains, and that you have a Dart checkout under //dart. It is early
+days and this is crufty. The process will improve from here.
+
+1. First, set up some symlinks in your Dart checkout:
+
+  //dart/third_party/fuchsia_tools/toolchains
+      -> symlinked to //toolchains
+  //dart/third_party/fuchsia_tools/sysroot/x86_64/usr
+      -> symlinked to //magenta/build-magenta-qemu-x86-64/sysroot/
+
+  Also, copy the linker script:
+
+  //magenta$ cp kernel/arch/x86/64/user.ld build-magenta-qemu-x86-64/sysroot/
+
+  and similarly for arm64.
+
+2. Build:
+
+  //dart$ tools/build.py -m product -a x64 --os=fuchsia fuchsia_test
+
+  This will produce //dart/out/ProductFuchsiaX64/fuchsia_test
+
+3. Strip it:
+
+  //dart$ third_party/fuchsia_tools/toolchains/x86_64-elf-5.3.0-Linux-x86_64/bin/x86_64-elf-strip out/ProductFuchsiaX64/fuchsia_test -o out/ProductFuchsiaX64/fuchsia_test.stripped
+
+4. Make a file //magenta/fuchsia_test.manifest containing:
+
+  bin/fuchsia_test=//dart/out/ProductFuchsiaX64/fuchsia_test.stripped
+
+  Where //dart is the actual path to your Dart checkout.
+
+5. Make an extra bootfs:
+
+  //magenta$ build-magenta-qemu-x86-64/tools/mkbootfs -o fuchsia_test.bootfs fuchsia_test.manifest
+
+6. Run:
+
+  //magenta$ ./scripts/run-magenta-x86-64 -x fuchsia_test.bootfs
diff --git a/build/OWNERS b/build/OWNERS
new file mode 100644
index 0000000..17d067c
--- /dev/null
+++ b/build/OWNERS
@@ -0,0 +1,5 @@
+cjhopman@chromium.org
+dpranke@chromium.org
+jochen@chromium.org
+scottmg@chromium.org
+thakis@chromium.org
diff --git a/build/PRESUBMIT.py b/build/PRESUBMIT.py
new file mode 100644
index 0000000..fca962f
--- /dev/null
+++ b/build/PRESUBMIT.py
@@ -0,0 +1,16 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+def _RunTests(input_api, output_api):
+  return (input_api.canned_checks.RunUnitTestsInDirectory(
+          input_api, output_api, '.', whitelist=[r'.+_test.py$']))
+
+
+def CheckChangeOnUpload(input_api, output_api):
+  return _RunTests(input_api, output_api)
+
+
+def CheckChangeOnCommit(input_api, output_api):
+  return _RunTests(input_api, output_api)
diff --git a/build/README.chromium b/build/README.chromium
new file mode 100644
index 0000000..012df35
--- /dev/null
+++ b/build/README.chromium
@@ -0,0 +1,15 @@
+List of property sheets to be included by projects:
+  common.vsprops
+    Not used anymore. No-op. Kept for compatibility with current projects.
+
+  debug.vsprops
+    Enables debug settings. Must be included directly in Debug configuration. Includes internal\essential.vsprops.
+
+  external_code.vsprops
+    Contains settings made to simplify usage of external (non-Google) code. It relaxes the warning levels. Should be included after debug.vsprops or release.vsprops to override their settings.
+
+  output_dll_copy.rules
+    Run to enable automatic copy of DLL when they are as an input file in a vcproj project.
+
+  release.vsprops
+    Enables release settings. Must be included directly in Release configuration. Includes internal\essential.vsprops. Also includes "internal\release_impl$(CHROME_BUILD_TYPE).vsprops". So the behavior is dependant on the CHROME_BUILD_TYPE environment variable.
diff --git a/build/README.dart b/build/README.dart
new file mode 100644
index 0000000..ac57fbe
--- /dev/null
+++ b/build/README.dart
@@ -0,0 +1,7 @@
+This directory was taken from a snapshot of flutter/engine/src/build/.
+
+The snapshot was taken with a recursive copy `cp -R` of the directory from
+the flutter repository.
+
+The contents is used to support the GN build system.
+
diff --git a/build/all.gyp b/build/all.gyp
new file mode 100644
index 0000000..b36fae6
--- /dev/null
+++ b/build/all.gyp
@@ -0,0 +1,1442 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'variables': {
+    # A hook that can be overridden in other repositories to add additional
+    # compilation targets to 'All'.
+    'app_targets%': [],
+    # For Android-specific targets.
+    'android_app_targets%': [],
+  },
+  'targets': [
+    {
+      'target_name': 'All',
+      'type': 'none',
+      'xcode_create_dependents_test_runner': 1,
+      'dependencies': [
+        '<@(app_targets)',
+        'some.gyp:*',
+        '../base/base.gyp:*',
+        '../components/components.gyp:*',
+        '../components/components_tests.gyp:*',
+        '../content/content.gyp:*',
+        '../crypto/crypto.gyp:*',
+        '../net/net.gyp:*',
+        '../sdch/sdch.gyp:*',
+        '../sql/sql.gyp:*',
+        '../testing/gmock.gyp:*',
+        '../testing/gtest.gyp:*',
+        '../third_party/icu/icu.gyp:*',
+        '../third_party/libxml/libxml.gyp:*',
+        '../third_party/sqlite/sqlite.gyp:*',
+        '../third_party/zlib/zlib.gyp:*',
+        '../ui/accessibility/accessibility.gyp:*',
+        '../ui/base/ui_base.gyp:*',
+        '../ui/display/display.gyp:display_unittests',
+        '../ui/snapshot/snapshot.gyp:*',
+        '../url/url.gyp:*',
+      ],
+      'conditions': [
+        ['OS!="ios" and OS!="mac"', {
+          'dependencies': [
+            '../ui/touch_selection/ui_touch_selection.gyp:*',
+          ],
+        }],
+        ['OS=="ios"', {
+          'dependencies': [
+            '../chrome/chrome.gyp:browser',
+            '../chrome/chrome.gyp:browser_ui',
+            '../ios/ios.gyp:*',
+            # NOTE: This list of targets is present because
+            # mojo_base.gyp:mojo_base cannot be built on iOS, as
+            # javascript-related targets cause v8 to be built.
+            '../mojo/mojo_base.gyp:mojo_common_lib',
+            '../mojo/mojo_base.gyp:mojo_common_unittests',
+            '../google_apis/google_apis.gyp:google_apis_unittests',
+            '../skia/skia_tests.gyp:skia_unittests',
+            '../third_party/mojo/mojo_edk.gyp:mojo_system_impl',
+            '../third_party/mojo/mojo_edk_tests.gyp:mojo_public_bindings_unittests',
+            '../third_party/mojo/mojo_edk_tests.gyp:mojo_public_environment_unittests',
+            '../third_party/mojo/mojo_edk_tests.gyp:mojo_public_system_unittests',
+            '../third_party/mojo/mojo_edk_tests.gyp:mojo_public_utility_unittests',
+            '../third_party/mojo/mojo_edk_tests.gyp:mojo_system_unittests',
+            '../third_party/mojo/mojo_public.gyp:mojo_cpp_bindings',
+            '../third_party/mojo/mojo_public.gyp:mojo_public_test_utils',
+            '../third_party/mojo/mojo_public.gyp:mojo_system',
+            '../ui/base/ui_base_tests.gyp:ui_base_unittests',
+            '../ui/gfx/gfx_tests.gyp:gfx_unittests',
+          ],
+        }],
+        ['OS=="android"', {
+          'dependencies': [
+            '../content/content_shell_and_tests.gyp:content_shell_apk',
+            '<@(android_app_targets)',
+            'android_builder_tests',
+            '../tools/telemetry/telemetry.gyp:*#host',
+            # TODO(nyquist) This should instead by a target for sync when all of
+            # the sync-related code for Android has been upstreamed.
+            # See http://crbug.com/159203
+            '../third_party/cacheinvalidation/cacheinvalidation.gyp:cacheinvalidation_javalib',
+          ],
+          'conditions': [
+            ['chromecast==0', {
+              'dependencies': [
+                '../android_webview/android_webview.gyp:android_webview_apk',
+                '../android_webview/android_webview.gyp:system_webview_apk',
+                '../android_webview/android_webview_shell.gyp:android_webview_shell_apk',
+                '../chrome/android/chrome_apk.gyp:chrome_public_apk',
+                '../chrome/chrome.gyp:chrome_shell_apk',
+                '../chrome/chrome.gyp:chrome_sync_shell_apk',
+                '../remoting/remoting.gyp:remoting_apk',
+              ],
+            }],
+            # TODO: Enable packed relocations for x64. See: b/20532404
+            ['target_arch != "x64"', {
+              'dependencies': [
+                '../third_party/android_platform/relocation_packer.gyp:android_relocation_packer_unittests#host',
+              ],
+            }],
+          ],
+        }, {
+          'dependencies': [
+            '../content/content_shell_and_tests.gyp:*',
+            # TODO: This should build on Android and the target should move to the list above.
+            '../sync/sync.gyp:*',
+          ],
+        }],
+        ['OS!="ios" and OS!="android" and chromecast==0', {
+          'dependencies': [
+            '../third_party/re2/re2.gyp:re2',
+            '../chrome/chrome.gyp:*',
+            '../chrome/tools/profile_reset/jtl_compiler.gyp:*',
+            '../cc/blink/cc_blink_tests.gyp:*',
+            '../cc/cc_tests.gyp:*',
+            '../device/usb/usb.gyp:*',
+            '../extensions/extensions.gyp:*',
+            '../extensions/extensions_tests.gyp:*',
+            '../gin/gin.gyp:*',
+            '../gpu/gpu.gyp:*',
+            '../gpu/tools/tools.gyp:*',
+            '../ipc/ipc.gyp:*',
+            '../ipc/mojo/ipc_mojo.gyp:*',
+            '../jingle/jingle.gyp:*',
+            '../media/cast/cast.gyp:*',
+            '../media/media.gyp:*',
+            '../media/midi/midi.gyp:*',
+            '../mojo/mojo.gyp:*',
+            '../mojo/mojo_base.gyp:*',
+            '../ppapi/ppapi.gyp:*',
+            '../ppapi/ppapi_internal.gyp:*',
+            '../ppapi/tools/ppapi_tools.gyp:*',
+            '../printing/printing.gyp:*',
+            '../skia/skia.gyp:*',
+            '../sync/tools/sync_tools.gyp:*',
+            '../third_party/WebKit/public/all.gyp:*',
+            '../third_party/cacheinvalidation/cacheinvalidation.gyp:*',
+            '../third_party/codesighs/codesighs.gyp:*',
+            '../third_party/ffmpeg/ffmpeg.gyp:*',
+            '../third_party/iccjpeg/iccjpeg.gyp:*',
+            '../third_party/libpng/libpng.gyp:*',
+            '../third_party/libusb/libusb.gyp:*',
+            '../third_party/libwebp/libwebp.gyp:*',
+            '../third_party/libxslt/libxslt.gyp:*',
+            '../third_party/lzma_sdk/lzma_sdk.gyp:*',
+            '../third_party/mesa/mesa.gyp:*',
+            '../third_party/modp_b64/modp_b64.gyp:*',
+            '../third_party/npapi/npapi.gyp:*',
+            '../third_party/ots/ots.gyp:*',
+            '../third_party/pdfium/samples/samples.gyp:*',
+            '../third_party/qcms/qcms.gyp:*',
+            '../tools/gn/gn.gyp:*',
+            '../tools/perf/clear_system_cache/clear_system_cache.gyp:*',
+            '../tools/telemetry/telemetry.gyp:*',
+            '../v8/tools/gyp/v8.gyp:*',
+            '<(libjpeg_gyp_path):*',
+          ],
+        }],
+        ['OS!="ios"', {
+          'dependencies': [
+            '../device/bluetooth/bluetooth.gyp:*',
+            '../device/device_tests.gyp:*',
+            '../gpu/skia_runner/skia_runner.gyp:*',
+          ],
+        }],
+        ['use_openssl==0 and (OS=="mac" or OS=="ios" or OS=="win")', {
+          'dependencies': [
+            '../third_party/nss/nss.gyp:*',
+           ],
+        }],
+        ['OS=="win" or OS=="ios" or OS=="linux"', {
+          'dependencies': [
+            '../breakpad/breakpad.gyp:*',
+           ],
+        }],
+        ['OS=="mac"', {
+          'dependencies': [
+            '../sandbox/sandbox.gyp:*',
+            '../third_party/crashpad/crashpad/crashpad.gyp:*',
+            '../third_party/ocmock/ocmock.gyp:*',
+          ],
+        }],
+        ['OS=="linux"', {
+          'dependencies': [
+            '../courgette/courgette.gyp:*',
+            '../sandbox/sandbox.gyp:*',
+          ],
+          'conditions': [
+            ['branding=="Chrome"', {
+              'dependencies': [
+                '../chrome/chrome.gyp:linux_packages_<(channel)',
+              ],
+            }],
+            ['enable_ipc_fuzzer==1', {
+              'dependencies': [
+                '../tools/ipc_fuzzer/ipc_fuzzer.gyp:*',
+              ],
+            }],
+            ['use_dbus==1', {
+              'dependencies': [
+                '../dbus/dbus.gyp:*',
+              ],
+            }],
+          ],
+        }],
+        ['chromecast==1', {
+          'dependencies': [
+            '../chromecast/chromecast.gyp:*',
+          ],
+        }],
+        ['use_x11==1', {
+          'dependencies': [
+            '../tools/xdisplaycheck/xdisplaycheck.gyp:*',
+          ],
+        }],
+        ['OS=="win"', {
+          'conditions': [
+            ['win_use_allocator_shim==1', {
+              'dependencies': [
+                '../base/allocator/allocator.gyp:*',
+              ],
+            }],
+          ],
+          'dependencies': [
+            '../chrome/tools/crash_service/caps/caps.gyp:*',
+            '../chrome_elf/chrome_elf.gyp:*',
+            '../cloud_print/cloud_print.gyp:*',
+            '../courgette/courgette.gyp:*',
+            '../rlz/rlz.gyp:*',
+            '../sandbox/sandbox.gyp:*',
+            '<(angle_path)/src/angle.gyp:*',
+            '../third_party/bspatch/bspatch.gyp:*',
+            '../tools/win/static_initializers/static_initializers.gyp:*',
+          ],
+        }, {
+          'dependencies': [
+            '../third_party/libevent/libevent.gyp:*',
+          ],
+        }],
+        ['toolkit_views==1', {
+          'dependencies': [
+            '../ui/views/controls/webview/webview.gyp:*',
+            '../ui/views/views.gyp:*',
+          ],
+        }],
+        ['use_aura==1', {
+          'dependencies': [
+            '../ui/aura/aura.gyp:*',
+            '../ui/aura_extra/aura_extra.gyp:*',
+          ],
+        }],
+        ['use_ash==1', {
+          'dependencies': [
+            '../ash/ash.gyp:*',
+          ],
+        }],
+        ['remoting==1', {
+          'dependencies': [
+            '../remoting/remoting_all.gyp:remoting_all',
+          ],
+        }],
+        ['use_openssl==0', {
+          'dependencies': [
+            '../net/third_party/nss/ssl.gyp:*',
+          ],
+        }],
+        ['use_openssl==1', {
+          'dependencies': [
+            '../third_party/boringssl/boringssl.gyp:*',
+            '../third_party/boringssl/boringssl_tests.gyp:*',
+          ],
+        }],
+        ['enable_app_list==1', {
+          'dependencies': [
+            '../ui/app_list/app_list.gyp:*',
+          ],
+        }],
+        ['OS!="android" and OS!="ios"', {
+          'dependencies': [
+            '../google_apis/gcm/gcm.gyp:*',
+          ],
+        }],
+        ['(chromeos==1 or OS=="linux" or OS=="win" or OS=="mac") and chromecast==0', {
+          'dependencies': [
+            '../extensions/shell/app_shell.gyp:*',
+          ],
+        }],
+        ['envoy==1', {
+          'dependencies': [
+            '../envoy/envoy.gyp:*',
+          ],
+        }],
+      ],
+    }, # target_name: All
+    {
+      'target_name': 'All_syzygy',
+      'type': 'none',
+      'conditions': [
+        ['OS=="win" and fastbuild==0 and target_arch=="ia32" and '
+            '(syzyasan==1 or syzygy_optimize==1)', {
+          'dependencies': [
+            '../chrome/installer/mini_installer_syzygy.gyp:*',
+          ],
+        }],
+      ],
+    }, # target_name: All_syzygy
+    {
+      # Note: Android uses android_builder_tests below.
+      # TODO: Consider merging that with this target.
+      'target_name': 'chromium_builder_tests',
+      'type': 'none',
+      'dependencies': [
+        '../base/base.gyp:base_unittests',
+        '../components/components_tests.gyp:components_unittests',
+        '../crypto/crypto.gyp:crypto_unittests',
+        '../net/net.gyp:net_unittests',
+        '../skia/skia_tests.gyp:skia_unittests',
+        '../sql/sql.gyp:sql_unittests',
+        '../sync/sync.gyp:sync_unit_tests',
+        '../ui/base/ui_base_tests.gyp:ui_base_unittests',
+        '../ui/display/display.gyp:display_unittests',
+        '../ui/gfx/gfx_tests.gyp:gfx_unittests',
+        '../url/url.gyp:url_unittests',
+      ],
+      'conditions': [
+        ['OS!="ios"', {
+          'dependencies': [
+            '../ui/gl/gl_tests.gyp:gl_unittests',
+          ],
+        }],
+        ['OS!="ios" and OS!="mac"', {
+          'dependencies': [
+            '../ui/touch_selection/ui_touch_selection.gyp:ui_touch_selection_unittests',
+          ],
+        }],
+        ['OS!="ios" and OS!="android"', {
+          'dependencies': [
+            '../cc/blink/cc_blink_tests.gyp:cc_blink_unittests',
+            '../cc/cc_tests.gyp:cc_unittests',
+            '../cloud_print/cloud_print.gyp:cloud_print_unittests',
+            '../content/content_shell_and_tests.gyp:content_browsertests',
+            '../content/content_shell_and_tests.gyp:content_shell',
+            '../content/content_shell_and_tests.gyp:content_unittests',
+            '../device/device_tests.gyp:device_unittests',
+            '../gin/gin.gyp:gin_unittests',
+            '../google_apis/google_apis.gyp:google_apis_unittests',
+            '../gpu/gles2_conform_support/gles2_conform_support.gyp:gles2_conform_support',
+            '../gpu/gpu.gyp:gpu_unittests',
+            '../ipc/ipc.gyp:ipc_tests',
+            '../ipc/mojo/ipc_mojo.gyp:ipc_mojo_unittests',
+            '../jingle/jingle.gyp:jingle_unittests',
+            '../media/cast/cast.gyp:cast_unittests',
+            '../media/media.gyp:media_unittests',
+            '../media/midi/midi.gyp:midi_unittests',
+            '../mojo/mojo.gyp:mojo',
+            '../ppapi/ppapi_internal.gyp:ppapi_unittests',
+            '../remoting/remoting.gyp:remoting_unittests',
+            '../third_party/WebKit/public/all.gyp:all_blink',
+            '../third_party/cacheinvalidation/cacheinvalidation.gyp:cacheinvalidation_unittests',
+            '../third_party/leveldatabase/leveldatabase.gyp:env_chromium_unittests',
+            '../third_party/libaddressinput/libaddressinput.gyp:libaddressinput_unittests',
+            '../third_party/libphonenumber/libphonenumber.gyp:libphonenumber_unittests',
+            '../tools/telemetry/telemetry.gyp:*',
+          ],
+        }],
+        ['OS!="ios" and OS!="android" and chromecast==0', {
+          'dependencies': [
+            '../chrome/chrome.gyp:browser_tests',
+            '../chrome/chrome.gyp:chromedriver_tests',
+            '../chrome/chrome.gyp:chromedriver_unittests',
+            '../chrome/chrome.gyp:interactive_ui_tests',
+            '../chrome/chrome.gyp:sync_integration_tests',
+            '../chrome/chrome.gyp:unit_tests',
+            '../extensions/extensions_tests.gyp:extensions_browsertests',
+            '../extensions/extensions_tests.gyp:extensions_unittests',
+          ],
+        }],
+        ['OS=="win"', {
+          'dependencies': [
+            '../chrome/chrome.gyp:crash_service',
+            '../chrome/chrome.gyp:installer_util_unittests',
+            '../chrome/chrome.gyp:setup_unittests',
+            # ../chrome/test/mini_installer requires mini_installer.
+            '../chrome/installer/mini_installer.gyp:mini_installer',
+            '../chrome_elf/chrome_elf.gyp:chrome_elf_unittests',
+            '../content/content_shell_and_tests.gyp:copy_test_netscape_plugin',
+            '../courgette/courgette.gyp:courgette_unittests',
+            '../sandbox/sandbox.gyp:sbox_integration_tests',
+            '../sandbox/sandbox.gyp:sbox_unittests',
+            '../sandbox/sandbox.gyp:sbox_validation_tests',
+            '../ui/app_list/app_list.gyp:app_list_unittests',
+          ],
+          'conditions': [
+            # remoting_host_installation uses lots of non-trivial GYP that tend
+            # to break because of differences between ninja and msbuild. Make
+            # sure this target is built by the builders on the main waterfall.
+            # See http://crbug.com/180600.
+            ['wix_exists == "True" and sas_dll_exists == "True"', {
+              'dependencies': [
+                '../remoting/remoting.gyp:remoting_host_installation',
+              ],
+            }],
+            ['syzyasan==1', {
+              'variables': {
+                # Disable incremental linking for all modules.
+                # 0: inherit, 1: disabled, 2: enabled.
+                'msvs_debug_link_incremental': '1',
+                'msvs_large_module_debug_link_mode': '1',
+                # Disable RTC. Syzygy explicitly doesn't support RTC
+                # instrumented binaries for now.
+                'win_debug_RuntimeChecks': '0',
+              },
+              'defines': [
+                # Disable iterator debugging (huge speed boost).
+                '_HAS_ITERATOR_DEBUGGING=0',
+              ],
+              'msvs_settings': {
+                'VCLinkerTool': {
+                  # Enable profile information (necessary for SyzyAsan
+                  # instrumentation). This is incompatible with incremental
+                  # linking.
+                  'Profile': 'true',
+                },
+              }
+            }],
+          ],
+        }],
+        ['chromeos==1', {
+          'dependencies': [
+            '../ui/chromeos/ui_chromeos.gyp:ui_chromeos_unittests',
+          ],
+        }],
+        ['OS=="linux"', {
+          'dependencies': [
+            '../sandbox/sandbox.gyp:sandbox_linux_unittests',
+          ],
+        }],
+        ['OS=="linux" and use_dbus==1', {
+          'dependencies': [
+            '../dbus/dbus.gyp:dbus_unittests',
+          ],
+        }],
+        ['OS=="mac"', {
+          'dependencies': [
+            '../ui/app_list/app_list.gyp:app_list_unittests',
+            '../ui/message_center/message_center.gyp:*',
+          ],
+        }],
+        ['test_isolation_mode != "noop"', {
+          'dependencies': [
+            'chromium_swarm_tests',
+          ],
+        }],
+        ['OS!="android"', {
+          'dependencies': [
+            '../google_apis/gcm/gcm.gyp:gcm_unit_tests',
+          ],
+        }],
+        ['enable_basic_printing==1 or enable_print_preview==1', {
+          'dependencies': [
+            '../printing/printing.gyp:printing_unittests',
+          ],
+        }],
+        ['use_aura==1', {
+          'dependencies': [
+            '../ui/app_list/app_list.gyp:app_list_unittests',
+            '../ui/aura/aura.gyp:aura_unittests',
+            '../ui/compositor/compositor.gyp:compositor_unittests',
+          ],
+        }],
+        ['use_aura==1 and chromecast==0', {
+          'dependencies': [
+            '../ui/keyboard/keyboard.gyp:keyboard_unittests',
+            '../ui/views/views.gyp:views_unittests',
+          ],
+        }],
+        ['use_aura==1 or toolkit_views==1', {
+          'dependencies': [
+            '../ui/events/events.gyp:events_unittests',
+          ],
+        }],
+        ['use_ash==1', {
+          'dependencies': [
+            '../ash/ash.gyp:ash_unittests',
+          ],
+        }],
+        ['disable_nacl==0', {
+          'dependencies': [
+            '../components/nacl.gyp:nacl_loader_unittests',
+          ],
+        }],
+        ['disable_nacl==0 and disable_nacl_untrusted==0 and enable_nacl_nonsfi_test==1', {
+          'dependencies': [
+            '../components/nacl.gyp:nacl_helper_nonsfi_unittests',
+          ],
+        }],
+        ['disable_nacl==0 and disable_nacl_untrusted==0', {
+          'dependencies': [
+            '../mojo/mojo_nacl_untrusted.gyp:libmojo',
+            '../mojo/mojo_nacl.gyp:monacl_codegen',
+            '../mojo/mojo_nacl.gyp:monacl_sel',
+            '../mojo/mojo_nacl.gyp:monacl_shell',
+          ],
+        }],
+      ],
+    }, # target_name: chromium_builder_tests
+  ],
+  'conditions': [
+    # TODO(GYP): make gn_migration.gypi work unconditionally.
+    ['OS=="mac" or OS=="win" or (OS=="linux" and target_arch=="x64" and chromecast==0)', {
+      'includes': [
+        'gn_migration.gypi',
+      ],
+    }],
+    ['OS!="ios"', {
+      'targets': [
+        {
+          'target_name': 'blink_tests',
+          'type': 'none',
+          'dependencies': [
+            '../third_party/WebKit/public/all.gyp:all_blink',
+          ],
+          'conditions': [
+            ['OS=="android"', {
+              'dependencies': [
+                '../content/content_shell_and_tests.gyp:content_shell_apk',
+                '../breakpad/breakpad.gyp:dump_syms#host',
+                '../breakpad/breakpad.gyp:minidump_stackwalk#host',
+              ],
+            }, {  # OS!="android"
+              'dependencies': [
+                '../content/content_shell_and_tests.gyp:content_shell',
+              ],
+            }],
+            ['OS=="win"', {
+              'dependencies': [
+                '../components/test_runner/test_runner.gyp:layout_test_helper',
+                '../content/content_shell_and_tests.gyp:content_shell_crash_service',
+              ],
+            }],
+            ['OS!="win" and OS!="android"', {
+              'dependencies': [
+                '../breakpad/breakpad.gyp:minidump_stackwalk',
+              ],
+            }],
+            ['OS=="mac"', {
+              'dependencies': [
+                '../components/test_runner/test_runner.gyp:layout_test_helper',
+                '../breakpad/breakpad.gyp:dump_syms#host',
+              ],
+            }],
+            ['OS=="linux"', {
+              'dependencies': [
+                '../breakpad/breakpad.gyp:dump_syms#host',
+              ],
+            }],
+          ],
+        }, # target_name: blink_tests
+      ],
+    }], # OS!=ios
+    ['OS!="ios" and OS!="android" and chromecast==0', {
+      'targets': [
+        {
+          'target_name': 'chromium_builder_nacl_win_integration',
+          'type': 'none',
+          'dependencies': [
+            'chromium_builder_tests',
+          ],
+        }, # target_name: chromium_builder_nacl_win_integration
+        {
+          'target_name': 'chromium_builder_perf',
+          'type': 'none',
+          'dependencies': [
+            '../cc/cc_tests.gyp:cc_perftests',
+            '../chrome/chrome.gyp:chrome',
+            '../chrome/chrome.gyp:load_library_perf_tests',
+            '../chrome/chrome.gyp:performance_browser_tests',
+            '../chrome/chrome.gyp:sync_performance_tests',
+            '../content/content_shell_and_tests.gyp:content_shell',
+            '../gpu/gpu.gyp:gpu_perftests',
+            '../media/media.gyp:media_perftests',
+            '../media/midi/midi.gyp:midi_unittests',
+            '../tools/perf/clear_system_cache/clear_system_cache.gyp:*',
+            '../tools/telemetry/telemetry.gyp:*',
+          ],
+          'conditions': [
+            ['OS!="ios" and OS!="win"', {
+              'dependencies': [
+                '../breakpad/breakpad.gyp:minidump_stackwalk',
+              ],
+            }],
+            ['OS=="linux"', {
+              'dependencies': [
+                '../chrome/chrome.gyp:linux_symbols'
+              ],
+            }],
+            ['OS=="win"', {
+              'dependencies': [
+                '../chrome/chrome.gyp:crash_service',
+                '../gpu/gpu.gyp:angle_perftests',
+              ],
+            }],
+            ['OS=="win" and target_arch=="ia32"', {
+              'dependencies': [
+                '../chrome/chrome.gyp:crash_service_win64',
+              ],
+            }],
+          ],
+        }, # target_name: chromium_builder_perf
+        {
+          'target_name': 'chromium_gpu_builder',
+          'type': 'none',
+          'dependencies': [
+            '../chrome/chrome.gyp:chrome',
+            '../chrome/chrome.gyp:performance_browser_tests',
+            '../content/content_shell_and_tests.gyp:content_browsertests',
+            '../content/content_shell_and_tests.gyp:content_gl_tests',
+            '../gpu/gles2_conform_support/gles2_conform_test.gyp:gles2_conform_test',
+            '../gpu/khronos_glcts_support/khronos_glcts_test.gyp:khronos_glcts_test',
+            '../gpu/gpu.gyp:gl_tests',
+            '../gpu/gpu.gyp:angle_unittests',
+            '../gpu/gpu.gyp:gpu_unittests',
+            '../tools/telemetry/telemetry.gyp:*',
+          ],
+          'conditions': [
+            ['OS!="ios" and OS!="win"', {
+              'dependencies': [
+                '../breakpad/breakpad.gyp:minidump_stackwalk',
+              ],
+            }],
+            ['OS=="linux"', {
+              'dependencies': [
+                '../chrome/chrome.gyp:linux_symbols'
+              ],
+            }],
+            ['OS=="win"', {
+              'dependencies': [
+                '../chrome/chrome.gyp:crash_service',
+              ],
+            }],
+            ['OS=="win" and target_arch=="ia32"', {
+              'dependencies': [
+                '../chrome/chrome.gyp:crash_service_win64',
+              ],
+            }],
+          ],
+        }, # target_name: chromium_gpu_builder
+        {
+          'target_name': 'chromium_gpu_debug_builder',
+          'type': 'none',
+          'dependencies': [
+            '../chrome/chrome.gyp:chrome',
+            '../content/content_shell_and_tests.gyp:content_browsertests',
+            '../content/content_shell_and_tests.gyp:content_gl_tests',
+            '../gpu/gles2_conform_support/gles2_conform_test.gyp:gles2_conform_test',
+            '../gpu/khronos_glcts_support/khronos_glcts_test.gyp:khronos_glcts_test',
+            '../gpu/gpu.gyp:gl_tests',
+            '../gpu/gpu.gyp:angle_unittests',
+            '../gpu/gpu.gyp:gpu_unittests',
+            '../tools/telemetry/telemetry.gyp:*',
+          ],
+          'conditions': [
+            ['OS!="ios" and OS!="win"', {
+              'dependencies': [
+                '../breakpad/breakpad.gyp:minidump_stackwalk',
+              ],
+            }],
+            ['OS=="linux"', {
+              'dependencies': [
+                '../chrome/chrome.gyp:linux_symbols'
+              ],
+            }],
+            ['OS=="win"', {
+              'dependencies': [
+                '../chrome/chrome.gyp:crash_service',
+              ],
+            }],
+            ['OS=="win" and target_arch=="ia32"', {
+              'dependencies': [
+                '../chrome/chrome.gyp:crash_service_win64',
+              ],
+            }],
+          ],
+        }, # target_name: chromium_gpu_debug_builder
+        {
+          # This target contains everything we need to run tests on the special
+          # device-equipped WebRTC bots. We have device-requiring tests in
+          # browser_tests and content_browsertests.
+          'target_name': 'chromium_builder_webrtc',
+          'type': 'none',
+          'dependencies': [
+            'chromium_builder_perf',
+            '../chrome/chrome.gyp:browser_tests',
+            '../content/content_shell_and_tests.gyp:content_browsertests',
+            '../content/content_shell_and_tests.gyp:content_unittests',
+            '../media/media.gyp:media_unittests',
+            '../media/midi/midi.gyp:midi_unittests',
+            '../third_party/webrtc/tools/tools.gyp:frame_analyzer',
+            '../third_party/webrtc/tools/tools.gyp:rgba_to_i420_converter',
+          ],
+          'conditions': [
+            ['remoting==1', {
+              'dependencies': [
+                '../remoting/remoting.gyp:*',
+              ],
+            }],
+          ],
+        },  # target_name: chromium_builder_webrtc
+        {
+          'target_name': 'chromium_builder_chromedriver',
+          'type': 'none',
+          'dependencies': [
+            '../chrome/chrome.gyp:chromedriver',
+            '../chrome/chrome.gyp:chromedriver_tests',
+            '../chrome/chrome.gyp:chromedriver_unittests',
+          ],
+        },  # target_name: chromium_builder_chromedriver
+        {
+          'target_name': 'chromium_builder_asan',
+          'type': 'none',
+          'dependencies': [
+            '../chrome/chrome.gyp:chrome',
+
+            # We refer to content_shell directly rather than blink_tests
+            # because we don't want the _unittests binaries.
+            '../content/content_shell_and_tests.gyp:content_shell',
+          ],
+          'conditions': [
+            ['OS!="win"', {
+              'dependencies': [
+                '../net/net.gyp:hpack_fuzz_wrapper',
+                '../net/net.gyp:dns_fuzz_stub',
+                '../skia/skia.gyp:filter_fuzz_stub',
+              ],
+            }],
+            ['enable_ipc_fuzzer==1 and component!="shared_library" and '
+                 '(OS=="linux" or OS=="win")', {
+              'dependencies': [
+                '../tools/ipc_fuzzer/ipc_fuzzer.gyp:*',
+              ],
+            }],
+            ['chromeos==0', {
+              'dependencies': [
+                '../v8/src/d8.gyp:d8#host',
+                '../third_party/pdfium/samples/samples.gyp:pdfium_test',
+              ],
+            }],
+            ['internal_filter_fuzzer==1', {
+              'dependencies': [
+                '../skia/tools/clusterfuzz-data/fuzzers/filter_fuzzer/filter_fuzzer.gyp:filter_fuzzer',
+              ],
+            }], # internal_filter_fuzzer
+            ['clang==1', {
+              'dependencies': [
+                'sanitizers/sanitizers.gyp:llvm-symbolizer',
+              ],
+            }],
+            ['OS=="win" and fastbuild==0 and target_arch=="ia32" and syzyasan==1', {
+              'dependencies': [
+                '../chrome/chrome_syzygy.gyp:chrome_dll_syzygy',
+                '../content/content_shell_and_tests.gyp:content_shell_syzyasan',
+              ],
+              'conditions': [
+                ['chrome_multiple_dll==1', {
+                  'dependencies': [
+                    '../chrome/chrome_syzygy.gyp:chrome_child_dll_syzygy',
+                  ],
+                }],
+              ],
+            }],
+          ],
+        },
+        {
+          'target_name': 'chromium_builder_nacl_sdk',
+          'type': 'none',
+          'dependencies': [
+            '../chrome/chrome.gyp:chrome',
+          ],
+          'conditions': [
+            ['OS=="win"', {
+              'dependencies': [
+                '../chrome/chrome.gyp:chrome_nacl_win64',
+              ]
+            }],
+          ],
+        },  #target_name: chromium_builder_nacl_sdk
+      ],  # targets
+    }], #OS!=ios and OS!=android
+    ['OS=="android"', {
+      'targets': [
+        {
+          # The current list of tests for android.  This is temporary
+          # until the full set supported.  If adding a new test here,
+          # please also add it to build/android/pylib/gtest/gtest_config.py,
+          # else the test is not run.
+          #
+          # WARNING:
+          # Do not add targets here without communicating the implications
+          # on tryserver triggers and load.  Discuss with
+          # chrome-infrastructure-team please.
+          'target_name': 'android_builder_tests',
+          'type': 'none',
+          'dependencies': [
+            '../base/android/jni_generator/jni_generator.gyp:jni_generator_tests',
+            '../base/base.gyp:base_unittests',
+            '../breakpad/breakpad.gyp:breakpad_unittests_deps',
+            # Also compile the tools needed to deal with minidumps, they are
+            # needed to run minidump tests upstream.
+            '../breakpad/breakpad.gyp:dump_syms#host',
+            '../breakpad/breakpad.gyp:symupload#host',
+            '../breakpad/breakpad.gyp:minidump_dump#host',
+            '../breakpad/breakpad.gyp:minidump_stackwalk#host',
+            '../build/android/pylib/device/commands/commands.gyp:chromium_commands',
+            '../cc/blink/cc_blink_tests.gyp:cc_blink_unittests',
+            '../cc/cc_tests.gyp:cc_perftests_apk',
+            '../cc/cc_tests.gyp:cc_unittests',
+            '../components/components_tests.gyp:components_unittests',
+            '../content/content_shell_and_tests.gyp:content_browsertests',
+            '../content/content_shell_and_tests.gyp:content_gl_tests',
+            '../content/content_shell_and_tests.gyp:content_junit_tests',
+            '../content/content_shell_and_tests.gyp:chromium_linker_test_apk',
+            '../content/content_shell_and_tests.gyp:content_shell_test_apk',
+            '../content/content_shell_and_tests.gyp:content_unittests',
+            '../gpu/gpu.gyp:gl_tests',
+            '../gpu/gpu.gyp:gpu_perftests_apk',
+            '../gpu/gpu.gyp:gpu_unittests',
+            '../ipc/ipc.gyp:ipc_tests',
+            '../media/media.gyp:media_perftests_apk',
+            '../media/media.gyp:media_unittests',
+            '../media/midi/midi.gyp:midi_unittests_apk',
+            '../media/midi/midi.gyp:midi_unittests',
+            '../net/net.gyp:net_unittests',
+            '../sandbox/sandbox.gyp:sandbox_linux_unittests_deps',
+            '../skia/skia_tests.gyp:skia_unittests',
+            '../sql/sql.gyp:sql_unittests',
+            '../sync/sync.gyp:sync_unit_tests',
+            '../testing/android/junit/junit_test.gyp:junit_unit_tests',
+            '../third_party/leveldatabase/leveldatabase.gyp:env_chromium_unittests',
+            '../third_party/WebKit/public/all.gyp:*',
+            '../tools/android/android_tools.gyp:android_tools',
+            '../tools/android/android_tools.gyp:memconsumer',
+            '../tools/android/findbugs_plugin/findbugs_plugin.gyp:findbugs_plugin_test',
+            '../ui/android/ui_android.gyp:ui_android_unittests',
+            '../ui/base/ui_base_tests.gyp:ui_base_unittests',
+            '../ui/events/events.gyp:events_unittests',
+            '../ui/touch_selection/ui_touch_selection.gyp:ui_touch_selection_unittests',
+            # Unit test bundles packaged as an apk.
+            '../base/base.gyp:base_unittests_apk',
+            '../cc/blink/cc_blink_tests.gyp:cc_blink_unittests_apk',
+            '../cc/cc_tests.gyp:cc_unittests_apk',
+            '../components/components_tests.gyp:components_browsertests_apk',
+            '../components/components_tests.gyp:components_unittests_apk',
+            '../content/content_shell_and_tests.gyp:content_browsertests_apk',
+            '../content/content_shell_and_tests.gyp:content_gl_tests_apk',
+            '../content/content_shell_and_tests.gyp:content_unittests_apk',
+            '../content/content_shell_and_tests.gyp:video_decode_accelerator_unittest_apk',
+            '../gpu/gpu.gyp:gl_tests_apk',
+            '../gpu/gpu.gyp:gpu_unittests_apk',
+            '../ipc/ipc.gyp:ipc_tests_apk',
+            '../media/media.gyp:media_unittests_apk',
+            '../media/midi/midi.gyp:midi_unittests_apk',
+            '../net/net.gyp:net_unittests_apk',
+            '../sandbox/sandbox.gyp:sandbox_linux_jni_unittests_apk',
+            '../skia/skia_tests.gyp:skia_unittests_apk',
+            '../sql/sql.gyp:sql_unittests_apk',
+            '../sync/sync.gyp:sync_unit_tests_apk',
+            '../tools/android/heap_profiler/heap_profiler.gyp:heap_profiler_unittests_apk',
+            '../ui/android/ui_android.gyp:ui_android_unittests_apk',
+            '../ui/base/ui_base_tests.gyp:ui_base_unittests_apk',
+            '../ui/events/events.gyp:events_unittests_apk',
+            '../ui/gfx/gfx_tests.gyp:gfx_unittests_apk',
+            '../ui/gl/gl_tests.gyp:gl_unittests_apk',
+            '../ui/touch_selection/ui_touch_selection.gyp:ui_touch_selection_unittests_apk',
+          ],
+          'conditions': [
+            ['chromecast==0', {
+              'dependencies': [
+                '../android_webview/android_webview.gyp:android_webview_unittests',
+                '../chrome/chrome.gyp:unit_tests',
+                # Unit test bundles packaged as an apk.
+                '../android_webview/android_webview.gyp:android_webview_test_apk',
+                '../android_webview/android_webview.gyp:android_webview_unittests_apk',
+                '../chrome/android/chrome_apk.gyp:chrome_public_test_apk',
+                '../chrome/chrome.gyp:chrome_junit_tests',
+                '../chrome/chrome.gyp:chrome_shell_test_apk',
+                '../chrome/chrome.gyp:chrome_sync_shell_test_apk',
+                '../chrome/chrome.gyp:chrome_shell_uiautomator_tests',
+                '../chrome/chrome.gyp:chromedriver_webview_shell_apk',
+                '../chrome/chrome.gyp:unit_tests_apk',
+                '../third_party/custom_tabs_client/src/custom_tabs_client.gyp:custom_tabs_client_example_apk',
+              ],
+            }],
+          ],
+        },
+        {
+          # WebRTC Chromium tests to run on Android.
+          'target_name': 'android_builder_chromium_webrtc',
+          'type': 'none',
+          'dependencies': [
+            '../build/android/pylib/device/commands/commands.gyp:chromium_commands',
+            '../content/content_shell_and_tests.gyp:content_browsertests',
+            '../tools/android/android_tools.gyp:android_tools',
+            '../tools/android/android_tools.gyp:memconsumer',
+            '../content/content_shell_and_tests.gyp:content_browsertests_apk',
+          ],
+        },  # target_name: android_builder_chromium_webrtc
+      ], # targets
+    }], # OS="android"
+    ['OS=="mac"', {
+      'targets': [
+        {
+          # Target to build everything plus the dmg.  We don't put the dmg
+          # in the All target because developers really don't need it.
+          'target_name': 'all_and_dmg',
+          'type': 'none',
+          'dependencies': [
+            'All',
+            '../chrome/chrome.gyp:build_app_dmg',
+          ],
+        },
+        # These targets are here so the build bots can use them to build
+        # subsets of a full tree for faster cycle times.
+        {
+          'target_name': 'chromium_builder_dbg',
+          'type': 'none',
+          'dependencies': [
+            '../cc/blink/cc_blink_tests.gyp:cc_blink_unittests',
+            '../cc/cc_tests.gyp:cc_unittests',
+            '../chrome/chrome.gyp:browser_tests',
+            '../chrome/chrome.gyp:interactive_ui_tests',
+            '../chrome/chrome.gyp:sync_integration_tests',
+            '../chrome/chrome.gyp:unit_tests',
+            '../cloud_print/cloud_print.gyp:cloud_print_unittests',
+            '../components/components_tests.gyp:components_unittests',
+            '../content/content_shell_and_tests.gyp:content_browsertests',
+            '../content/content_shell_and_tests.gyp:content_unittests',
+            '../device/device_tests.gyp:device_unittests',
+            '../google_apis/gcm/gcm.gyp:gcm_unit_tests',
+            '../gpu/gpu.gyp:gpu_unittests',
+            '../ipc/ipc.gyp:ipc_tests',
+            '../ipc/mojo/ipc_mojo.gyp:ipc_mojo_unittests',
+            '../jingle/jingle.gyp:jingle_unittests',
+            '../media/media.gyp:media_unittests',
+            '../media/midi/midi.gyp:midi_unittests',
+            '../ppapi/ppapi_internal.gyp:ppapi_unittests',
+            '../printing/printing.gyp:printing_unittests',
+            '../remoting/remoting.gyp:remoting_unittests',
+            '../rlz/rlz.gyp:*',
+            '../skia/skia_tests.gyp:skia_unittests',
+            '../sql/sql.gyp:sql_unittests',
+            '../sync/sync.gyp:sync_unit_tests',
+            '../third_party/cacheinvalidation/cacheinvalidation.gyp:cacheinvalidation_unittests',
+            '../third_party/leveldatabase/leveldatabase.gyp:env_chromium_unittests',
+            '../third_party/libaddressinput/libaddressinput.gyp:libaddressinput_unittests',
+            '../third_party/libphonenumber/libphonenumber.gyp:libphonenumber_unittests',
+            '../tools/perf/clear_system_cache/clear_system_cache.gyp:*',
+            '../tools/telemetry/telemetry.gyp:*',
+            '../ui/base/ui_base_tests.gyp:ui_base_unittests',
+            '../ui/gfx/gfx_tests.gyp:gfx_unittests',
+            '../ui/gl/gl_tests.gyp:gl_unittests',
+            '../url/url.gyp:url_unittests',
+          ],
+        },
+        {
+          'target_name': 'chromium_builder_rel',
+          'type': 'none',
+          'dependencies': [
+            '../cc/blink/cc_blink_tests.gyp:cc_blink_unittests',
+            '../cc/cc_tests.gyp:cc_unittests',
+            '../chrome/chrome.gyp:browser_tests',
+            '../chrome/chrome.gyp:performance_browser_tests',
+            '../chrome/chrome.gyp:sync_integration_tests',
+            '../chrome/chrome.gyp:unit_tests',
+            '../cloud_print/cloud_print.gyp:cloud_print_unittests',
+            '../components/components_tests.gyp:components_unittests',
+            '../content/content_shell_and_tests.gyp:content_browsertests',
+            '../content/content_shell_and_tests.gyp:content_unittests',
+            '../device/device_tests.gyp:device_unittests',
+            '../google_apis/gcm/gcm.gyp:gcm_unit_tests',
+            '../gpu/gpu.gyp:gpu_unittests',
+            '../ipc/ipc.gyp:ipc_tests',
+            '../ipc/mojo/ipc_mojo.gyp:ipc_mojo_unittests',
+            '../jingle/jingle.gyp:jingle_unittests',
+            '../media/media.gyp:media_unittests',
+            '../media/midi/midi.gyp:midi_unittests',
+            '../ppapi/ppapi_internal.gyp:ppapi_unittests',
+            '../printing/printing.gyp:printing_unittests',
+            '../remoting/remoting.gyp:remoting_unittests',
+            '../skia/skia_tests.gyp:skia_unittests',
+            '../sql/sql.gyp:sql_unittests',
+            '../sync/sync.gyp:sync_unit_tests',
+            '../third_party/cacheinvalidation/cacheinvalidation.gyp:cacheinvalidation_unittests',
+            '../third_party/leveldatabase/leveldatabase.gyp:env_chromium_unittests',
+            '../third_party/libaddressinput/libaddressinput.gyp:libaddressinput_unittests',
+            '../third_party/libphonenumber/libphonenumber.gyp:libphonenumber_unittests',
+            '../tools/perf/clear_system_cache/clear_system_cache.gyp:*',
+            '../tools/telemetry/telemetry.gyp:*',
+            '../ui/base/ui_base_tests.gyp:ui_base_unittests',
+            '../ui/gfx/gfx_tests.gyp:gfx_unittests',
+            '../ui/gl/gl_tests.gyp:gl_unittests',
+            '../url/url.gyp:url_unittests',
+          ],
+        },
+        {
+          'target_name': 'chromium_builder_dbg_tsan_mac',
+          'type': 'none',
+          'dependencies': [
+            '../base/base.gyp:base_unittests',
+            '../cloud_print/cloud_print.gyp:cloud_print_unittests',
+            '../crypto/crypto.gyp:crypto_unittests',
+            '../ipc/ipc.gyp:ipc_tests',
+            '../jingle/jingle.gyp:jingle_unittests',
+            '../media/media.gyp:media_unittests',
+            '../media/midi/midi.gyp:midi_unittests',
+            '../net/net.gyp:net_unittests',
+            '../printing/printing.gyp:printing_unittests',
+            '../remoting/remoting.gyp:remoting_unittests',
+            '../third_party/cacheinvalidation/cacheinvalidation.gyp:cacheinvalidation_unittests',
+            '../third_party/libaddressinput/libaddressinput.gyp:libaddressinput_unittests',
+            '../third_party/libphonenumber/libphonenumber.gyp:libphonenumber_unittests',
+            '../url/url.gyp:url_unittests',
+          ],
+        },
+        {
+          'target_name': 'chromium_builder_dbg_valgrind_mac',
+          'type': 'none',
+          'dependencies': [
+            '../base/base.gyp:base_unittests',
+            '../chrome/chrome.gyp:unit_tests',
+            '../cloud_print/cloud_print.gyp:cloud_print_unittests',
+            '../components/components_tests.gyp:components_unittests',
+            '../content/content_shell_and_tests.gyp:content_unittests',
+            '../crypto/crypto.gyp:crypto_unittests',
+            '../device/device_tests.gyp:device_unittests',
+            '../ipc/ipc.gyp:ipc_tests',
+            '../jingle/jingle.gyp:jingle_unittests',
+            '../media/media.gyp:media_unittests',
+            '../media/midi/midi.gyp:midi_unittests',
+            '../net/net.gyp:net_unittests',
+            '../google_apis/gcm/gcm.gyp:gcm_unit_tests',
+            '../printing/printing.gyp:printing_unittests',
+            '../remoting/remoting.gyp:remoting_unittests',
+            '../skia/skia_tests.gyp:skia_unittests',
+            '../sql/sql.gyp:sql_unittests',
+            '../sync/sync.gyp:sync_unit_tests',
+            '../third_party/cacheinvalidation/cacheinvalidation.gyp:cacheinvalidation_unittests',
+            '../third_party/leveldatabase/leveldatabase.gyp:env_chromium_unittests',
+            '../third_party/libaddressinput/libaddressinput.gyp:libaddressinput_unittests',
+            '../third_party/libphonenumber/libphonenumber.gyp:libphonenumber_unittests',
+            '../ui/base/ui_base_tests.gyp:ui_base_unittests',
+            '../ui/gfx/gfx_tests.gyp:gfx_unittests',
+            '../ui/gl/gl_tests.gyp:gl_unittests',
+            '../url/url.gyp:url_unittests',
+          ],
+        },
+      ],  # targets
+    }], # OS="mac"
+    ['OS=="win"', {
+      'targets': [
+        # These targets are here so the build bots can use them to build
+        # subsets of a full tree for faster cycle times.
+        {
+          'target_name': 'chromium_builder',
+          'type': 'none',
+          'dependencies': [
+            '../cc/blink/cc_blink_tests.gyp:cc_blink_unittests',
+            '../cc/cc_tests.gyp:cc_unittests',
+            '../chrome/chrome.gyp:browser_tests',
+            '../chrome/chrome.gyp:crash_service',
+            '../chrome/chrome.gyp:gcapi_test',
+            '../chrome/chrome.gyp:installer_util_unittests',
+            '../chrome/chrome.gyp:interactive_ui_tests',
+            '../chrome/chrome.gyp:performance_browser_tests',
+            '../chrome/chrome.gyp:setup_unittests',
+            '../chrome/chrome.gyp:sync_integration_tests',
+            '../chrome/chrome.gyp:unit_tests',
+            '../cloud_print/cloud_print.gyp:cloud_print_unittests',
+            '../components/components_tests.gyp:components_unittests',
+            '../content/content_shell_and_tests.gyp:content_browsertests',
+            '../content/content_shell_and_tests.gyp:content_unittests',
+            '../content/content_shell_and_tests.gyp:copy_test_netscape_plugin',
+            # ../chrome/test/mini_installer requires mini_installer.
+            '../chrome/installer/mini_installer.gyp:mini_installer',
+            '../courgette/courgette.gyp:courgette_unittests',
+            '../device/device_tests.gyp:device_unittests',
+            '../google_apis/gcm/gcm.gyp:gcm_unit_tests',
+            '../gpu/gpu.gyp:gpu_unittests',
+            '../ipc/ipc.gyp:ipc_tests',
+            '../ipc/mojo/ipc_mojo.gyp:ipc_mojo_unittests',
+            '../jingle/jingle.gyp:jingle_unittests',
+            '../media/media.gyp:media_unittests',
+            '../media/midi/midi.gyp:midi_unittests',
+            '../ppapi/ppapi_internal.gyp:ppapi_unittests',
+            '../printing/printing.gyp:printing_unittests',
+            '../remoting/remoting.gyp:remoting_unittests',
+            '../skia/skia_tests.gyp:skia_unittests',
+            '../sql/sql.gyp:sql_unittests',
+            '../sync/sync.gyp:sync_unit_tests',
+            '../third_party/cacheinvalidation/cacheinvalidation.gyp:cacheinvalidation_unittests',
+            '../third_party/leveldatabase/leveldatabase.gyp:env_chromium_unittests',
+            '../third_party/libaddressinput/libaddressinput.gyp:libaddressinput_unittests',
+            '../third_party/libphonenumber/libphonenumber.gyp:libphonenumber_unittests',
+            '../tools/perf/clear_system_cache/clear_system_cache.gyp:*',
+            '../tools/telemetry/telemetry.gyp:*',
+            '../ui/base/ui_base_tests.gyp:ui_base_unittests',
+            '../ui/events/events.gyp:events_unittests',
+            '../ui/gfx/gfx_tests.gyp:gfx_unittests',
+            '../ui/gl/gl_tests.gyp:gl_unittests',
+            '../ui/touch_selection/ui_touch_selection.gyp:ui_touch_selection_unittests',
+            '../ui/views/views.gyp:views_unittests',
+            '../url/url.gyp:url_unittests',
+          ],
+          'conditions': [
+            ['target_arch=="ia32"', {
+              'dependencies': [
+                '../chrome/chrome.gyp:crash_service_win64',
+              ],
+            }],
+          ],
+        },
+        {
+          'target_name': 'chromium_builder_dbg_tsan_win',
+          'type': 'none',
+          'dependencies': [
+            '../base/base.gyp:base_unittests',
+            '../cloud_print/cloud_print.gyp:cloud_print_unittests',
+            '../components/components_tests.gyp:components_unittests',
+            '../content/content_shell_and_tests.gyp:content_unittests',
+            '../crypto/crypto.gyp:crypto_unittests',
+            '../ipc/ipc.gyp:ipc_tests',
+            '../jingle/jingle.gyp:jingle_unittests',
+            '../media/media.gyp:media_unittests',
+            '../media/midi/midi.gyp:midi_unittests',
+            '../net/net.gyp:net_unittests',
+            '../printing/printing.gyp:printing_unittests',
+            '../remoting/remoting.gyp:remoting_unittests',
+            '../sql/sql.gyp:sql_unittests',
+            '../third_party/cacheinvalidation/cacheinvalidation.gyp:cacheinvalidation_unittests',
+            '../third_party/leveldatabase/leveldatabase.gyp:env_chromium_unittests',
+            '../third_party/libaddressinput/libaddressinput.gyp:libaddressinput_unittests',
+            '../third_party/libphonenumber/libphonenumber.gyp:libphonenumber_unittests',
+            '../url/url.gyp:url_unittests',
+          ],
+        },
+        {
+          'target_name': 'chromium_builder_lkgr_drmemory_win',
+          'type': 'none',
+          'dependencies': [
+            '../components/test_runner/test_runner.gyp:layout_test_helper',
+            '../content/content_shell_and_tests.gyp:content_shell',
+            '../content/content_shell_and_tests.gyp:content_shell_crash_service',
+          ],
+        },
+        {
+          'target_name': 'chromium_builder_dbg_drmemory_win',
+          'type': 'none',
+          'dependencies': [
+            '../ash/ash.gyp:ash_shell_unittests',
+            '../ash/ash.gyp:ash_unittests',
+            '../base/base.gyp:base_unittests',
+            '../cc/blink/cc_blink_tests.gyp:cc_blink_unittests',
+            '../cc/cc_tests.gyp:cc_unittests',
+            '../chrome/chrome.gyp:browser_tests',
+            '../chrome/chrome.gyp:chrome_app_unittests',
+            '../chrome/chrome.gyp:chromedriver_unittests',
+            '../chrome/chrome.gyp:installer_util_unittests',
+            '../chrome/chrome.gyp:setup_unittests',
+            '../chrome/chrome.gyp:unit_tests',
+            '../chrome_elf/chrome_elf.gyp:chrome_elf_unittests',
+            '../cloud_print/cloud_print.gyp:cloud_print_unittests',
+            '../components/components_tests.gyp:components_unittests',
+            '../components/test_runner/test_runner.gyp:layout_test_helper',
+            '../content/content_shell_and_tests.gyp:content_browsertests',
+            '../content/content_shell_and_tests.gyp:content_shell',
+            '../content/content_shell_and_tests.gyp:content_shell_crash_service',
+            '../content/content_shell_and_tests.gyp:content_unittests',
+            '../courgette/courgette.gyp:courgette_unittests',
+            '../crypto/crypto.gyp:crypto_unittests',
+            '../device/device_tests.gyp:device_unittests',
+            '../extensions/extensions_tests.gyp:extensions_browsertests',
+            '../extensions/extensions_tests.gyp:extensions_unittests',
+            '../gin/gin.gyp:gin_shell',
+            '../gin/gin.gyp:gin_unittests',
+            '../google_apis/gcm/gcm.gyp:gcm_unit_tests',
+            '../google_apis/google_apis.gyp:google_apis_unittests',
+            '../gpu/gpu.gyp:angle_unittests',
+            '../gpu/gpu.gyp:gpu_unittests',
+            '../ipc/ipc.gyp:ipc_tests',
+            '../ipc/mojo/ipc_mojo.gyp:ipc_mojo_unittests',
+            '../jingle/jingle.gyp:jingle_unittests',
+            '../media/cast/cast.gyp:cast_unittests',
+            '../media/media.gyp:media_unittests',
+            '../media/midi/midi.gyp:midi_unittests',
+            '../mojo/mojo.gyp:mojo',
+            '../net/net.gyp:net_unittests',
+            '../printing/printing.gyp:printing_unittests',
+            '../remoting/remoting.gyp:remoting_unittests',
+            '../skia/skia_tests.gyp:skia_unittests',
+            '../sql/sql.gyp:sql_unittests',
+            '../sync/sync.gyp:sync_unit_tests',
+            '../third_party/cacheinvalidation/cacheinvalidation.gyp:cacheinvalidation_unittests',
+            '../third_party/leveldatabase/leveldatabase.gyp:env_chromium_unittests',
+            '../third_party/libaddressinput/libaddressinput.gyp:libaddressinput_unittests',
+            '../third_party/libphonenumber/libphonenumber.gyp:libphonenumber_unittests',
+            '../third_party/WebKit/Source/platform/blink_platform_tests.gyp:blink_heap_unittests',
+            '../third_party/WebKit/Source/platform/blink_platform_tests.gyp:blink_platform_unittests',
+            '../ui/accessibility/accessibility.gyp:accessibility_unittests',
+            '../ui/app_list/app_list.gyp:app_list_unittests',
+            '../ui/aura/aura.gyp:aura_unittests',
+            '../ui/compositor/compositor.gyp:compositor_unittests',
+            '../ui/display/display.gyp:display_unittests',
+            '../ui/events/events.gyp:events_unittests',
+            '../ui/gfx/gfx_tests.gyp:gfx_unittests',
+            '../ui/gl/gl_tests.gyp:gl_unittests',
+            '../ui/keyboard/keyboard.gyp:keyboard_unittests',
+            '../ui/touch_selection/ui_touch_selection.gyp:ui_touch_selection_unittests',
+            '../url/url.gyp:url_unittests',
+          ],
+        },
+      ],  # targets
+      'conditions': [
+        ['branding=="Chrome"', {
+          'targets': [
+            {
+              'target_name': 'chrome_official_builder_no_unittests',
+              'type': 'none',
+              'dependencies': [
+                '../chrome/chrome.gyp:crash_service',
+                '../chrome/chrome.gyp:gcapi_dll',
+                '../chrome/chrome.gyp:pack_policy_templates',
+                '../chrome/installer/mini_installer.gyp:mini_installer',
+                '../cloud_print/cloud_print.gyp:cloud_print',
+                '../courgette/courgette.gyp:courgette',
+                '../courgette/courgette.gyp:courgette64',
+                '../remoting/remoting.gyp:remoting_webapp',
+                '../third_party/widevine/cdm/widevine_cdm.gyp:widevinecdmadapter',
+              ],
+              'conditions': [
+                ['target_arch=="ia32"', {
+                  'dependencies': [
+                    '../chrome/chrome.gyp:crash_service_win64',
+                  ],
+                }],
+                ['component != "shared_library" and wix_exists == "True" and \
+                    sas_dll_exists == "True"', {
+                  'dependencies': [
+                    '../remoting/remoting.gyp:remoting_host_installation',
+                  ],
+                }], # component != "shared_library"
+              ]
+            }, {
+              'target_name': 'chrome_official_builder',
+              'type': 'none',
+              'dependencies': [
+                'chrome_official_builder_no_unittests',
+                '../base/base.gyp:base_unittests',
+                '../chrome/chrome.gyp:browser_tests',
+                '../chrome/chrome.gyp:sync_integration_tests',
+                '../ipc/ipc.gyp:ipc_tests',
+                '../media/media.gyp:media_unittests',
+                '../media/midi/midi.gyp:midi_unittests',
+                '../net/net.gyp:net_unittests',
+                '../printing/printing.gyp:printing_unittests',
+                '../sql/sql.gyp:sql_unittests',
+                '../sync/sync.gyp:sync_unit_tests',
+                '../ui/base/ui_base_tests.gyp:ui_base_unittests',
+                '../ui/gfx/gfx_tests.gyp:gfx_unittests',
+                '../ui/gl/gl_tests.gyp:gl_unittests',
+                '../ui/touch_selection/ui_touch_selection.gyp:ui_touch_selection_unittests',
+                '../ui/views/views.gyp:views_unittests',
+                '../url/url.gyp:url_unittests',
+              ],
+            },
+          ], # targets
+        }], # branding=="Chrome"
+       ], # conditions
+    }], # OS="win"
+    ['chromeos==1', {
+      'targets': [
+        {
+          'target_name': 'chromiumos_preflight',
+          'type': 'none',
+          'dependencies': [
+            '../breakpad/breakpad.gyp:minidump_stackwalk',
+            '../chrome/chrome.gyp:chrome',
+            '../chrome/chrome.gyp:chromedriver',
+            '../content/content_shell_and_tests.gyp:video_decode_accelerator_unittest',
+            '../content/content_shell_and_tests.gyp:video_encode_accelerator_unittest',
+            '../media/media.gyp:media_unittests',
+            '../ppapi/ppapi_internal.gyp:ppapi_example_video_decode',
+            '../sandbox/sandbox.gyp:chrome_sandbox',
+            '../sandbox/sandbox.gyp:sandbox_linux_unittests',
+            '../third_party/mesa/mesa.gyp:osmesa',
+            '../tools/telemetry/telemetry.gyp:bitmaptools#host',
+            '../tools/perf/clear_system_cache/clear_system_cache.gyp:clear_system_cache',
+          ],
+          'conditions': [
+            ['disable_nacl==0', {
+              'dependencies': [
+                '../components/nacl.gyp:nacl_helper',
+                '../native_client/src/trusted/service_runtime/linux/nacl_bootstrap.gyp:nacl_helper_bootstrap',
+              ],
+            }],
+          ],
+        },
+      ],  # targets
+    }], # "chromeos==1"
+    ['use_aura==1', {
+      'targets': [
+        {
+          'target_name': 'aura_builder',
+          'type': 'none',
+          'dependencies': [
+            '../cc/blink/cc_blink_tests.gyp:cc_blink_unittests',
+            '../cc/cc_tests.gyp:cc_unittests',
+            '../components/components_tests.gyp:components_unittests',
+            '../content/content_shell_and_tests.gyp:content_browsertests',
+            '../content/content_shell_and_tests.gyp:content_unittests',
+            '../device/device_tests.gyp:device_unittests',
+            '../google_apis/gcm/gcm.gyp:gcm_unit_tests',
+            '../ppapi/ppapi_internal.gyp:ppapi_unittests',
+            '../remoting/remoting.gyp:remoting_unittests',
+            '../skia/skia_tests.gyp:skia_unittests',
+            '../ui/app_list/app_list.gyp:*',
+            '../ui/aura/aura.gyp:*',
+            '../ui/aura_extra/aura_extra.gyp:*',
+            '../ui/base/ui_base_tests.gyp:ui_base_unittests',
+            '../ui/compositor/compositor.gyp:*',
+            '../ui/display/display.gyp:display_unittests',
+            '../ui/events/events.gyp:*',
+            '../ui/gfx/gfx_tests.gyp:gfx_unittests',
+            '../ui/gl/gl_tests.gyp:gl_unittests',
+            '../ui/keyboard/keyboard.gyp:*',
+            '../ui/snapshot/snapshot.gyp:snapshot_unittests',
+            '../ui/touch_selection/ui_touch_selection.gyp:ui_touch_selection_unittests',
+            '../ui/wm/wm.gyp:*',
+            'blink_tests',
+          ],
+          'conditions': [
+            ['OS=="win"', {
+              'dependencies': [
+                '../chrome/chrome.gyp:crash_service',
+              ],
+            }],
+            ['OS=="win" and target_arch=="ia32"', {
+              'dependencies': [
+                '../chrome/chrome.gyp:crash_service_win64',
+              ],
+            }],
+            ['use_ash==1', {
+              'dependencies': [
+                '../ash/ash.gyp:ash_shell',
+                '../ash/ash.gyp:ash_unittests',
+              ],
+            }],
+            ['OS=="linux"', {
+              # Tests that currently only work on Linux.
+              'dependencies': [
+                '../base/base.gyp:base_unittests',
+                '../ipc/ipc.gyp:ipc_tests',
+                '../sql/sql.gyp:sql_unittests',
+                '../sync/sync.gyp:sync_unit_tests',
+              ],
+            }],
+            ['chromeos==1', {
+              'dependencies': [
+                '../chromeos/chromeos.gyp:chromeos_unittests',
+                '../ui/chromeos/ui_chromeos.gyp:ui_chromeos_unittests',
+              ],
+            }],
+            ['use_ozone==1', {
+              'dependencies': [
+                '../ui/ozone/ozone.gyp:*',
+                '../ui/ozone/demo/ozone_demos.gyp:*',
+              ],
+            }],
+            ['chromecast==0', {
+              'dependencies': [
+                '../chrome/chrome.gyp:browser_tests',
+                '../chrome/chrome.gyp:chrome',
+                '../chrome/chrome.gyp:interactive_ui_tests',
+                '../chrome/chrome.gyp:unit_tests',
+                '../ui/message_center/message_center.gyp:*',
+                '../ui/views/examples/examples.gyp:views_examples_with_content_exe',
+                '../ui/views/views.gyp:views',
+                '../ui/views/views.gyp:views_unittests',
+              ],
+            }],
+          ],
+        },
+      ],  # targets
+    }], # "use_aura==1"
+    ['test_isolation_mode != "noop"', {
+      'targets': [
+        {
+          'target_name': 'chromium_swarm_tests',
+          'type': 'none',
+          'dependencies': [
+            '../base/base.gyp:base_unittests_run',
+            '../content/content_shell_and_tests.gyp:content_browsertests_run',
+            '../content/content_shell_and_tests.gyp:content_unittests_run',
+            '../net/net.gyp:net_unittests_run',
+          ],
+          'conditions': [
+            ['chromecast==0', {
+              'dependencies': [
+                '../chrome/chrome.gyp:browser_tests_run',
+                '../chrome/chrome.gyp:interactive_ui_tests_run',
+                '../chrome/chrome.gyp:sync_integration_tests_run',
+                '../chrome/chrome.gyp:unit_tests_run',
+              ],
+            }],
+          ],
+        }, # target_name: chromium_swarm_tests
+      ],
+    }],
+    ['archive_chromoting_tests==1', {
+      'targets': [
+        {
+          'target_name': 'chromoting_swarm_tests',
+          'type': 'none',
+          'dependencies': [
+            '../testing/chromoting/integration_tests.gyp:*',
+          ],
+        }, # target_name: chromoting_swarm_tests
+      ]
+    }],
+    ['OS=="mac" and toolkit_views==1', {
+      'targets': [
+        {
+          'target_name': 'macviews_builder',
+          'type': 'none',
+          'dependencies': [
+            '../ui/views/examples/examples.gyp:views_examples_with_content_exe',
+            '../ui/views/views.gyp:views',
+            '../ui/views/views.gyp:views_unittests',
+          ],
+        },  # target_name: macviews_builder
+      ],  # targets
+    }],  # os=='mac' and toolkit_views==1
+  ],  # conditions
+}
diff --git a/build/android/AndroidManifest.xml b/build/android/AndroidManifest.xml
new file mode 100644
index 0000000..f27872e
--- /dev/null
+++ b/build/android/AndroidManifest.xml
@@ -0,0 +1,20 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!--
+  Copyright (c) 2012 The Chromium Authors. All rights reserved.  Use of this
+  source code is governed by a BSD-style license that can be found in the
+  LICENSE file.
+-->
+
+<!--
+  This is a dummy manifest which is required by:
+  1. aapt when generating R.java in java.gypi:
+     Nothing in the manifest is used, but it is still required by aapt.
+  2. lint: [min|target]SdkVersion are required by lint and should
+     be kept up-to-date.
+-->
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+    package="dummy.package">
+
+    <uses-sdk android:minSdkVersion="16" android:targetSdkVersion="22" />
+
+</manifest>
diff --git a/build/android/BUILD.gn b/build/android/BUILD.gn
new file mode 100644
index 0000000..d90ad70
--- /dev/null
+++ b/build/android/BUILD.gn
@@ -0,0 +1,56 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/android/rules.gni")
+
+sun_tools_jar_path = "$root_gen_dir/sun_tools_jar/tools.jar"
+
+action("find_sun_tools_jar") {
+  script = "//build/android/gyp/find_sun_tools_jar.py"
+  depfile = "$target_gen_dir/$target_name.d"
+  outputs = [
+    depfile,
+    sun_tools_jar_path,
+  ]
+  args = [
+    "--depfile",
+    rebase_path(depfile, root_build_dir),
+    "--output",
+    rebase_path(sun_tools_jar_path, root_build_dir),
+  ]
+}
+
+java_prebuilt("sun_tools_java") {
+  jar_path = sun_tools_jar_path
+  jar_dep = ":find_sun_tools_jar"
+}
+
+action("cpplib_stripped") {
+  _strip_bin = "${android_tool_prefix}strip"
+  _soname = "libc++_shared.so"
+  _input_so = "${android_libcpp_root}/libs/${android_app_abi}/${_soname}"
+  _output_so = "${root_out_dir}/lib.stripped/${_soname}"
+
+  script = "//build/gn_run_binary.py"
+  inputs = [
+    _strip_bin,
+  ]
+  sources = [
+    _input_so,
+  ]
+  outputs = [
+    _output_so,
+  ]
+
+  _rebased_strip_bin = rebase_path(_strip_bin, root_out_dir)
+  _rebased_input_so = rebase_path(_input_so, root_out_dir)
+  _rebased_output_so = rebase_path(_output_so, root_out_dir)
+  args = [
+    _rebased_strip_bin,
+    "--strip-unneeded",
+    "-o",
+    _rebased_output_so,
+    _rebased_input_so,
+  ]
+}
diff --git a/build/android/CheckInstallApk-debug.apk b/build/android/CheckInstallApk-debug.apk
new file mode 100644
index 0000000..3dc31910
--- /dev/null
+++ b/build/android/CheckInstallApk-debug.apk
Binary files differ
diff --git a/build/android/OWNERS b/build/android/OWNERS
new file mode 100644
index 0000000..9a5d270
--- /dev/null
+++ b/build/android/OWNERS
@@ -0,0 +1,3 @@
+jbudorick@chromium.org
+klundberg@chromium.org
+pasko@chromium.org
diff --git a/build/android/PRESUBMIT.py b/build/android/PRESUBMIT.py
new file mode 100644
index 0000000..6e0a3de
--- /dev/null
+++ b/build/android/PRESUBMIT.py
@@ -0,0 +1,64 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Presubmit script for android buildbot.
+
+See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts for
+details on the presubmit API built into depot_tools.
+"""
+
+
+def CommonChecks(input_api, output_api):
+  output = []
+
+  def J(*dirs):
+    """Returns a path relative to presubmit directory."""
+    return input_api.os_path.join(input_api.PresubmitLocalPath(), *dirs)
+
+  output.extend(input_api.canned_checks.RunPylint(
+      input_api,
+      output_api,
+      black_list=[r'pylib/symbols/.*\.py$', r'gyp/.*\.py$', r'gn/.*\.py'],
+      extra_paths_list=[
+          J(), J('..', '..', 'third_party', 'android_testrunner'),
+          J('buildbot')]))
+  output.extend(input_api.canned_checks.RunPylint(
+      input_api,
+      output_api,
+      white_list=[r'gyp/.*\.py$', r'gn/.*\.py'],
+      extra_paths_list=[J('gyp'), J('gn')]))
+
+  # Disabled due to http://crbug.com/410936
+  #output.extend(input_api.canned_checks.RunUnitTestsInDirectory(
+  #input_api, output_api, J('buildbot', 'tests')))
+
+  pylib_test_env = dict(input_api.environ)
+  pylib_test_env.update({
+      'PYTHONPATH': input_api.PresubmitLocalPath(),
+      'PYTHONDONTWRITEBYTECODE': '1',
+  })
+  output.extend(input_api.canned_checks.RunUnitTests(
+      input_api,
+      output_api,
+      unit_tests=[
+          J('pylib', 'base', 'test_dispatcher_unittest.py'),
+          J('pylib', 'device', 'battery_utils_test.py'),
+          J('pylib', 'device', 'device_utils_test.py'),
+          J('pylib', 'device', 'logcat_monitor_test.py'),
+          J('pylib', 'gtest', 'gtest_test_instance_test.py'),
+          J('pylib', 'instrumentation',
+            'instrumentation_test_instance_test.py'),
+          J('pylib', 'results', 'json_results_test.py'),
+          J('pylib', 'utils', 'md5sum_test.py'),
+      ],
+      env=pylib_test_env))
+  return output
+
+
+def CheckChangeOnUpload(input_api, output_api):
+  return CommonChecks(input_api, output_api)
+
+
+def CheckChangeOnCommit(input_api, output_api):
+  return CommonChecks(input_api, output_api)
diff --git a/build/android/adb_android_webview_command_line b/build/android/adb_android_webview_command_line
new file mode 100755
index 0000000..791e270
--- /dev/null
+++ b/build/android/adb_android_webview_command_line
@@ -0,0 +1,20 @@
+#!/bin/bash
+#
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# If no flags are given, prints the current content shell flags.
+#
+# Otherwise, the given flags are used to REPLACE (not modify) the content shell
+# flags. For example:
+#   adb_android_webview_command_line --enable-webgl
+#
+# To remove all content shell flags, pass an empty string for the flags:
+#   adb_android_webview_command_line ""
+
+. $(dirname $0)/adb_command_line_functions.sh
+CMD_LINE_FILE=/data/local/tmp/android-webview-command-line
+REQUIRES_SU=0
+set_command_line "$@"
+
diff --git a/build/android/adb_chrome_public_command_line b/build/android/adb_chrome_public_command_line
new file mode 100755
index 0000000..9bf91c6
--- /dev/null
+++ b/build/android/adb_chrome_public_command_line
@@ -0,0 +1,19 @@
+#!/bin/bash
+#
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# If no flags are given, prints the current Chrome flags.
+#
+# Otherwise, the given flags are used to REPLACE (not modify) the Chrome
+# flags. For example:
+#   adb_chrome_public_command_line --enable-webgl
+#
+# To remove all Chrome flags, pass an empty string for the flags:
+#   adb_chrome_public_command_line ""
+
+. $(dirname $0)/adb_command_line_functions.sh
+CMD_LINE_FILE=/data/local/chrome-command-line
+REQUIRES_SU=1
+set_command_line "$@"
diff --git a/build/android/adb_chrome_shell_command_line b/build/android/adb_chrome_shell_command_line
new file mode 100755
index 0000000..750f906
--- /dev/null
+++ b/build/android/adb_chrome_shell_command_line
@@ -0,0 +1,20 @@
+#!/bin/bash
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# If no flags are given, prints the current chrome shell flags.
+#
+# Otherwise, the given flags are used to REPLACE (not modify) the chrome shell
+# flags. For example:
+#   adb_chrome_shell_command_line --enable-webgl
+#
+# To remove all chrome shell flags, pass an empty string for the flags:
+#   adb_chrome_shell_command_line ""
+
+. $(dirname $0)/adb_command_line_functions.sh
+CMD_LINE_FILE=/data/local/tmp/chrome-shell-command-line
+REQUIRES_SU=0
+set_command_line "$@"
+
diff --git a/build/android/adb_command_line_functions.sh b/build/android/adb_command_line_functions.sh
new file mode 100755
index 0000000..7ea98b09
--- /dev/null
+++ b/build/android/adb_command_line_functions.sh
@@ -0,0 +1,40 @@
+#!/bin/bash
+#
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Variables must be set before calling:
+# CMD_LINE_FILE - Path on device to flags file.
+# REQUIRES_SU - Set to 1 if path requires root.
+function set_command_line() {
+  SU_CMD=""
+  if [[ "$REQUIRES_SU" = 1 ]]; then
+    # Older androids accept "su -c", while newer use "su uid".
+    SDK_LEVEL=$(adb shell getprop ro.build.version.sdk | tr -d '\r')
+    # E.g. if no device connected.
+    if [[ -z "$SDK_LEVEL" ]]; then
+      exit 1
+    fi
+    SU_CMD="su -c"
+    if (( $SDK_LEVEL >= 21 )); then
+      SU_CMD="su 0"
+    fi
+  fi
+
+  if [ $# -eq 0 ] ; then
+    # If nothing specified, print the command line (stripping off "chrome ")
+    adb shell "cat $CMD_LINE_FILE 2>/dev/null" | cut -d ' ' -s -f2-
+  elif [ $# -eq 1 ] && [ "$1" = '' ] ; then
+    # If given an empty string, delete the command line.
+    set -x
+    adb shell $SU_CMD rm $CMD_LINE_FILE >/dev/null
+  else
+    # Else set it.
+    set -x
+    adb shell "echo 'chrome $*' | $SU_CMD dd of=$CMD_LINE_FILE"
+    # Prevent other apps from modifying flags (this can create security issues).
+    adb shell $SU_CMD chmod 0664 $CMD_LINE_FILE
+  fi
+}
+
diff --git a/build/android/adb_content_shell_command_line b/build/android/adb_content_shell_command_line
new file mode 100755
index 0000000..2ac7ece
--- /dev/null
+++ b/build/android/adb_content_shell_command_line
@@ -0,0 +1,20 @@
+#!/bin/bash
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# If no flags are given, prints the current content shell flags.
+#
+# Otherwise, the given flags are used to REPLACE (not modify) the content shell
+# flags. For example:
+#   adb_content_shell_command_line --enable-webgl
+#
+# To remove all content shell flags, pass an empty string for the flags:
+#   adb_content_shell_command_line ""
+
+. $(dirname $0)/adb_command_line_functions.sh
+CMD_LINE_FILE=/data/local/tmp/content-shell-command-line
+REQUIRES_SU=0
+set_command_line "$@"
+
diff --git a/build/android/adb_device_functions.sh b/build/android/adb_device_functions.sh
new file mode 100755
index 0000000..66cc32f
--- /dev/null
+++ b/build/android/adb_device_functions.sh
@@ -0,0 +1,139 @@
+#!/bin/bash
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# A collection of functions useful for maintaining android devices
+
+
+# Run an adb command on all connected device in parallel.
+# Usage: adb_all command line to eval.  Quoting is optional.
+#
+# Examples:
+#  adb_all install Chrome.apk
+#  adb_all 'shell cat /path/to/file'
+#
+adb_all() {
+  if [[ $# == 0 ]]; then
+    echo "Usage: adb_all <adb command>.  Quoting is optional."
+    echo "Example: adb_all install Chrome.apk"
+    return 1
+  fi
+  local DEVICES=$(adb_get_devices -b)
+  local NUM_DEVICES=$(echo $DEVICES | wc -w)
+  if (( $NUM_DEVICES > 1 )); then
+    echo "Looping over $NUM_DEVICES devices"
+  fi
+  _adb_multi "$DEVICES" "$*"
+}
+
+
+# Run a command on each connected device.  Quoting the command is suggested but
+# not required.  The script setups up variable DEVICE to correspond to the
+# current serial number.  Intended for complex one_liners that don't work in
+# adb_all
+# Usage: adb_device_loop 'command line to eval'
+adb_device_loop() {
+  if [[ $# == 0 ]]; then
+    echo "Intended for more complex one-liners that cannot be done with" \
+        "adb_all."
+    echo 'Usage: adb_device_loop "echo $DEVICE: $(adb root &&' \
+        'adb shell cat /data/local.prop)"'
+    return 1
+  fi
+  local DEVICES=$(adb_get_devices)
+  if [[ -z $DEVICES ]]; then
+    return
+  fi
+  # Do not change DEVICE variable name - part of api
+  for DEVICE in $DEVICES; do
+    DEV_TYPE=$(adb -s $DEVICE shell getprop ro.product.device | sed 's/\r//')
+    echo "Running on $DEVICE ($DEV_TYPE)"
+    ANDROID_SERIAL=$DEVICE eval "$*"
+  done
+}
+
+# Erases data from any devices visible on adb.  To preserve a device,
+# disconnect it or:
+#  1) Reboot it into fastboot with 'adb reboot bootloader'
+#  2) Run wipe_all_devices to wipe remaining devices
+#  3) Restore device it with 'fastboot reboot'
+#
+#  Usage: wipe_all_devices [-f]
+#
+wipe_all_devices() {
+  if [[ -z $(which adb) || -z $(which fastboot) ]]; then
+    echo "aborting: adb and fastboot not in path"
+    return 1
+  elif ! $(groups | grep -q 'plugdev'); then
+    echo "If fastboot fails, run: 'sudo adduser $(whoami) plugdev'"
+  fi
+
+  local DEVICES=$(adb_get_devices -b)
+
+  if [[ $1 != '-f' ]]; then
+    echo "This will ERASE ALL DATA from $(echo $DEVICES | wc -w) device."
+    read -p "Hit enter to continue"
+  fi
+
+  _adb_multi "$DEVICES" "reboot bootloader"
+  # Subshell to isolate job list
+  (
+  for DEVICE in $DEVICES; do
+    fastboot_erase $DEVICE &
+  done
+  wait
+  )
+
+  # Reboot devices together
+  for DEVICE in $DEVICES; do
+    fastboot -s $DEVICE reboot
+  done
+}
+
+# Wipe a device in fastboot.
+# Usage fastboot_erase [serial]
+fastboot_erase() {
+  if [[ -n $1 ]]; then
+    echo "Wiping $1"
+    local SERIAL="-s $1"
+  else
+    if [ -z $(fastboot devices) ]; then
+      echo "No devices in fastboot, aborting."
+      echo "Check out wipe_all_devices to see if sufficient"
+      echo "You can put a device in fastboot using adb reboot bootloader"
+      return 1
+    fi
+    local SERIAL=""
+  fi
+  fastboot $SERIAL erase cache
+  fastboot $SERIAL erase userdata
+}
+
+# Get list of devices connected via adb
+# Args: -b block until adb detects a device
+adb_get_devices() {
+  local DEVICES="$(adb devices | grep 'device$')"
+  if [[ -z $DEVICES && $1 == '-b' ]]; then
+    echo '- waiting for device -' >&2
+    local DEVICES="$(adb wait-for-device devices | grep 'device$')"
+  fi
+  echo "$DEVICES" | awk -vORS=' ' '{print $1}' | sed 's/ $/\n/'
+}
+
+###################################################
+## HELPER FUNCTIONS
+###################################################
+
+# Run an adb command in parallel over a device list
+_adb_multi() {
+  local DEVICES=$1
+  local ADB_ARGS=$2
+  (
+    for DEVICE in $DEVICES; do
+      adb -s $DEVICE $ADB_ARGS &
+    done
+    wait
+  )
+}
diff --git a/build/android/adb_gdb b/build/android/adb_gdb
new file mode 100755
index 0000000..65ec7b2
--- /dev/null
+++ b/build/android/adb_gdb
@@ -0,0 +1,1047 @@
+#!/bin/bash
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+
+# A generic script used to attach to a running Chromium process and
+# debug it. Most users should not use this directly, but one of the
+# wrapper scripts like adb_gdb_content_shell
+#
+# Use --help to print full usage instructions.
+#
+
+PROGNAME=$(basename "$0")
+PROGDIR=$(dirname "$0")
+
+# Location of Chromium-top-level sources.
+CHROMIUM_SRC=$(cd "$PROGDIR"/../.. >/dev/null && pwd 2>/dev/null)
+
+# Location of Chromium out/ directory.
+if [ -z "$CHROMIUM_OUT_DIR" ]; then
+  CHROMIUM_OUT_DIR=out
+fi
+
+TMPDIR=
+GDBSERVER_PIDFILE=
+TARGET_GDBSERVER=
+COMMAND_PREFIX=
+
+clean_exit () {
+  if [ "$TMPDIR" ]; then
+    GDBSERVER_PID=$(cat $GDBSERVER_PIDFILE 2>/dev/null)
+    if [ "$GDBSERVER_PID" ]; then
+      log "Killing background gdbserver process: $GDBSERVER_PID"
+      kill -9 $GDBSERVER_PID >/dev/null 2>&1
+    fi
+    if [ "$TARGET_GDBSERVER" ]; then
+      log "Removing target gdbserver binary: $TARGET_GDBSERVER."
+      "$ADB" shell "$COMMAND_PREFIX" rm "$TARGET_GDBSERVER" >/dev/null 2>&1
+    fi
+    log "Cleaning up: $TMPDIR"
+    rm -rf "$TMPDIR"
+  fi
+  trap "" EXIT
+  exit $1
+}
+
+# Ensure clean exit on Ctrl-C or normal exit.
+trap "clean_exit 1" INT HUP QUIT TERM
+trap "clean_exit \$?" EXIT
+
+panic () {
+  echo "ERROR: $@" >&2
+  exit 1
+}
+
+fail_panic () {
+  if [ $? != 0 ]; then panic "$@"; fi
+}
+
+log () {
+  if [ "$VERBOSE" -gt 0 ]; then
+    echo "$@"
+  fi
+}
+
+DEFAULT_PULL_LIBS_DIR=/tmp/$USER-adb-gdb-libs
+
+# NOTE: Allow wrapper scripts to set various default through ADB_GDB_XXX
+# environment variables. This is only for cosmetic reasons, i.e. to
+# display proper
+
+# Allow wrapper scripts to set the default activity through
+# the ADB_GDB_ACTIVITY variable. Users are still able to change the
+# final activity name through --activity=<name> option.
+#
+# This is only for cosmetic reasons, i.e. to display the proper default
+# in the --help output.
+#
+DEFAULT_ACTIVITY=${ADB_GDB_ACTIVITY:-".Main"}
+
+# Allow wrapper scripts to set the program name through ADB_GDB_PROGNAME
+PROGNAME=${ADB_GDB_PROGNAME:-$(basename "$0")}
+
+ACTIVITY=$DEFAULT_ACTIVITY
+ADB=
+ANNOTATE=
+# Note: Ignore BUILDTYPE variable, because the Ninja build doesn't use it.
+BUILDTYPE=
+FORCE=
+GDBEXEPOSTFIX=gdb
+GDBINIT=
+GDBSERVER=
+HELP=
+NDK_DIR=
+NO_PULL_LIBS=
+PACKAGE_NAME=
+PID=
+PORT=
+PRIVILEGED=
+PRIVILEGED_INDEX=
+PROGRAM_NAME="activity"
+PULL_LIBS=
+PULL_LIBS_DIR=
+SANDBOXED=
+SANDBOXED_INDEX=
+START=
+SU_PREFIX=
+SYMBOL_DIR=
+TARGET_ARCH=
+TOOLCHAIN=
+VERBOSE=0
+
+for opt; do
+  optarg=$(expr "x$opt" : 'x[^=]*=\(.*\)')
+  case $opt in
+    --adb=*)
+      ADB=$optarg
+      ;;
+    --activity=*)
+      ACTIVITY=$optarg
+      ;;
+    --annotate=3)
+      ANNOTATE=$optarg
+      ;;
+    --force)
+      FORCE=true
+      ;;
+    --gdbserver=*)
+      GDBSERVER=$optarg
+      ;;
+    --gdb=*)
+      GDB=$optarg
+      ;;
+    --help|-h|-?)
+      HELP=true
+      ;;
+    --ndk-dir=*)
+      NDK_DIR=$optarg
+      ;;
+    --no-pull-libs)
+      NO_PULL_LIBS=true
+      ;;
+    --package-name=*)
+      PACKAGE_NAME=$optarg
+      ;;
+    --pid=*)
+      PID=$optarg
+      ;;
+    --port=*)
+      PORT=$optarg
+      ;;
+    --privileged)
+      PRIVILEGED=true
+      ;;
+    --privileged=*)
+      PRIVILEGED=true
+      PRIVILEGED_INDEX=$optarg
+      ;;
+    --program-name=*)
+      PROGRAM_NAME=$optarg
+      ;;
+    --pull-libs)
+      PULL_LIBS=true
+      ;;
+    --pull-libs-dir=*)
+      PULL_LIBS_DIR=$optarg
+      ;;
+    --sandboxed)
+      SANDBOXED=true
+      ;;
+    --sandboxed=*)
+      SANDBOXED=true
+      SANDBOXED_INDEX=$optarg
+      ;;
+    --script=*)
+      GDBINIT=$optarg
+      ;;
+    --start)
+      START=true
+      ;;
+    --su-prefix=*)
+      SU_PREFIX=$optarg
+      ;;
+    --symbol-dir=*)
+      SYMBOL_DIR=$optarg
+      ;;
+    --out-dir=*)
+      CHROMIUM_OUT_DIR=$optarg
+      ;;
+    --target-arch=*)
+      TARGET_ARCH=$optarg
+      ;;
+    --toolchain=*)
+      TOOLCHAIN=$optarg
+      ;;
+    --ui)
+      GDBEXEPOSTFIX=gdbtui
+      ;;
+    --verbose)
+      VERBOSE=$(( $VERBOSE + 1 ))
+      ;;
+    --debug)
+      BUILDTYPE=Debug
+      ;;
+    --release)
+      BUILDTYPE=Release
+      ;;
+    -*)
+      panic "Unknown option $OPT, see --help." >&2
+      ;;
+    *)
+      if [ "$PACKAGE_NAME" ]; then
+        panic "You can only provide a single package name as argument!\
+ See --help."
+      fi
+      PACKAGE_NAME=$opt
+      ;;
+  esac
+done
+
+print_help_options () {
+  cat <<EOF
+EOF
+}
+
+if [ "$HELP" ]; then
+  if [ "$ADB_GDB_PROGNAME" ]; then
+    # Assume wrapper scripts all provide a default package name.
+    cat <<EOF
+Usage: $PROGNAME [options]
+
+Attach gdb to a running Android $PROGRAM_NAME process.
+EOF
+  else
+    # Assume this is a direct call to adb_gdb
+  cat <<EOF
+Usage: $PROGNAME [options] [<package-name>]
+
+Attach gdb to a running Android $PROGRAM_NAME process.
+
+If provided, <package-name> must be the name of the Android application's
+package name to be debugged. You can also use --package-name=<name> to
+specify it.
+EOF
+  fi
+
+  cat <<EOF
+
+This script is used to debug a running $PROGRAM_NAME process.
+This can be a regular Android application process, sandboxed (if you use the
+--sandboxed or --sandboxed=<num> option) or a privileged (--privileged or
+--privileged=<num>) service.
+
+This script needs several things to work properly. It will try to pick
+them up automatically for you though:
+
+   - target gdbserver binary
+   - host gdb client (e.g. arm-linux-androideabi-gdb)
+   - directory with symbolic version of $PROGRAM_NAME's shared libraries.
+
+You can also use --ndk-dir=<path> to specify an alternative NDK installation
+directory.
+
+The script tries to find the most recent version of the debug version of
+shared libraries under one of the following directories:
+
+  \$CHROMIUM_SRC/<out>/Release/lib/           (used by Ninja builds)
+  \$CHROMIUM_SRC/<out>/Debug/lib/             (used by Ninja builds)
+  \$CHROMIUM_SRC/<out>/Release/lib.target/    (used by Make builds)
+  \$CHROMIUM_SRC/<out>/Debug/lib.target/      (used by Make builds)
+
+Where <out> is 'out' by default, unless the --out=<name> option is used or
+the CHROMIUM_OUT_DIR environment variable is defined.
+
+You can restrict this search by using --release or --debug to specify the
+build type, or simply use --symbol-dir=<path> to specify the file manually.
+
+The script tries to extract the target architecture from your target device,
+but if this fails, will default to 'arm'. Use --target-arch=<name> to force
+its value.
+
+Otherwise, the script will complain, but you can use the --gdbserver,
+--gdb and --symbol-lib options to specify everything manually.
+
+An alternative to --gdb=<file> is to use --toollchain=<path> to specify
+the path to the host target-specific cross-toolchain.
+
+You will also need the 'adb' tool in your path. Otherwise, use the --adb
+option. The script will complain if there is more than one device connected
+and ANDROID_SERIAL is not defined.
+
+The first time you use it on a device, the script will pull many system
+libraries required by the process into a temporary directory. This
+is done to strongly improve the debugging experience, like allowing
+readable thread stacks and more. The libraries are copied to the following
+directory by default:
+
+  $DEFAULT_PULL_LIBS_DIR/
+
+But you can use the --pull-libs-dir=<path> option to specify an
+alternative. The script can detect when you change the connected device,
+and will re-pull the libraries only in this case. You can however force it
+with the --pull-libs option.
+
+Any local .gdbinit script will be ignored, but it is possible to pass a
+gdb command script with the --script=<file> option. Note that its commands
+will be passed to gdb after the remote connection and library symbol
+loading have completed.
+
+Valid options:
+  --help|-h|-?          Print this message.
+  --verbose             Increase verbosity.
+
+  --sandboxed           Debug first sandboxed process we find.
+  --sandboxed=<num>     Debug specific sandboxed process.
+  --symbol-dir=<path>   Specify directory with symbol shared libraries.
+  --out-dir=<path>      Specify the out directory.
+  --package-name=<name> Specify package name (alternative to 1st argument).
+  --privileged          Debug first privileged process we find.
+  --privileged=<num>    Debug specific privileged process.
+  --program-name=<name> Specify program name (cosmetic only).
+  --pid=<pid>           Specify application process pid.
+  --force               Kill any previous debugging session, if any.
+  --start               Start package's activity on device.
+  --ui                  Use gdbtui instead of gdb
+  --activity=<name>     Activity name for --start [$DEFAULT_ACTIVITY].
+  --annotate=<num>      Enable gdb annotation.
+  --script=<file>       Specify extra GDB init script.
+
+  --gdbserver=<file>    Specify target gdbserver binary.
+  --gdb=<file>          Specify host gdb client binary.
+  --target-arch=<name>  Specify NDK target arch.
+  --adb=<file>          Specify host ADB binary.
+  --port=<port>         Specify the tcp port to use.
+
+  --su-prefix=<prefix>  Prepend <prefix> to 'adb shell' commands that are
+                        run by this script. This can be useful to use
+                        the 'su' program on rooted production devices.
+                        e.g. --su-prefix="su -c"
+
+  --pull-libs           Force system libraries extraction.
+  --no-pull-libs        Do not extract any system library.
+  --libs-dir=<path>     Specify system libraries extraction directory.
+
+  --debug               Use libraries under out/Debug.
+  --release             Use libraries under out/Release.
+
+EOF
+  exit 0
+fi
+
+if [ -z "$PACKAGE_NAME" ]; then
+  panic "Please specify a package name on the command line. See --help."
+fi
+
+if [ -z "$NDK_DIR" ]; then
+  ANDROID_NDK_ROOT=$(PYTHONPATH=$CHROMIUM_SRC/build/android python -c \
+'from pylib.constants import ANDROID_NDK_ROOT; print ANDROID_NDK_ROOT,')
+else
+  if [ ! -d "$NDK_DIR" ]; then
+    panic "Invalid directory: $NDK_DIR"
+  fi
+  if [ ! -f "$NDK_DIR/ndk-build" ]; then
+    panic "Not a valid NDK directory: $NDK_DIR"
+  fi
+  ANDROID_NDK_ROOT=$NDK_DIR
+fi
+
+if [ "$GDBINIT" -a ! -f "$GDBINIT" ]; then
+  panic "Unknown --script file: $GDBINIT"
+fi
+
+# Check that ADB is in our path
+if [ -z "$ADB" ]; then
+  ADB=$(which adb 2>/dev/null)
+  if [ -z "$ADB" ]; then
+    panic "Can't find 'adb' tool in your path. Install it or use \
+--adb=<file>"
+  fi
+  log "Auto-config: --adb=$ADB"
+fi
+
+# Check that it works minimally
+ADB_VERSION=$($ADB version 2>/dev/null)
+echo "$ADB_VERSION" | fgrep -q -e "Android Debug Bridge"
+if [ $? != 0 ]; then
+  panic "Your 'adb' tool seems invalid, use --adb=<file> to specify a \
+different one: $ADB"
+fi
+
+# If there are more than one device connected, and ANDROID_SERIAL is not
+# defined, print an error message.
+NUM_DEVICES_PLUS2=$($ADB devices 2>/dev/null | wc -l)
+if [ "$NUM_DEVICES_PLUS2" -lt 3 -a -z "$ANDROID_SERIAL" ]; then
+  echo "ERROR: There is more than one Android device connected to ADB."
+  echo "Please define ANDROID_SERIAL to specify which one to use."
+  exit 1
+fi
+
+# Run a command through adb shell, strip the extra \r from the output
+# and return the correct status code to detect failures. This assumes
+# that the adb shell command prints a final \n to stdout.
+# $1+: command to run
+# Out: command's stdout
+# Return: command's status
+# Note: the command's stderr is lost
+adb_shell () {
+  local TMPOUT="$(mktemp)"
+  local LASTLINE RET
+  local ADB=${ADB:-adb}
+
+  # The weird sed rule is to strip the final \r on each output line
+  # Since 'adb shell' never returns the command's proper exit/status code,
+  # we force it to print it as '%%<status>' in the temporary output file,
+  # which we will later strip from it.
+  $ADB shell $@ ";" echo "%%\$?" 2>/dev/null | \
+      sed -e 's![[:cntrl:]]!!g' > $TMPOUT
+  # Get last line in log, which contains the exit code from the command
+  LASTLINE=$(sed -e '$!d' $TMPOUT)
+  # Extract the status code from the end of the line, which must
+  # be '%%<code>'.
+  RET=$(echo "$LASTLINE" | \
+    awk '{ if (match($0, "%%[0-9]+$")) { print substr($0,RSTART+2); } }')
+  # Remove the status code from the last line. Note that this may result
+  # in an empty line.
+  LASTLINE=$(echo "$LASTLINE" | \
+    awk '{ if (match($0, "%%[0-9]+$")) { print substr($0,1,RSTART-1); } }')
+  # The output itself: all lines except the status code.
+  sed -e '$d' $TMPOUT && printf "%s" "$LASTLINE"
+  # Remove temp file.
+  rm -f $TMPOUT
+  # Exit with the appropriate status.
+  return $RET
+}
+
+# Find the target architecture from the target device.
+# This returns an NDK-compatible architecture name.
+# out: NDK Architecture name, or empty string.
+get_gyp_target_arch () {
+  local ARCH=$(adb_shell getprop ro.product.cpu.abi)
+  case $ARCH in
+    mips|x86|x86_64) echo "$ARCH";;
+    arm64*) echo "arm64";;
+    arm*) echo "arm";;
+    *) echo "";
+  esac
+}
+
+if [ -z "$TARGET_ARCH" ]; then
+  TARGET_ARCH=$(get_gyp_target_arch)
+  if [ -z "$TARGET_ARCH" ]; then
+    TARGET_ARCH=arm
+  fi
+else
+  # Nit: accept Chromium's 'ia32' as a valid target architecture. This
+  # script prefers the NDK 'x86' name instead because it uses it to find
+  # NDK-specific files (host gdb) with it.
+  if [ "$TARGET_ARCH" = "ia32" ]; then
+    TARGET_ARCH=x86
+    log "Auto-config: --arch=$TARGET_ARCH  (equivalent to ia32)"
+  fi
+fi
+
+# Detect the NDK system name, i.e. the name used to identify the host.
+# out: NDK system name (e.g. 'linux' or 'darwin')
+get_ndk_host_system () {
+  local HOST_OS
+  if [ -z "$NDK_HOST_SYSTEM" ]; then
+    HOST_OS=$(uname -s)
+    case $HOST_OS in
+      Linux) NDK_HOST_SYSTEM=linux;;
+      Darwin) NDK_HOST_SYSTEM=darwin;;
+      *) panic "You can't run this script on this system: $HOST_OS";;
+    esac
+  fi
+  echo "$NDK_HOST_SYSTEM"
+}
+
+# Detect the NDK host architecture name.
+# out: NDK arch name (e.g. 'x86' or 'x86_64')
+get_ndk_host_arch () {
+  local HOST_ARCH HOST_OS
+  if [ -z "$NDK_HOST_ARCH" ]; then
+    HOST_OS=$(get_ndk_host_system)
+    HOST_ARCH=$(uname -p)
+    case $HOST_ARCH in
+      i?86) NDK_HOST_ARCH=x86;;
+      x86_64|amd64) NDK_HOST_ARCH=x86_64;;
+      *) panic "You can't run this script on this host architecture: $HOST_ARCH";;
+    esac
+    # Darwin trick: "uname -p" always returns i386 on 64-bit installations.
+    if [ "$HOST_OS" = darwin -a "$NDK_HOST_ARCH" = "x86" ]; then
+      # Use '/usr/bin/file', not just 'file' to avoid buggy MacPorts
+      # implementations of the tool. See http://b.android.com/53769
+      HOST_64BITS=$(/usr/bin/file -L "$SHELL" | grep -e "x86[_-]64")
+      if [ "$HOST_64BITS" ]; then
+        NDK_HOST_ARCH=x86_64
+      fi
+    fi
+  fi
+  echo "$NDK_HOST_ARCH"
+}
+
+# Convert an NDK architecture name into a GNU configure triplet.
+# $1: NDK architecture name (e.g. 'arm')
+# Out: Android GNU configure triplet (e.g. 'arm-linux-androideabi')
+get_arch_gnu_config () {
+  case $1 in
+    arm)
+      echo "arm-linux-androideabi"
+      ;;
+    arm64)
+      echo "aarch64-linux-android"
+      ;;
+    x86)
+      echo "i686-linux-android"
+      ;;
+    x86_64)
+      echo "x86_64-linux-android"
+      ;;
+    mips)
+      echo "mipsel-linux-android"
+      ;;
+    *)
+      echo "$ARCH-linux-android"
+      ;;
+  esac
+}
+
+# Convert an NDK architecture name into a toolchain name prefix
+# $1: NDK architecture name (e.g. 'arm')
+# Out: NDK toolchain name prefix (e.g. 'arm-linux-androideabi')
+get_arch_toolchain_prefix () {
+  # Return the configure triplet, except for x86!
+  if [ "$1" = "x86" ]; then
+    echo "$1"
+  else
+    get_arch_gnu_config $1
+  fi
+}
+
+# Find a NDK toolchain prebuilt file or sub-directory.
+# This will probe the various arch-specific toolchain directories
+# in the NDK for the needed file.
+# $1: NDK install path
+# $2: NDK architecture name
+# $3: prebuilt sub-path to look for.
+# Out: file path, or empty if none is found.
+get_ndk_toolchain_prebuilt () {
+  local NDK_DIR="${1%/}"
+  local ARCH="$2"
+  local SUBPATH="$3"
+  local NAME="$(get_arch_toolchain_prefix $ARCH)"
+  local FILE TARGET
+  FILE=$NDK_DIR/toolchains/$NAME-4.9/prebuilt/$SUBPATH
+  if [ ! -f "$FILE" ]; then
+    FILE=$NDK_DIR/toolchains/$NAME-4.8/prebuilt/$SUBPATH
+    if [ ! -f "$FILE" ]; then
+      FILE=
+    fi
+  fi
+  echo "$FILE"
+}
+
+# Find the path to an NDK's toolchain full prefix for a given architecture
+# $1: NDK install path
+# $2: NDK target architecture name
+# Out: install path + binary prefix (e.g.
+#      ".../path/to/bin/arm-linux-androideabi-")
+get_ndk_toolchain_fullprefix () {
+  local NDK_DIR="$1"
+  local ARCH="$2"
+  local TARGET NAME HOST_OS HOST_ARCH GCC CONFIG
+
+  # NOTE: This will need to be updated if the NDK changes the names or moves
+  #        the location of its prebuilt toolchains.
+  #
+  GCC=
+  HOST_OS=$(get_ndk_host_system)
+  HOST_ARCH=$(get_ndk_host_arch)
+  CONFIG=$(get_arch_gnu_config $ARCH)
+  GCC=$(get_ndk_toolchain_prebuilt \
+        "$NDK_DIR" "$ARCH" "$HOST_OS-$HOST_ARCH/bin/$CONFIG-gcc")
+  if [ -z "$GCC" -a "$HOST_ARCH" = "x86_64" ]; then
+    GCC=$(get_ndk_toolchain_prebuilt \
+          "$NDK_DIR" "$ARCH" "$HOST_OS-x86/bin/$CONFIG-gcc")
+  fi
+  if [ ! -f "$GCC" -a "$ARCH" = "x86" ]; then
+    # Special case, the x86 toolchain used to be incorrectly
+    # named i686-android-linux-gcc!
+    GCC=$(get_ndk_toolchain_prebuilt \
+          "$NDK_DIR" "$ARCH" "$HOST_OS-x86/bin/i686-android-linux-gcc")
+  fi
+  if [ -z "$GCC" ]; then
+    panic "Cannot find Android NDK toolchain for '$ARCH' architecture. \
+Please verify your NDK installation!"
+  fi
+  echo "${GCC%%gcc}"
+}
+
+# $1: NDK install path
+# $2: target architecture.
+get_ndk_gdbserver () {
+  local NDK_DIR="$1"
+  local ARCH=$2
+  local BINARY
+
+  # The location has moved after NDK r8
+  BINARY=$NDK_DIR/prebuilt/android-$ARCH/gdbserver/gdbserver
+  if [ ! -f "$BINARY" ]; then
+    BINARY=$(get_ndk_toolchain_prebuilt "$NDK_DIR" "$ARCH" gdbserver)
+  fi
+  echo "$BINARY"
+}
+
+# Check/probe the path to the Android toolchain installation. Always
+# use the NDK versions of gdb and gdbserver. They must match to avoid
+# issues when both binaries do not speak the same wire protocol.
+#
+if [ -z "$TOOLCHAIN" ]; then
+  ANDROID_TOOLCHAIN=$(get_ndk_toolchain_fullprefix \
+                      "$ANDROID_NDK_ROOT" "$TARGET_ARCH")
+  ANDROID_TOOLCHAIN=$(dirname "$ANDROID_TOOLCHAIN")
+  log "Auto-config: --toolchain=$ANDROID_TOOLCHAIN"
+else
+  # Be flexible, allow one to specify either the install path or the bin
+  # sub-directory in --toolchain:
+  #
+  if [ -d "$TOOLCHAIN/bin" ]; then
+    TOOLCHAIN=$TOOLCHAIN/bin
+  fi
+  ANDROID_TOOLCHAIN=$TOOLCHAIN
+fi
+
+# Cosmetic: Remove trailing directory separator.
+ANDROID_TOOLCHAIN=${ANDROID_TOOLCHAIN%/}
+
+# Find host GDB client binary
+if [ -z "$GDB" ]; then
+  GDB=$(which $ANDROID_TOOLCHAIN/*-$GDBEXEPOSTFIX 2>/dev/null | head -1)
+  if [ -z "$GDB" ]; then
+    panic "Can't find Android gdb client in your path, check your \
+--toolchain or --gdb path."
+  fi
+  log "Host gdb client: $GDB"
+fi
+
+# Find gdbserver binary, we will later push it to /data/local/tmp
+# This ensures that both gdbserver and $GDB talk the same binary protocol,
+# otherwise weird problems will appear.
+#
+if [ -z "$GDBSERVER" ]; then
+  GDBSERVER=$(get_ndk_gdbserver "$ANDROID_NDK_ROOT" "$TARGET_ARCH")
+  if [ -z "$GDBSERVER" ]; then
+    panic "Can't find NDK gdbserver binary. use --gdbserver to specify \
+valid one!"
+  fi
+  log "Auto-config: --gdbserver=$GDBSERVER"
+fi
+
+# A unique ID for this script's session. This needs to be the same in all
+# sub-shell commands we're going to launch, so take the PID of the launcher
+# process.
+TMP_ID=$$
+
+# Temporary directory, will get cleaned up on exit.
+TMPDIR=/tmp/$USER-adb-gdb-tmp-$TMP_ID
+mkdir -p "$TMPDIR" && rm -rf "$TMPDIR"/*
+
+GDBSERVER_PIDFILE="$TMPDIR"/gdbserver-$TMP_ID.pid
+
+# If --force is specified, try to kill any gdbserver process started by the
+# same user on the device. Normally, these are killed automatically by the
+# script on exit, but there are a few corner cases where this would still
+# be needed.
+if [ "$FORCE" ]; then
+  GDBSERVER_PIDS=$(adb_shell ps | awk '$9 ~ /gdbserver/ { print $2; }')
+  for GDB_PID in $GDBSERVER_PIDS; do
+    log "Killing previous gdbserver (PID=$GDB_PID)"
+    adb_shell kill -9 $GDB_PID
+  done
+fi
+
+if [ "$START" ]; then
+  log "Starting $PROGRAM_NAME on device."
+  adb_shell am start -n $PACKAGE_NAME/$ACTIVITY 2>/dev/null
+  adb_shell ps | grep -q $PACKAGE_NAME
+  fail_panic "Could not start $PROGRAM_NAME on device. Are you sure the \
+package is installed?"
+fi
+
+# Return the timestamp of a given time, as number of seconds since epoch.
+# $1: file path
+# Out: file timestamp
+get_file_timestamp () {
+  stat -c %Y "$1" 2>/dev/null
+}
+
+# Detect the build type and symbol directory. This is done by finding
+# the most recent sub-directory containing debug shared libraries under
+# $CHROMIUM_SRC/$CHROMIUM_OUT_DIR/
+#
+# $1: $BUILDTYPE value, can be empty
+# Out: nothing, but this sets SYMBOL_DIR
+#
+detect_symbol_dir () {
+  local SUBDIRS SUBDIR LIST DIR DIR_LIBS TSTAMP
+  # Note: Ninja places debug libraries under out/$BUILDTYPE/lib/, while
+  # Make places then under out/$BUILDTYPE/lib.target.
+  if [ "$1" ]; then
+    SUBDIRS="$1/lib $1/lib.target"
+  else
+    SUBDIRS="Release/lib Debug/lib Release/lib.target Debug/lib.target"
+  fi
+  LIST=$TMPDIR/scan-subdirs-$$.txt
+  printf "" > "$LIST"
+  for SUBDIR in $SUBDIRS; do
+    DIR=$CHROMIUM_SRC/$CHROMIUM_OUT_DIR/$SUBDIR
+    if [ -d "$DIR" ]; then
+      # Ignore build directories that don't contain symbol versions
+      # of the shared libraries.
+      DIR_LIBS=$(ls "$DIR"/lib*.so 2>/dev/null)
+      if [ -z "$DIR_LIBS" ]; then
+        echo "No shared libs: $DIR"
+        continue
+      fi
+      TSTAMP=$(get_file_timestamp "$DIR")
+      printf "%s %s\n" "$TSTAMP" "$SUBDIR" >> "$LIST"
+    fi
+  done
+  SUBDIR=$(cat $LIST | sort -r | head -1 | cut -d" " -f2)
+  rm -f "$LIST"
+
+  if [ -z "$SUBDIR" ]; then
+    if [ -z "$1" ]; then
+      panic "Could not find any build directory under \
+$CHROMIUM_SRC/$CHROMIUM_OUT_DIR. Please build the program first!"
+    else
+      panic "Could not find any $1 directory under \
+$CHROMIUM_SRC/$CHROMIUM_OUT_DIR. Check your build type!"
+    fi
+  fi
+
+  SYMBOL_DIR=$CHROMIUM_SRC/$CHROMIUM_OUT_DIR/$SUBDIR
+  log "Auto-config: --symbol-dir=$SYMBOL_DIR"
+}
+
+if [ -z "$SYMBOL_DIR" ]; then
+  detect_symbol_dir "$BUILDTYPE"
+fi
+
+# Allow several concurrent debugging sessions
+TARGET_GDBSERVER=/data/data/$PACKAGE_NAME/gdbserver-adb-gdb-$TMP_ID
+TMP_TARGET_GDBSERVER=/data/local/tmp/gdbserver-adb-gdb-$TMP_ID
+
+# Return the build fingerprint contained in a build.prop file.
+# $1: path to build.prop file
+get_build_fingerprint_from () {
+  cat "$1" | grep -e '^ro.build.fingerprint=' | cut -d= -f2
+}
+
+
+ORG_PULL_LIBS_DIR=$PULL_LIBS_DIR
+PULL_LIBS_DIR=${PULL_LIBS_DIR:-$DEFAULT_PULL_LIBS_DIR}
+
+HOST_FINGERPRINT=
+DEVICE_FINGERPRINT=$(adb_shell getprop ro.build.fingerprint)
+log "Device build fingerprint: $DEVICE_FINGERPRINT"
+
+# If --pull-libs-dir is not specified, and this is a platform build, look
+# if we can use the symbolic libraries under $ANDROID_PRODUCT_OUT/symbols/
+# directly, if the build fingerprint matches the device.
+if [ -z "$ORG_PULL_LIBS_DIR" -a \
+     "$ANDROID_PRODUCT_OUT" -a \
+     -f "$ANDROID_PRODUCT_OUT/system/build.prop" ]; then
+  ANDROID_FINGERPRINT=$(get_build_fingerprint_from \
+                        "$ANDROID_PRODUCT_OUT"/system/build.prop)
+  log "Android build fingerprint:  $ANDROID_FINGERPRINT"
+  if [ "$ANDROID_FINGERPRINT" = "$DEVICE_FINGERPRINT" ]; then
+    log "Perfect match!"
+    PULL_LIBS_DIR=$ANDROID_PRODUCT_OUT/symbols
+    HOST_FINGERPRINT=$ANDROID_FINGERPRINT
+    if [ "$PULL_LIBS" ]; then
+      log "Ignoring --pull-libs since the device and platform build \
+fingerprints match."
+      NO_PULL_LIBS=true
+    fi
+  fi
+fi
+
+# If neither --pull-libs an --no-pull-libs were specified, check the build
+# fingerprints of the device, and the cached system libraries on the host.
+#
+if [ -z "$NO_PULL_LIBS" -a -z "$PULL_LIBS" ]; then
+  if [ ! -f "$PULL_LIBS_DIR/build.prop" ]; then
+    log "Auto-config: --pull-libs  (no cached libraries)"
+    PULL_LIBS=true
+  else
+    HOST_FINGERPRINT=$(get_build_fingerprint_from "$PULL_LIBS_DIR/build.prop")
+    log "Host build fingerprint:   $HOST_FINGERPRINT"
+    if [ "$HOST_FINGERPRINT" == "$DEVICE_FINGERPRINT" ]; then
+      log "Auto-config: --no-pull-libs (fingerprint match)"
+      NO_PULL_LIBS=true
+    else
+      log "Auto-config: --pull-libs  (fingerprint mismatch)"
+      PULL_LIBS=true
+    fi
+  fi
+fi
+
+# Extract the system libraries from the device if necessary.
+if [ "$PULL_LIBS" -a -z "$NO_PULL_LIBS" ]; then
+  echo "Extracting system libraries into: $PULL_LIBS_DIR"
+fi
+
+mkdir -p "$PULL_LIBS_DIR"
+fail_panic "Can't create --libs-dir directory: $PULL_LIBS_DIR"
+
+# If requested, work for M-x gdb.  The gdb indirections make it
+# difficult to pass --annotate=3 to the gdb binary itself.
+GDB_ARGS=
+if [ "$ANNOTATE" ]; then
+  GDB_ARGS=$GDB_ARGS" --annotate=$ANNOTATE"
+fi
+
+# Get the PID from the first argument or else find the PID of the
+# browser process.
+if [ -z "$PID" ]; then
+  PROCESSNAME=$PACKAGE_NAME
+  if [ "$SANDBOXED_INDEX" ]; then
+    PROCESSNAME=$PROCESSNAME:sandboxed_process$SANDBOXED_INDEX
+  elif [ "$SANDBOXED" ]; then
+    PROCESSNAME=$PROCESSNAME:sandboxed_process
+    PID=$(adb_shell ps | \
+          awk '$9 ~ /^'$PROCESSNAME'/ { print $2; }' | head -1)
+  elif [ "$PRIVILEGED_INDEX" ]; then
+    PROCESSNAME=$PROCESSNAME:privileged_process$PRIVILEGED_INDEX
+  elif [ "$PRIVILEGED" ]; then
+    PROCESSNAME=$PROCESSNAME:privileged_process
+    PID=$(adb_shell ps | \
+          awk '$9 ~ /^'$PROCESSNAME'/ { print $2; }' | head -1)
+  fi
+  if [ -z "$PID" ]; then
+    PID=$(adb_shell ps | \
+          awk '$9 == "'$PROCESSNAME'" { print $2; }' | head -1)
+  fi
+  if [ -z "$PID" ]; then
+    if [ "$START" ]; then
+      panic "Can't find application process PID, did it crash?"
+    else
+      panic "Can't find application process PID, are you sure it is \
+running? Try using --start."
+    fi
+  fi
+  log "Found process PID: $PID"
+elif [ "$SANDBOXED" ]; then
+  echo "WARNING: --sandboxed option ignored due to use of --pid."
+elif [ "$PRIVILEGED" ]; then
+  echo "WARNING: --privileged option ignored due to use of --pid."
+fi
+
+# Determine if 'adb shell' runs as root or not.
+# If so, we can launch gdbserver directly, otherwise, we have to
+# use run-as $PACKAGE_NAME ..., which requires the package to be debuggable.
+#
+if [ "$SU_PREFIX" ]; then
+  # Need to check that this works properly.
+  SU_PREFIX_TEST_LOG=$TMPDIR/su-prefix.log
+  adb_shell $SU_PREFIX \"echo "foo"\" > $SU_PREFIX_TEST_LOG 2>&1
+  if [ $? != 0 -o "$(cat $SU_PREFIX_TEST_LOG)" != "foo" ]; then
+    echo "ERROR: Cannot use '$SU_PREFIX' as a valid su prefix:"
+    echo "$ adb shell $SU_PREFIX \"echo foo\""
+    cat $SU_PREFIX_TEST_LOG
+    exit 1
+  fi
+  COMMAND_PREFIX="$SU_PREFIX \""
+  COMMAND_SUFFIX="\""
+else
+  SHELL_UID=$(adb shell cat /proc/self/status | \
+              awk '$1 == "Uid:" { print $2; }')
+  log "Shell UID: $SHELL_UID"
+  if [ "$SHELL_UID" != 0 -o -n "$NO_ROOT" ]; then
+    COMMAND_PREFIX="run-as $PACKAGE_NAME"
+    COMMAND_SUFFIX=
+  else
+    COMMAND_PREFIX=
+    COMMAND_SUFFIX=
+  fi
+fi
+log "Command prefix: '$COMMAND_PREFIX'"
+log "Command suffix: '$COMMAND_SUFFIX'"
+
+# Pull device's system libraries that are mapped by our process.
+# Pulling all system libraries is too long, so determine which ones
+# we need by looking at /proc/$PID/maps instead
+if [ "$PULL_LIBS" -a -z "$NO_PULL_LIBS" ]; then
+  echo "Extracting system libraries into: $PULL_LIBS_DIR"
+  rm -f $PULL_LIBS_DIR/build.prop
+  MAPPINGS=$(adb_shell $COMMAND_PREFIX cat /proc/$PID/maps $COMMAND_SUFFIX)
+  if [ $? != 0 ]; then
+    echo "ERROR: Could not list process's memory mappings."
+    if [ "$SU_PREFIX" ]; then
+      panic "Are you sure your --su-prefix is correct?"
+    else
+      panic "Use --su-prefix if the application is not debuggable."
+    fi
+  fi
+  SYSTEM_LIBS=$(echo "$MAPPINGS" | \
+      awk '$6 ~ /\/system\/.*\.so$/ { print $6; }' | sort -u)
+  for SYSLIB in /system/bin/linker $SYSTEM_LIBS; do
+    echo "Pulling from device: $SYSLIB"
+    DST_FILE=$PULL_LIBS_DIR$SYSLIB
+    DST_DIR=$(dirname "$DST_FILE")
+    mkdir -p "$DST_DIR" && adb pull $SYSLIB "$DST_FILE" 2>/dev/null
+    fail_panic "Could not pull $SYSLIB from device !?"
+  done
+  echo "Pulling device build.prop"
+  adb pull /system/build.prop $PULL_LIBS_DIR/build.prop
+  fail_panic "Could not pull device build.prop !?"
+fi
+
+# Find all the sub-directories of $PULL_LIBS_DIR, up to depth 4
+# so we can add them to solib-search-path later.
+SOLIB_DIRS=$(find $PULL_LIBS_DIR -mindepth 1 -maxdepth 4 -type d | \
+             grep -v "^$" | tr '\n' ':')
+
+# This is a re-implementation of gdbclient, where we use compatible
+# versions of gdbserver and $GDBNAME to ensure that everything works
+# properly.
+#
+
+# Push gdbserver to the device
+log "Pushing gdbserver $GDBSERVER to $TARGET_GDBSERVER"
+adb push $GDBSERVER $TMP_TARGET_GDBSERVER &>/dev/null
+adb shell $COMMAND_PREFIX cp $TMP_TARGET_GDBSERVER $TARGET_GDBSERVER
+adb shell rm $TMP_TARGET_GDBSERVER
+fail_panic "Could not copy gdbserver to the device!"
+
+if [ -z "$PORT" ]; then
+    PORT=5039
+fi
+HOST_PORT=$PORT
+TARGET_PORT=$PORT
+
+# Select correct app_process for architecture.
+case $TARGET_ARCH in
+      arm|x86|mips) GDBEXEC=app_process;;
+      arm64|x86_64) GDBEXEC=app_process64;;
+      *) fail_panic "Unknown app_process for architecture!";;
+esac
+
+# Detect AddressSanitizer setup on the device. In that case app_process is a
+# script, and the real executable is app_process.real.
+GDBEXEC_ASAN=app_process.real
+adb_shell ls /system/bin/$GDBEXEC_ASAN
+if [ $? == 0 ]; then
+    GDBEXEC=$GDBEXEC_ASAN
+fi
+
+# Pull the app_process binary from the device.
+log "Pulling $GDBEXEC from device"
+adb pull /system/bin/$GDBEXEC "$TMPDIR"/$GDBEXEC &>/dev/null
+fail_panic "Could not retrieve $GDBEXEC from the device!"
+
+# Setup network redirection
+log "Setting network redirection (host:$HOST_PORT -> device:$TARGET_PORT)"
+adb forward tcp:$HOST_PORT tcp:$TARGET_PORT
+fail_panic "Could not setup network redirection from \
+host:localhost:$HOST_PORT to device:localhost:$TARGET_PORT!"
+
+# Start gdbserver in the background
+# Note that using run-as requires the package to be debuggable.
+#
+# If not, this will fail horribly. The alternative is to run the
+# program as root, which requires of course root privileges.
+# Maybe we should add a --root option to enable this?
+#
+log "Starting gdbserver in the background:"
+GDBSERVER_LOG=$TMPDIR/gdbserver-$TMP_ID.log
+log "adb shell $COMMAND_PREFIX $TARGET_GDBSERVER :$TARGET_PORT \
+--attach $PID $COMMAND_SUFFIX"
+("$ADB" shell $COMMAND_PREFIX $TARGET_GDBSERVER :$TARGET_PORT \
+ --attach $PID $COMMAND_SUFFIX > $GDBSERVER_LOG 2>&1) &
+GDBSERVER_PID=$!
+echo "$GDBSERVER_PID" > $GDBSERVER_PIDFILE
+log "background job pid: $GDBSERVER_PID"
+
+# Check that it is still running after a few seconds. If not, this means we
+# could not properly attach to it
+sleep 2
+log "Job control: $(jobs -l)"
+STATE=$(jobs -l | awk '$2 == "'$GDBSERVER_PID'" { print $3; }')
+if [ "$STATE" != "Running" ]; then
+  echo "ERROR: GDBServer could not attach to PID $PID!"
+  if [ $(adb_shell su -c getenforce) != "Permissive" ];  then
+    echo "Device mode is Enforcing. Changing Device mode to Permissive "
+    $(adb_shell su -c setenforce 0)
+    if [ $(adb_shell su -c getenforce) != "Permissive" ]; then
+      echo "ERROR: Failed to Change Device mode to Permissive"
+      echo "Failure log (use --verbose for more information):"
+      cat $GDBSERVER_LOG
+      exit 1
+    fi
+  else
+    echo "Failure log (use --verbose for more information):"
+    cat $GDBSERVER_LOG
+    exit 1
+  fi
+fi
+
+# Generate a file containing useful GDB initialization commands
+readonly COMMANDS=$TMPDIR/gdb.init
+log "Generating GDB initialization commands file: $COMMANDS"
+echo -n "" > $COMMANDS
+echo "set print pretty 1" >> $COMMANDS
+echo "python" >> $COMMANDS
+echo "import sys" >> $COMMANDS
+echo "sys.path.insert(0, '$CHROMIUM_SRC/tools/gdb/')" >> $COMMANDS
+echo "try:" >> $COMMANDS
+echo "  import gdb_chrome" >> $COMMANDS
+echo "finally:" >> $COMMANDS
+echo "  sys.path.pop(0)" >> $COMMANDS
+echo "end" >> $COMMANDS
+echo "file $TMPDIR/$GDBEXEC" >> $COMMANDS
+echo "directory $CHROMIUM_SRC" >> $COMMANDS
+echo "set solib-absolute-prefix $PULL_LIBS_DIR" >> $COMMANDS
+echo "set solib-search-path $SOLIB_DIRS:$PULL_LIBS_DIR:$SYMBOL_DIR" \
+    >> $COMMANDS
+echo "echo Attaching and reading symbols, this may take a while.." \
+    >> $COMMANDS
+echo "target remote :$HOST_PORT" >> $COMMANDS
+
+if [ "$GDBINIT" ]; then
+  cat "$GDBINIT" >> $COMMANDS
+fi
+
+if [ "$VERBOSE" -gt 0 ]; then
+  echo "### START $COMMANDS"
+  cat $COMMANDS
+  echo "### END $COMMANDS"
+fi
+
+log "Launching gdb client: $GDB $GDB_ARGS -x $COMMANDS"
+$GDB $GDB_ARGS -x $COMMANDS &&
+rm -f "$GDBSERVER_PIDFILE"
diff --git a/build/android/adb_gdb_android_webview_shell b/build/android/adb_gdb_android_webview_shell
new file mode 100755
index 0000000..f685fda
--- /dev/null
+++ b/build/android/adb_gdb_android_webview_shell
@@ -0,0 +1,16 @@
+#!/bin/bash
+#
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Attach to or start a ContentShell process and debug it.
+# See --help for details.
+#
+PROGDIR=$(dirname "$0")
+export ADB_GDB_PROGNAME=$(basename "$0")
+export ADB_GDB_ACTIVITY=.AwShellActivity
+"$PROGDIR"/adb_gdb \
+    --program-name=AwShellApplication \
+    --package-name=org.chromium.android_webview.shell \
+    "$@"
diff --git a/build/android/adb_gdb_chrome_public b/build/android/adb_gdb_chrome_public
new file mode 100755
index 0000000..4366c83
--- /dev/null
+++ b/build/android/adb_gdb_chrome_public
@@ -0,0 +1,16 @@
+#!/bin/bash
+#
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Attach to or start a ChromePublic process and debug it.
+# See --help for details.
+#
+PROGDIR=$(dirname "$0")
+export ADB_GDB_PROGNAME=$(basename "$0")
+export ADB_GDB_ACTIVITY=com.google.android.apps.chrome.Main
+"$PROGDIR"/adb_gdb \
+    --program-name=ChromePublic \
+    --package-name=org.chromium.chrome \
+    "$@"
diff --git a/build/android/adb_gdb_chrome_shell b/build/android/adb_gdb_chrome_shell
new file mode 100755
index 0000000..e5c8a30
--- /dev/null
+++ b/build/android/adb_gdb_chrome_shell
@@ -0,0 +1,16 @@
+#!/bin/bash
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Attach to or start a ChromeShell process and debug it.
+# See --help for details.
+#
+PROGDIR=$(dirname "$0")
+export ADB_GDB_PROGNAME=$(basename "$0")
+export ADB_GDB_ACTIVITY=.ChromeShellActivity
+"$PROGDIR"/adb_gdb \
+    --program-name=ChromeShell \
+    --package-name=org.chromium.chrome.shell \
+    "$@"
diff --git a/build/android/adb_gdb_content_shell b/build/android/adb_gdb_content_shell
new file mode 100755
index 0000000..18e1a61
--- /dev/null
+++ b/build/android/adb_gdb_content_shell
@@ -0,0 +1,16 @@
+#!/bin/bash
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Attach to or start a ContentShell process and debug it.
+# See --help for details.
+#
+PROGDIR=$(dirname "$0")
+export ADB_GDB_PROGNAME=$(basename "$0")
+export ADB_GDB_ACTIVITY=.ContentShellActivity
+"$PROGDIR"/adb_gdb \
+    --program-name=ContentShell \
+    --package-name=org.chromium.content_shell_apk \
+    "$@"
diff --git a/build/android/adb_gdb_cronet_sample b/build/android/adb_gdb_cronet_sample
new file mode 100755
index 0000000..8d0c864
--- /dev/null
+++ b/build/android/adb_gdb_cronet_sample
@@ -0,0 +1,16 @@
+#!/bin/bash
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Attach to or start a ContentShell process and debug it.
+# See --help for details.
+#
+PROGDIR=$(dirname "$0")
+export ADB_GDB_PROGNAME=$(basename "$0")
+export ADB_GDB_ACTIVITY=.CronetSampleActivity
+"$PROGDIR"/adb_gdb \
+    --program-name=CronetSample \
+    --package-name=org.chromium.cronet_sample_apk \
+    "$@"
diff --git a/build/android/adb_gdb_mojo_shell b/build/android/adb_gdb_mojo_shell
new file mode 100755
index 0000000..ba91149
--- /dev/null
+++ b/build/android/adb_gdb_mojo_shell
@@ -0,0 +1,16 @@
+#!/bin/bash
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Attach to or start a ContentShell process and debug it.
+# See --help for details.
+#
+PROGDIR=$(dirname "$0")
+export ADB_GDB_PROGNAME=$(basename "$0")
+export ADB_GDB_ACTIVITY=.MojoShellActivity
+"$PROGDIR"/adb_gdb \
+    --program-name=MojoShell \
+    --package-name=org.chromium.mojo_shell_apk \
+    "$@"
diff --git a/build/android/adb_install_apk.py b/build/android/adb_install_apk.py
new file mode 100755
index 0000000..50faea7
--- /dev/null
+++ b/build/android/adb_install_apk.py
@@ -0,0 +1,114 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Utility script to install APKs from the command line quickly."""
+
+import argparse
+import glob
+import logging
+import os
+import sys
+
+from pylib import constants
+from pylib.device import device_blacklist
+from pylib.device import device_errors
+from pylib.device import device_utils
+from pylib.utils import apk_helper
+from pylib.utils import run_tests_helper
+
+
+def main():
+  parser = argparse.ArgumentParser()
+
+  apk_group = parser.add_mutually_exclusive_group(required=True)
+  apk_group.add_argument('--apk', dest='apk_name',
+                         help='DEPRECATED The name of the apk containing the'
+                              ' application (with the .apk extension).')
+  apk_group.add_argument('apk_path', nargs='?',
+                         help='The path to the APK to install.')
+
+  # TODO(jbudorick): Remove once no clients pass --apk_package
+  parser.add_argument('--apk_package', help='DEPRECATED unused')
+  parser.add_argument('--split',
+                      action='append',
+                      dest='splits',
+                      help='A glob matching the apk splits. '
+                           'Can be specified multiple times.')
+  parser.add_argument('--keep_data',
+                      action='store_true',
+                      default=False,
+                      help='Keep the package data when installing '
+                           'the application.')
+  parser.add_argument('--debug', action='store_const', const='Debug',
+                      dest='build_type',
+                      default=os.environ.get('BUILDTYPE', 'Debug'),
+                      help='If set, run test suites under out/Debug. '
+                           'Default is env var BUILDTYPE or Debug')
+  parser.add_argument('--release', action='store_const', const='Release',
+                      dest='build_type',
+                      help='If set, run test suites under out/Release. '
+                           'Default is env var BUILDTYPE or Debug.')
+  parser.add_argument('-d', '--device', dest='device',
+                      help='Target device for apk to install on.')
+  parser.add_argument('-v', '--verbose', action='count',
+                      help='Enable verbose logging.')
+
+  args = parser.parse_args()
+
+  run_tests_helper.SetLogLevel(args.verbose)
+  constants.SetBuildType(args.build_type)
+
+  apk = args.apk_path or args.apk_name
+  if not apk.endswith('.apk'):
+    apk += '.apk'
+  if not os.path.exists(apk):
+    apk = os.path.join(constants.GetOutDirectory(), 'apks', apk)
+    if not os.path.exists(apk):
+      parser.error('%s not found.' % apk)
+
+  if args.splits:
+    splits = []
+    base_apk_package = apk_helper.ApkHelper(apk).GetPackageName()
+    for split_glob in args.splits:
+      apks = [f for f in glob.glob(split_glob) if f.endswith('.apk')]
+      if not apks:
+        logging.warning('No apks matched for %s.' % split_glob)
+      for f in apks:
+        helper = apk_helper.ApkHelper(f)
+        if (helper.GetPackageName() == base_apk_package
+            and helper.GetSplitName()):
+          splits.append(f)
+
+  devices = device_utils.DeviceUtils.HealthyDevices()
+
+  if args.device:
+    devices = [d for d in devices if d == args.device]
+    if not devices:
+      raise device_errors.DeviceUnreachableError(args.device)
+  elif not devices:
+    raise device_errors.NoDevicesError()
+
+  def blacklisting_install(device):
+    try:
+      if args.splits:
+        device.InstallSplitApk(apk, splits, reinstall=args.keep_data)
+      else:
+        device.Install(apk, reinstall=args.keep_data)
+    except device_errors.CommandFailedError:
+      logging.exception('Failed to install %s', args.apk_name)
+      device_blacklist.ExtendBlacklist([str(device)])
+      logging.warning('Blacklisting %s', str(device))
+    except device_errors.CommandTimeoutError:
+      logging.exception('Timed out while installing %s', args.apk_name)
+      device_blacklist.ExtendBlacklist([str(device)])
+      logging.warning('Blacklisting %s', str(device))
+
+  device_utils.DeviceUtils.parallel(devices).pMap(blacklisting_install)
+
+
+if __name__ == '__main__':
+  sys.exit(main())
+
diff --git a/build/android/adb_kill_android_webview_shell b/build/android/adb_kill_android_webview_shell
new file mode 100755
index 0000000..5f287f0
--- /dev/null
+++ b/build/android/adb_kill_android_webview_shell
@@ -0,0 +1,24 @@
+#!/bin/bash
+#
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Kill a running android webview shell.
+#
+# Assumes you have sourced the build/android/envsetup.sh script.
+
+SHELL_PID_LINES=$(adb shell ps | grep ' org.chromium.android_webview.shell')
+VAL=$(echo "$SHELL_PID_LINES" | wc -l)
+if [ $VAL -lt 1 ] ; then
+   echo "Not running android webview shell."
+else
+   SHELL_PID=$(echo $SHELL_PID_LINES | awk '{print $2}')
+   if [ "$SHELL_PID" != "" ] ; then
+      set -x
+      adb shell kill $SHELL_PID
+      set -
+   else
+     echo "Android webview shell does not appear to be running."
+   fi
+fi
diff --git a/build/android/adb_kill_chrome_public b/build/android/adb_kill_chrome_public
new file mode 100755
index 0000000..5b539a0
--- /dev/null
+++ b/build/android/adb_kill_chrome_public
@@ -0,0 +1,24 @@
+#!/bin/bash
+#
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Kill a running instance of ChromePublic.
+#
+# Assumes you have sourced the build/android/envsetup.sh script.
+
+SHELL_PID_LINES=$(adb shell ps | grep -w 'org.chromium.chrome')
+VAL=$(echo "$SHELL_PID_LINES" | wc -l)
+if [ $VAL -lt 1 ] ; then
+   echo "Not running ChromePublic."
+else
+   SHELL_PID=$(echo $SHELL_PID_LINES | awk '{print $2}')
+   if [ "$SHELL_PID" != "" ] ; then
+      set -x
+      adb shell kill $SHELL_PID
+      set -
+   else
+     echo "ChromePublic does not appear to be running."
+   fi
+fi
diff --git a/build/android/adb_kill_chrome_shell b/build/android/adb_kill_chrome_shell
new file mode 100755
index 0000000..2b63c9a
--- /dev/null
+++ b/build/android/adb_kill_chrome_shell
@@ -0,0 +1,24 @@
+#!/bin/bash
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Kill a running chrome shell.
+#
+# Assumes you have sourced the build/android/envsetup.sh script.
+
+SHELL_PID_LINES=$(adb shell ps | grep ' org.chromium.chrome.shell')
+VAL=$(echo "$SHELL_PID_LINES" | wc -l)
+if [ $VAL -lt 1 ] ; then
+   echo "Not running Chrome shell."
+else
+   SHELL_PID=$(echo $SHELL_PID_LINES | awk '{print $2}')
+   if [ "$SHELL_PID" != "" ] ; then
+      set -x
+      adb shell kill $SHELL_PID
+      set -
+   else
+     echo "Chrome shell does not appear to be running."
+   fi
+fi
diff --git a/build/android/adb_kill_content_shell b/build/android/adb_kill_content_shell
new file mode 100755
index 0000000..e379dd4
--- /dev/null
+++ b/build/android/adb_kill_content_shell
@@ -0,0 +1,24 @@
+#!/bin/bash
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Kill a running content shell.
+#
+# Assumes you have sourced the build/android/envsetup.sh script.
+
+SHELL_PID_LINES=$(adb shell ps | grep ' org.chromium.content_shell_apk')
+VAL=$(echo "$SHELL_PID_LINES" | wc -l)
+if [ $VAL -lt 1 ] ; then
+   echo "Not running Content shell."
+else
+   SHELL_PID=$(echo $SHELL_PID_LINES | awk '{print $2}')
+   if [ "$SHELL_PID" != "" ] ; then
+      set -x
+      adb shell kill $SHELL_PID
+      set -
+   else
+     echo "Content shell does not appear to be running."
+   fi
+fi
diff --git a/build/android/adb_logcat_monitor.py b/build/android/adb_logcat_monitor.py
new file mode 100755
index 0000000..d3cc67d
--- /dev/null
+++ b/build/android/adb_logcat_monitor.py
@@ -0,0 +1,156 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Saves logcats from all connected devices.
+
+Usage: adb_logcat_monitor.py <base_dir> [<adb_binary_path>]
+
+This script will repeatedly poll adb for new devices and save logcats
+inside the <base_dir> directory, which it attempts to create.  The
+script will run until killed by an external signal.  To test, run the
+script in a shell and <Ctrl>-C it after a while.  It should be
+resilient across phone disconnects and reconnects and start the logcat
+early enough to not miss anything.
+"""
+
+import logging
+import os
+import re
+import shutil
+import signal
+import subprocess
+import sys
+import time
+
+# Map from device_id -> (process, logcat_num)
+devices = {}
+
+
+class TimeoutException(Exception):
+  """Exception used to signal a timeout."""
+  pass
+
+
+class SigtermError(Exception):
+  """Exception used to catch a sigterm."""
+  pass
+
+
+def StartLogcatIfNecessary(device_id, adb_cmd, base_dir):
+  """Spawns a adb logcat process if one is not currently running."""
+  process, logcat_num = devices[device_id]
+  if process:
+    if process.poll() is None:
+      # Logcat process is still happily running
+      return
+    else:
+      logging.info('Logcat for device %s has died', device_id)
+      error_filter = re.compile('- waiting for device -')
+      for line in process.stderr:
+        if not error_filter.match(line):
+          logging.error(device_id + ':   ' + line)
+
+  logging.info('Starting logcat %d for device %s', logcat_num,
+               device_id)
+  logcat_filename = 'logcat_%s_%03d' % (device_id, logcat_num)
+  logcat_file = open(os.path.join(base_dir, logcat_filename), 'w')
+  process = subprocess.Popen([adb_cmd, '-s', device_id,
+                              'logcat', '-v', 'threadtime'],
+                             stdout=logcat_file,
+                             stderr=subprocess.PIPE)
+  devices[device_id] = (process, logcat_num + 1)
+
+
+def GetAttachedDevices(adb_cmd):
+  """Gets the device list from adb.
+
+  We use an alarm in this function to avoid deadlocking from an external
+  dependency.
+
+  Args:
+    adb_cmd: binary to run adb
+
+  Returns:
+    list of devices or an empty list on timeout
+  """
+  signal.alarm(2)
+  try:
+    out, err = subprocess.Popen([adb_cmd, 'devices'],
+                                stdout=subprocess.PIPE,
+                                stderr=subprocess.PIPE).communicate()
+    if err:
+      logging.warning('adb device error %s', err.strip())
+    return re.findall('^(\\S+)\tdevice$', out, re.MULTILINE)
+  except TimeoutException:
+    logging.warning('"adb devices" command timed out')
+    return []
+  except (IOError, OSError):
+    logging.exception('Exception from "adb devices"')
+    return []
+  finally:
+    signal.alarm(0)
+
+
+def main(base_dir, adb_cmd='adb'):
+  """Monitor adb forever.  Expects a SIGINT (Ctrl-C) to kill."""
+  # We create the directory to ensure 'run once' semantics
+  if os.path.exists(base_dir):
+    print 'adb_logcat_monitor: %s already exists? Cleaning' % base_dir
+    shutil.rmtree(base_dir, ignore_errors=True)
+
+  os.makedirs(base_dir)
+  logging.basicConfig(filename=os.path.join(base_dir, 'eventlog'),
+                      level=logging.INFO,
+                      format='%(asctime)-2s %(levelname)-8s %(message)s')
+
+  # Set up the alarm for calling 'adb devices'. This is to ensure
+  # our script doesn't get stuck waiting for a process response
+  def TimeoutHandler(_signum, _unused_frame):
+    raise TimeoutException()
+  signal.signal(signal.SIGALRM, TimeoutHandler)
+
+  # Handle SIGTERMs to ensure clean shutdown
+  def SigtermHandler(_signum, _unused_frame):
+    raise SigtermError()
+  signal.signal(signal.SIGTERM, SigtermHandler)
+
+  logging.info('Started with pid %d', os.getpid())
+  pid_file_path = os.path.join(base_dir, 'LOGCAT_MONITOR_PID')
+
+  try:
+    with open(pid_file_path, 'w') as f:
+      f.write(str(os.getpid()))
+    while True:
+      for device_id in GetAttachedDevices(adb_cmd):
+        if not device_id in devices:
+          subprocess.call([adb_cmd, '-s', device_id, 'logcat', '-c'])
+          devices[device_id] = (None, 0)
+
+      for device in devices:
+        # This will spawn logcat watchers for any device ever detected
+        StartLogcatIfNecessary(device, adb_cmd, base_dir)
+
+      time.sleep(5)
+  except SigtermError:
+    logging.info('Received SIGTERM, shutting down')
+  except: # pylint: disable=bare-except
+    logging.exception('Unexpected exception in main.')
+  finally:
+    for process, _ in devices.itervalues():
+      if process:
+        try:
+          process.terminate()
+        except OSError:
+          pass
+    os.remove(pid_file_path)
+
+
+if __name__ == '__main__':
+  if 2 <= len(sys.argv) <= 3:
+    print 'adb_logcat_monitor: Initializing'
+    sys.exit(main(*sys.argv[1:3]))
+
+  print 'Usage: %s <base_dir> [<adb_binary_path>]' % sys.argv[0]
diff --git a/build/android/adb_logcat_printer.py b/build/android/adb_logcat_printer.py
new file mode 100755
index 0000000..55176ab
--- /dev/null
+++ b/build/android/adb_logcat_printer.py
@@ -0,0 +1,213 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Shutdown adb_logcat_monitor and print accumulated logs.
+
+To test, call './adb_logcat_printer.py <base_dir>' where
+<base_dir> contains 'adb logcat -v threadtime' files named as
+logcat_<deviceID>_<sequenceNum>
+
+The script will print the files to out, and will combine multiple
+logcats from a single device if there is overlap.
+
+Additionally, if a <base_dir>/LOGCAT_MONITOR_PID exists, the script
+will attempt to terminate the contained PID by sending a SIGINT and
+monitoring for the deletion of the aforementioned file.
+"""
+# pylint: disable=W0702
+
+import cStringIO
+import logging
+import optparse
+import os
+import re
+import signal
+import sys
+import time
+
+
+# Set this to debug for more verbose output
+LOG_LEVEL = logging.INFO
+
+
+def CombineLogFiles(list_of_lists, logger):
+  """Splices together multiple logcats from the same device.
+
+  Args:
+    list_of_lists: list of pairs (filename, list of timestamped lines)
+    logger: handler to log events
+
+  Returns:
+    list of lines with duplicates removed
+  """
+  cur_device_log = ['']
+  for cur_file, cur_file_lines in list_of_lists:
+    # Ignore files with just the logcat header
+    if len(cur_file_lines) < 2:
+      continue
+    common_index = 0
+    # Skip this step if list just has empty string
+    if len(cur_device_log) > 1:
+      try:
+        line = cur_device_log[-1]
+        # Used to make sure we only splice on a timestamped line
+        if re.match(r'^\d{2}-\d{2} \d{2}:\d{2}:\d{2}.\d{3} ', line):
+          common_index = cur_file_lines.index(line)
+        else:
+          logger.warning('splice error - no timestamp in "%s"?', line.strip())
+      except ValueError:
+        # The last line was valid but wasn't found in the next file
+        cur_device_log += ['***** POSSIBLE INCOMPLETE LOGCAT *****']
+        logger.info('Unable to splice %s. Incomplete logcat?', cur_file)
+
+    cur_device_log += ['*'*30 + '  %s' % cur_file]
+    cur_device_log.extend(cur_file_lines[common_index:])
+
+  return cur_device_log
+
+
+def FindLogFiles(base_dir):
+  """Search a directory for logcat files.
+
+  Args:
+    base_dir: directory to search
+
+  Returns:
+    Mapping of device_id to a sorted list of file paths for a given device
+  """
+  logcat_filter = re.compile(r'^logcat_(\S+)_(\d+)$')
+  # list of tuples (<device_id>, <seq num>, <full file path>)
+  filtered_list = []
+  for cur_file in os.listdir(base_dir):
+    matcher = logcat_filter.match(cur_file)
+    if matcher:
+      filtered_list += [(matcher.group(1), int(matcher.group(2)),
+                         os.path.join(base_dir, cur_file))]
+  filtered_list.sort()
+  file_map = {}
+  for device_id, _, cur_file in filtered_list:
+    if device_id not in file_map:
+      file_map[device_id] = []
+
+    file_map[device_id] += [cur_file]
+  return file_map
+
+
+def GetDeviceLogs(log_filenames, logger):
+  """Read log files, combine and format.
+
+  Args:
+    log_filenames: mapping of device_id to sorted list of file paths
+    logger: logger handle for logging events
+
+  Returns:
+    list of formatted device logs, one for each device.
+  """
+  device_logs = []
+
+  for device, device_files in log_filenames.iteritems():
+    logger.debug('%s: %s', device, str(device_files))
+    device_file_lines = []
+    for cur_file in device_files:
+      with open(cur_file) as f:
+        device_file_lines += [(cur_file, f.read().splitlines())]
+    combined_lines = CombineLogFiles(device_file_lines, logger)
+    # Prepend each line with a short unique ID so it's easy to see
+    # when the device changes.  We don't use the start of the device
+    # ID because it can be the same among devices.  Example lines:
+    # AB324:  foo
+    # AB324:  blah
+    device_logs += [('\n' + device[-5:] + ':  ').join(combined_lines)]
+  return device_logs
+
+
+def ShutdownLogcatMonitor(base_dir, logger):
+  """Attempts to shutdown adb_logcat_monitor and blocks while waiting."""
+  try:
+    monitor_pid_path = os.path.join(base_dir, 'LOGCAT_MONITOR_PID')
+    with open(monitor_pid_path) as f:
+      monitor_pid = int(f.readline())
+
+    logger.info('Sending SIGTERM to %d', monitor_pid)
+    os.kill(monitor_pid, signal.SIGTERM)
+    i = 0
+    while True:
+      time.sleep(.2)
+      if not os.path.exists(monitor_pid_path):
+        return
+      if not os.path.exists('/proc/%d' % monitor_pid):
+        logger.warning('Monitor (pid %d) terminated uncleanly?', monitor_pid)
+        return
+      logger.info('Waiting for logcat process to terminate.')
+      i += 1
+      if i >= 10:
+        logger.warning('Monitor pid did not terminate. Continuing anyway.')
+        return
+
+  except (ValueError, IOError, OSError):
+    logger.exception('Error signaling logcat monitor - continuing')
+
+
+def main(argv):
+  parser = optparse.OptionParser(usage='Usage: %prog [options] <log dir>')
+  parser.add_option('--output-path',
+                    help='Output file path (if unspecified, prints to stdout)')
+  options, args = parser.parse_args(argv)
+  if len(args) != 1:
+    parser.error('Wrong number of unparsed args')
+  base_dir = args[0]
+  if options.output_path:
+    output_file = open(options.output_path, 'w')
+  else:
+    output_file = sys.stdout
+
+  log_stringio = cStringIO.StringIO()
+  logger = logging.getLogger('LogcatPrinter')
+  logger.setLevel(LOG_LEVEL)
+  sh = logging.StreamHandler(log_stringio)
+  sh.setFormatter(logging.Formatter('%(asctime)-2s %(levelname)-8s'
+                                    ' %(message)s'))
+  logger.addHandler(sh)
+
+  try:
+    # Wait at least 5 seconds after base_dir is created before printing.
+    #
+    # The idea is that 'adb logcat > file' output consists of 2 phases:
+    #  1 Dump all the saved logs to the file
+    #  2 Stream log messages as they are generated
+    #
+    # We want to give enough time for phase 1 to complete.  There's no
+    # good method to tell how long to wait, but it usually only takes a
+    # second.  On most bots, this code path won't occur at all, since
+    # adb_logcat_monitor.py command will have spawned more than 5 seconds
+    # prior to called this shell script.
+    try:
+      sleep_time = 5 - (time.time() - os.path.getctime(base_dir))
+    except OSError:
+      sleep_time = 5
+    if sleep_time > 0:
+      logger.warning('Monitor just started? Sleeping %.1fs', sleep_time)
+      time.sleep(sleep_time)
+
+    assert os.path.exists(base_dir), '%s does not exist' % base_dir
+    ShutdownLogcatMonitor(base_dir, logger)
+    separator = '\n' + '*' * 80 + '\n\n'
+    for log in GetDeviceLogs(FindLogFiles(base_dir), logger):
+      output_file.write(log)
+      output_file.write(separator)
+    with open(os.path.join(base_dir, 'eventlog')) as f:
+      output_file.write('\nLogcat Monitor Event Log\n')
+      output_file.write(f.read())
+  except:
+    logger.exception('Unexpected exception')
+
+  logger.info('Done.')
+  sh.flush()
+  output_file.write('\nLogcat Printer Event Log\n')
+  output_file.write(log_stringio.getvalue())
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/build/android/adb_profile_chrome b/build/android/adb_profile_chrome
new file mode 100755
index 0000000..21f6faf
--- /dev/null
+++ b/build/android/adb_profile_chrome
@@ -0,0 +1,8 @@
+#!/bin/bash
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Start / stop profiling in chrome.
+exec $(dirname $0)/../../tools/profile_chrome.py $@
diff --git a/build/android/adb_reverse_forwarder.py b/build/android/adb_reverse_forwarder.py
new file mode 100755
index 0000000..3ce5359
--- /dev/null
+++ b/build/android/adb_reverse_forwarder.py
@@ -0,0 +1,80 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Command line tool for forwarding ports from a device to the host.
+
+Allows an Android device to connect to services running on the host machine,
+i.e., "adb forward" in reverse. Requires |host_forwarder| and |device_forwarder|
+to be built.
+"""
+
+import logging
+import optparse
+import sys
+import time
+
+from pylib import constants
+from pylib import forwarder
+from pylib.device import adb_wrapper
+from pylib.device import device_errors
+from pylib.device import device_utils
+from pylib.utils import run_tests_helper
+
+
+def main(argv):
+  parser = optparse.OptionParser(usage='Usage: %prog [options] device_port '
+                                 'host_port [device_port_2 host_port_2] ...',
+                                 description=__doc__)
+  parser.add_option('-v',
+                    '--verbose',
+                    dest='verbose_count',
+                    default=0,
+                    action='count',
+                    help='Verbose level (multiple times for more)')
+  parser.add_option('--device',
+                    help='Serial number of device we should use.')
+  parser.add_option('--debug', action='store_const', const='Debug',
+                    dest='build_type', default='Release',
+                    help='Use Debug build of host tools instead of Release.')
+
+  options, args = parser.parse_args(argv)
+  run_tests_helper.SetLogLevel(options.verbose_count)
+
+  if len(args) < 2 or not len(args) % 2:
+    parser.error('Need even number of port pairs')
+    sys.exit(1)
+
+  try:
+    port_pairs = map(int, args[1:])
+    port_pairs = zip(port_pairs[::2], port_pairs[1::2])
+  except ValueError:
+    parser.error('Bad port number')
+    sys.exit(1)
+
+  devices = device_utils.DeviceUtils.HealthyDevices()
+
+  if options.device:
+    device = next((d for d in devices if d == options.device), None)
+    if not device:
+      raise device_errors.DeviceUnreachableError(options.device)
+  elif devices:
+    device = devices[0]
+    logging.info('No device specified. Defaulting to %s', devices[0])
+  else:
+    raise device_errors.NoDevicesError()
+
+  constants.SetBuildType(options.build_type)
+  try:
+    forwarder.Forwarder.Map(port_pairs, device)
+    while True:
+      time.sleep(60)
+  except KeyboardInterrupt:
+    sys.exit(0)
+  finally:
+    forwarder.Forwarder.UnmapAllDevicePorts(device)
+
+if __name__ == '__main__':
+  main(sys.argv)
diff --git a/build/android/adb_run_android_webview_shell b/build/android/adb_run_android_webview_shell
new file mode 100755
index 0000000..1014a73
--- /dev/null
+++ b/build/android/adb_run_android_webview_shell
@@ -0,0 +1,12 @@
+#!/bin/bash
+#
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+optional_url=$1
+
+adb shell am start \
+  -a android.intent.action.VIEW \
+  -n org.chromium.android_webview.shell/.AwShellActivity \
+  ${optional_url:+-d "$optional_url"}
diff --git a/build/android/adb_run_chrome_public b/build/android/adb_run_chrome_public
new file mode 100755
index 0000000..bf15071
--- /dev/null
+++ b/build/android/adb_run_chrome_public
@@ -0,0 +1,12 @@
+#!/bin/bash
+#
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+optional_url=$1
+
+adb shell am start \
+  -a android.intent.action.VIEW \
+  -n org.chromium.chrome/com.google.android.apps.chrome.Main \
+  ${optional_url:+-d "$optional_url"}
diff --git a/build/android/adb_run_chrome_shell b/build/android/adb_run_chrome_shell
new file mode 100755
index 0000000..79c4c32
--- /dev/null
+++ b/build/android/adb_run_chrome_shell
@@ -0,0 +1,12 @@
+#!/bin/bash
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+optional_url=$1
+
+adb shell am start \
+  -a android.intent.action.VIEW \
+  -n org.chromium.chrome.shell/.ChromeShellActivity \
+  ${optional_url:+-d "$optional_url"}
diff --git a/build/android/adb_run_content_shell b/build/android/adb_run_content_shell
new file mode 100755
index 0000000..3f01f3b
--- /dev/null
+++ b/build/android/adb_run_content_shell
@@ -0,0 +1,12 @@
+#!/bin/bash
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+optional_url=$1
+
+adb shell am start \
+  -a android.intent.action.VIEW \
+  -n org.chromium.content_shell_apk/.ContentShellActivity \
+  ${optional_url:+-d "$optional_url"}
diff --git a/build/android/adb_run_mojo_shell b/build/android/adb_run_mojo_shell
new file mode 100755
index 0000000..b585e4a
--- /dev/null
+++ b/build/android/adb_run_mojo_shell
@@ -0,0 +1,16 @@
+#!/bin/bash
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+optional_url=$1
+parameters=$2
+
+adb logcat -c
+adb shell am start -S \
+  -a android.intent.action.VIEW \
+  -n org.chromium.mojo_shell_apk/.MojoShellActivity \
+  ${parameters:+--esa parameters "$parameters"} \
+  ${optional_url:+-d "$optional_url"}
+adb logcat -s MojoShellApplication MojoShellActivity chromium
diff --git a/build/android/android_no_jni_exports.lst b/build/android/android_no_jni_exports.lst
new file mode 100644
index 0000000..ffc6cf7
--- /dev/null
+++ b/build/android/android_no_jni_exports.lst
@@ -0,0 +1,17 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This script makes all JNI exported symbols local, to prevent the JVM from
+# being able to find them, enforcing use of manual JNI function registration.
+# This is used for all Android binaries by default, unless they explicitly state
+# that they want JNI exported symbols to remain visible, as we need to ensure
+# the manual registration path is correct to maintain compatibility with the
+# crazy linker.
+# Check ld version script manual:
+# https://sourceware.org/binutils/docs-2.24/ld/VERSION.html#VERSION
+
+{
+  local:
+    Java_*;
+};
diff --git a/build/android/ant/BUILD.gn b/build/android/ant/BUILD.gn
new file mode 100644
index 0000000..a30fb54
--- /dev/null
+++ b/build/android/ant/BUILD.gn
@@ -0,0 +1,13 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+copy("keystore") {
+  sources = [
+    "chromium-debug.keystore",
+  ]
+
+  outputs = [
+    "$root_out_dir/chromium-debug.keystore",
+  ]
+}
diff --git a/build/android/ant/apk-package.xml b/build/android/ant/apk-package.xml
new file mode 100644
index 0000000..e8b76f7
--- /dev/null
+++ b/build/android/ant/apk-package.xml
@@ -0,0 +1,96 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+    Copyright (C) 2005-2008 The Android Open Source Project
+
+    Licensed under the Apache License, Version 2.0 (the "License");
+    you may not use this file except in compliance with the License.
+    You may obtain a copy of the License at
+
+         http://www.apache.org/licenses/LICENSE-2.0
+
+    Unless required by applicable law or agreed to in writing, software
+    distributed under the License is distributed on an "AS IS" BASIS,
+    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+    See the License for the specific language governing permissions and
+    limitations under the License.
+-->
+
+<project default="-package">
+  <property name="verbose" value="false" />
+  <property name="out.dir" location="${OUT_DIR}" />
+  <property name="out.absolute.dir" location="${out.dir}" />
+
+  <property name="sdk.dir" location="${ANDROID_SDK_ROOT}"/>
+  <property name="emma.device.jar" location="${EMMA_DEVICE_JAR}" />
+
+  <condition property="emma.enabled" value="true" else="false">
+    <equals arg1="${EMMA_INSTRUMENT}" arg2="1"/>
+  </condition>
+
+  <!-- jar file from where the tasks are loaded -->
+  <path id="android.antlibs">
+    <pathelement path="${sdk.dir}/tools/lib/ant-tasks.jar" />
+  </path>
+
+  <!-- Custom tasks -->
+  <taskdef resource="anttasks.properties" classpathref="android.antlibs" />
+
+  <condition property="build.target" value="release" else="debug">
+    <equals arg1="${CONFIGURATION_NAME}" arg2="Release" />
+  </condition>
+  <condition property="build.is.packaging.debug" value="true" else="false">
+    <equals arg1="${build.target}" arg2="debug" />
+  </condition>
+
+  <!-- Disables automatic signing. -->
+  <property name="build.is.signing.debug" value="false"/>
+
+  <!-- SDK tools assume that out.packaged.file is signed and name it "...-unaligned" -->
+  <property name="out.packaged.file" value="${UNSIGNED_APK_PATH}" />
+
+  <property name="native.libs.absolute.dir" location="${NATIVE_LIBS_DIR}" />
+
+  <!-- Intermediate files -->
+  <property name="resource.package.file.name" value="${RESOURCE_PACKAGED_APK_NAME}" />
+
+  <property name="intermediate.dex.file" location="${DEX_FILE_PATH}" />
+
+  <!-- Macro that enables passing a variable list of external jar files
+       to ApkBuilder. -->
+  <macrodef name="package-helper">
+    <element name="extra-jars" optional="yes" />
+    <sequential>
+      <apkbuilder
+          outfolder="${out.absolute.dir}"
+          resourcefile="${resource.package.file.name}"
+          apkfilepath="${out.packaged.file}"
+          debugpackaging="${build.is.packaging.debug}"
+          debugsigning="${build.is.signing.debug}"
+          verbose="${verbose}"
+          hascode="${HAS_CODE}"
+          previousBuildType="/"
+          buildType="${build.is.packaging.debug}/${build.is.signing.debug}">
+        <dex path="${intermediate.dex.file}"/>
+        <nativefolder path="${native.libs.absolute.dir}" />
+        <extra-jars/>
+      </apkbuilder>
+    </sequential>
+  </macrodef>
+
+
+  <!-- Packages the application. -->
+  <target name="-package">
+    <if condition="${emma.enabled}">
+      <then>
+        <package-helper>
+          <extra-jars>
+            <jarfile path="${emma.device.jar}" />
+          </extra-jars>
+        </package-helper>
+      </then>
+      <else>
+        <package-helper />
+      </else>
+    </if>
+  </target>
+</project>
diff --git a/build/android/ant/chromium-debug.keystore b/build/android/ant/chromium-debug.keystore
new file mode 100644
index 0000000..67eb0aa
--- /dev/null
+++ b/build/android/ant/chromium-debug.keystore
Binary files differ
diff --git a/build/android/ant/empty/res/.keep b/build/android/ant/empty/res/.keep
new file mode 100644
index 0000000..1fd038b
--- /dev/null
+++ b/build/android/ant/empty/res/.keep
@@ -0,0 +1,2 @@
+# This empty res folder can be passed to aapt while building Java libraries or
+# APKs that don't have any resources.
diff --git a/build/android/apkbuilder_action.gypi b/build/android/apkbuilder_action.gypi
new file mode 100644
index 0000000..27807d8
--- /dev/null
+++ b/build/android/apkbuilder_action.gypi
@@ -0,0 +1,79 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is a helper to java_apk.gypi. It should be used to create an
+# action that runs ApkBuilder via ANT.
+#
+# Required variables:
+#  apk_name - File name (minus path & extension) of the output apk.
+#  apk_path - Path to output apk.
+#  package_input_paths - Late-evaluated list of resource zips.
+#  native_libs_dir - Path to lib/ directory to use. Set to an empty directory
+#    if no native libs are needed.
+# Optional variables:
+#  has_code - Whether to include classes.dex in the apk.
+#  dex_path - Path to classes.dex. Used only when has_code=1.
+#  extra_inputs - List of extra action inputs.
+{
+  'variables': {
+    'variables': {
+      'has_code%': 1,
+    },
+    'conditions': [
+      ['has_code == 0', {
+        'has_code_str': 'false',
+      }, {
+        'has_code_str': 'true',
+      }],
+    ],
+    'has_code%': '<(has_code)',
+    'extra_inputs%': [],
+    # Write the inputs list to a file, so that its mtime is updated when
+    # the list of inputs changes.
+    'inputs_list_file': '>|(apk_package.<(_target_name).<(apk_name).gypcmd >@(package_input_paths))',
+    'resource_packaged_apk_name': '<(apk_name)-resources.ap_',
+    'resource_packaged_apk_path': '<(intermediate_dir)/<(resource_packaged_apk_name)',
+  },
+  'action_name': 'apkbuilder_<(apk_name)',
+  'message': 'Packaging <(apk_name)',
+  'inputs': [
+    '<(DEPTH)/build/android/ant/apk-package.xml',
+    '<(DEPTH)/build/android/gyp/util/build_utils.py',
+    '<(DEPTH)/build/android/gyp/ant.py',
+    '<(resource_packaged_apk_path)',
+    '<@(extra_inputs)',
+    '>@(package_input_paths)',
+    '>(inputs_list_file)',
+  ],
+  'outputs': [
+    '<(apk_path)',
+  ],
+  'conditions': [
+    ['has_code == 1', {
+      'inputs': ['<(dex_path)'],
+      'action': [
+        '-DDEX_FILE_PATH=<(dex_path)',
+      ]
+    }],
+  ],
+  'action': [
+    'python', '<(DEPTH)/build/android/gyp/ant.py',
+    '--',
+    '-quiet',
+    '-DHAS_CODE=<(has_code_str)',
+    '-DANDROID_SDK_ROOT=<(android_sdk_root)',
+    '-DANDROID_SDK_TOOLS=<(android_sdk_tools)',
+    '-DRESOURCE_PACKAGED_APK_NAME=<(resource_packaged_apk_name)',
+    '-DNATIVE_LIBS_DIR=<(native_libs_dir)',
+    '-DAPK_NAME=<(apk_name)',
+    '-DCONFIGURATION_NAME=<(CONFIGURATION_NAME)',
+    '-DOUT_DIR=<(intermediate_dir)',
+    '-DUNSIGNED_APK_PATH=<(apk_path)',
+    '-DEMMA_INSTRUMENT=<(emma_instrument)',
+    '-DEMMA_DEVICE_JAR=<(emma_device_jar)',
+    '-Dbasedir=.',
+    '-buildfile',
+    '<(DEPTH)/build/android/ant/apk-package.xml',
+  ]
+}
diff --git a/build/android/asan_symbolize.py b/build/android/asan_symbolize.py
new file mode 100755
index 0000000..10087a6
--- /dev/null
+++ b/build/android/asan_symbolize.py
@@ -0,0 +1,103 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+import collections
+import optparse
+import os
+import re
+import sys
+
+from pylib import constants
+
+# Uses symbol.py from third_party/android_platform, not python's.
+sys.path.insert(0,
+                os.path.join(constants.DIR_SOURCE_ROOT,
+                            'third_party/android_platform/development/scripts'))
+import symbol
+
+
+_RE_ASAN = re.compile(r'(.*?)(#\S*?) (\S*?) \((.*?)\+(.*?)\)')
+
+def _ParseAsanLogLine(line):
+  m = re.match(_RE_ASAN, line)
+  if not m:
+    return None
+  return {
+      'prefix': m.group(1),
+      'library': m.group(4),
+      'pos': m.group(2),
+      'rel_address': '%08x' % int(m.group(5), 16),
+  }
+
+
+def _FindASanLibraries():
+  asan_lib_dir = os.path.join(constants.DIR_SOURCE_ROOT,
+                              'third_party', 'llvm-build',
+                              'Release+Asserts', 'lib')
+  asan_libs = []
+  for src_dir, _, files in os.walk(asan_lib_dir):
+    asan_libs += [os.path.relpath(os.path.join(src_dir, f))
+                  for f in files
+                  if f.endswith('.so')]
+  return asan_libs
+
+
+def _TranslateLibPath(library, asan_libs):
+  for asan_lib in asan_libs:
+    if os.path.basename(library) == os.path.basename(asan_lib):
+      return '/' + asan_lib
+  return symbol.TranslateLibPath(library)
+
+
+def _Symbolize(asan_input):
+  asan_libs = _FindASanLibraries()
+  libraries = collections.defaultdict(list)
+  asan_lines = []
+  for asan_log_line in [a.rstrip() for a in asan_input]:
+    m = _ParseAsanLogLine(asan_log_line)
+    if m:
+      libraries[m['library']].append(m)
+    asan_lines.append({'raw_log': asan_log_line, 'parsed': m})
+
+  all_symbols = collections.defaultdict(dict)
+  for library, items in libraries.iteritems():
+    libname = _TranslateLibPath(library, asan_libs)
+    lib_relative_addrs = set([i['rel_address'] for i in items])
+    info_dict = symbol.SymbolInformationForSet(libname,
+                                               lib_relative_addrs,
+                                               True)
+    if info_dict:
+      all_symbols[library]['symbols'] = info_dict
+
+  for asan_log_line in asan_lines:
+    m = asan_log_line['parsed']
+    if not m:
+      print asan_log_line['raw_log']
+      continue
+    if (m['library'] in all_symbols and
+        m['rel_address'] in all_symbols[m['library']]['symbols']):
+      s = all_symbols[m['library']]['symbols'][m['rel_address']][0]
+      print '%s%s %s %s' % (m['prefix'], m['pos'], s[0], s[1])
+    else:
+      print asan_log_line['raw_log']
+
+
+def main():
+  parser = optparse.OptionParser()
+  parser.add_option('-l', '--logcat',
+                    help='File containing adb logcat output with ASan stacks. '
+                         'Use stdin if not specified.')
+  options, _ = parser.parse_args()
+  if options.logcat:
+    asan_input = file(options.logcat, 'r')
+  else:
+    asan_input = sys.stdin
+  _Symbolize(asan_input.readlines())
+
+
+if __name__ == "__main__":
+  sys.exit(main())
diff --git a/build/android/avd.py b/build/android/avd.py
new file mode 100755
index 0000000..c45544f
--- /dev/null
+++ b/build/android/avd.py
@@ -0,0 +1,96 @@
+#!/usr/bin/env python
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Launches Android Virtual Devices with a set configuration for testing Chrome.
+
+The script will launch a specified number of Android Virtual Devices (AVD's).
+"""
+
+
+import install_emulator_deps
+import logging
+import optparse
+import os
+import re
+import sys
+
+from pylib import cmd_helper
+from pylib import constants
+from pylib.utils import emulator
+
+
+def main(argv):
+  # ANDROID_SDK_ROOT needs to be set to the location of the SDK used to launch
+  # the emulator to find the system images upon launch.
+  emulator_sdk = os.path.join(constants.EMULATOR_SDK_ROOT, 'sdk')
+  os.environ['ANDROID_SDK_ROOT'] = emulator_sdk
+
+  opt_parser = optparse.OptionParser(description='AVD script.')
+  opt_parser.add_option('--name', help='Optinaly, name of existing AVD to '
+                        'launch. If not specified, new AVD\'s will be created')
+  opt_parser.add_option('-n', '--num', dest='emulator_count',
+                        help='Number of emulators to launch (default is 1).',
+                        type='int', default='1')
+  opt_parser.add_option('--abi', default='x86',
+                        help='Platform of emulators to launch (x86 default).')
+  opt_parser.add_option('--api-level', dest='api_level',
+                        help='API level for the image, e.g. 19 for Android 4.4',
+                        type='int', default=constants.ANDROID_SDK_VERSION)
+
+  options, _ = opt_parser.parse_args(argv[1:])
+
+  logging.basicConfig(level=logging.INFO,
+                      format='# %(asctime)-15s: %(message)s')
+  logging.root.setLevel(logging.INFO)
+
+  # Check if KVM is enabled for x86 AVD's and check for x86 system images.
+  # TODO(andrewhayden) Since we can fix all of these with install_emulator_deps
+  # why don't we just run it?
+  if options.abi == 'x86':
+    if not install_emulator_deps.CheckKVM():
+      logging.critical('ERROR: KVM must be enabled in BIOS, and installed. '
+                       'Enable KVM in BIOS and run install_emulator_deps.py')
+      return 1
+    elif not install_emulator_deps.CheckX86Image(options.api_level):
+      logging.critical('ERROR: System image for x86 AVD not installed. Run '
+                       'install_emulator_deps.py')
+      return 1
+
+  if not install_emulator_deps.CheckSDK():
+    logging.critical('ERROR: Emulator SDK not installed. Run '
+                     'install_emulator_deps.py.')
+    return 1
+
+  # If AVD is specified, check that the SDK has the required target. If not,
+  # check that the SDK has the desired target for the temporary AVD's.
+  api_level = options.api_level
+  if options.name:
+    android = os.path.join(constants.EMULATOR_SDK_ROOT, 'sdk', 'tools',
+                           'android')
+    avds_output = cmd_helper.GetCmdOutput([android, 'list', 'avd'])
+    names = re.findall(r'Name: (\w+)', avds_output)
+    api_levels = re.findall(r'API level (\d+)', avds_output)
+    try:
+      avd_index = names.index(options.name)
+    except ValueError:
+      logging.critical('ERROR: Specified AVD %s does not exist.' % options.name)
+      return 1
+    api_level = int(api_levels[avd_index])
+
+  if not install_emulator_deps.CheckSDKPlatform(api_level):
+    logging.critical('ERROR: Emulator SDK missing required target for API %d. '
+                     'Run install_emulator_deps.py.')
+    return 1
+
+  if options.name:
+    emulator.LaunchEmulator(options.name, options.abi)
+  else:
+    emulator.LaunchTempEmulators(options.emulator_count, options.abi,
+                                 options.api_level, True)
+
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
diff --git a/build/android/bb_run_sharded_steps.py b/build/android/bb_run_sharded_steps.py
new file mode 100755
index 0000000..6aeba5b
--- /dev/null
+++ b/build/android/bb_run_sharded_steps.py
@@ -0,0 +1,41 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""DEPRECATED!
+TODO(bulach): remove me once all other repositories reference
+'test_runner.py perf' directly.
+"""
+
+import optparse
+import sys
+
+from pylib import cmd_helper
+
+
+def main(argv):
+  parser = optparse.OptionParser()
+  parser.add_option('-s', '--steps',
+                    help='A JSON file containing all the steps to be '
+                         'sharded.')
+  parser.add_option('--flaky_steps',
+                    help='A JSON file containing steps that are flaky and '
+                         'will have its exit code ignored.')
+  parser.add_option('-p', '--print_results',
+                    help='Only prints the results for the previously '
+                         'executed step, do not run it again.')
+  options, _ = parser.parse_args(argv)
+  if options.print_results:
+    return cmd_helper.RunCmd(['build/android/test_runner.py', 'perf',
+                              '--print-step', options.print_results])
+  flaky_options = []
+  if options.flaky_steps:
+    flaky_options = ['--flaky-steps', options.flaky_steps]
+  return cmd_helper.RunCmd(['build/android/test_runner.py', 'perf', '-v',
+                            '--steps', options.steps] + flaky_options)
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
diff --git a/build/android/buildbot/OWNERS b/build/android/buildbot/OWNERS
new file mode 100644
index 0000000..f289720
--- /dev/null
+++ b/build/android/buildbot/OWNERS
@@ -0,0 +1,6 @@
+set noparent
+
+cmp@chromium.org
+jbudorick@chromium.org
+navabi@chromium.org
+
diff --git a/build/android/buildbot/bb_annotations.py b/build/android/buildbot/bb_annotations.py
new file mode 100644
index 0000000..059d673
--- /dev/null
+++ b/build/android/buildbot/bb_annotations.py
@@ -0,0 +1,46 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Helper functions to print buildbot messages."""
+
+def PrintLink(label, url):
+  """Adds a link with name |label| linking to |url| to current buildbot step.
+
+  Args:
+    label: A string with the name of the label.
+    url: A string of the URL.
+  """
+  print '@@@STEP_LINK@%s@%s@@@' % (label, url)
+
+
+def PrintMsg(msg):
+  """Appends |msg| to the current buildbot step text.
+
+  Args:
+    msg: String to be appended.
+  """
+  print '@@@STEP_TEXT@%s@@@' % msg
+
+
+def PrintSummaryText(msg):
+  """Appends |msg| to main build summary. Visible from waterfall.
+
+  Args:
+    msg: String to be appended.
+  """
+  print '@@@STEP_SUMMARY_TEXT@%s@@@' % msg
+
+
+def PrintError():
+  """Marks the current step as failed."""
+  print '@@@STEP_FAILURE@@@'
+
+
+def PrintWarning():
+  """Marks the current step with a warning."""
+  print '@@@STEP_WARNINGS@@@'
+
+
+def PrintNamedStep(step):
+  print '@@@BUILD_STEP %s@@@' % step
diff --git a/build/android/buildbot/bb_device_status_check.py b/build/android/buildbot/bb_device_status_check.py
new file mode 100755
index 0000000..917c51e
--- /dev/null
+++ b/build/android/buildbot/bb_device_status_check.py
@@ -0,0 +1,404 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""A class to keep track of devices across builds and report state."""
+import json
+import logging
+import optparse
+import os
+import psutil
+import re
+import signal
+import smtplib
+import subprocess
+import sys
+import time
+import urllib
+
+import bb_annotations
+import bb_utils
+
+sys.path.append(os.path.join(os.path.dirname(__file__),
+                             os.pardir, os.pardir, 'util', 'lib',
+                             'common'))
+import perf_tests_results_helper  # pylint: disable=F0401
+
+sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
+from pylib import constants
+from pylib.cmd_helper import GetCmdOutput
+from pylib.device import adb_wrapper
+from pylib.device import battery_utils
+from pylib.device import device_blacklist
+from pylib.device import device_errors
+from pylib.device import device_list
+from pylib.device import device_utils
+from pylib.utils import run_tests_helper
+
+_RE_DEVICE_ID = re.compile('Device ID = (\d+)')
+
+def DeviceInfo(device, options):
+  """Gathers info on a device via various adb calls.
+
+  Args:
+    device: A DeviceUtils instance for the device to construct info about.
+
+  Returns:
+    Tuple of device type, build id, report as a string, error messages, and
+    boolean indicating whether or not device can be used for testing.
+  """
+  battery = battery_utils.BatteryUtils(device)
+
+  build_product = ''
+  build_id = ''
+  battery_level = 100
+  errors = []
+  dev_good = True
+  json_data = {}
+
+  try:
+    build_product = device.build_product
+    build_id = device.build_id
+
+    json_data = {
+      'serial': device.adb.GetDeviceSerial(),
+      'type': build_product,
+      'build': build_id,
+      'build_detail': device.GetProp('ro.build.fingerprint'),
+      'battery': {},
+      'imei_slice': 'Unknown',
+      'wifi_ip': device.GetProp('dhcp.wlan0.ipaddress'),
+    }
+
+    battery_info = {}
+    try:
+      battery_info = battery.GetBatteryInfo(timeout=5)
+      battery_level = int(battery_info.get('level', battery_level))
+      json_data['battery'] = battery_info
+    except device_errors.CommandFailedError:
+      logging.exception('Failed to get battery information for %s', str(device))
+
+    try:
+      for l in device.RunShellCommand(['dumpsys', 'iphonesubinfo'],
+                                      check_return=True, timeout=5):
+        m = _RE_DEVICE_ID.match(l)
+        if m:
+          json_data['imei_slice'] = m.group(1)[-6:]
+    except device_errors.CommandFailedError:
+      logging.exception('Failed to get IMEI slice for %s', str(device))
+
+    if battery_level < 15:
+      errors += ['Device critically low in battery.']
+      dev_good = False
+      if not battery.GetCharging():
+        battery.SetCharging(True)
+    if not options.no_provisioning_check:
+      setup_wizard_disabled = (
+          device.GetProp('ro.setupwizard.mode') == 'DISABLED')
+      if not setup_wizard_disabled and device.build_type != 'user':
+        errors += ['Setup wizard not disabled. Was it provisioned correctly?']
+    if (device.product_name == 'mantaray' and
+        battery_info.get('AC powered', None) != 'true'):
+      errors += ['Mantaray device not connected to AC power.']
+  except device_errors.CommandFailedError:
+    logging.exception('Failure while getting device status.')
+    dev_good = False
+  except device_errors.CommandTimeoutError:
+    logging.exception('Timeout while getting device status.')
+    dev_good = False
+
+  return (build_product, build_id, battery_level, errors, dev_good, json_data)
+
+
+def CheckForMissingDevices(options, devices):
+  """Uses file of previous online devices to detect broken phones.
+
+  Args:
+    options: out_dir parameter of options argument is used as the base
+      directory to load and update the cache file.
+    devices: A list of DeviceUtils instance for the currently visible and
+      online attached devices.
+  """
+  out_dir = os.path.abspath(options.out_dir)
+  device_serials = set(d.adb.GetDeviceSerial() for d in devices)
+
+  # last_devices denotes all known devices prior to this run
+  last_devices_path = os.path.join(out_dir, device_list.LAST_DEVICES_FILENAME)
+  last_missing_devices_path = os.path.join(out_dir,
+      device_list.LAST_MISSING_DEVICES_FILENAME)
+  try:
+    last_devices = device_list.GetPersistentDeviceList(last_devices_path)
+  except IOError:
+    # Ignore error, file might not exist
+    last_devices = []
+
+  try:
+    last_missing_devices = device_list.GetPersistentDeviceList(
+        last_missing_devices_path)
+  except IOError:
+    last_missing_devices = []
+
+  missing_devs = list(set(last_devices) - device_serials)
+  new_missing_devs = list(set(missing_devs) - set(last_missing_devices))
+
+  if new_missing_devs and os.environ.get('BUILDBOT_SLAVENAME'):
+    logging.info('new_missing_devs %s' % new_missing_devs)
+    devices_missing_msg = '%d devices not detected.' % len(missing_devs)
+    bb_annotations.PrintSummaryText(devices_missing_msg)
+
+    from_address = 'chrome-bot@chromium.org'
+    to_addresses = ['chrome-labs-tech-ticket@google.com',
+                    'chrome-android-device-alert@google.com']
+    cc_addresses = ['chrome-android-device-alert@google.com']
+    subject = 'Devices offline on %s, %s, %s' % (
+      os.environ.get('BUILDBOT_SLAVENAME'),
+      os.environ.get('BUILDBOT_BUILDERNAME'),
+      os.environ.get('BUILDBOT_BUILDNUMBER'))
+    msg = ('Please reboot the following devices:\n%s' %
+           '\n'.join(map(str, new_missing_devs)))
+    SendEmail(from_address, to_addresses, cc_addresses, subject, msg)
+
+  all_known_devices = list(device_serials | set(last_devices))
+  device_list.WritePersistentDeviceList(last_devices_path, all_known_devices)
+  device_list.WritePersistentDeviceList(last_missing_devices_path, missing_devs)
+
+  if not all_known_devices:
+    # This can happen if for some reason the .last_devices file is not
+    # present or if it was empty.
+    return ['No online devices. Have any devices been plugged in?']
+  if missing_devs:
+    devices_missing_msg = '%d devices not detected.' % len(missing_devs)
+    bb_annotations.PrintSummaryText(devices_missing_msg)
+    return ['Current online devices: %s' % ', '.join(d for d in device_serials),
+            '%s are no longer visible. Were they removed?' % missing_devs]
+  else:
+    new_devs = device_serials - set(last_devices)
+    if new_devs and os.path.exists(last_devices_path):
+      bb_annotations.PrintWarning()
+      bb_annotations.PrintSummaryText(
+          '%d new devices detected' % len(new_devs))
+      logging.info('New devices detected:')
+      for d in new_devs:
+        logging.info('  %s', d)
+
+
+def SendEmail(from_address, to_addresses, cc_addresses, subject, msg):
+  msg_body = '\r\n'.join(['From: %s' % from_address,
+                          'To: %s' % ', '.join(to_addresses),
+                          'CC: %s' % ', '.join(cc_addresses),
+                          'Subject: %s' % subject, '', msg])
+  try:
+    server = smtplib.SMTP('localhost')
+    server.sendmail(from_address, to_addresses, msg_body)
+    server.quit()
+  except Exception:
+    logging.exception('Failed to send alert email.')
+
+
+def RestartUsb():
+  if not os.path.isfile('/usr/bin/restart_usb'):
+    logging.error('Could not restart usb. ''/usr/bin/restart_usb not '
+                  'installed on host (see BUG=305769).')
+    return False
+
+  lsusb_proc = bb_utils.SpawnCmd(['lsusb'], stdout=subprocess.PIPE)
+  lsusb_output, _ = lsusb_proc.communicate()
+  if lsusb_proc.returncode:
+    logging.error('Could not get list of USB ports (i.e. lsusb).')
+    return lsusb_proc.returncode
+
+  usb_devices = [re.findall(r'Bus (\d\d\d) Device (\d\d\d)', lsusb_line)[0]
+                 for lsusb_line in lsusb_output.strip().split('\n')]
+
+  all_restarted = True
+  # Walk USB devices from leaves up (i.e reverse sorted) restarting the
+  # connection. If a parent node (e.g. usb hub) is restarted before the
+  # devices connected to it, the (bus, dev) for the hub can change, making the
+  # output we have wrong. This way we restart the devices before the hub.
+  for (bus, dev) in reversed(sorted(usb_devices)):
+    # Can not restart root usb connections
+    if dev != '001':
+      return_code = bb_utils.RunCmd(['/usr/bin/restart_usb', bus, dev])
+      if return_code:
+        logging.error('Error restarting USB device /dev/bus/usb/%s/%s',
+                      bus, dev)
+        all_restarted = False
+      else:
+        logging.info('Restarted USB device /dev/bus/usb/%s/%s', bus, dev)
+
+  return all_restarted
+
+
+def KillAllAdb():
+  def GetAllAdb():
+    for p in psutil.process_iter():
+      try:
+        if 'adb' in p.name:
+          yield p
+      except (psutil.NoSuchProcess, psutil.AccessDenied):
+        pass
+
+  for sig in [signal.SIGTERM, signal.SIGQUIT, signal.SIGKILL]:
+    for p in GetAllAdb():
+      try:
+        logging.info('kill %d %d (%s [%s])', sig, p.pid, p.name,
+                     ' '.join(p.cmdline))
+        p.send_signal(sig)
+      except (psutil.NoSuchProcess, psutil.AccessDenied):
+        pass
+  for p in GetAllAdb():
+    try:
+      logging.error('Unable to kill %d (%s [%s])', p.pid, p.name,
+                    ' '.join(p.cmdline))
+    except (psutil.NoSuchProcess, psutil.AccessDenied):
+      pass
+
+
+def main():
+  parser = optparse.OptionParser()
+  parser.add_option('', '--out-dir',
+                    help='Directory where the device path is stored',
+                    default=os.path.join(constants.DIR_SOURCE_ROOT, 'out'))
+  parser.add_option('--no-provisioning-check', action='store_true',
+                    help='Will not check if devices are provisioned properly.')
+  parser.add_option('--device-status-dashboard', action='store_true',
+                    help='Output device status data for dashboard.')
+  parser.add_option('--restart-usb', action='store_true',
+                    help='Restart USB ports before running device check.')
+  parser.add_option('--json-output',
+                    help='Output JSON information into a specified file.')
+  parser.add_option('-v', '--verbose', action='count', default=1,
+                    help='Log more information.')
+
+  options, args = parser.parse_args()
+  if args:
+    parser.error('Unknown options %s' % args)
+
+  run_tests_helper.SetLogLevel(options.verbose)
+
+  # Remove the last build's "bad devices" before checking device statuses.
+  device_blacklist.ResetBlacklist()
+
+  try:
+    expected_devices = device_list.GetPersistentDeviceList(
+        os.path.join(options.out_dir, device_list.LAST_DEVICES_FILENAME))
+  except IOError:
+    expected_devices = []
+  devices = device_utils.DeviceUtils.HealthyDevices()
+  device_serials = [d.adb.GetDeviceSerial() for d in devices]
+  # Only restart usb if devices are missing.
+  if set(expected_devices) != set(device_serials):
+    logging.warning('expected_devices: %s', expected_devices)
+    logging.warning('devices: %s', device_serials)
+    KillAllAdb()
+    retries = 5
+    usb_restarted = True
+    if options.restart_usb:
+      if not RestartUsb():
+        usb_restarted = False
+        bb_annotations.PrintWarning()
+        logging.error('USB reset stage failed, '
+                      'wait for any device to come back.')
+    while retries:
+      logging.info('retry adb devices...')
+      time.sleep(1)
+      devices = device_utils.DeviceUtils.HealthyDevices()
+      device_serials = [d.adb.GetDeviceSerial() for d in devices]
+      if set(expected_devices) == set(device_serials):
+        # All devices are online, keep going.
+        break
+      if not usb_restarted and devices:
+        # The USB wasn't restarted, but there's at least one device online.
+        # No point in trying to wait for all devices.
+        break
+      retries -= 1
+
+  types, builds, batteries, errors, devices_ok, json_data = (
+      [], [], [], [], [], [])
+  if devices:
+    types, builds, batteries, errors, devices_ok, json_data = (
+        zip(*[DeviceInfo(dev, options) for dev in devices]))
+
+  # Write device info to file for buildbot info display.
+  if os.path.exists('/home/chrome-bot'):
+    with open('/home/chrome-bot/.adb_device_info', 'w') as f:
+      for device in json_data:
+        try:
+          f.write('%s %s %s %.1fC %s%%\n' % (device['serial'], device['type'],
+              device['build'], float(device['battery']['temperature']) / 10,
+              device['battery']['level']))
+        except Exception:
+          pass
+
+  err_msg = CheckForMissingDevices(options, devices) or []
+
+  unique_types = list(set(types))
+  unique_builds = list(set(builds))
+
+  bb_annotations.PrintMsg('Online devices: %d. Device types %s, builds %s'
+                           % (len(devices), unique_types, unique_builds))
+
+  for j in json_data:
+    logging.info('Device %s (%s)', j.get('serial'), j.get('type'))
+    logging.info('  Build: %s (%s)', j.get('build'), j.get('build_detail'))
+    logging.info('  Current Battery Service state:')
+    for k, v in j.get('battery', {}).iteritems():
+      logging.info('    %s: %s', k, v)
+    logging.info('  IMEI slice: %s', j.get('imei_slice'))
+    logging.info('  WiFi IP: %s', j.get('wifi_ip'))
+
+
+  for dev, dev_errors in zip(devices, errors):
+    if dev_errors:
+      err_msg += ['%s errors:' % str(dev)]
+      err_msg += ['    %s' % error for error in dev_errors]
+
+  if err_msg:
+    bb_annotations.PrintWarning()
+    for e in err_msg:
+      logging.error(e)
+    from_address = 'buildbot@chromium.org'
+    to_addresses = ['chromium-android-device-alerts@google.com']
+    bot_name = os.environ.get('BUILDBOT_BUILDERNAME')
+    slave_name = os.environ.get('BUILDBOT_SLAVENAME')
+    subject = 'Device status check errors on %s, %s.' % (slave_name, bot_name)
+    SendEmail(from_address, to_addresses, [], subject, '\n'.join(err_msg))
+
+  if options.device_status_dashboard:
+    offline_devices = [
+        device_utils.DeviceUtils(a)
+        for a in adb_wrapper.AdbWrapper.Devices(is_ready=False)
+        if a.GetState() == 'offline']
+
+    perf_tests_results_helper.PrintPerfResult('BotDevices', 'OnlineDevices',
+                                              [len(devices)], 'devices')
+    perf_tests_results_helper.PrintPerfResult('BotDevices', 'OfflineDevices',
+                                              [len(offline_devices)], 'devices',
+                                              'unimportant')
+    for dev, battery in zip(devices, batteries):
+      perf_tests_results_helper.PrintPerfResult('DeviceBattery', str(dev),
+                                                [battery], '%',
+                                                'unimportant')
+
+  if options.json_output:
+    with open(options.json_output, 'wb') as f:
+      f.write(json.dumps(json_data, indent=4))
+
+  num_failed_devs = 0
+  for device_ok, device in zip(devices_ok, devices):
+    if not device_ok:
+      logging.warning('Blacklisting %s', str(device))
+      device_blacklist.ExtendBlacklist([str(device)])
+      num_failed_devs += 1
+
+  if num_failed_devs == len(devices):
+    return 2
+
+  if not devices:
+    return 1
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/android/buildbot/bb_device_steps.py b/build/android/buildbot/bb_device_steps.py
new file mode 100755
index 0000000..8ad42b9
--- /dev/null
+++ b/build/android/buildbot/bb_device_steps.py
@@ -0,0 +1,796 @@
+#!/usr/bin/env python
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import collections
+import glob
+import hashlib
+import json
+import os
+import random
+import re
+import shutil
+import sys
+
+import bb_utils
+import bb_annotations
+
+sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
+import provision_devices
+from pylib import constants
+from pylib.device import device_utils
+from pylib.gtest import gtest_config
+
+CHROME_SRC_DIR = bb_utils.CHROME_SRC
+DIR_BUILD_ROOT = os.path.dirname(CHROME_SRC_DIR)
+CHROME_OUT_DIR = bb_utils.CHROME_OUT_DIR
+BLINK_SCRIPTS_DIR = 'third_party/WebKit/Tools/Scripts'
+
+SLAVE_SCRIPTS_DIR = os.path.join(bb_utils.BB_BUILD_DIR, 'scripts', 'slave')
+LOGCAT_DIR = os.path.join(bb_utils.CHROME_OUT_DIR, 'logcat')
+GS_URL = 'https://storage.googleapis.com'
+GS_AUTH_URL = 'https://storage.cloud.google.com'
+
+# Describes an instrumation test suite:
+#   test: Name of test we're running.
+#   apk: apk to be installed.
+#   apk_package: package for the apk to be installed.
+#   test_apk: apk to run tests on.
+#   test_data: data folder in format destination:source.
+#   host_driven_root: The host-driven test root directory.
+#   annotation: Annotation of the tests to include.
+#   exclude_annotation: The annotation of the tests to exclude.
+I_TEST = collections.namedtuple('InstrumentationTest', [
+    'name', 'apk', 'apk_package', 'test_apk', 'test_data', 'isolate_file_path',
+    'host_driven_root', 'annotation', 'exclude_annotation', 'extra_flags'])
+
+
+def SrcPath(*path):
+  return os.path.join(CHROME_SRC_DIR, *path)
+
+
+def I(name, apk, apk_package, test_apk, test_data, isolate_file_path=None,
+      host_driven_root=None, annotation=None, exclude_annotation=None,
+      extra_flags=None):
+  return I_TEST(name, apk, apk_package, test_apk, test_data, isolate_file_path,
+                host_driven_root, annotation, exclude_annotation, extra_flags)
+
+INSTRUMENTATION_TESTS = dict((suite.name, suite) for suite in [
+    I('ContentShell',
+      'ContentShell.apk',
+      'org.chromium.content_shell_apk',
+      'ContentShellTest',
+      'content:content/test/data/android/device_files',
+      isolate_file_path='content/content_shell_test_apk.isolate'),
+    I('ChromeShell',
+      'ChromeShell.apk',
+      'org.chromium.chrome.shell',
+      'ChromeShellTest',
+      'chrome:chrome/test/data/android/device_files',
+      isolate_file_path='chrome/chrome_shell_test_apk.isolate',
+      host_driven_root=constants.CHROME_SHELL_HOST_DRIVEN_DIR),
+    I('AndroidWebView',
+      'AndroidWebView.apk',
+      'org.chromium.android_webview.shell',
+      'AndroidWebViewTest',
+      'webview:android_webview/test/data/device_files',
+      isolate_file_path='android_webview/android_webview_test_apk.isolate'),
+    I('ChromeSyncShell',
+      'ChromeSyncShell.apk',
+      'org.chromium.chrome.browser.sync',
+      'ChromeSyncShellTest',
+      None),
+    ])
+
+InstallablePackage = collections.namedtuple('InstallablePackage', [
+    'name', 'apk', 'apk_package'])
+
+INSTALLABLE_PACKAGES = dict((package.name, package) for package in (
+    [InstallablePackage(i.name, i.apk, i.apk_package)
+     for i in INSTRUMENTATION_TESTS.itervalues()] +
+    [InstallablePackage('ChromeDriverWebViewShell',
+                        'ChromeDriverWebViewShell.apk',
+                        'org.chromium.chromedriver_webview_shell')]))
+
+VALID_TESTS = set([
+    'base_junit_tests',
+    'chromedriver',
+    'chrome_proxy',
+    'components_browsertests',
+    'gfx_unittests',
+    'gl_unittests',
+    'gpu',
+    'python_unittests',
+    'telemetry_unittests',
+    'telemetry_perf_unittests',
+    'ui',
+    'unit',
+    'webkit',
+    'webkit_layout'
+])
+
+RunCmd = bb_utils.RunCmd
+
+
+def _GetRevision(options):
+  """Get the SVN revision number.
+
+  Args:
+    options: options object.
+
+  Returns:
+    The revision number.
+  """
+  revision = options.build_properties.get('got_revision')
+  if not revision:
+    revision = options.build_properties.get('revision', 'testing')
+  return revision
+
+
+def _RunTest(options, cmd, suite):
+  """Run test command with runtest.py.
+
+  Args:
+    options: options object.
+    cmd: the command to run.
+    suite: test name.
+  """
+  property_args = bb_utils.EncodeProperties(options)
+  args = [os.path.join(SLAVE_SCRIPTS_DIR, 'runtest.py')] + property_args
+  args += ['--test-platform', 'android']
+  if options.factory_properties.get('generate_gtest_json'):
+    args.append('--generate-json-file')
+    args += ['-o', 'gtest-results/%s' % suite,
+             '--annotate', 'gtest',
+             '--build-number', str(options.build_properties.get('buildnumber',
+                                                                '')),
+             '--builder-name', options.build_properties.get('buildername', '')]
+  if options.target == 'Release':
+    args += ['--target', 'Release']
+  else:
+    args += ['--target', 'Debug']
+  if options.flakiness_server:
+    args += ['--flakiness-dashboard-server=%s' %
+                options.flakiness_server]
+  args += cmd
+  RunCmd(args, cwd=DIR_BUILD_ROOT)
+
+
+def RunTestSuites(options, suites, suites_options=None):
+  """Manages an invocation of test_runner.py for gtests.
+
+  Args:
+    options: options object.
+    suites: List of suite names to run.
+    suites_options: Command line options dictionary for particular suites.
+                    For example,
+                    {'content_browsertests', ['--num_retries=1', '--release']}
+                    will add the options only to content_browsertests.
+  """
+
+  if not suites_options:
+    suites_options = {}
+
+  args = ['--verbose']
+  if options.target == 'Release':
+    args.append('--release')
+  if options.asan:
+    args.append('--tool=asan')
+  if options.gtest_filter:
+    args.append('--gtest-filter=%s' % options.gtest_filter)
+
+  for suite in suites:
+    bb_annotations.PrintNamedStep(suite)
+    cmd = [suite] + args
+    cmd += suites_options.get(suite, [])
+    if suite == 'content_browsertests' or suite == 'components_browsertests':
+      cmd.append('--num_retries=1')
+    _RunTest(options, cmd, suite)
+
+
+def RunJunitSuite(suite):
+  bb_annotations.PrintNamedStep(suite)
+  RunCmd(['build/android/test_runner.py', 'junit', '-s', suite])
+
+
+def RunChromeDriverTests(options):
+  """Run all the steps for running chromedriver tests."""
+  bb_annotations.PrintNamedStep('chromedriver_annotation')
+  RunCmd(['chrome/test/chromedriver/run_buildbot_steps.py',
+          '--android-packages=%s,%s,%s,%s' %
+          ('chrome_shell',
+           'chrome_stable',
+           'chrome_beta',
+           'chromedriver_webview_shell'),
+          '--revision=%s' % _GetRevision(options),
+          '--update-log'])
+
+def RunChromeProxyTests(options):
+  """Run the chrome_proxy tests.
+
+  Args:
+    options: options object.
+  """
+  InstallApk(options, INSTRUMENTATION_TESTS['ChromeShell'], False)
+  args = ['--browser', 'android-chrome-shell']
+  devices = device_utils.DeviceUtils.HealthyDevices()
+  if devices:
+    args = args + ['--device', devices[0].adb.GetDeviceSerial()]
+  bb_annotations.PrintNamedStep('chrome_proxy')
+  RunCmd(['tools/chrome_proxy/run_tests'] + args)
+
+
+def RunTelemetryTests(options, step_name, run_tests_path):
+  """Runs either telemetry_perf_unittests or telemetry_unittests.
+
+  Args:
+    options: options object.
+    step_name: either 'telemetry_unittests' or 'telemetry_perf_unittests'
+    run_tests_path: path to run_tests script (tools/perf/run_tests for
+                    perf_unittests and tools/telemetry/run_tests for
+                    telemetry_unittests)
+  """
+  InstallApk(options, INSTRUMENTATION_TESTS['ChromeShell'], False)
+  args = ['--browser', 'android-chrome-shell']
+  devices = device_utils.DeviceUtils.HealthyDevices()
+  if devices:
+    args = args + ['--device', 'android']
+  bb_annotations.PrintNamedStep(step_name)
+  RunCmd([run_tests_path] + args)
+
+
+def InstallApk(options, test, print_step=False):
+  """Install an apk to all phones.
+
+  Args:
+    options: options object
+    test: An I_TEST namedtuple
+    print_step: Print a buildbot step
+  """
+  if print_step:
+    bb_annotations.PrintNamedStep('install_%s' % test.name.lower())
+
+  args = ['--apk_package', test.apk_package]
+  if options.target == 'Release':
+    args.append('--release')
+  args.append(test.apk)
+
+  RunCmd(['build/android/adb_install_apk.py'] + args, halt_on_failure=True)
+
+
+def RunInstrumentationSuite(options, test, flunk_on_failure=True,
+                            python_only=False, official_build=False):
+  """Manages an invocation of test_runner.py for instrumentation tests.
+
+  Args:
+    options: options object
+    test: An I_TEST namedtuple
+    flunk_on_failure: Flunk the step if tests fail.
+    Python: Run only host driven Python tests.
+    official_build: Run official-build tests.
+  """
+  bb_annotations.PrintNamedStep('%s_instrumentation_tests' % test.name.lower())
+
+  if test.apk:
+    InstallApk(options, test)
+  args = ['--test-apk', test.test_apk, '--verbose']
+  if test.test_data:
+    args.extend(['--test_data', test.test_data])
+  if options.target == 'Release':
+    args.append('--release')
+  if options.asan:
+    args.append('--tool=asan')
+  if options.flakiness_server:
+    args.append('--flakiness-dashboard-server=%s' %
+                options.flakiness_server)
+  if options.coverage_bucket:
+    args.append('--coverage-dir=%s' % options.coverage_dir)
+  if test.isolate_file_path:
+    args.append('--isolate-file-path=%s' % test.isolate_file_path)
+  if test.host_driven_root:
+    args.append('--host-driven-root=%s' % test.host_driven_root)
+  if test.annotation:
+    args.extend(['-A', test.annotation])
+  if test.exclude_annotation:
+    args.extend(['-E', test.exclude_annotation])
+  if test.extra_flags:
+    args.extend(test.extra_flags)
+  if python_only:
+    args.append('-p')
+  if official_build:
+    # The option needs to be assigned 'True' as it does not have an action
+    # associated with it.
+    args.append('--official-build')
+
+  RunCmd(['build/android/test_runner.py', 'instrumentation'] + args,
+         flunk_on_failure=flunk_on_failure)
+
+
+def RunWebkitLint():
+  """Lint WebKit's TestExpectation files."""
+  bb_annotations.PrintNamedStep('webkit_lint')
+  RunCmd([SrcPath(os.path.join(BLINK_SCRIPTS_DIR, 'lint-test-expectations'))])
+
+
+def RunWebkitLayoutTests(options):
+  """Run layout tests on an actual device."""
+  bb_annotations.PrintNamedStep('webkit_tests')
+  cmd_args = [
+      '--no-show-results',
+      '--no-new-test-results',
+      '--full-results-html',
+      '--clobber-old-results',
+      '--exit-after-n-failures', '5000',
+      '--exit-after-n-crashes-or-timeouts', '100',
+      '--debug-rwt-logging',
+      '--results-directory', '../layout-test-results',
+      '--target', options.target,
+      '--builder-name', options.build_properties.get('buildername', ''),
+      '--build-number', str(options.build_properties.get('buildnumber', '')),
+      '--master-name', 'ChromiumWebkit',  # TODO: Get this from the cfg.
+      '--build-name', options.build_properties.get('buildername', ''),
+      '--platform=android']
+
+  for flag in 'test_results_server', 'driver_name', 'additional_driver_flag':
+    if flag in options.factory_properties:
+      cmd_args.extend(['--%s' % flag.replace('_', '-'),
+                       options.factory_properties.get(flag)])
+
+  for f in options.factory_properties.get('additional_expectations', []):
+    cmd_args.extend(
+        ['--additional-expectations=%s' % os.path.join(CHROME_SRC_DIR, *f)])
+
+  # TODO(dpranke): Remove this block after
+  # https://codereview.chromium.org/12927002/ lands.
+  for f in options.factory_properties.get('additional_expectations_files', []):
+    cmd_args.extend(
+        ['--additional-expectations=%s' % os.path.join(CHROME_SRC_DIR, *f)])
+
+  exit_code = RunCmd(
+      [SrcPath(os.path.join(BLINK_SCRIPTS_DIR, 'run-webkit-tests'))] + cmd_args)
+  if exit_code == 255: # test_run_results.UNEXPECTED_ERROR_EXIT_STATUS
+    bb_annotations.PrintMsg('?? (crashed or hung)')
+  elif exit_code == 254: # test_run_results.NO_DEVICES_EXIT_STATUS
+    bb_annotations.PrintMsg('?? (no devices found)')
+  elif exit_code == 253: # test_run_results.NO_TESTS_EXIT_STATUS
+    bb_annotations.PrintMsg('?? (no tests found)')
+  else:
+    full_results_path = os.path.join('..', 'layout-test-results',
+                                     'full_results.json')
+    if os.path.exists(full_results_path):
+      full_results = json.load(open(full_results_path))
+      unexpected_passes, unexpected_failures, unexpected_flakes = (
+          _ParseLayoutTestResults(full_results))
+      if unexpected_failures:
+        _PrintDashboardLink('failed', unexpected_failures.keys(),
+                            max_tests=25)
+      elif unexpected_passes:
+        _PrintDashboardLink('unexpected passes', unexpected_passes.keys(),
+                            max_tests=10)
+      if unexpected_flakes:
+        _PrintDashboardLink('unexpected flakes', unexpected_flakes.keys(),
+                            max_tests=10)
+
+      if exit_code == 0 and (unexpected_passes or unexpected_flakes):
+        # If exit_code != 0, RunCmd() will have already printed an error.
+        bb_annotations.PrintWarning()
+    else:
+      bb_annotations.PrintError()
+      bb_annotations.PrintMsg('?? (results missing)')
+
+  if options.factory_properties.get('archive_webkit_results', False):
+    bb_annotations.PrintNamedStep('archive_webkit_results')
+    base = 'https://storage.googleapis.com/chromium-layout-test-archives'
+    builder_name = options.build_properties.get('buildername', '')
+    build_number = str(options.build_properties.get('buildnumber', ''))
+    results_link = '%s/%s/%s/layout-test-results/results.html' % (
+        base, EscapeBuilderName(builder_name), build_number)
+    bb_annotations.PrintLink('results', results_link)
+    bb_annotations.PrintLink('(zip)', '%s/%s/%s/layout-test-results.zip' % (
+        base, EscapeBuilderName(builder_name), build_number))
+    gs_bucket = 'gs://chromium-layout-test-archives'
+    RunCmd([os.path.join(SLAVE_SCRIPTS_DIR, 'chromium',
+                         'archive_layout_test_results.py'),
+            '--results-dir', '../../layout-test-results',
+            '--build-number', build_number,
+            '--builder-name', builder_name,
+            '--gs-bucket', gs_bucket],
+            cwd=DIR_BUILD_ROOT)
+
+
+def _ParseLayoutTestResults(results):
+  """Extract the failures from the test run."""
+  # Cloned from third_party/WebKit/Tools/Scripts/print-json-test-results
+  tests = _ConvertTrieToFlatPaths(results['tests'])
+  failures = {}
+  flakes = {}
+  passes = {}
+  for (test, result) in tests.iteritems():
+    if result.get('is_unexpected'):
+      actual_results = result['actual'].split()
+      expected_results = result['expected'].split()
+      if len(actual_results) > 1:
+        # We report the first failure type back, even if the second
+        # was more severe.
+        if actual_results[1] in expected_results:
+          flakes[test] = actual_results[0]
+        else:
+          failures[test] = actual_results[0]
+      elif actual_results[0] == 'PASS':
+        passes[test] = result
+      else:
+        failures[test] = actual_results[0]
+
+  return (passes, failures, flakes)
+
+
+def _ConvertTrieToFlatPaths(trie, prefix=None):
+  """Flatten the trie of failures into a list."""
+  # Cloned from third_party/WebKit/Tools/Scripts/print-json-test-results
+  result = {}
+  for name, data in trie.iteritems():
+    if prefix:
+      name = prefix + '/' + name
+
+    if len(data) and 'actual' not in data and 'expected' not in data:
+      result.update(_ConvertTrieToFlatPaths(data, name))
+    else:
+      result[name] = data
+
+  return result
+
+
+def _PrintDashboardLink(link_text, tests, max_tests):
+  """Add a link to the flakiness dashboard in the step annotations."""
+  if len(tests) > max_tests:
+    test_list_text = ' '.join(tests[:max_tests]) + ' and more'
+  else:
+    test_list_text = ' '.join(tests)
+
+  dashboard_base = ('http://test-results.appspot.com'
+                    '/dashboards/flakiness_dashboard.html#'
+                    'master=ChromiumWebkit&tests=')
+
+  bb_annotations.PrintLink('%d %s: %s' %
+                           (len(tests), link_text, test_list_text),
+                           dashboard_base + ','.join(tests))
+
+
+def EscapeBuilderName(builder_name):
+  return re.sub('[ ()]', '_', builder_name)
+
+
+def SpawnLogcatMonitor():
+  shutil.rmtree(LOGCAT_DIR, ignore_errors=True)
+  bb_utils.SpawnCmd([
+      os.path.join(CHROME_SRC_DIR, 'build', 'android', 'adb_logcat_monitor.py'),
+      LOGCAT_DIR])
+
+  # Wait for logcat_monitor to pull existing logcat
+  RunCmd(['sleep', '5'])
+
+
+def ProvisionDevices(options):
+  bb_annotations.PrintNamedStep('provision_devices')
+
+  if not bb_utils.TESTING:
+    # Restart adb to work around bugs, sleep to wait for usb discovery.
+    device_utils.RestartServer()
+    RunCmd(['sleep', '1'])
+  provision_cmd = ['build/android/provision_devices.py', '-t', options.target]
+  if options.auto_reconnect:
+    provision_cmd.append('--auto-reconnect')
+  if options.skip_wipe:
+    provision_cmd.append('--skip-wipe')
+  if options.disable_location:
+    provision_cmd.append('--disable-location')
+  RunCmd(provision_cmd, halt_on_failure=True)
+
+
+def DeviceStatusCheck(options):
+  bb_annotations.PrintNamedStep('device_status_check')
+  cmd = ['build/android/buildbot/bb_device_status_check.py']
+  if options.restart_usb:
+    cmd.append('--restart-usb')
+  RunCmd(cmd, halt_on_failure=True)
+
+
+def GetDeviceSetupStepCmds():
+  return [
+      ('device_status_check', DeviceStatusCheck),
+      ('provision_devices', ProvisionDevices),
+  ]
+
+
+def RunUnitTests(options):
+  suites = gtest_config.STABLE_TEST_SUITES
+  if options.asan:
+    suites = [s for s in suites
+              if s not in gtest_config.ASAN_EXCLUDED_TEST_SUITES]
+  RunTestSuites(options, suites)
+
+
+def RunTelemetryUnitTests(options):
+  RunTelemetryTests(options, 'telemetry_unittests', 'tools/telemetry/run_tests')
+
+
+def RunTelemetryPerfUnitTests(options):
+  RunTelemetryTests(options, 'telemetry_perf_unittests', 'tools/perf/run_tests')
+
+
+def RunInstrumentationTests(options):
+  for test in INSTRUMENTATION_TESTS.itervalues():
+    RunInstrumentationSuite(options, test)
+
+
+def RunWebkitTests(options):
+  RunTestSuites(options, ['webkit_unit_tests', 'blink_heap_unittests'])
+  RunWebkitLint()
+
+
+def RunGPUTests(options):
+  revision = _GetRevision(options)
+  builder_name = options.build_properties.get('buildername', 'noname')
+
+  bb_annotations.PrintNamedStep('pixel_tests')
+  RunCmd(['content/test/gpu/run_gpu_test.py',
+          'pixel', '-v',
+          '--browser',
+          'android-content-shell',
+          '--build-revision',
+          str(revision),
+          '--upload-refimg-to-cloud-storage',
+          '--refimg-cloud-storage-bucket',
+          'chromium-gpu-archive/reference-images',
+          '--os-type',
+          'android',
+          '--test-machine-name',
+          EscapeBuilderName(builder_name)])
+
+  bb_annotations.PrintNamedStep('webgl_conformance_tests')
+  RunCmd(['content/test/gpu/run_gpu_test.py', '-v',
+          '--browser=android-content-shell', 'webgl_conformance',
+          '--webgl-conformance-version=1.0.1'])
+
+  bb_annotations.PrintNamedStep('android_webview_webgl_conformance_tests')
+  RunCmd(['content/test/gpu/run_gpu_test.py', '-v',
+          '--browser=android-webview-shell', 'webgl_conformance',
+          '--webgl-conformance-version=1.0.1'])
+
+  bb_annotations.PrintNamedStep('gpu_rasterization_tests')
+  RunCmd(['content/test/gpu/run_gpu_test.py',
+          'gpu_rasterization', '-v',
+          '--browser',
+          'android-content-shell',
+          '--build-revision',
+          str(revision),
+          '--test-machine-name',
+          EscapeBuilderName(builder_name)])
+
+
+def RunPythonUnitTests(_options):
+  for suite in constants.PYTHON_UNIT_TEST_SUITES:
+    bb_annotations.PrintNamedStep(suite)
+    RunCmd(['build/android/test_runner.py', 'python', '-s', suite])
+
+
+def GetTestStepCmds():
+  return [
+      ('base_junit_tests',
+          lambda _options: RunJunitSuite('base_junit_tests')),
+      ('chromedriver', RunChromeDriverTests),
+      ('chrome_proxy', RunChromeProxyTests),
+      ('components_browsertests',
+          lambda options: RunTestSuites(options, ['components_browsertests'])),
+      ('gfx_unittests',
+          lambda options: RunTestSuites(options, ['gfx_unittests'])),
+      ('gl_unittests',
+          lambda options: RunTestSuites(options, ['gl_unittests'])),
+      ('gpu', RunGPUTests),
+      ('python_unittests', RunPythonUnitTests),
+      ('telemetry_unittests', RunTelemetryUnitTests),
+      ('telemetry_perf_unittests', RunTelemetryPerfUnitTests),
+      ('ui', RunInstrumentationTests),
+      ('unit', RunUnitTests),
+      ('webkit', RunWebkitTests),
+      ('webkit_layout', RunWebkitLayoutTests),
+  ]
+
+
+def MakeGSPath(options, gs_base_dir):
+  revision = _GetRevision(options)
+  bot_id = options.build_properties.get('buildername', 'testing')
+  randhash = hashlib.sha1(str(random.random())).hexdigest()
+  gs_path = '%s/%s/%s/%s' % (gs_base_dir, bot_id, revision, randhash)
+  # remove double slashes, happens with blank revisions and confuses gsutil
+  gs_path = re.sub('/+', '/', gs_path)
+  return gs_path
+
+def UploadHTML(options, gs_base_dir, dir_to_upload, link_text,
+               link_rel_path='index.html', gs_url=GS_URL):
+  """Uploads directory at |dir_to_upload| to Google Storage and output a link.
+
+  Args:
+    options: Command line options.
+    gs_base_dir: The Google Storage base directory (e.g.
+      'chromium-code-coverage/java')
+    dir_to_upload: Absolute path to the directory to be uploaded.
+    link_text: Link text to be displayed on the step.
+    link_rel_path: Link path relative to |dir_to_upload|.
+    gs_url: Google storage URL.
+  """
+  gs_path = MakeGSPath(options, gs_base_dir)
+  RunCmd([bb_utils.GSUTIL_PATH, 'cp', '-R', dir_to_upload, 'gs://%s' % gs_path])
+  bb_annotations.PrintLink(link_text,
+                           '%s/%s/%s' % (gs_url, gs_path, link_rel_path))
+
+
+def GenerateJavaCoverageReport(options):
+  """Generates an HTML coverage report using EMMA and uploads it."""
+  bb_annotations.PrintNamedStep('java_coverage_report')
+
+  coverage_html = os.path.join(options.coverage_dir, 'coverage_html')
+  RunCmd(['build/android/generate_emma_html.py',
+          '--coverage-dir', options.coverage_dir,
+          '--metadata-dir', os.path.join(CHROME_OUT_DIR, options.target),
+          '--cleanup',
+          '--output', os.path.join(coverage_html, 'index.html')])
+  return coverage_html
+
+
+def LogcatDump(options):
+  # Print logcat, kill logcat monitor
+  bb_annotations.PrintNamedStep('logcat_dump')
+  logcat_file = os.path.join(CHROME_OUT_DIR, options.target, 'full_log.txt')
+  RunCmd([SrcPath('build', 'android', 'adb_logcat_printer.py'),
+          '--output-path', logcat_file, LOGCAT_DIR])
+  gs_path = MakeGSPath(options, 'chromium-android/logcat_dumps')
+  RunCmd([bb_utils.GSUTIL_PATH, 'cp', '-z', 'txt', logcat_file,
+          'gs://%s' % gs_path])
+  bb_annotations.PrintLink('logcat dump', '%s/%s' % (GS_AUTH_URL, gs_path))
+
+
+def RunStackToolSteps(options):
+  """Run stack tool steps.
+
+  Stack tool is run for logcat dump, optionally for ASAN.
+  """
+  bb_annotations.PrintNamedStep('Run stack tool with logcat dump')
+  logcat_file = os.path.join(CHROME_OUT_DIR, options.target, 'full_log.txt')
+  RunCmd([os.path.join(CHROME_SRC_DIR, 'third_party', 'android_platform',
+          'development', 'scripts', 'stack'),
+          '--more-info', logcat_file])
+  if options.asan_symbolize:
+    bb_annotations.PrintNamedStep('Run stack tool for ASAN')
+    RunCmd([
+        os.path.join(CHROME_SRC_DIR, 'build', 'android', 'asan_symbolize.py'),
+        '-l', logcat_file])
+
+
+def GenerateTestReport(options):
+  bb_annotations.PrintNamedStep('test_report')
+  for report in glob.glob(
+      os.path.join(CHROME_OUT_DIR, options.target, 'test_logs', '*.log')):
+    RunCmd(['cat', report])
+    os.remove(report)
+
+
+def MainTestWrapper(options):
+  try:
+    # Spawn logcat monitor
+    SpawnLogcatMonitor()
+
+    # Run all device setup steps
+    for _, cmd in GetDeviceSetupStepCmds():
+      cmd(options)
+
+    if options.install:
+      for i in options.install:
+        install_obj = INSTALLABLE_PACKAGES[i]
+        InstallApk(options, install_obj, print_step=True)
+
+    if options.test_filter:
+      bb_utils.RunSteps(options.test_filter, GetTestStepCmds(), options)
+
+    if options.coverage_bucket:
+      coverage_html = GenerateJavaCoverageReport(options)
+      UploadHTML(options, '%s/java' % options.coverage_bucket, coverage_html,
+                 'Coverage Report')
+      shutil.rmtree(coverage_html, ignore_errors=True)
+
+    if options.experimental:
+      RunTestSuites(options, gtest_config.EXPERIMENTAL_TEST_SUITES)
+
+  finally:
+    # Run all post test steps
+    LogcatDump(options)
+    if not options.disable_stack_tool:
+      RunStackToolSteps(options)
+    GenerateTestReport(options)
+    # KillHostHeartbeat() has logic to check if heartbeat process is running,
+    # and kills only if it finds the process is running on the host.
+    provision_devices.KillHostHeartbeat()
+    if options.cleanup:
+      shutil.rmtree(os.path.join(CHROME_OUT_DIR, options.target),
+          ignore_errors=True)
+
+
+def GetDeviceStepsOptParser():
+  parser = bb_utils.GetParser()
+  parser.add_option('--experimental', action='store_true',
+                    help='Run experiemental tests')
+  parser.add_option('-f', '--test-filter', metavar='<filter>', default=[],
+                    action='append',
+                    help=('Run a test suite. Test suites: "%s"' %
+                          '", "'.join(VALID_TESTS)))
+  parser.add_option('--gtest-filter',
+                    help='Filter for running a subset of tests of a gtest test')
+  parser.add_option('--asan', action='store_true', help='Run tests with asan.')
+  parser.add_option('--install', metavar='<apk name>', action="append",
+                    help='Install an apk by name')
+  parser.add_option('--no-reboot', action='store_true',
+                    help='Do not reboot devices during provisioning.')
+  parser.add_option('--coverage-bucket',
+                    help=('Bucket name to store coverage results. Coverage is '
+                          'only run if this is set.'))
+  parser.add_option('--restart-usb', action='store_true',
+                    help='Restart usb ports before device status check.')
+  parser.add_option(
+      '--flakiness-server',
+      help=('The flakiness dashboard server to which the results should be '
+            'uploaded.'))
+  parser.add_option(
+      '--auto-reconnect', action='store_true',
+      help='Push script to device which restarts adbd on disconnections.')
+  parser.add_option('--skip-wipe', action='store_true',
+                    help='Do not wipe devices during provisioning.')
+  parser.add_option('--disable-location', action='store_true',
+                    help='Disable location settings.')
+  parser.add_option(
+      '--logcat-dump-output',
+      help='The logcat dump output will be "tee"-ed into this file')
+  # During processing perf bisects, a seperate working directory created under
+  # which builds are produced. Therefore we should look for relevent output
+  # file under this directory.(/b/build/slave/<slave_name>/build/bisect/src/out)
+  parser.add_option(
+      '--chrome-output-dir',
+      help='Chrome output directory to be used while bisecting.')
+
+  parser.add_option('--disable-stack-tool', action='store_true',
+      help='Do not run stack tool.')
+  parser.add_option('--asan-symbolize', action='store_true',
+      help='Run stack tool for ASAN')
+  parser.add_option('--cleanup', action='store_true',
+      help='Delete out/<target> directory at the end of the run.')
+  return parser
+
+
+def main(argv):
+  parser = GetDeviceStepsOptParser()
+  options, args = parser.parse_args(argv[1:])
+
+  if args:
+    return sys.exit('Unused args %s' % args)
+
+  unknown_tests = set(options.test_filter) - VALID_TESTS
+  if unknown_tests:
+    return sys.exit('Unknown tests %s' % list(unknown_tests))
+
+  setattr(options, 'target', options.factory_properties.get('target', 'Debug'))
+
+  if options.chrome_output_dir:
+    global CHROME_OUT_DIR
+    global LOGCAT_DIR
+    CHROME_OUT_DIR = options.chrome_output_dir
+    LOGCAT_DIR = os.path.join(CHROME_OUT_DIR, 'logcat')
+
+  if options.coverage_bucket:
+    setattr(options, 'coverage_dir',
+            os.path.join(CHROME_OUT_DIR, options.target, 'coverage'))
+
+  MainTestWrapper(options)
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
diff --git a/build/android/buildbot/bb_host_steps.py b/build/android/buildbot/bb_host_steps.py
new file mode 100755
index 0000000..1e927fb
--- /dev/null
+++ b/build/android/buildbot/bb_host_steps.py
@@ -0,0 +1,133 @@
+#!/usr/bin/env python
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import json
+import sys
+
+import bb_utils
+import bb_annotations
+
+sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
+from pylib import constants
+
+
+SLAVE_SCRIPTS_DIR = os.path.join(bb_utils.BB_BUILD_DIR, 'scripts', 'slave')
+VALID_HOST_TESTS = set(['check_webview_licenses'])
+
+DIR_BUILD_ROOT = os.path.dirname(constants.DIR_SOURCE_ROOT)
+
+# Short hand for RunCmd which is used extensively in this file.
+RunCmd = bb_utils.RunCmd
+
+
+def SrcPath(*path):
+  return os.path.join(constants.DIR_SOURCE_ROOT, *path)
+
+
+def CheckWebViewLicenses(_):
+  bb_annotations.PrintNamedStep('check_licenses')
+  RunCmd([SrcPath('android_webview', 'tools', 'webview_licenses.py'), 'scan'],
+         warning_code=1)
+
+
+def RunHooks(build_type):
+  RunCmd([SrcPath('build', 'landmines.py')])
+  build_path = SrcPath('out', build_type)
+  landmine_path = os.path.join(build_path, '.landmines_triggered')
+  clobber_env = os.environ.get('BUILDBOT_CLOBBER')
+  if clobber_env or os.path.isfile(landmine_path):
+    bb_annotations.PrintNamedStep('Clobber')
+    if not clobber_env:
+      print 'Clobbering due to triggered landmines:'
+      with open(landmine_path) as f:
+        print f.read()
+    RunCmd(['rm', '-rf', build_path])
+
+  bb_annotations.PrintNamedStep('runhooks')
+  RunCmd(['gclient', 'runhooks'], halt_on_failure=True)
+
+
+def Compile(options):
+  RunHooks(options.target)
+  cmd = [os.path.join(SLAVE_SCRIPTS_DIR, 'compile.py'),
+         '--build-tool=ninja',
+         '--compiler=goma',
+         '--target=%s' % options.target,
+         '--goma-dir=%s' % bb_utils.GOMA_DIR]
+  bb_annotations.PrintNamedStep('compile')
+  if options.build_targets:
+    build_targets = options.build_targets.split(',')
+    cmd += ['--build-args', ' '.join(build_targets)]
+  RunCmd(cmd, halt_on_failure=True, cwd=DIR_BUILD_ROOT)
+
+
+def ZipBuild(options):
+  bb_annotations.PrintNamedStep('zip_build')
+  RunCmd([
+      os.path.join(SLAVE_SCRIPTS_DIR, 'zip_build.py'),
+      '--src-dir', constants.DIR_SOURCE_ROOT,
+      '--exclude-files', 'lib.target,gen,android_webview,jingle_unittests']
+      + bb_utils.EncodeProperties(options), cwd=DIR_BUILD_ROOT)
+
+
+def ExtractBuild(options):
+  bb_annotations.PrintNamedStep('extract_build')
+  RunCmd([os.path.join(SLAVE_SCRIPTS_DIR, 'extract_build.py')]
+         + bb_utils.EncodeProperties(options), cwd=DIR_BUILD_ROOT)
+
+
+def BisectPerfRegression(options):
+  args = []
+  if options.extra_src:
+    args = ['--extra_src', options.extra_src]
+  RunCmd([SrcPath('tools', 'prepare-bisect-perf-regression.py'),
+          '-w', os.path.join(constants.DIR_SOURCE_ROOT, os.pardir)])
+  RunCmd([SrcPath('tools', 'run-bisect-perf-regression.py'),
+          '-w', os.path.join(constants.DIR_SOURCE_ROOT, os.pardir),
+          '--build-properties=%s' % json.dumps(options.build_properties)] +
+          args)
+
+
+def GetHostStepCmds():
+  return [
+      ('compile', Compile),
+      ('extract_build', ExtractBuild),
+      ('check_webview_licenses', CheckWebViewLicenses),
+      ('bisect_perf_regression', BisectPerfRegression),
+      ('zip_build', ZipBuild)
+  ]
+
+
+def GetHostStepsOptParser():
+  parser = bb_utils.GetParser()
+  parser.add_option('--steps', help='Comma separated list of host tests.')
+  parser.add_option('--build-targets', default='',
+                    help='Comma separated list of build targets.')
+  parser.add_option('--experimental', action='store_true',
+                    help='Indicate whether to compile experimental targets.')
+  parser.add_option('--extra_src', default='',
+                    help='Path to extra source file. If this is supplied, '
+                    'bisect script will use it to override default behavior.')
+
+  return parser
+
+
+def main(argv):
+  parser = GetHostStepsOptParser()
+  options, args = parser.parse_args(argv[1:])
+  if args:
+    return sys.exit('Unused args %s' % args)
+
+  setattr(options, 'target', options.factory_properties.get('target', 'Debug'))
+  setattr(options, 'extra_src',
+          options.factory_properties.get('extra_src', ''))
+
+  if options.steps:
+    bb_utils.RunSteps(options.steps.split(','), GetHostStepCmds(), options)
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
diff --git a/build/android/buildbot/bb_run_bot.py b/build/android/buildbot/bb_run_bot.py
new file mode 100755
index 0000000..0c8a977
--- /dev/null
+++ b/build/android/buildbot/bb_run_bot.py
@@ -0,0 +1,320 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import collections
+import copy
+import json
+import os
+import pipes
+import re
+import subprocess
+import sys
+
+import bb_utils
+
+sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
+from pylib import constants
+
+
+CHROMIUM_COVERAGE_BUCKET = 'chromium-code-coverage'
+
+_BotConfig = collections.namedtuple(
+    'BotConfig', ['bot_id', 'host_obj', 'test_obj'])
+
+HostConfig = collections.namedtuple(
+    'HostConfig',
+    ['script', 'host_steps', 'extra_args', 'extra_gyp_defines', 'target_arch'])
+
+TestConfig = collections.namedtuple('Tests', ['script', 'tests', 'extra_args'])
+
+
+def BotConfig(bot_id, host_object, test_object=None):
+  return _BotConfig(bot_id, host_object, test_object)
+
+
+def DictDiff(d1, d2):
+  diff = []
+  for key in sorted(set(d1.keys() + d2.keys())):
+    if key in d1 and d1[key] != d2.get(key):
+      diff.append('- %s=%s' % (key, pipes.quote(d1[key])))
+    if key in d2 and d2[key] != d1.get(key):
+      diff.append('+ %s=%s' % (key, pipes.quote(d2[key])))
+  return '\n'.join(diff)
+
+
+def GetEnvironment(host_obj, testing, extra_env_vars=None):
+  init_env = dict(os.environ)
+  init_env['GYP_GENERATORS'] = 'ninja'
+  if extra_env_vars:
+    init_env.update(extra_env_vars)
+  envsetup_cmd = '. build/android/envsetup.sh'
+  if testing:
+    # Skip envsetup to avoid presubmit dependence on android deps.
+    print 'Testing mode - skipping "%s"' % envsetup_cmd
+    envsetup_cmd = ':'
+  else:
+    print 'Running %s' % envsetup_cmd
+  proc = subprocess.Popen(['bash', '-exc',
+    envsetup_cmd + ' >&2; python build/android/buildbot/env_to_json.py'],
+    stdout=subprocess.PIPE, stderr=subprocess.PIPE,
+    cwd=bb_utils.CHROME_SRC, env=init_env)
+  json_env, envsetup_output = proc.communicate()
+  if proc.returncode != 0:
+    print >> sys.stderr, 'FATAL Failure in envsetup.'
+    print >> sys.stderr, envsetup_output
+    sys.exit(1)
+  env = json.loads(json_env)
+  env['GYP_DEFINES'] = env.get('GYP_DEFINES', '') + \
+      ' OS=android fastbuild=1 use_goma=1 gomadir=%s' % bb_utils.GOMA_DIR
+  if host_obj.target_arch:
+    env['GYP_DEFINES'] += ' target_arch=%s' % host_obj.target_arch
+  extra_gyp = host_obj.extra_gyp_defines
+  if extra_gyp:
+    env['GYP_DEFINES'] += ' %s' % extra_gyp
+    if re.search('(asan|clang)=1', extra_gyp):
+      env.pop('CXX_target', None)
+
+  # Bots checkout chrome in /b/build/slave/<name>/build/src
+  build_internal_android = os.path.abspath(os.path.join(
+      bb_utils.CHROME_SRC, '..', '..', '..', '..', '..', 'build_internal',
+      'scripts', 'slave', 'android'))
+  if os.path.exists(build_internal_android):
+    env['PATH'] = os.pathsep.join([build_internal_android, env['PATH']])
+  return env
+
+
+def GetCommands(options, bot_config):
+  """Get a formatted list of commands.
+
+  Args:
+    options: Options object.
+    bot_config: A BotConfig named tuple.
+    host_step_script: Host step script.
+    device_step_script: Device step script.
+  Returns:
+    list of Command objects.
+  """
+  property_args = bb_utils.EncodeProperties(options)
+  commands = [[bot_config.host_obj.script,
+               '--steps=%s' % ','.join(bot_config.host_obj.host_steps)] +
+              property_args + (bot_config.host_obj.extra_args or [])]
+
+  test_obj = bot_config.test_obj
+  if test_obj:
+    run_test_cmd = [test_obj.script] + property_args
+    for test in test_obj.tests:
+      run_test_cmd.extend(['-f', test])
+    if test_obj.extra_args:
+      run_test_cmd.extend(test_obj.extra_args)
+    commands.append(run_test_cmd)
+  return commands
+
+
+def GetBotStepMap():
+  compile_step = ['compile']
+  chrome_proxy_tests = ['chrome_proxy']
+  python_unittests = ['python_unittests']
+  std_host_tests = ['check_webview_licenses']
+  std_build_steps = ['compile', 'zip_build']
+  std_test_steps = ['extract_build']
+  std_tests = ['ui', 'unit']
+  telemetry_tests = ['telemetry_perf_unittests']
+  telemetry_tests_user_build = ['telemetry_unittests',
+                                'telemetry_perf_unittests']
+  trial_tests = [
+      'base_junit_tests',
+      'components_browsertests',
+      'gfx_unittests',
+      'gl_unittests',
+  ]
+  flakiness_server = (
+      '--flakiness-server=%s' % constants.UPSTREAM_FLAKINESS_SERVER)
+  experimental = ['--experimental']
+  bisect_chrome_output_dir = os.path.abspath(
+      os.path.join(os.path.dirname(__file__), os.pardir, os.pardir, os.pardir,
+                   os.pardir, 'bisect', 'src', 'out'))
+  B = BotConfig
+  H = (lambda steps, extra_args=None, extra_gyp=None, target_arch=None:
+       HostConfig('build/android/buildbot/bb_host_steps.py', steps, extra_args,
+                  extra_gyp, target_arch))
+  T = (lambda tests, extra_args=None:
+       TestConfig('build/android/buildbot/bb_device_steps.py', tests,
+                  extra_args))
+
+  bot_configs = [
+      # Main builders
+      B('main-builder-dbg', H(std_build_steps + std_host_tests)),
+      B('main-builder-rel', H(std_build_steps)),
+      B('main-clang-builder',
+        H(compile_step, extra_gyp='clang=1 component=shared_library')),
+      B('main-clobber', H(compile_step)),
+      B('main-tests-rel', H(std_test_steps),
+        T(std_tests + telemetry_tests + chrome_proxy_tests,
+          ['--cleanup', flakiness_server])),
+      B('main-tests', H(std_test_steps),
+        T(std_tests, ['--cleanup', flakiness_server])),
+
+      # Other waterfalls
+      B('asan-builder-tests', H(compile_step,
+                                extra_gyp='asan=1 component=shared_library'),
+        T(std_tests, ['--asan', '--asan-symbolize'])),
+      B('blink-try-builder', H(compile_step)),
+      B('chromedriver-fyi-tests-dbg', H(std_test_steps),
+        T(['chromedriver'],
+          ['--install=ChromeShell', '--install=ChromeDriverWebViewShell',
+           '--skip-wipe', '--disable-location', '--cleanup'])),
+      B('fyi-x86-builder-dbg',
+        H(compile_step + std_host_tests, experimental, target_arch='ia32')),
+      B('fyi-builder-dbg',
+        H(std_build_steps + std_host_tests, experimental,
+          extra_gyp='emma_coverage=1')),
+      B('x86-builder-dbg',
+        H(compile_step + std_host_tests, target_arch='ia32')),
+      B('fyi-builder-rel', H(std_build_steps, experimental)),
+      B('fyi-tests', H(std_test_steps),
+        T(std_tests + python_unittests,
+                      ['--experimental', flakiness_server,
+                      '--coverage-bucket', CHROMIUM_COVERAGE_BUCKET,
+                      '--cleanup'])),
+      B('user-build-fyi-tests-dbg', H(std_test_steps),
+        T(sorted(telemetry_tests_user_build + trial_tests))),
+      B('fyi-component-builder-tests-dbg',
+        H(compile_step, extra_gyp='component=shared_library'),
+        T(std_tests, ['--experimental', flakiness_server])),
+      B('gpu-builder-tests-dbg',
+        H(compile_step),
+        T(['gpu'], ['--install=ContentShell'])),
+      # Pass empty T([]) so that logcat monitor and device status check are run.
+      B('perf-bisect-builder-tests-dbg',
+        H(['bisect_perf_regression']),
+        T([], ['--chrome-output-dir', bisect_chrome_output_dir])),
+      B('perf-tests-rel', H(std_test_steps),
+        T([], ['--install=ChromeShell', '--cleanup'])),
+      B('webkit-latest-webkit-tests', H(std_test_steps),
+        T(['webkit_layout', 'webkit'], ['--cleanup', '--auto-reconnect'])),
+      B('webkit-latest-contentshell', H(compile_step),
+        T(['webkit_layout'], ['--auto-reconnect'])),
+      B('builder-unit-tests', H(compile_step), T(['unit'])),
+
+      # Generic builder config (for substring match).
+      B('builder', H(std_build_steps)),
+  ]
+
+  bot_map = dict((config.bot_id, config) for config in bot_configs)
+
+  # These bots have identical configuration to ones defined earlier.
+  copy_map = [
+      ('lkgr-clobber', 'main-clobber'),
+      ('try-builder-dbg', 'main-builder-dbg'),
+      ('try-builder-rel', 'main-builder-rel'),
+      ('try-clang-builder', 'main-clang-builder'),
+      ('try-fyi-builder-dbg', 'fyi-builder-dbg'),
+      ('try-x86-builder-dbg', 'x86-builder-dbg'),
+      ('try-tests-rel', 'main-tests-rel'),
+      ('try-tests', 'main-tests'),
+      ('try-fyi-tests', 'fyi-tests'),
+      ('webkit-latest-tests', 'main-tests'),
+  ]
+  for to_id, from_id in copy_map:
+    assert to_id not in bot_map
+    # pylint: disable=W0212
+    bot_map[to_id] = copy.deepcopy(bot_map[from_id])._replace(bot_id=to_id)
+
+    # Trybots do not upload to flakiness dashboard. They should be otherwise
+    # identical in configuration to their trunk building counterparts.
+    test_obj = bot_map[to_id].test_obj
+    if to_id.startswith('try') and test_obj:
+      extra_args = test_obj.extra_args
+      if extra_args and flakiness_server in extra_args:
+        extra_args.remove(flakiness_server)
+  return bot_map
+
+
+# Return an object from the map, looking first for an exact id match.
+# If this fails, look for an id which is a substring of the specified id.
+# Choose the longest of all substring matches.
+# pylint: disable=W0622
+def GetBestMatch(id_map, id):
+  config = id_map.get(id)
+  if not config:
+    substring_matches = [x for x in id_map.iterkeys() if x in id]
+    if substring_matches:
+      max_id = max(substring_matches, key=len)
+      print 'Using config from id="%s" (substring match).' % max_id
+      config = id_map[max_id]
+  return config
+
+
+def GetRunBotOptParser():
+  parser = bb_utils.GetParser()
+  parser.add_option('--bot-id', help='Specify bot id directly.')
+  parser.add_option('--testing', action='store_true',
+                    help='For testing: print, but do not run commands')
+
+  return parser
+
+
+def GetBotConfig(options, bot_step_map):
+  bot_id = options.bot_id or options.factory_properties.get('android_bot_id')
+  if not bot_id:
+    print (sys.stderr,
+           'A bot id must be specified through option or factory_props.')
+    return
+
+  bot_config = GetBestMatch(bot_step_map, bot_id)
+  if not bot_config:
+    print 'Error: config for id="%s" cannot be inferred.' % bot_id
+  return bot_config
+
+
+def RunBotCommands(options, commands, env):
+  print 'Environment changes:'
+  print DictDiff(dict(os.environ), env)
+
+  for command in commands:
+    print bb_utils.CommandToString(command)
+    sys.stdout.flush()
+    if options.testing:
+      env['BUILDBOT_TESTING'] = '1'
+    return_code = subprocess.call(command, cwd=bb_utils.CHROME_SRC, env=env)
+    if return_code != 0:
+      return return_code
+
+
+def main(argv):
+  proc = subprocess.Popen(
+      ['/bin/hostname', '-f'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+  hostname_stdout, hostname_stderr = proc.communicate()
+  if proc.returncode == 0:
+    print 'Running on: ' + hostname_stdout
+  else:
+    print >> sys.stderr, 'WARNING: failed to run hostname'
+    print >> sys.stderr, hostname_stdout
+    print >> sys.stderr, hostname_stderr
+    sys.exit(1)
+
+  parser = GetRunBotOptParser()
+  options, args = parser.parse_args(argv[1:])
+  if args:
+    parser.error('Unused args: %s' % args)
+
+  bot_config = GetBotConfig(options, GetBotStepMap())
+  if not bot_config:
+    sys.exit(1)
+
+  print 'Using config:', bot_config
+
+  commands = GetCommands(options, bot_config)
+  for command in commands:
+    print 'Will run: ', bb_utils.CommandToString(command)
+  print
+
+  env = GetEnvironment(bot_config.host_obj, options.testing)
+  return RunBotCommands(options, commands, env)
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
diff --git a/build/android/buildbot/bb_utils.py b/build/android/buildbot/bb_utils.py
new file mode 100644
index 0000000..3c16cc2
--- /dev/null
+++ b/build/android/buildbot/bb_utils.py
@@ -0,0 +1,100 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import json
+import optparse
+import os
+import pipes
+import subprocess
+import sys
+
+import bb_annotations
+
+sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
+from pylib import constants
+
+
+TESTING = 'BUILDBOT_TESTING' in os.environ
+
+BB_BUILD_DIR = os.path.abspath(
+    os.path.join(os.path.dirname(__file__), os.pardir, os.pardir, os.pardir,
+    os.pardir, os.pardir, os.pardir, os.pardir))
+
+CHROME_SRC = os.path.abspath(
+    os.path.join(os.path.dirname(__file__), '..', '..', '..'))
+
+# TODO: Figure out how to merge this with pylib.cmd_helper.OutDirectory().
+CHROME_OUT_DIR = os.path.join(CHROME_SRC, 'out')
+
+GOMA_DIR = os.environ.get('GOMA_DIR', os.path.join(BB_BUILD_DIR, 'goma'))
+
+GSUTIL_PATH = os.path.join(BB_BUILD_DIR, 'third_party', 'gsutil', 'gsutil')
+
+def CommandToString(command):
+  """Returns quoted command that can be run in bash shell."""
+  return ' '.join(map(pipes.quote, command))
+
+
+def SpawnCmd(command, stdout=None, cwd=CHROME_SRC):
+  """Spawn a process without waiting for termination."""
+  print '>', CommandToString(command)
+  sys.stdout.flush()
+  if TESTING:
+    class MockPopen(object):
+      @staticmethod
+      def wait():
+        return 0
+      @staticmethod
+      def communicate():
+        return '', ''
+    return MockPopen()
+  return subprocess.Popen(command, cwd=cwd, stdout=stdout)
+
+
+def RunCmd(command, flunk_on_failure=True, halt_on_failure=False,
+           warning_code=constants.WARNING_EXIT_CODE, stdout=None,
+           cwd=CHROME_SRC):
+  """Run a command relative to the chrome source root."""
+  code = SpawnCmd(command, stdout, cwd).wait()
+  print '<', CommandToString(command)
+  if code != 0:
+    print 'ERROR: process exited with code %d' % code
+    if code != warning_code and flunk_on_failure:
+      bb_annotations.PrintError()
+    else:
+      bb_annotations.PrintWarning()
+    # Allow steps to have both halting (i.e. 1) and non-halting exit codes.
+    if code != warning_code and halt_on_failure:
+      print 'FATAL %d != %d' % (code, warning_code)
+      sys.exit(1)
+  return code
+
+
+def GetParser():
+  def ConvertJson(option, _, value, parser):
+    setattr(parser.values, option.dest, json.loads(value))
+  parser = optparse.OptionParser()
+  parser.add_option('--build-properties', action='callback',
+                    callback=ConvertJson, type='string', default={},
+                    help='build properties in JSON format')
+  parser.add_option('--factory-properties', action='callback',
+                    callback=ConvertJson, type='string', default={},
+                    help='factory properties in JSON format')
+  return parser
+
+
+def EncodeProperties(options):
+  return ['--factory-properties=%s' % json.dumps(options.factory_properties),
+          '--build-properties=%s' % json.dumps(options.build_properties)]
+
+
+def RunSteps(steps, step_cmds, options):
+  unknown_steps = set(steps) - set(step for step, _ in step_cmds)
+  if unknown_steps:
+    print >> sys.stderr, 'FATAL: Unknown steps %s' % list(unknown_steps)
+    sys.exit(1)
+
+  for step, cmd in step_cmds:
+    if step in steps:
+      cmd(options)
diff --git a/build/android/buildbot/env_to_json.py b/build/android/buildbot/env_to_json.py
new file mode 100755
index 0000000..f9a7a44
--- /dev/null
+++ b/build/android/buildbot/env_to_json.py
@@ -0,0 +1,11 @@
+#!/usr/bin/python
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Encode current environment into json.
+
+import json
+import os
+
+print json.dumps(dict(os.environ))
diff --git a/build/android/buildbot/tests/bb_run_bot_test.py b/build/android/buildbot/tests/bb_run_bot_test.py
new file mode 100755
index 0000000..810c60d
--- /dev/null
+++ b/build/android/buildbot/tests/bb_run_bot_test.py
@@ -0,0 +1,35 @@
+#!/usr/bin/env python
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import subprocess
+import sys
+
+BUILDBOT_DIR = os.path.join(os.path.dirname(__file__), '..')
+sys.path.append(BUILDBOT_DIR)
+import bb_run_bot
+
+def RunBotProcesses(bot_process_map):
+  code = 0
+  for bot, proc in bot_process_map:
+    _, err = proc.communicate()
+    code |= proc.returncode
+    if proc.returncode != 0:
+      print 'Error running the bot script with id="%s"' % bot, err
+
+  return code
+
+
+def main():
+  procs = [
+      (bot, subprocess.Popen(
+          [os.path.join(BUILDBOT_DIR, 'bb_run_bot.py'), '--bot-id', bot,
+          '--testing'], stdout=subprocess.PIPE, stderr=subprocess.PIPE))
+      for bot in bb_run_bot.GetBotStepMap()]
+  return RunBotProcesses(procs)
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/android/chrome_with_libs.gyp b/build/android/chrome_with_libs.gyp
new file mode 100644
index 0000000..690be88
--- /dev/null
+++ b/build/android/chrome_with_libs.gyp
@@ -0,0 +1,82 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to add more loadable libs into Chrome_apk.
+#
+# This is useful when building Chrome_apk with some loadable modules which are
+# not included in Chrome_apk.
+# As an example, when building Chrome_apk with
+# libpeer_target_type=loadable_module,
+# the libpeerconnection.so is not included in Chrome_apk. To add the missing
+# lib, follow the steps below:
+# - Run gyp:
+#     GYP_DEFINES="$GYP_DEFINES libpeer_target_type=loadable_module" CHROMIUM_GYP_FILE="build/android/chrome_with_libs.gyp" build/gyp_chromium
+# - Build chrome_with_libs:
+#     ninja (or make) chrome_with_libs
+#
+# This tool also allows replacing the loadable module with a new one via the
+# following steps:
+# - Build Chrome_apk with the gyp define:
+#     GYP_DEFINES="$GYP_DEFINES libpeer_target_type=loadable_module" build/gyp_chromium
+#     ninja (or make) Chrome_apk
+# - Replace libpeerconnection.so with a new one:
+#     cp the_new_one path/to/libpeerconnection.so
+# - Run gyp:
+#     GYP_DEFINES="$GYP_DEFINES libpeer_target_type=loadable_module" CHROMIUM_GYP_FILE="build/android/chrome_with_libs.gyp" build/gyp_chromium
+# - Build chrome_with_libs:
+#     ninja (or make) chrome_with_libs
+{
+  'targets': [
+    {
+      # An "All" target is required for a top-level gyp-file.
+      'target_name': 'All',
+      'type': 'none',
+      'dependencies': [
+        'chrome_with_libs',
+      ],
+    },
+    {
+      'target_name': 'chrome_with_libs',
+      'type': 'none',
+      'variables': {
+        'intermediate_dir': '<(PRODUCT_DIR)/prebuilt_libs/',
+        'chrome_unsigned_path': '<(PRODUCT_DIR)/chrome_apk/Chrome-unsigned.apk',
+        'chrome_with_libs_unsigned': '<(intermediate_dir)/Chrome-with-libs-unsigned.apk',
+        'chrome_with_libs_final': '<(PRODUCT_DIR)/apks/Chrome-with-libs.apk',
+      },
+      'dependencies': [
+        '<(DEPTH)/clank/native/framework/clank.gyp:chrome_apk'
+      ],
+      'copies': [
+        {
+          'destination': '<(intermediate_dir)/lib/<(android_app_abi)',
+          'files': [
+            '<(PRODUCT_DIR)/libpeerconnection.so',
+          ],
+        },
+      ],
+      'actions': [
+        {
+          'action_name': 'put_libs_in_chrome',
+          'variables': {
+            'inputs': [
+              '<(intermediate_dir)/lib/<(android_app_abi)/libpeerconnection.so',
+            ],
+            'input_apk_path': '<(chrome_unsigned_path)',
+            'output_apk_path': '<(chrome_with_libs_unsigned)',
+            'libraries_top_dir%': '<(intermediate_dir)',
+          },
+          'includes': [ 'create_standalone_apk_action.gypi' ],
+        },
+        {
+          'action_name': 'finalize_chrome_with_libs',
+          'variables': {
+            'input_apk_path': '<(chrome_with_libs_unsigned)',
+            'output_apk_path': '<(chrome_with_libs_final)',
+          },
+          'includes': [ 'finalize_apk_action.gypi'],
+        },
+      ],
+    }],
+}
diff --git a/build/android/create_standalone_apk_action.gypi b/build/android/create_standalone_apk_action.gypi
new file mode 100644
index 0000000..d17af7c
--- /dev/null
+++ b/build/android/create_standalone_apk_action.gypi
@@ -0,0 +1,41 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into an action to provide an action that
+# combines a directory of shared libraries and an incomplete APK into a
+# standalone APK.
+#
+# To use this, create a gyp action with the following form:
+#  {
+#    'action_name': 'some descriptive action name',
+#    'variables': {
+#      'inputs': [ 'input_path1', 'input_path2' ],
+#      'input_apk_path': '<(unsigned_apk_path)',
+#      'output_apk_path': '<(unsigned_standalone_apk_path)',
+#      'libraries_top_dir': '<(libraries_top_dir)',
+#    },
+#    'includes': [ 'relative/path/to/create_standalone_apk_action.gypi' ],
+#  },
+
+{
+  'message': 'Creating standalone APK: <(output_apk_path)',
+  'variables': {
+    'inputs': [],
+  },
+  'inputs': [
+    '<(DEPTH)/build/android/gyp/util/build_utils.py',
+    '<(DEPTH)/build/android/gyp/create_standalone_apk.py',
+    '<(input_apk_path)',
+    '>@(inputs)',
+  ],
+  'outputs': [
+    '<(output_apk_path)',
+  ],
+  'action': [
+    'python', '<(DEPTH)/build/android/gyp/create_standalone_apk.py',
+    '--libraries-top-dir=<(libraries_top_dir)',
+    '--input-apk-path=<(input_apk_path)',
+    '--output-apk-path=<(output_apk_path)',
+  ],
+}
diff --git a/build/android/developer_recommended_flags.gypi b/build/android/developer_recommended_flags.gypi
new file mode 100644
index 0000000..79c201de
--- /dev/null
+++ b/build/android/developer_recommended_flags.gypi
@@ -0,0 +1,61 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This is the set of recommended gyp variable settings for Chrome for Android development.
+#
+# These can be used by copying this file to $CHROME_SRC/chrome/supplement.gypi.
+#
+# Even better, create chrome/supplement.gypi containing the following:
+#   {
+#     'includes': [ '../build/android/developer_recommended_flags.gypi' ]
+#   }
+# and you'll get new settings automatically.
+# When using this method, you can override individual settings by setting them unconditionally (with
+# no %) in chrome/supplement.gypi.
+# I.e. to disable gyp_managed_install but use everything else:
+#   {
+#     'variables': {
+#       'gyp_managed_install': 0,
+#     },
+#     'includes': [ '../build/android/developer_recommended_flags.gypi' ]
+#   }
+
+{
+  'variables': {
+    'variables': {
+      # Set component to 'shared_library' to enable the component build. This builds native code as
+      # many small shared libraries instead of one monolithic library. This slightly reduces the time
+      # required for incremental builds.
+      'component%': 'shared_library',
+    },
+    'component%': '<(component)',
+
+    # When gyp_managed_install is set to 1, building an APK will install that APK on the connected
+    # device(/emulator). To install on multiple devices (or onto a new device), build the APK once
+    # with each device attached. This greatly reduces the time required for incremental builds.
+    #
+    # This comes with some caveats:
+    #   Only works with a single device connected (it will print a warning if
+    #     zero or multiple devices are attached).
+    #   Device must be flashed with a user-debug unsigned Android build.
+    #   Some actions are always run (i.e. ninja will never say "no work to do").
+    'gyp_managed_install%': 1,
+
+    # With gyp_managed_install, we do not necessarily need a standalone APK.
+    # When create_standalone_apk is set to 1, we will build a standalone APK
+    # anyway. For even faster builds, you can set create_standalone_apk to 0.
+    'create_standalone_apk%': 1,
+
+    # Set clang to 1 to use the clang compiler. Clang has much (much, much) better warning/error
+    # messages than gcc.
+    # TODO(cjhopman): Enable this when http://crbug.com/156420 is addressed. Until then, users can
+    # set clang to 1, but Android stack traces will sometimes be incomplete.
+    #'clang%': 1,
+
+    # Set fastbuild to 1 to build with less debugging information. This can greatly decrease linking
+    # time. The downside is that stack traces will be missing useful information (like line
+    # numbers).
+    #'fastbuild%': 1,
+  },
+}
diff --git a/build/android/dex_action.gypi b/build/android/dex_action.gypi
new file mode 100644
index 0000000..56d386f
--- /dev/null
+++ b/build/android/dex_action.gypi
@@ -0,0 +1,60 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into an action to provide a rule that dexes
+# compiled java files. If proguard_enabled == "true" and CONFIGURATION_NAME ==
+# "Release", then it will dex the proguard_enabled_input_path instead of the
+# normal dex_input_paths/dex_generated_input_paths.
+#
+# To use this, create a gyp target with the following form:
+#  {
+#    'action_name': 'some name for the action'
+#    'actions': [
+#      'variables': {
+#        'dex_input_paths': [ 'files to dex (when proguard is not used) and add to input paths' ],
+#        'dex_generated_input_dirs': [ 'dirs that contain generated files to dex' ],
+#
+#        # For targets that use proguard:
+#        'proguard_enabled': 'true',
+#        'proguard_enabled_input_path': 'path to dex when using proguard',
+#      },
+#      'includes': [ 'relative/path/to/dex_action.gypi' ],
+#    ],
+#  },
+#
+
+{
+  'message': 'Creating dex file: <(output_path)',
+  'variables': {
+    'dex_input_paths': [],
+    'dex_generated_input_dirs': [],
+    'proguard_enabled%': 'false',
+    'proguard_enabled_input_path%': '',
+    'dex_no_locals%': 0,
+    'dex_additional_options': [],
+  },
+  'inputs': [
+    '<(DEPTH)/build/android/gyp/util/build_utils.py',
+    '<(DEPTH)/build/android/gyp/util/md5_check.py',
+    '<(DEPTH)/build/android/gyp/dex.py',
+    '>@(dex_input_paths)',
+  ],
+  'outputs': [
+    '<(output_path)',
+    '<(output_path).inputs',
+  ],
+  'action': [
+    'python', '<(DEPTH)/build/android/gyp/dex.py',
+    '--dex-path=<(output_path)',
+    '--android-sdk-tools=<(android_sdk_tools)',
+    '--output-directory=<(PRODUCT_DIR)',
+    '--configuration-name=<(CONFIGURATION_NAME)',
+    '--proguard-enabled=>(proguard_enabled)',
+    '--proguard-enabled-input-path=<(proguard_enabled_input_path)',
+    '--no-locals=>(dex_no_locals)',
+    '>@(dex_additional_options)',
+    '>@(dex_input_paths)',
+    '>@(dex_generated_input_dirs)',
+  ]
+}
diff --git a/build/android/disable_lto.gypi b/build/android/disable_lto.gypi
new file mode 100644
index 0000000..e379cfd
--- /dev/null
+++ b/build/android/disable_lto.gypi
@@ -0,0 +1,20 @@
+# Copyright (c) 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included to disable LTO on a target.
+
+{
+  'target_conditions': [
+    ['_toolset=="target"', {
+      'conditions': [
+        ['OS=="android" and (use_lto==1 or use_lto_o2==1)', {
+          'cflags!': [
+            '-flto',
+            '-ffat-lto-objects',
+          ],
+        }],
+      ],
+    }],
+  ],
+}
diff --git a/build/android/empty/src/.keep b/build/android/empty/src/.keep
new file mode 100644
index 0000000..0f710b6
--- /dev/null
+++ b/build/android/empty/src/.keep
@@ -0,0 +1,6 @@
+This is a file that needs to live here until http://crbug.com/158155 has
+been fixed.
+
+The ant build system requires that a src folder is always present, and for
+some of our targets that is not the case. Giving it an empty src-folder works
+nicely though.
diff --git a/build/android/empty_proguard.flags b/build/android/empty_proguard.flags
new file mode 100644
index 0000000..53484fe
--- /dev/null
+++ b/build/android/empty_proguard.flags
@@ -0,0 +1 @@
+# Used for apk targets that do not need proguard. See build/java_apk.gypi.
diff --git a/build/android/enable_asserts.py b/build/android/enable_asserts.py
new file mode 100755
index 0000000..8fb7dca
--- /dev/null
+++ b/build/android/enable_asserts.py
@@ -0,0 +1,42 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Enables dalvik vm asserts in the android device."""
+
+import argparse
+import sys
+
+from pylib.device import device_utils
+
+
+def main():
+  parser = argparse.ArgumentParser()
+
+  set_asserts_group = parser.add_mutually_exclusive_group(required=True)
+  set_asserts_group.add_argument(
+      '--enable_asserts', dest='set_asserts', action='store_true',
+      help='Sets the dalvik.vm.enableassertions property to "all"')
+  set_asserts_group.add_argument(
+      '--disable_asserts', dest='set_asserts', action='store_false',
+      help='Removes the dalvik.vm.enableassertions property')
+
+  args = parser.parse_args()
+
+  # TODO(jbudorick): Accept optional serial number and run only for the
+  # specified device when present.
+  devices = device_utils.DeviceUtils.parallel()
+
+  def set_java_asserts_and_restart(device):
+    if device.SetJavaAsserts(args.set_asserts):
+      device.RunShellCommand('stop')
+      device.RunShellCommand('start')
+
+  devices.pMap(set_java_asserts_and_restart)
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/android/envsetup.sh b/build/android/envsetup.sh
new file mode 100755
index 0000000..0545330
--- /dev/null
+++ b/build/android/envsetup.sh
@@ -0,0 +1,62 @@
+#!/bin/bash
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Sets up environment for building Chromium on Android.
+
+# Make sure we're being sourced (possibly by another script). Check for bash
+# since zsh sets $0 when sourcing.
+if [[ -n "$BASH_VERSION" && "${BASH_SOURCE:-$0}" == "$0" ]]; then
+  echo "ERROR: envsetup must be sourced."
+  exit 1
+fi
+
+# This only exists to set local variables. Don't call this manually.
+android_envsetup_main() {
+  local SCRIPT_PATH="$1"
+  local SCRIPT_DIR="$(dirname "$SCRIPT_PATH")"
+
+  local CURRENT_DIR="$(readlink -f "${SCRIPT_DIR}/../../")"
+  if [[ -z "${CHROME_SRC}" ]]; then
+    # If $CHROME_SRC was not set, assume current directory is CHROME_SRC.
+    local CHROME_SRC="${CURRENT_DIR}"
+  fi
+
+  if [[ "${CURRENT_DIR/"${CHROME_SRC}"/}" == "${CURRENT_DIR}" ]]; then
+    # If current directory is not in $CHROME_SRC, it might be set for other
+    # source tree. If $CHROME_SRC was set correctly and we are in the correct
+    # directory, "${CURRENT_DIR/"${CHROME_SRC}"/}" will be "".
+    # Otherwise, it will equal to "${CURRENT_DIR}"
+    echo "Warning: Current directory is out of CHROME_SRC, it may not be \
+  the one you want."
+    echo "${CHROME_SRC}"
+  fi
+
+  # Allow the caller to override a few environment variables. If any of them is
+  # unset, we default to a sane value that's known to work. This allows for
+  # experimentation with a custom SDK.
+  if [[ -z "${ANDROID_SDK_ROOT}" || ! -d "${ANDROID_SDK_ROOT}" ]]; then
+    local ANDROID_SDK_ROOT="${CHROME_SRC}/third_party/android_tools/sdk/"
+  fi
+
+  # Add Android SDK tools to system path.
+  export PATH=$PATH:${ANDROID_SDK_ROOT}/platform-tools
+
+  # Add Android utility tools to the system path.
+  export PATH=$PATH:${ANDROID_SDK_ROOT}/tools/
+
+  # Add Chromium Android development scripts to system path.
+  # Must be after CHROME_SRC is set.
+  export PATH=$PATH:${CHROME_SRC}/build/android
+
+  export ENVSETUP_GYP_CHROME_SRC=${CHROME_SRC}  # TODO(thakis): Remove.
+}
+# In zsh, $0 is the name of the file being sourced.
+android_envsetup_main "${BASH_SOURCE:-$0}"
+unset -f android_envsetup_main
+
+android_gyp() {
+  echo "Please call build/gyp_chromium instead. android_gyp is going away."
+  "${ENVSETUP_GYP_CHROME_SRC}/build/gyp_chromium" --depth="${ENVSETUP_GYP_CHROME_SRC}" --check "$@"
+}
diff --git a/build/android/finalize_apk_action.gypi b/build/android/finalize_apk_action.gypi
new file mode 100644
index 0000000..644f9e8
--- /dev/null
+++ b/build/android/finalize_apk_action.gypi
@@ -0,0 +1,49 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into an action to provide an action that
+# signs and zipaligns an APK.
+#
+# To use this, create a gyp action with the following form:
+#  {
+#    'action_name': 'some descriptive action name',
+#    'variables': {
+#      'input_apk_path': 'relative/path/to/input.apk',
+#      'output_apk_path': 'relative/path/to/output.apk',
+#    },
+#    'includes': [ '../../build/android/finalize_apk_action.gypi' ],
+#  },
+#
+
+{
+  'message': 'Signing/aligning <(_target_name) APK: <(input_apk_path)',
+  'variables': {
+    'keystore_path%': '<(DEPTH)/build/android/ant/chromium-debug.keystore',
+    'keystore_name%': 'chromiumdebugkey',
+    'keystore_password%': 'chromium',
+    'zipalign_path%': '<(android_sdk_tools)/zipalign',
+    'rezip_apk_jar_path%': '<(PRODUCT_DIR)/lib.java/rezip_apk.jar',
+    'load_library_from_zip%': 0,
+  },
+  'inputs': [
+    '<(DEPTH)/build/android/gyp/finalize_apk.py',
+    '<(DEPTH)/build/android/gyp/util/build_utils.py',
+    '<(keystore_path)',
+    '<(input_apk_path)',
+  ],
+  'outputs': [
+    '<(output_apk_path)',
+  ],
+  'action': [
+    'python', '<(DEPTH)/build/android/gyp/finalize_apk.py',
+    '--zipalign-path=<(zipalign_path)',
+    '--unsigned-apk-path=<(input_apk_path)',
+    '--final-apk-path=<(output_apk_path)',
+    '--key-path=<(keystore_path)',
+    '--key-name=<(keystore_name)',
+    '--key-passwd=<(keystore_password)',
+    '--load-library-from-zip=<(load_library_from_zip)',
+    '--rezip-apk-jar-path=<(rezip_apk_jar_path)',
+  ],
+}
diff --git a/build/android/finalize_splits_action.gypi b/build/android/finalize_splits_action.gypi
new file mode 100644
index 0000000..daa7f83
--- /dev/null
+++ b/build/android/finalize_splits_action.gypi
@@ -0,0 +1,76 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into an action to provide an action that
+# signs and zipaligns split APKs.
+#
+# Required variables:
+#  apk_name - Base name of the apk.
+# Optional variables:
+#  density_splits - Whether to process density splits
+#  language_splits - Whether to language splits
+
+{
+  'variables': {
+    'keystore_path%': '<(DEPTH)/build/android/ant/chromium-debug.keystore',
+    'keystore_name%': 'chromiumdebugkey',
+    'keystore_password%': 'chromium',
+    'zipalign_path%': '<(android_sdk_tools)/zipalign',
+    'density_splits%': 0,
+    'language_splits%': [],
+    'resource_packaged_apk_name': '<(apk_name)-resources.ap_',
+    'resource_packaged_apk_path': '<(intermediate_dir)/<(resource_packaged_apk_name)',
+    'base_output_path': '<(PRODUCT_DIR)/apks/<(apk_name)',
+  },
+  'inputs': [
+    '<(DEPTH)/build/android/gyp/finalize_splits.py',
+    '<(DEPTH)/build/android/gyp/finalize_apk.py',
+    '<(DEPTH)/build/android/gyp/util/build_utils.py',
+    '<(keystore_path)',
+  ],
+  'action': [
+    'python', '<(DEPTH)/build/android/gyp/finalize_splits.py',
+    '--resource-packaged-apk-path=<(resource_packaged_apk_path)',
+    '--base-output-path=<(base_output_path)',
+    '--zipalign-path=<(zipalign_path)',
+    '--key-path=<(keystore_path)',
+    '--key-name=<(keystore_name)',
+    '--key-passwd=<(keystore_password)',
+  ],
+  'conditions': [
+    ['density_splits == 1', {
+      'message': 'Signing/aligning <(_target_name) density splits',
+      'inputs': [
+        '<(resource_packaged_apk_path)_hdpi',
+        '<(resource_packaged_apk_path)_xhdpi',
+        '<(resource_packaged_apk_path)_xxhdpi',
+        '<(resource_packaged_apk_path)_xxxhdpi',
+        '<(resource_packaged_apk_path)_tvdpi',
+      ],
+      'outputs': [
+        '<(base_output_path)-density-hdpi.apk',
+        '<(base_output_path)-density-xhdpi.apk',
+        '<(base_output_path)-density-xxhdpi.apk',
+        '<(base_output_path)-density-xxxhdpi.apk',
+        '<(base_output_path)-density-tvdpi.apk',
+      ],
+      'action': [
+        '--densities=hdpi,xhdpi,xxhdpi,xxxhdpi,tvdpi',
+      ],
+    }],
+    ['language_splits != []', {
+      'message': 'Signing/aligning <(_target_name) language splits',
+      'inputs': [
+        "<!@(python <(DEPTH)/build/apply_locales.py '<(resource_packaged_apk_path)_ZZLOCALE' <(language_splits))",
+      ],
+      'outputs': [
+        "<!@(python <(DEPTH)/build/apply_locales.py '<(base_output_path)-lang-ZZLOCALE.apk' <(language_splits))",
+      ],
+      'action': [
+        '--languages=<(language_splits)',
+      ],
+    }],
+  ],
+}
+
diff --git a/build/android/findbugs_action.gypi b/build/android/findbugs_action.gypi
new file mode 100644
index 0000000..e3b3d36
--- /dev/null
+++ b/build/android/findbugs_action.gypi
@@ -0,0 +1,22 @@
+
+{
+  'action_name': 'findbugs_<(_target_name)',
+  'message': 'Running findbugs on <(_target_name)',
+  'variables': {
+  },
+  'inputs': [
+    '<(DEPTH)/build/android/findbugs_diff.py',
+    '<(DEPTH)/build/android/findbugs_filter/findbugs_exclude.xml',
+    '<(DEPTH)/build/android/pylib/utils/findbugs.py',
+    '<(findbugs_target_jar_path)',
+  ],
+  'outputs': [
+    '<(stamp_path)',
+  ],
+  'action': [
+    'python', '<(DEPTH)/build/android/findbugs_diff.py',
+    '--auxclasspath-gyp', '>(auxclasspath)',
+    '--stamp', '<(stamp_path)',
+    '<(findbugs_target_jar_path)',
+  ],
+}
diff --git a/build/android/findbugs_diff.py b/build/android/findbugs_diff.py
new file mode 100755
index 0000000..f55e462
--- /dev/null
+++ b/build/android/findbugs_diff.py
@@ -0,0 +1,110 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Runs findbugs, and returns an error code if there are new warnings.
+
+Other options
+  --only-analyze used to only analyze the class you are interested.
+  --relase-build analyze the classes in out/Release directory.
+  --findbugs-args used to passin other findbugs's options.
+
+Run
+  $CHROMIUM_SRC/third_party/findbugs/bin/findbugs -textui for details.
+
+"""
+
+import argparse
+import os
+import sys
+
+from pylib import constants
+from pylib.utils import findbugs
+
+_DEFAULT_BASE_DIR = os.path.join(
+    constants.DIR_SOURCE_ROOT, 'build', 'android', 'findbugs_filter')
+
+sys.path.append(
+    os.path.join(constants.DIR_SOURCE_ROOT, 'build', 'android', 'gyp'))
+from util import build_utils
+
+
+def main():
+  parser = argparse.ArgumentParser()
+
+  parser.add_argument(
+      '-a', '--auxclasspath', default=None, dest='auxclasspath',
+      help='Set aux classpath for analysis.')
+  parser.add_argument(
+      '--auxclasspath-gyp', dest='auxclasspath_gyp',
+      help='A gyp list containing the aux classpath for analysis')
+  parser.add_argument(
+      '-o', '--only-analyze', default=None,
+      dest='only_analyze', help='Only analyze the given classes and packages.')
+  parser.add_argument(
+      '-e', '--exclude', default=None, dest='exclude',
+      help='Exclude bugs matching given filter.')
+  parser.add_argument(
+      '-l', '--release-build', action='store_true', dest='release_build',
+      help='Analyze release build instead of debug.')
+  parser.add_argument(
+      '-f', '--findbug-args', default=None, dest='findbug_args',
+      help='Additional findbug arguments.')
+  parser.add_argument(
+      '-b', '--base-dir', default=_DEFAULT_BASE_DIR,
+      dest='base_dir', help='Base directory for configuration file.')
+  parser.add_argument(
+      '--output-file', dest='output_file',
+      help='Path to save the output to.')
+  parser.add_argument(
+      '--stamp', help='Path to touch on success.')
+  parser.add_argument(
+      '--depfile', help='Path to the depfile. This must be specified as the '
+                        "action's first output.")
+
+  parser.add_argument(
+      'jar_paths', metavar='JAR_PATH', nargs='+',
+      help='JAR file to analyze')
+
+  args = parser.parse_args(build_utils.ExpandFileArgs(sys.argv[1:]))
+  if args.auxclasspath:
+    args.auxclasspath = args.auxclasspath.split(':')
+  elif args.auxclasspath_gyp:
+    args.auxclasspath = build_utils.ParseGypList(args.auxclasspath_gyp)
+
+  if args.base_dir:
+    if not args.exclude:
+      args.exclude = os.path.join(args.base_dir, 'findbugs_exclude.xml')
+
+  findbugs_command, findbugs_warnings = findbugs.Run(
+      args.exclude, args.only_analyze, args.auxclasspath,
+      args.output_file, args.findbug_args, args.jar_paths)
+
+  if findbugs_warnings:
+    print
+    print '*' * 80
+    print 'FindBugs run via:'
+    print findbugs_command
+    print
+    print 'FindBugs reported the following issues:'
+    for warning in sorted(findbugs_warnings):
+      print str(warning)
+    print '*' * 80
+    print
+  else:
+    if args.depfile:
+      build_utils.WriteDepfile(
+          args.depfile,
+          build_utils.GetPythonDependencies() + args.auxclasspath
+              + args.jar_paths)
+    if args.stamp:
+      build_utils.Touch(args.stamp)
+
+  return len(findbugs_warnings)
+
+
+if __name__ == '__main__':
+  sys.exit(main())
+
diff --git a/build/android/findbugs_filter/findbugs_exclude.xml b/build/android/findbugs_filter/findbugs_exclude.xml
new file mode 100644
index 0000000..dbff9d9
--- /dev/null
+++ b/build/android/findbugs_filter/findbugs_exclude.xml
@@ -0,0 +1,23 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Copyright (c) 2012 The Chromium Authors. All rights reserved.
+  Use of this source code is governed by a BSD-style license that can be
+  found in the LICENSE file.
+-->
+
+<!--
+Documentation: http://findbugs.sourceforge.net/manual/filter.html
+In particular, ~ at the start of a string means it's a regex.
+-->
+<FindBugsFilter>
+  <!-- Skip the generated resource classes (including nested classes). -->
+  <Match>
+    <Class name="~.*\.R(\$\w+)?" />
+  </Match>
+  <Match>
+    <Class name="~org\.chromium\..*\.Manifest(\$\w+)?" />
+  </Match>
+  <Bug pattern="DM_STRING_CTOR" />
+  <!-- Ignore "reliance on default String encoding" warnings, as we're not multi-platform -->
+  <Bug pattern="DM_DEFAULT_ENCODING" />
+</FindBugsFilter>
diff --git a/build/android/generate_emma_html.py b/build/android/generate_emma_html.py
new file mode 100755
index 0000000..93b0b0e
--- /dev/null
+++ b/build/android/generate_emma_html.py
@@ -0,0 +1,90 @@
+#!/usr/bin/env python
+
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Aggregates EMMA coverage files to produce html output."""
+
+import fnmatch
+import json
+import optparse
+import os
+import sys
+
+from pylib import cmd_helper
+from pylib import constants
+
+
+def _GetFilesWithExt(root_dir, ext):
+  """Gets all files with a given extension.
+
+  Args:
+    root_dir: Directory in which to search for files.
+    ext: Extension to look for (including dot)
+
+  Returns:
+    A list of absolute paths to files that match.
+  """
+  files = []
+  for root, _, filenames in os.walk(root_dir):
+    basenames = fnmatch.filter(filenames, '*.' + ext)
+    files.extend([os.path.join(root, basename)
+                  for basename in basenames])
+
+  return files
+
+
+def main():
+  option_parser = optparse.OptionParser()
+  option_parser.add_option('--output', help='HTML output filename.')
+  option_parser.add_option('--coverage-dir', default=None,
+                           help=('Root of the directory in which to search for '
+                                 'coverage data (.ec) files.'))
+  option_parser.add_option('--metadata-dir', default=None,
+                           help=('Root of the directory in which to search for '
+                                 'coverage metadata (.em) files.'))
+  option_parser.add_option('--cleanup', action='store_true',
+                           help=('If set, removes coverage files generated at '
+                                 'runtime.'))
+  options, _ = option_parser.parse_args()
+
+  if not (options.coverage_dir and options.metadata_dir and options.output):
+    option_parser.error('One or more mandatory options are missing.')
+
+  coverage_files = _GetFilesWithExt(options.coverage_dir, 'ec')
+  metadata_files = _GetFilesWithExt(options.metadata_dir, 'em')
+  print 'Found coverage files: %s' % str(coverage_files)
+  print 'Found metadata files: %s' % str(metadata_files)
+
+  sources = []
+  for f in metadata_files:
+    sources_file = os.path.splitext(f)[0] + '_sources.txt'
+    with open(sources_file, 'r') as sf:
+      sources.extend(json.load(sf))
+  sources = [os.path.join(constants.DIR_SOURCE_ROOT, s) for s in sources]
+  print 'Sources: %s' % sources
+
+  input_args = []
+  for f in coverage_files + metadata_files:
+    input_args.append('-in')
+    input_args.append(f)
+
+  output_args = ['-Dreport.html.out.file', options.output]
+  source_args = ['-sp', ','.join(sources)]
+
+  exit_code = cmd_helper.RunCmd(
+      ['java', '-cp',
+       os.path.join(constants.ANDROID_SDK_ROOT, 'tools', 'lib', 'emma.jar'),
+       'emma', 'report', '-r', 'html']
+      + input_args + output_args + source_args)
+
+  if options.cleanup:
+    for f in coverage_files:
+      os.remove(f)
+
+  return exit_code
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/android/gn/zip.py b/build/android/gn/zip.py
new file mode 100755
index 0000000..5050ea0
--- /dev/null
+++ b/build/android/gn/zip.py
@@ -0,0 +1,49 @@
+#!/usr/bin/env python
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Archives a set of files.
+"""
+
+import ast
+import optparse
+import os
+import sys
+import zipfile
+
+sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir, 'gyp'))
+from util import build_utils
+
+def DoZip(inputs, output, base_dir):
+  with zipfile.ZipFile(output, 'w') as outfile:
+    for f in inputs:
+      outfile.write(f, os.path.relpath(f, base_dir))
+
+def main():
+  parser = optparse.OptionParser()
+  build_utils.AddDepfileOption(parser)
+
+  parser.add_option('--inputs', help='List of files to archive.')
+  parser.add_option('--output', help='Path to output archive.')
+  parser.add_option('--base-dir',
+                    help='If provided, the paths in the archive will be '
+                    'relative to this directory', default='.')
+
+  options, _ = parser.parse_args()
+
+  inputs = ast.literal_eval(options.inputs)
+  output = options.output
+  base_dir = options.base_dir
+
+  DoZip(inputs, output, base_dir)
+
+  if options.depfile:
+    build_utils.WriteDepfile(
+        options.depfile,
+        build_utils.GetPythonDependencies())
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/android/gyp/aidl.py b/build/android/gyp/aidl.py
new file mode 100755
index 0000000..d5aa546
--- /dev/null
+++ b/build/android/gyp/aidl.py
@@ -0,0 +1,54 @@
+#!/usr/bin/env python
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Invokes Android's aidl
+"""
+
+import optparse
+import os
+import sys
+
+from util import build_utils
+
+
+def main(argv):
+  option_parser = optparse.OptionParser()
+  build_utils.AddDepfileOption(option_parser)
+  option_parser.add_option('--aidl-path', help='Path to the aidl binary.')
+  option_parser.add_option('--imports', help='Files to import.')
+  option_parser.add_option('--includes',
+                           help='Directories to add as import search paths.')
+  option_parser.add_option('--srcjar', help='Path for srcjar output.')
+  options, args = option_parser.parse_args(argv[1:])
+
+  with build_utils.TempDir() as temp_dir:
+    for f in args:
+      classname = os.path.splitext(os.path.basename(f))[0]
+      output = os.path.join(temp_dir, classname + '.java')
+      aidl_cmd = [options.aidl_path]
+      aidl_cmd += [
+        '-p' + s for s in build_utils.ParseGypList(options.imports)
+      ]
+      if options.includes is not None:
+        aidl_cmd += [
+          '-I' + s for s in build_utils.ParseGypList(options.includes)
+        ]
+      aidl_cmd += [
+        f,
+        output
+      ]
+      build_utils.CheckOutput(aidl_cmd)
+
+    build_utils.ZipDir(options.srcjar, temp_dir)
+
+  if options.depfile:
+    build_utils.WriteDepfile(
+        options.depfile,
+        build_utils.GetPythonDependencies())
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
diff --git a/build/android/gyp/ant.py b/build/android/gyp/ant.py
new file mode 100755
index 0000000..5394b9e
--- /dev/null
+++ b/build/android/gyp/ant.py
@@ -0,0 +1,65 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""An Ant wrapper that suppresses useless Ant output.
+
+Ant build scripts output "BUILD SUCCESSFUL" and build timing at the end of
+every build. In the Android build, this just adds a lot of useless noise to the
+build output. This script forwards its arguments to ant, and prints Ant's
+output up until the BUILD SUCCESSFUL line.
+
+Also, when a command fails, this script will re-run that ant command with the
+'-verbose' argument so that the failure is easier to debug.
+"""
+
+import optparse
+import sys
+import traceback
+
+from util import build_utils
+
+
+def main(argv):
+  option_parser = optparse.OptionParser()
+  build_utils.AddDepfileOption(option_parser)
+  options, args = option_parser.parse_args(argv[1:])
+
+  try:
+    stdout = build_utils.CheckOutput(['ant'] + args)
+  except build_utils.CalledProcessError:
+    # It is very difficult to diagnose ant failures without the '-verbose'
+    # argument. So, when an ant command fails, re-run it with '-verbose' so that
+    # the cause of the failure is easier to identify.
+    verbose_args = ['-verbose'] + [a for a in args if a != '-quiet']
+    try:
+      stdout = build_utils.CheckOutput(['ant'] + verbose_args)
+    except build_utils.CalledProcessError:
+      traceback.print_exc()
+      sys.exit(1)
+
+    # If this did sys.exit(1), building again would succeed (which would be
+    # awkward). Instead, just print a big warning.
+    build_utils.PrintBigWarning(
+        'This is unexpected. `ant ' + ' '.join(args) + '` failed.' +
+        'But, running `ant ' + ' '.join(verbose_args) + '` passed.')
+
+  stdout = stdout.strip().split('\n')
+  for line in stdout:
+    if line.strip() == 'BUILD SUCCESSFUL':
+      break
+    print line
+
+  if options.depfile:
+    assert '-buildfile' in args
+    ant_buildfile = args[args.index('-buildfile') + 1]
+
+    build_utils.WriteDepfile(
+        options.depfile,
+        [ant_buildfile] + build_utils.GetPythonDependencies())
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
diff --git a/build/android/gyp/apk_install.py b/build/android/gyp/apk_install.py
new file mode 100755
index 0000000..a512e50
--- /dev/null
+++ b/build/android/gyp/apk_install.py
@@ -0,0 +1,118 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Installs an APK.
+
+"""
+
+import optparse
+import os
+import re
+import sys
+
+from util import build_device
+from util import build_utils
+from util import md5_check
+
+BUILD_ANDROID_DIR = os.path.join(os.path.dirname(__file__), '..')
+sys.path.append(BUILD_ANDROID_DIR)
+
+from pylib import constants
+from pylib.utils import apk_helper
+
+
+def GetNewMetadata(device, apk_package):
+  """Gets the metadata on the device for the apk_package apk."""
+  output = device.RunShellCommand('ls -l /data/app/')
+  # Matches lines like:
+  # -rw-r--r-- system   system    7376582 2013-04-19 16:34 \
+  # org.chromium.chrome.shell.apk
+  # -rw-r--r-- system   system    7376582 2013-04-19 16:34 \
+  # org.chromium.chrome.shell-1.apk
+  apk_matcher = lambda s: re.match('.*%s(-[0-9]*)?(.apk)?$' % apk_package, s)
+  matches = filter(apk_matcher, output)
+  return matches[0] if matches else None
+
+def HasInstallMetadataChanged(device, apk_package, metadata_path):
+  """Checks if the metadata on the device for apk_package has changed."""
+  if not os.path.exists(metadata_path):
+    return True
+
+  with open(metadata_path, 'r') as expected_file:
+    return expected_file.read() != device.GetInstallMetadata(apk_package)
+
+
+def RecordInstallMetadata(device, apk_package, metadata_path):
+  """Records the metadata from the device for apk_package."""
+  metadata = GetNewMetadata(device, apk_package)
+  if not metadata:
+    raise Exception('APK install failed unexpectedly.')
+
+  with open(metadata_path, 'w') as outfile:
+    outfile.write(metadata)
+
+
+def main():
+  parser = optparse.OptionParser()
+  parser.add_option('--apk-path',
+      help='Path to .apk to install.')
+  parser.add_option('--split-apk-path',
+      help='Path to .apk splits (can specify multiple times, causes '
+      '--install-multiple to be used.',
+      action='append')
+  parser.add_option('--android-sdk-tools',
+      help='Path to the Android SDK build tools folder. ' +
+           'Required when using --split-apk-path.')
+  parser.add_option('--install-record',
+      help='Path to install record (touched only when APK is installed).')
+  parser.add_option('--build-device-configuration',
+      help='Path to build device configuration.')
+  parser.add_option('--stamp',
+      help='Path to touch on success.')
+  parser.add_option('--configuration-name',
+      help='The build CONFIGURATION_NAME')
+  options, _ = parser.parse_args()
+
+  device = build_device.GetBuildDeviceFromPath(
+      options.build_device_configuration)
+  if not device:
+    return
+
+  constants.SetBuildType(options.configuration_name)
+
+  serial_number = device.GetSerialNumber()
+  apk_package = apk_helper.GetPackageName(options.apk_path)
+
+  metadata_path = '%s.%s.device.time.stamp' % (options.apk_path, serial_number)
+
+  # If the APK on the device does not match the one that was last installed by
+  # the build, then the APK has to be installed (regardless of the md5 record).
+  force_install = HasInstallMetadataChanged(device, apk_package, metadata_path)
+
+
+  def Install():
+    if options.split_apk_path:
+      device.InstallSplitApk(options.apk_path, options.split_apk_path)
+    else:
+      device.Install(options.apk_path, reinstall=True)
+
+    RecordInstallMetadata(device, apk_package, metadata_path)
+    build_utils.Touch(options.install_record)
+
+
+  record_path = '%s.%s.md5.stamp' % (options.apk_path, serial_number)
+  md5_check.CallAndRecordIfStale(
+      Install,
+      record_path=record_path,
+      input_paths=[options.apk_path],
+      force=force_install)
+
+  if options.stamp:
+    build_utils.Touch(options.stamp)
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/android/gyp/apk_obfuscate.py b/build/android/gyp/apk_obfuscate.py
new file mode 100755
index 0000000..b075758
--- /dev/null
+++ b/build/android/gyp/apk_obfuscate.py
@@ -0,0 +1,147 @@
+#!/usr/bin/env python
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Generates the obfuscated jar and test jar for an apk.
+
+If proguard is not enabled or 'Release' is not in the configuration name,
+obfuscation will be a no-op.
+"""
+
+import optparse
+import os
+import sys
+
+from util import build_utils
+from util import proguard_util
+
+
+def ParseArgs(argv):
+  parser = optparse.OptionParser()
+  parser.add_option('--android-sdk', help='path to the Android SDK folder')
+  parser.add_option('--android-sdk-tools',
+                    help='path to the Android SDK build tools folder')
+  parser.add_option('--android-sdk-jar',
+                    help='path to Android SDK\'s android.jar')
+  parser.add_option('--proguard-jar-path',
+                    help='Path to proguard.jar in the sdk')
+  parser.add_option('--input-jars-paths',
+                    help='Path to jars to include in obfuscated jar')
+
+  parser.add_option('--proguard-configs',
+                    help='Paths to proguard config files')
+
+  parser.add_option('--configuration-name',
+                    help='Gyp configuration name (i.e. Debug, Release)')
+  parser.add_option('--proguard-enabled', action='store_true',
+                    help='Set if proguard is enabled for this target.')
+
+  parser.add_option('--obfuscated-jar-path',
+                    help='Output path for obfuscated jar.')
+
+  parser.add_option('--testapp', action='store_true',
+                    help='Set this if building an instrumentation test apk')
+  parser.add_option('--tested-apk-obfuscated-jar-path',
+                    help='Path to obfusctated jar of the tested apk')
+  parser.add_option('--test-jar-path',
+                    help='Output path for jar containing all the test apk\'s '
+                    'code.')
+
+  parser.add_option('--stamp', help='File to touch on success')
+
+  (options, args) = parser.parse_args(argv)
+
+  if args:
+    parser.error('No positional arguments should be given. ' + str(args))
+
+  # Check that required options have been provided.
+  required_options = (
+      'android_sdk',
+      'android_sdk_tools',
+      'android_sdk_jar',
+      'proguard_jar_path',
+      'input_jars_paths',
+      'configuration_name',
+      'obfuscated_jar_path',
+      )
+
+  if options.testapp:
+    required_options += (
+        'test_jar_path',
+        )
+
+  build_utils.CheckOptions(options, parser, required=required_options)
+  return options, args
+
+
+def DoProguard(options):
+  proguard = proguard_util.ProguardCmdBuilder(options.proguard_jar_path)
+  proguard.outjar(options.obfuscated_jar_path)
+
+  library_classpath = [options.android_sdk_jar]
+  input_jars = build_utils.ParseGypList(options.input_jars_paths)
+
+  exclude_paths = []
+  configs = build_utils.ParseGypList(options.proguard_configs)
+  if options.tested_apk_obfuscated_jar_path:
+    # configs should only contain the process_resources.py generated config.
+    assert len(configs) == 1, (
+        'test apks should not have custom proguard configs: ' + str(configs))
+    tested_jar_info = build_utils.ReadJson(
+        options.tested_apk_obfuscated_jar_path + '.info')
+    exclude_paths = tested_jar_info['inputs']
+    configs = tested_jar_info['configs']
+
+    proguard.is_test(True)
+    proguard.mapping(options.tested_apk_obfuscated_jar_path + '.mapping')
+    library_classpath.append(options.tested_apk_obfuscated_jar_path)
+
+  proguard.libraryjars(library_classpath)
+  proguard_injars = [p for p in input_jars if p not in exclude_paths]
+  proguard.injars(proguard_injars)
+  proguard.configs(configs)
+
+  proguard.CheckOutput()
+
+  this_info = {
+    'inputs': proguard_injars,
+    'configs': configs
+  }
+
+  build_utils.WriteJson(
+      this_info, options.obfuscated_jar_path + '.info')
+
+
+def main(argv):
+  options, _ = ParseArgs(argv)
+
+  input_jars = build_utils.ParseGypList(options.input_jars_paths)
+
+  if options.testapp:
+    dependency_class_filters = [
+        '*R.class', '*R$*.class', '*Manifest.class', '*BuildConfig.class']
+    build_utils.MergeZips(
+        options.test_jar_path, input_jars, dependency_class_filters)
+
+  if options.configuration_name == 'Release' and options.proguard_enabled:
+    DoProguard(options)
+  else:
+    output_files = [
+        options.obfuscated_jar_path,
+        options.obfuscated_jar_path + '.info',
+        options.obfuscated_jar_path + '.dump',
+        options.obfuscated_jar_path + '.seeds',
+        options.obfuscated_jar_path + '.usage',
+        options.obfuscated_jar_path + '.mapping']
+    for f in output_files:
+      if os.path.exists(f):
+        os.remove(f)
+      build_utils.Touch(f)
+
+  if options.stamp:
+    build_utils.Touch(options.stamp)
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/build/android/gyp/copy_ex.py b/build/android/gyp/copy_ex.py
new file mode 100755
index 0000000..a474e77
--- /dev/null
+++ b/build/android/gyp/copy_ex.py
@@ -0,0 +1,77 @@
+#!/usr/bin/env python
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Copies files to a directory."""
+
+import optparse
+import os
+import shutil
+import sys
+
+from util import build_utils
+
+
+def _get_all_files(base):
+  """Returns a list of all the files in |base|. Each entry is relative to the
+  last path entry of |base|."""
+  result = []
+  dirname = os.path.dirname(base)
+  for root, _, files in os.walk(base):
+    result.extend([os.path.join(root[len(dirname):], f) for f in files])
+  return result
+
+
+def main(args):
+  args = build_utils.ExpandFileArgs(args)
+
+  parser = optparse.OptionParser()
+  build_utils.AddDepfileOption(parser)
+
+  parser.add_option('--dest', help='Directory to copy files to.')
+  parser.add_option('--files', action='append',
+                    help='List of files to copy.')
+  parser.add_option('--clear', action='store_true',
+                    help='If set, the destination directory will be deleted '
+                    'before copying files to it. This is highly recommended to '
+                    'ensure that no stale files are left in the directory.')
+  parser.add_option('--stamp', help='Path to touch on success.')
+
+  options, _ = parser.parse_args(args)
+
+  if options.clear:
+    build_utils.DeleteDirectory(options.dest)
+    build_utils.MakeDirectory(options.dest)
+
+  files = []
+  for file_arg in options.files:
+    files += build_utils.ParseGypList(file_arg)
+
+  deps = []
+
+  for f in files:
+    if os.path.isdir(f):
+      if not options.clear:
+        print ('To avoid stale files you must use --clear when copying '
+               'directories')
+        sys.exit(-1)
+      shutil.copytree(f, os.path.join(options.dest, os.path.basename(f)))
+      deps.extend(_get_all_files(f))
+    else:
+      shutil.copy(f, options.dest)
+      deps.append(f)
+
+  if options.depfile:
+    build_utils.WriteDepfile(
+        options.depfile,
+        deps + build_utils.GetPythonDependencies())
+
+  if options.stamp:
+    build_utils.Touch(options.stamp)
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
+
diff --git a/build/android/gyp/create_device_library_links.py b/build/android/gyp/create_device_library_links.py
new file mode 100755
index 0000000..3e630b6
--- /dev/null
+++ b/build/android/gyp/create_device_library_links.py
@@ -0,0 +1,114 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Creates symlinks to native libraries for an APK.
+
+The native libraries should have previously been pushed to the device (in
+options.target_dir). This script then creates links in an apk's lib/ folder to
+those native libraries.
+"""
+
+import optparse
+import os
+import sys
+
+from util import build_device
+from util import build_utils
+
+BUILD_ANDROID_DIR = os.path.join(os.path.dirname(__file__), '..')
+sys.path.append(BUILD_ANDROID_DIR)
+
+from pylib import constants
+from pylib.utils import apk_helper
+
+def RunShellCommand(device, cmd):
+  output = device.RunShellCommand(cmd)
+
+  if output:
+    raise Exception(
+        'Unexpected output running command: ' + cmd + '\n' +
+        '\n'.join(output))
+
+
+def CreateSymlinkScript(options):
+  libraries = build_utils.ParseGypList(options.libraries)
+
+  link_cmd = (
+      'rm $APK_LIBRARIES_DIR/%(lib_basename)s > /dev/null 2>&1 \n'
+      'ln -s $STRIPPED_LIBRARIES_DIR/%(lib_basename)s '
+        '$APK_LIBRARIES_DIR/%(lib_basename)s \n'
+      )
+
+  script = '#!/bin/sh \n'
+
+  for lib in libraries:
+    script += link_cmd % { 'lib_basename': lib }
+
+  with open(options.script_host_path, 'w') as scriptfile:
+    scriptfile.write(script)
+
+
+def TriggerSymlinkScript(options):
+  device = build_device.GetBuildDeviceFromPath(
+      options.build_device_configuration)
+  if not device:
+    return
+
+  apk_package = apk_helper.GetPackageName(options.apk)
+  apk_libraries_dir = '/data/data/%s/lib' % apk_package
+
+  device_dir = os.path.dirname(options.script_device_path)
+  mkdir_cmd = ('if [ ! -e %(dir)s ]; then mkdir -p %(dir)s; fi ' %
+      { 'dir': device_dir })
+  RunShellCommand(device, mkdir_cmd)
+  device.PushChangedFiles([(options.script_host_path,
+                            options.script_device_path)])
+
+  trigger_cmd = (
+      'APK_LIBRARIES_DIR=%(apk_libraries_dir)s; '
+      'STRIPPED_LIBRARIES_DIR=%(target_dir)s; '
+      '. %(script_device_path)s'
+      ) % {
+          'apk_libraries_dir': apk_libraries_dir,
+          'target_dir': options.target_dir,
+          'script_device_path': options.script_device_path
+          }
+  RunShellCommand(device, trigger_cmd)
+
+
+def main(args):
+  args = build_utils.ExpandFileArgs(args)
+  parser = optparse.OptionParser()
+  parser.add_option('--apk', help='Path to the apk.')
+  parser.add_option('--script-host-path',
+      help='Path on the host for the symlink script.')
+  parser.add_option('--script-device-path',
+      help='Path on the device to push the created symlink script.')
+  parser.add_option('--libraries',
+      help='List of native libraries.')
+  parser.add_option('--target-dir',
+      help='Device directory that contains the target libraries for symlinks.')
+  parser.add_option('--stamp', help='Path to touch on success.')
+  parser.add_option('--build-device-configuration',
+      help='Path to build device configuration.')
+  parser.add_option('--configuration-name',
+      help='The build CONFIGURATION_NAME')
+  options, _ = parser.parse_args(args)
+
+  required_options = ['apk', 'libraries', 'script_host_path',
+      'script_device_path', 'target_dir', 'configuration_name']
+  build_utils.CheckOptions(options, parser, required=required_options)
+  constants.SetBuildType(options.configuration_name)
+
+  CreateSymlinkScript(options)
+  TriggerSymlinkScript(options)
+
+  if options.stamp:
+    build_utils.Touch(options.stamp)
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/build/android/gyp/create_dist_jar.py b/build/android/gyp/create_dist_jar.py
new file mode 100755
index 0000000..0d31c5d
--- /dev/null
+++ b/build/android/gyp/create_dist_jar.py
@@ -0,0 +1,36 @@
+#!/usr/bin/env python
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Merges a list of jars into a single jar."""
+
+import optparse
+import sys
+
+from util import build_utils
+
+def main(args):
+  args = build_utils.ExpandFileArgs(args)
+  parser = optparse.OptionParser()
+  build_utils.AddDepfileOption(parser)
+  parser.add_option('--output', help='Path to output jar.')
+  parser.add_option('--inputs', action='append', help='List of jar inputs.')
+  options, _ = parser.parse_args(args)
+  build_utils.CheckOptions(options, parser, ['output', 'inputs'])
+
+  input_jars = []
+  for inputs_arg in options.inputs:
+    input_jars.extend(build_utils.ParseGypList(inputs_arg))
+
+  build_utils.MergeZips(options.output, input_jars)
+
+  if options.depfile:
+    build_utils.WriteDepfile(
+        options.depfile,
+        input_jars + build_utils.GetPythonDependencies())
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/build/android/gyp/create_flutter_jar.py b/build/android/gyp/create_flutter_jar.py
new file mode 100644
index 0000000..c30bae2
--- /dev/null
+++ b/build/android/gyp/create_flutter_jar.py
@@ -0,0 +1,56 @@
+#!/usr/bin/env python
+#
+# Copyright 2016 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Create a JAR incorporating all the components required to build a Flutter application"""
+
+import optparse
+import os
+import sys
+import zipfile
+
+from util import build_utils
+
+def main(args):
+  args = build_utils.ExpandFileArgs(args)
+  parser = optparse.OptionParser()
+  build_utils.AddDepfileOption(parser)
+  parser.add_option('--output', help='Path to output jar.')
+  parser.add_option('--dist_jar', help='Flutter shell Java code jar.')
+  parser.add_option('--native_lib', action='append', help='Native code library.')
+  parser.add_option('--android_abi', help='Native code ABI.')
+  parser.add_option('--asset_dir', help='Path to assets.')
+  options, _ = parser.parse_args(args)
+  build_utils.CheckOptions(options, parser, [
+    'output', 'dist_jar', 'native_lib', 'android_abi', 'asset_dir'
+  ])
+
+  input_deps = []
+
+  with zipfile.ZipFile(options.output, 'w', zipfile.ZIP_DEFLATED) as out_zip:
+    input_deps.append(options.dist_jar)
+    with zipfile.ZipFile(options.dist_jar, 'r') as dist_zip:
+      for dist_file in dist_zip.infolist():
+        if dist_file.filename.endswith('.class'):
+          out_zip.writestr(dist_file.filename, dist_zip.read(dist_file.filename))
+
+    for native_lib in options.native_lib:
+      input_deps.append(native_lib)
+      out_zip.write(native_lib,
+                    'lib/%s/%s' % (options.android_abi, os.path.basename(native_lib)))
+
+    for asset_file in os.listdir(options.asset_dir):
+      input_deps.append(asset_file)
+      out_zip.write(os.path.join(options.asset_dir, asset_file),
+                    'assets/%s' % asset_file)
+
+  if options.depfile:
+    build_utils.WriteDepfile(
+        options.depfile,
+        input_deps + build_utils.GetPythonDependencies())
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/build/android/gyp/create_java_binary_script.py b/build/android/gyp/create_java_binary_script.py
new file mode 100755
index 0000000..5de43f2
--- /dev/null
+++ b/build/android/gyp/create_java_binary_script.py
@@ -0,0 +1,77 @@
+#!/usr/bin/env python
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Creates a simple script to run a java "binary".
+
+This creates a script that sets up the java command line for running a java
+jar. This includes correctly setting the classpath and the main class.
+"""
+
+import optparse
+import os
+import sys
+
+from util import build_utils
+
+# The java command must be executed in the current directory because there may
+# be user-supplied paths in the args. The script receives the classpath relative
+# to the directory that the script is written in and then, when run, must
+# recalculate the paths relative to the current directory.
+script_template = """\
+#!/usr/bin/env python
+#
+# This file was generated by build/android/gyp/create_java_binary_script.py
+
+import os
+import sys
+
+self_dir = os.path.dirname(__file__)
+classpath = [{classpath}]
+if os.getcwd() != self_dir:
+  offset = os.path.relpath(self_dir, os.getcwd())
+  classpath = [os.path.join(offset, p) for p in classpath]
+java_args = [
+  "java",
+  "-classpath", ":".join(classpath),
+  "-enableassertions",
+  \"{main_class}\"] + sys.argv[1:]
+os.execvp("java", java_args)
+"""
+
+def main(argv):
+  argv = build_utils.ExpandFileArgs(argv)
+  parser = optparse.OptionParser()
+  build_utils.AddDepfileOption(parser)
+  parser.add_option('--output', help='Output path for executable script.')
+  parser.add_option('--jar-path', help='Path to the main jar.')
+  parser.add_option('--main-class',
+      help='Name of the java class with the "main" entry point.')
+  parser.add_option('--classpath', action='append',
+      help='Classpath for running the jar.')
+  options, _ = parser.parse_args(argv)
+
+  classpath = [options.jar_path]
+  for cp_arg in options.classpath:
+    classpath += build_utils.ParseGypList(cp_arg)
+
+  run_dir = os.path.dirname(options.output)
+  classpath = [os.path.relpath(p, run_dir) for p in classpath]
+
+  with open(options.output, 'w') as script:
+    script.write(script_template.format(
+      classpath=('"%s"' % '", "'.join(classpath)),
+      main_class=options.main_class))
+
+  os.chmod(options.output, 0750)
+
+  if options.depfile:
+    build_utils.WriteDepfile(
+        options.depfile,
+        build_utils.GetPythonDependencies())
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/build/android/gyp/create_placeholder_files.py b/build/android/gyp/create_placeholder_files.py
new file mode 100755
index 0000000..103e1df
--- /dev/null
+++ b/build/android/gyp/create_placeholder_files.py
@@ -0,0 +1,35 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Create placeholder files.
+"""
+
+import optparse
+import os
+import sys
+
+from util import build_utils
+
+def main():
+  parser = optparse.OptionParser()
+  parser.add_option(
+      '--dest-lib-dir',
+      help='Destination directory to have placeholder files.')
+  parser.add_option(
+      '--stamp',
+      help='Path to touch on success')
+
+  options, args = parser.parse_args()
+
+  for name in args:
+    target_path = os.path.join(options.dest_lib_dir, name)
+    build_utils.Touch(target_path)
+
+  if options.stamp:
+    build_utils.Touch(options.stamp)
+
+if __name__ == '__main__':
+  sys.exit(main())
+
diff --git a/build/android/gyp/create_standalone_apk.py b/build/android/gyp/create_standalone_apk.py
new file mode 100755
index 0000000..c560599
--- /dev/null
+++ b/build/android/gyp/create_standalone_apk.py
@@ -0,0 +1,60 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Combines stripped libraries and incomplete APK into single standalone APK.
+
+"""
+
+import optparse
+import os
+import shutil
+import sys
+import tempfile
+
+from util import build_utils
+from util import md5_check
+
+def CreateStandaloneApk(options):
+  def DoZip():
+    with tempfile.NamedTemporaryFile(suffix='.zip') as intermediate_file:
+      intermediate_path = intermediate_file.name
+      shutil.copy(options.input_apk_path, intermediate_path)
+      apk_path_abs = os.path.abspath(intermediate_path)
+      build_utils.CheckOutput(
+          ['zip', '-r', '-1', apk_path_abs, 'lib'],
+          cwd=options.libraries_top_dir)
+      shutil.copy(intermediate_path, options.output_apk_path)
+
+  input_paths = [options.input_apk_path, options.libraries_top_dir]
+  record_path = '%s.standalone.stamp' % options.input_apk_path
+  md5_check.CallAndRecordIfStale(
+      DoZip,
+      record_path=record_path,
+      input_paths=input_paths)
+
+
+def main():
+  parser = optparse.OptionParser()
+  parser.add_option('--libraries-top-dir',
+      help='Top directory that contains libraries '
+      '(i.e. library paths are like '
+      'libraries_top_dir/lib/android_app_abi/foo.so).')
+  parser.add_option('--input-apk-path', help='Path to incomplete APK.')
+  parser.add_option('--output-apk-path', help='Path for standalone APK.')
+  parser.add_option('--stamp', help='Path to touch on success.')
+  options, _ = parser.parse_args()
+
+  required_options = ['libraries_top_dir', 'input_apk_path', 'output_apk_path']
+  build_utils.CheckOptions(options, parser, required=required_options)
+
+  CreateStandaloneApk(options)
+
+  if options.stamp:
+    build_utils.Touch(options.stamp)
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/android/gyp/create_test_runner_script.py b/build/android/gyp/create_test_runner_script.py
new file mode 100755
index 0000000..247bf20
--- /dev/null
+++ b/build/android/gyp/create_test_runner_script.py
@@ -0,0 +1,96 @@
+#!/usr/bin/env python
+#
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Creates a script to run an android test using build/android/test_runner.py.
+"""
+
+import argparse
+import os
+import sys
+
+from util import build_utils
+
+SCRIPT_TEMPLATE = """\
+#!/usr/bin/env python
+#
+# This file was generated by build/android/gyp/create_test_runner_script.py
+
+import logging
+import os
+import sys
+
+def main():
+  script_directory = os.path.dirname(__file__)
+
+  def ResolvePath(path):
+    \"\"\"Returns an absolute filepath given a path relative to this script.
+    \"\"\"
+    return os.path.abspath(os.path.join(script_directory, path))
+
+  test_runner_path = ResolvePath('{test_runner_path}')
+  test_runner_args = {test_runner_args}
+  test_runner_path_args = {test_runner_path_args}
+  for arg, path in test_runner_path_args.iteritems():
+    test_runner_args.extend([arg, ResolvePath(path)])
+
+  test_runner_cmd = ' '.join(
+      [test_runner_path] + test_runner_args + sys.argv[1:])
+  logging.critical(test_runner_cmd)
+  os.system(test_runner_cmd)
+
+if __name__ == '__main__':
+  sys.exit(main())
+"""
+
+def main():
+  parser = argparse.ArgumentParser()
+  parser.add_argument('--script-output-path',
+                      help='Output path for executable script.')
+  parser.add_argument('--depfile',
+                      help='Path to the depfile. This must be specified as '
+                           "the action's first output.")
+  # We need to intercept any test runner path arguments and make all
+  # of the paths relative to the output script directory.
+  group = parser.add_argument_group('Test runner path arguments.')
+  group.add_argument('--output-directory')
+  group.add_argument('--isolate-file-path')
+  group.add_argument('--support-apk')
+  args, test_runner_args = parser.parse_known_args()
+
+  def RelativizePathToScript(path):
+    """Returns the path relative to the output script directory."""
+    return os.path.relpath(path, os.path.dirname(args.script_output_path))
+
+  test_runner_path = os.path.join(
+      os.path.dirname(__file__), os.path.pardir, 'test_runner.py')
+  test_runner_path = RelativizePathToScript(test_runner_path)
+
+  test_runner_path_args = {}
+  if args.output_directory:
+    test_runner_path_args['--output-directory'] = RelativizePathToScript(
+        args.output_directory)
+  if args.isolate_file_path:
+    test_runner_path_args['--isolate-file-path'] = RelativizePathToScript(
+        args.isolate_file_path)
+  if args.support_apk:
+    test_runner_path_args['--support-apk'] = RelativizePathToScript(
+        args.support_apk)
+
+  with open(args.script_output_path, 'w') as script:
+    script.write(SCRIPT_TEMPLATE.format(
+        test_runner_path=str(test_runner_path),
+        test_runner_args=str(test_runner_args),
+        test_runner_path_args=str(test_runner_path_args)))
+
+  os.chmod(args.script_output_path, 0750)
+
+  if args.depfile:
+    build_utils.WriteDepfile(
+        args.depfile,
+        build_utils.GetPythonDependencies())
+
+if __name__ == '__main__':
+  sys.exit(main())
\ No newline at end of file
diff --git a/build/android/gyp/dex.py b/build/android/gyp/dex.py
new file mode 100755
index 0000000..c26d23a
--- /dev/null
+++ b/build/android/gyp/dex.py
@@ -0,0 +1,89 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import optparse
+import os
+import sys
+
+from util import build_utils
+from util import md5_check
+
+
+def DoDex(options, paths):
+  dx_binary = os.path.join(options.android_sdk_tools, 'dx')
+  # See http://crbug.com/272064 for context on --force-jumbo.
+  dex_cmd = [dx_binary, '--dex', '--force-jumbo', '--output', options.dex_path]
+  if options.no_locals != '0':
+    dex_cmd.append('--no-locals')
+
+  dex_cmd += paths
+
+  record_path = '%s.md5.stamp' % options.dex_path
+  md5_check.CallAndRecordIfStale(
+      lambda: build_utils.CheckOutput(dex_cmd, print_stderr=False),
+      record_path=record_path,
+      input_paths=paths,
+      input_strings=dex_cmd,
+      force=not os.path.exists(options.dex_path))
+  build_utils.WriteJson(
+      [os.path.relpath(p, options.output_directory) for p in paths],
+      options.dex_path + '.inputs')
+
+
+def main():
+  args = build_utils.ExpandFileArgs(sys.argv[1:])
+
+  parser = optparse.OptionParser()
+  build_utils.AddDepfileOption(parser)
+
+  parser.add_option('--android-sdk-tools',
+                    help='Android sdk build tools directory.')
+  parser.add_option('--output-directory',
+                    default=os.getcwd(),
+                    help='Path to the output build directory.')
+  parser.add_option('--dex-path', help='Dex output path.')
+  parser.add_option('--configuration-name',
+                    help='The build CONFIGURATION_NAME.')
+  parser.add_option('--proguard-enabled',
+                    help='"true" if proguard is enabled.')
+  parser.add_option('--proguard-enabled-input-path',
+                    help=('Path to dex in Release mode when proguard '
+                          'is enabled.'))
+  parser.add_option('--no-locals',
+                    help='Exclude locals list from the dex file.')
+  parser.add_option('--inputs', help='A list of additional input paths.')
+  parser.add_option('--excluded-paths',
+                    help='A list of paths to exclude from the dex file.')
+
+  options, paths = parser.parse_args(args)
+
+  required_options = ('android_sdk_tools',)
+  build_utils.CheckOptions(options, parser, required=required_options)
+
+  if (options.proguard_enabled == 'true'
+      and options.configuration_name == 'Release'):
+    paths = [options.proguard_enabled_input_path]
+
+  if options.inputs:
+    paths += build_utils.ParseGypList(options.inputs)
+
+  if options.excluded_paths:
+    # Excluded paths are relative to the output directory.
+    exclude_paths = build_utils.ParseGypList(options.excluded_paths)
+    paths = [p for p in paths if not
+             os.path.relpath(p, options.output_directory) in exclude_paths]
+
+  DoDex(options, paths)
+
+  if options.depfile:
+    build_utils.WriteDepfile(
+        options.depfile,
+        paths + build_utils.GetPythonDependencies())
+
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/android/gyp/emma_instr.py b/build/android/gyp/emma_instr.py
new file mode 100755
index 0000000..6f3555a
--- /dev/null
+++ b/build/android/gyp/emma_instr.py
@@ -0,0 +1,207 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Instruments classes and jar files.
+
+This script corresponds to the 'emma_instr' action in the java build process.
+Depending on whether emma_instrument is set, the 'emma_instr' action will either
+call one of the instrument commands, or the copy command.
+
+Possible commands are:
+- instrument_jar: Accepts a jar and instruments it using emma.jar.
+- instrument_classes: Accepts a directory containing java classes and
+      instruments it using emma.jar.
+- copy: Called when EMMA coverage is not enabled. This allows us to make
+      this a required step without necessarily instrumenting on every build.
+      Also removes any stale coverage files.
+"""
+
+import collections
+import json
+import os
+import shutil
+import sys
+import tempfile
+
+sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir))
+from pylib.utils import command_option_parser
+
+from util import build_utils
+
+
+def _AddCommonOptions(option_parser):
+  """Adds common options to |option_parser|."""
+  option_parser.add_option('--input-path',
+                           help=('Path to input file(s). Either the classes '
+                                 'directory, or the path to a jar.'))
+  option_parser.add_option('--output-path',
+                           help=('Path to output final file(s) to. Either the '
+                                 'final classes directory, or the directory in '
+                                 'which to place the instrumented/copied jar.'))
+  option_parser.add_option('--stamp', help='Path to touch when done.')
+  option_parser.add_option('--coverage-file',
+                           help='File to create with coverage metadata.')
+  option_parser.add_option('--sources-file',
+                           help='File to create with the list of sources.')
+
+
+def _AddInstrumentOptions(option_parser):
+  """Adds options related to instrumentation to |option_parser|."""
+  _AddCommonOptions(option_parser)
+  option_parser.add_option('--sources',
+                           help='Space separated list of sources.')
+  option_parser.add_option('--src-root',
+                           help='Root of the src repository.')
+  option_parser.add_option('--emma-jar',
+                           help='Path to emma.jar.')
+  option_parser.add_option(
+      '--filter-string', default='',
+      help=('Filter string consisting of a list of inclusion/exclusion '
+            'patterns separated with whitespace and/or comma.'))
+
+
+def _RunCopyCommand(_command, options, _, option_parser):
+  """Copies the jar from input to output locations.
+
+  Also removes any old coverage/sources file.
+
+  Args:
+    command: String indicating the command that was received to trigger
+        this function.
+    options: optparse options dictionary.
+    args: List of extra args from optparse.
+    option_parser: optparse.OptionParser object.
+
+  Returns:
+    An exit code.
+  """
+  if not (options.input_path and options.output_path and
+          options.coverage_file and options.sources_file):
+    option_parser.error('All arguments are required.')
+
+  coverage_file = os.path.join(os.path.dirname(options.output_path),
+                               options.coverage_file)
+  sources_file = os.path.join(os.path.dirname(options.output_path),
+                              options.sources_file)
+  if os.path.exists(coverage_file):
+    os.remove(coverage_file)
+  if os.path.exists(sources_file):
+    os.remove(sources_file)
+
+  if os.path.isdir(options.input_path):
+    shutil.rmtree(options.output_path, ignore_errors=True)
+    shutil.copytree(options.input_path, options.output_path)
+  else:
+    shutil.copy(options.input_path, options.output_path)
+
+  if options.stamp:
+    build_utils.Touch(options.stamp)
+
+
+def _CreateSourcesFile(sources_string, sources_file, src_root):
+  """Adds all normalized source directories to |sources_file|.
+
+  Args:
+    sources_string: String generated from gyp containing the list of sources.
+    sources_file: File into which to write the JSON list of sources.
+    src_root: Root which sources added to the file should be relative to.
+
+  Returns:
+    An exit code.
+  """
+  src_root = os.path.abspath(src_root)
+  sources = build_utils.ParseGypList(sources_string)
+  relative_sources = []
+  for s in sources:
+    abs_source = os.path.abspath(s)
+    if abs_source[:len(src_root)] != src_root:
+      print ('Error: found source directory not under repository root: %s %s'
+             % (abs_source, src_root))
+      return 1
+    rel_source = os.path.relpath(abs_source, src_root)
+
+    relative_sources.append(rel_source)
+
+  with open(sources_file, 'w') as f:
+    json.dump(relative_sources, f)
+
+
+def _RunInstrumentCommand(command, options, _, option_parser):
+  """Instruments the classes/jar files using EMMA.
+
+  Args:
+    command: 'instrument_jar' or 'instrument_classes'. This distinguishes
+        whether we copy the output from the created lib/ directory, or classes/
+        directory.
+    options: optparse options dictionary.
+    args: List of extra args from optparse.
+    option_parser: optparse.OptionParser object.
+
+  Returns:
+    An exit code.
+  """
+  if not (options.input_path and options.output_path and
+          options.coverage_file and options.sources_file and options.sources and
+          options.src_root and options.emma_jar):
+    option_parser.error('All arguments are required.')
+
+  coverage_file = os.path.join(os.path.dirname(options.output_path),
+                               options.coverage_file)
+  sources_file = os.path.join(os.path.dirname(options.output_path),
+                              options.sources_file)
+  if os.path.exists(coverage_file):
+    os.remove(coverage_file)
+  temp_dir = tempfile.mkdtemp()
+  try:
+    cmd = ['java', '-cp', options.emma_jar,
+           'emma', 'instr',
+           '-ip', options.input_path,
+           '-ix', options.filter_string,
+           '-d', temp_dir,
+           '-out', coverage_file,
+           '-m', 'fullcopy']
+    build_utils.CheckOutput(cmd)
+
+    if command == 'instrument_jar':
+      for jar in os.listdir(os.path.join(temp_dir, 'lib')):
+        shutil.copy(os.path.join(temp_dir, 'lib', jar),
+                    options.output_path)
+    else:  # 'instrument_classes'
+      if os.path.isdir(options.output_path):
+        shutil.rmtree(options.output_path, ignore_errors=True)
+      shutil.copytree(os.path.join(temp_dir, 'classes'),
+                      options.output_path)
+  finally:
+    shutil.rmtree(temp_dir)
+
+  _CreateSourcesFile(options.sources, sources_file, options.src_root)
+
+  if options.stamp:
+    build_utils.Touch(options.stamp)
+
+  return 0
+
+
+CommandFunctionTuple = collections.namedtuple(
+    'CommandFunctionTuple', ['add_options_func', 'run_command_func'])
+VALID_COMMANDS = {
+    'copy': CommandFunctionTuple(_AddCommonOptions,
+                                 _RunCopyCommand),
+    'instrument_jar': CommandFunctionTuple(_AddInstrumentOptions,
+                                           _RunInstrumentCommand),
+    'instrument_classes': CommandFunctionTuple(_AddInstrumentOptions,
+                                               _RunInstrumentCommand),
+}
+
+
+def main():
+  option_parser = command_option_parser.CommandOptionParser(
+      commands_dict=VALID_COMMANDS)
+  command_option_parser.ParseAndExecute(option_parser)
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/android/gyp/finalize_apk.py b/build/android/gyp/finalize_apk.py
new file mode 100755
index 0000000..0a80035
--- /dev/null
+++ b/build/android/gyp/finalize_apk.py
@@ -0,0 +1,132 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Signs and zipaligns APK.
+
+"""
+
+import optparse
+import shutil
+import sys
+import tempfile
+
+from util import build_utils
+
+def RenameInflateAndAddPageAlignment(
+    rezip_apk_jar_path, in_zip_file, out_zip_file):
+  rezip_apk_cmd = [
+      'java',
+      '-classpath',
+      rezip_apk_jar_path,
+      'RezipApk',
+      'renamealign',
+      in_zip_file,
+      out_zip_file,
+    ]
+  build_utils.CheckOutput(rezip_apk_cmd)
+
+
+def ReorderAndAlignApk(rezip_apk_jar_path, in_zip_file, out_zip_file):
+  rezip_apk_cmd = [
+      'java',
+      '-classpath',
+      rezip_apk_jar_path,
+      'RezipApk',
+      'reorder',
+      in_zip_file,
+      out_zip_file,
+    ]
+  build_utils.CheckOutput(rezip_apk_cmd)
+
+
+def JarSigner(key_path, key_name, key_passwd, unsigned_path, signed_path):
+  shutil.copy(unsigned_path, signed_path)
+  sign_cmd = [
+      'jarsigner',
+      '-sigalg', 'MD5withRSA',
+      '-digestalg', 'SHA1',
+      '-keystore', key_path,
+      '-storepass', key_passwd,
+      signed_path,
+      key_name,
+    ]
+  build_utils.CheckOutput(sign_cmd)
+
+
+def AlignApk(zipalign_path, unaligned_path, final_path):
+  align_cmd = [
+      zipalign_path,
+      '-f', '4',  # 4 bytes
+      unaligned_path,
+      final_path,
+      ]
+  build_utils.CheckOutput(align_cmd)
+
+
+def main():
+  parser = optparse.OptionParser()
+  build_utils.AddDepfileOption(parser)
+
+  parser.add_option('--rezip-apk-jar-path',
+                    help='Path to the RezipApk jar file.')
+  parser.add_option('--zipalign-path', help='Path to the zipalign tool.')
+  parser.add_option('--unsigned-apk-path', help='Path to input unsigned APK.')
+  parser.add_option('--final-apk-path',
+      help='Path to output signed and aligned APK.')
+  parser.add_option('--key-path', help='Path to keystore for signing.')
+  parser.add_option('--key-passwd', help='Keystore password')
+  parser.add_option('--key-name', help='Keystore name')
+  parser.add_option('--stamp', help='Path to touch on success.')
+  parser.add_option('--load-library-from-zip', type='int',
+      help='If non-zero, build the APK such that the library can be loaded ' +
+           'directly from the zip file using the crazy linker. The library ' +
+           'will be renamed, uncompressed and page aligned.')
+
+  options, _ = parser.parse_args()
+
+  FinalizeApk(options)
+
+  if options.depfile:
+    build_utils.WriteDepfile(
+        options.depfile, build_utils.GetPythonDependencies())
+
+  if options.stamp:
+    build_utils.Touch(options.stamp)
+
+
+def FinalizeApk(options):
+  with tempfile.NamedTemporaryFile() as signed_apk_path_tmp, \
+      tempfile.NamedTemporaryFile() as apk_to_sign_tmp:
+
+    if options.load_library_from_zip:
+      # We alter the name of the library so that the Android Package Manager
+      # does not extract it into a separate file. This must be done before
+      # signing, as the filename is part of the signed manifest. At the same
+      # time we uncompress the library, which is necessary so that it can be
+      # loaded directly from the APK.
+      # Move the library to a page boundary by adding a page alignment file.
+      apk_to_sign = apk_to_sign_tmp.name
+      RenameInflateAndAddPageAlignment(
+          options.rezip_apk_jar_path, options.unsigned_apk_path, apk_to_sign)
+    else:
+      apk_to_sign = options.unsigned_apk_path
+
+    signed_apk_path = signed_apk_path_tmp.name
+    JarSigner(options.key_path, options.key_name, options.key_passwd,
+              apk_to_sign, signed_apk_path)
+
+    if options.load_library_from_zip:
+      # Reorder the contents of the APK. This re-establishes the canonical
+      # order which means the library will be back at its page aligned location.
+      # This step also aligns uncompressed items to 4 bytes.
+      ReorderAndAlignApk(
+          options.rezip_apk_jar_path, signed_apk_path, options.final_apk_path)
+    else:
+      # Align uncompressed items to 4 bytes
+      AlignApk(options.zipalign_path, signed_apk_path, options.final_apk_path)
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/android/gyp/finalize_splits.py b/build/android/gyp/finalize_splits.py
new file mode 100755
index 0000000..a6796bb
--- /dev/null
+++ b/build/android/gyp/finalize_splits.py
@@ -0,0 +1,52 @@
+#!/usr/bin/env python
+#
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Signs and zipaligns split APKs.
+
+This script is require only by GYP (not GN).
+"""
+
+import optparse
+import sys
+
+import finalize_apk
+from util import build_utils
+
+def main():
+  parser = optparse.OptionParser()
+  parser.add_option('--zipalign-path', help='Path to the zipalign tool.')
+  parser.add_option('--resource-packaged-apk-path',
+      help='Base path to input .ap_s.')
+  parser.add_option('--base-output-path',
+      help='Path to output .apk, minus extension.')
+  parser.add_option('--key-path', help='Path to keystore for signing.')
+  parser.add_option('--key-passwd', help='Keystore password')
+  parser.add_option('--key-name', help='Keystore name')
+  parser.add_option('--densities',
+      help='Comma separated list of densities finalize.')
+  parser.add_option('--languages',
+      help='GYP list of language splits to finalize.')
+
+  options, _ = parser.parse_args()
+  options.load_library_from_zip = 0
+
+  if options.densities:
+    for density in options.densities.split(','):
+      options.unsigned_apk_path = ("%s_%s" %
+          (options.resource_packaged_apk_path, density))
+      options.final_apk_path = ("%s-density-%s.apk" %
+          (options.base_output_path, density))
+      finalize_apk.FinalizeApk(options)
+
+  if options.languages:
+    for lang in build_utils.ParseGypList(options.languages):
+      options.unsigned_apk_path = ("%s_%s" %
+          (options.resource_packaged_apk_path, lang))
+      options.final_apk_path = ("%s-lang-%s.apk" %
+          (options.base_output_path, lang))
+      finalize_apk.FinalizeApk(options)
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/android/gyp/find.py b/build/android/gyp/find.py
new file mode 100755
index 0000000..a9f1d49
--- /dev/null
+++ b/build/android/gyp/find.py
@@ -0,0 +1,30 @@
+#!/usr/bin/env python
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Finds files in directories.
+"""
+
+import fnmatch
+import optparse
+import os
+import sys
+
+
+def main(argv):
+  parser = optparse.OptionParser()
+  parser.add_option('--pattern', default='*', help='File pattern to match.')
+  options, directories = parser.parse_args(argv)
+
+  for d in directories:
+    if not os.path.exists(d):
+      print >> sys.stderr, '%s does not exist' % d
+      return 1
+    for root, _, filenames in os.walk(d):
+      for f in fnmatch.filter(filenames, options.pattern):
+        print os.path.join(root, f)
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/build/android/gyp/find_sun_tools_jar.py b/build/android/gyp/find_sun_tools_jar.py
new file mode 100755
index 0000000..2f15a15
--- /dev/null
+++ b/build/android/gyp/find_sun_tools_jar.py
@@ -0,0 +1,56 @@
+#!/usr/bin/env python
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""This finds the java distribution's tools.jar and copies it somewhere.
+"""
+
+import argparse
+import os
+import re
+import shutil
+import sys
+
+from util import build_utils
+
+RT_JAR_FINDER = re.compile(r'\[Opened (.*)/jre/lib/rt.jar\]')
+
+def main():
+  parser = argparse.ArgumentParser(description='Find Sun Tools Jar')
+  parser.add_argument('--depfile',
+                      help='Path to depfile. This must be specified as the '
+                           'action\'s first output.')
+  parser.add_argument('--output', required=True)
+  args = parser.parse_args()
+
+  sun_tools_jar_path = FindSunToolsJarPath()
+
+  if sun_tools_jar_path is None:
+    raise Exception("Couldn\'t find tools.jar")
+
+  # Using copyfile instead of copy() because copy() calls copymode()
+  # We don't want the locked mode because we may copy over this file again
+  shutil.copyfile(sun_tools_jar_path, args.output)
+
+  if args.depfile:
+    build_utils.WriteDepfile(
+        args.depfile,
+        [sun_tools_jar_path] + build_utils.GetPythonDependencies())
+
+
+def FindSunToolsJarPath():
+  # This works with at least openjdk 1.6, 1.7 and sun java 1.6, 1.7
+  stdout = build_utils.CheckOutput(
+      ["java", "-verbose", "-version"], print_stderr=False)
+  for ln in stdout.splitlines():
+    match = RT_JAR_FINDER.match(ln)
+    if match:
+      return os.path.join(match.group(1), 'lib', 'tools.jar')
+
+  return None
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/android/gyp/gcc_preprocess.py b/build/android/gyp/gcc_preprocess.py
new file mode 100755
index 0000000..03becf9
--- /dev/null
+++ b/build/android/gyp/gcc_preprocess.py
@@ -0,0 +1,58 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import optparse
+import os
+import sys
+
+from util import build_utils
+
+def DoGcc(options):
+  build_utils.MakeDirectory(os.path.dirname(options.output))
+
+  gcc_cmd = [ 'gcc' ]  # invoke host gcc.
+  if options.defines:
+    gcc_cmd.extend(sum(map(lambda w: ['-D', w], options.defines), []))
+  gcc_cmd.extend([
+      '-E',                  # stop after preprocessing.
+      '-D', 'ANDROID',       # Specify ANDROID define for pre-processor.
+      '-x', 'c-header',      # treat sources as C header files
+      '-P',                  # disable line markers, i.e. '#line 309'
+      '-I', options.include_path,
+      '-o', options.output,
+      options.template
+      ])
+
+  build_utils.CheckOutput(gcc_cmd)
+
+
+def main(args):
+  args = build_utils.ExpandFileArgs(args)
+
+  parser = optparse.OptionParser()
+  build_utils.AddDepfileOption(parser)
+
+  parser.add_option('--include-path', help='Include path for gcc.')
+  parser.add_option('--template', help='Path to template.')
+  parser.add_option('--output', help='Path for generated file.')
+  parser.add_option('--stamp', help='Path to touch on success.')
+  parser.add_option('--defines', help='Pre-defines macros', action='append')
+
+  options, _ = parser.parse_args(args)
+
+  DoGcc(options)
+
+  if options.depfile:
+    build_utils.WriteDepfile(
+        options.depfile,
+        build_utils.GetPythonDependencies())
+
+  if options.stamp:
+    build_utils.Touch(options.stamp)
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/build/android/gyp/generate_split_manifest.py b/build/android/gyp/generate_split_manifest.py
new file mode 100755
index 0000000..9cb3bca
--- /dev/null
+++ b/build/android/gyp/generate_split_manifest.py
@@ -0,0 +1,97 @@
+#!/usr/bin/env python
+#
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Creates an AndroidManifest.xml for an APK split.
+
+Given the manifest file for the main APK, generates an AndroidManifest.xml with
+the value required for a Split APK (package, versionCode, etc).
+"""
+
+import optparse
+import xml.etree.ElementTree
+
+from util import build_utils
+
+MANIFEST_TEMPLATE = """<?xml version="1.0" encoding="utf-8"?>
+<manifest
+    xmlns:android="http://schemas.android.com/apk/res/android"
+    package="%(package)s"
+    split="%(split)s">
+  <uses-sdk android:minSdkVersion="21" />
+  <application android:hasCode="%(has_code)s">
+  </application>
+</manifest>
+"""
+
+def ParseArgs():
+  """Parses command line options.
+
+  Returns:
+    An options object as from optparse.OptionsParser.parse_args()
+  """
+  parser = optparse.OptionParser()
+  build_utils.AddDepfileOption(parser)
+  parser.add_option('--main-manifest', help='The main manifest of the app')
+  parser.add_option('--out-manifest', help='The output manifest')
+  parser.add_option('--split', help='The name of the split')
+  parser.add_option(
+      '--has-code',
+      action='store_true',
+      default=False,
+      help='Whether the split will contain a .dex file')
+
+  (options, args) = parser.parse_args()
+
+  if args:
+    parser.error('No positional arguments should be given.')
+
+  # Check that required options have been provided.
+  required_options = ('main_manifest', 'out_manifest', 'split')
+  build_utils.CheckOptions(options, parser, required=required_options)
+
+  return options
+
+
+def Build(main_manifest, split, has_code):
+  """Builds a split manifest based on the manifest of the main APK.
+
+  Args:
+    main_manifest: the XML manifest of the main APK as a string
+    split: the name of the split as a string
+    has_code: whether this split APK will contain .dex files
+
+  Returns:
+    The XML split manifest as a string
+  """
+
+  doc = xml.etree.ElementTree.fromstring(main_manifest)
+  package = doc.get('package')
+
+  return MANIFEST_TEMPLATE % {
+      'package': package,
+      'split': split.replace('-', '_'),
+      'has_code': str(has_code).lower()
+  }
+
+
+def main():
+  options = ParseArgs()
+  main_manifest = file(options.main_manifest).read()
+  split_manifest = Build(
+      main_manifest,
+      options.split,
+      options.has_code)
+
+  with file(options.out_manifest, 'w') as f:
+    f.write(split_manifest)
+
+  if options.depfile:
+    build_utils.WriteDepfile(
+        options.depfile,
+        [options.main_manifest] + build_utils.GetPythonDependencies())
+
+
+if __name__ == '__main__':
+  main()
diff --git a/build/android/gyp/generate_v14_compatible_resources.py b/build/android/gyp/generate_v14_compatible_resources.py
new file mode 100755
index 0000000..9c8ff3b
--- /dev/null
+++ b/build/android/gyp/generate_v14_compatible_resources.py
@@ -0,0 +1,319 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Convert Android xml resources to API 14 compatible.
+
+There are two reasons that we cannot just use API 17 attributes,
+so we are generating another set of resources by this script.
+
+1. paddingStart attribute can cause a crash on Galaxy Tab 2.
+2. There is a bug that paddingStart does not override paddingLeft on
+   JB-MR1. This is fixed on JB-MR2. b/8654490
+
+Therefore, this resource generation script can be removed when
+we drop the support for JB-MR1.
+
+Please refer to http://crbug.com/235118 for the details.
+"""
+
+import optparse
+import os
+import re
+import shutil
+import sys
+import xml.dom.minidom as minidom
+
+from util import build_utils
+
+# Note that we are assuming 'android:' is an alias of
+# the namespace 'http://schemas.android.com/apk/res/android'.
+
+GRAVITY_ATTRIBUTES = ('android:gravity', 'android:layout_gravity')
+
+# Almost all the attributes that has "Start" or "End" in
+# its name should be mapped.
+ATTRIBUTES_TO_MAP = {'paddingStart' : 'paddingLeft',
+                     'drawableStart' : 'drawableLeft',
+                     'layout_alignStart' : 'layout_alignLeft',
+                     'layout_marginStart' : 'layout_marginLeft',
+                     'layout_alignParentStart' : 'layout_alignParentLeft',
+                     'layout_toStartOf' : 'layout_toLeftOf',
+                     'paddingEnd' : 'paddingRight',
+                     'drawableEnd' : 'drawableRight',
+                     'layout_alignEnd' : 'layout_alignRight',
+                     'layout_marginEnd' : 'layout_marginRight',
+                     'layout_alignParentEnd' : 'layout_alignParentRight',
+                     'layout_toEndOf' : 'layout_toRightOf'}
+
+ATTRIBUTES_TO_MAP = dict(['android:' + k, 'android:' + v] for k, v
+                         in ATTRIBUTES_TO_MAP.iteritems())
+
+ATTRIBUTES_TO_MAP_REVERSED = dict([v, k] for k, v
+                                  in ATTRIBUTES_TO_MAP.iteritems())
+
+
+def IterateXmlElements(node):
+  """minidom helper function that iterates all the element nodes.
+  Iteration order is pre-order depth-first."""
+  if node.nodeType == node.ELEMENT_NODE:
+    yield node
+  for child_node in node.childNodes:
+    for child_node_element in IterateXmlElements(child_node):
+      yield child_node_element
+
+
+def ParseAndReportErrors(filename):
+  try:
+    return minidom.parse(filename)
+  except Exception:
+    import traceback
+    traceback.print_exc()
+    sys.stderr.write('Failed to parse XML file: %s\n' % filename)
+    sys.exit(1)
+
+
+def AssertNotDeprecatedAttribute(name, value, filename):
+  """Raises an exception if the given attribute is deprecated."""
+  msg = None
+  if name in ATTRIBUTES_TO_MAP_REVERSED:
+    msg = '{0} should use {1} instead of {2}'.format(filename,
+        ATTRIBUTES_TO_MAP_REVERSED[name], name)
+  elif name in GRAVITY_ATTRIBUTES and ('left' in value or 'right' in value):
+    msg = '{0} should use start/end instead of left/right for {1}'.format(
+        filename, name)
+
+  if msg:
+    msg += ('\nFor background, see: http://android-developers.blogspot.com/'
+            '2013/03/native-rtl-support-in-android-42.html\n'
+            'If you have a legitimate need for this attribute, discuss with '
+            'kkimlabs@chromium.org or newt@chromium.org')
+    raise Exception(msg)
+
+
+def WriteDomToFile(dom, filename):
+  """Write the given dom to filename."""
+  build_utils.MakeDirectory(os.path.dirname(filename))
+  with open(filename, 'w') as f:
+    dom.writexml(f, '', '  ', '\n', encoding='utf-8')
+
+
+def HasStyleResource(dom):
+  """Return True if the dom is a style resource, False otherwise."""
+  root_node = IterateXmlElements(dom).next()
+  return bool(root_node.nodeName == 'resources' and
+              list(root_node.getElementsByTagName('style')))
+
+
+def ErrorIfStyleResourceExistsInDir(input_dir):
+  """If a style resource is in input_dir, raises an exception."""
+  for input_filename in build_utils.FindInDirectory(input_dir, '*.xml'):
+    dom = ParseAndReportErrors(input_filename)
+    if HasStyleResource(dom):
+      raise Exception('error: style file ' + input_filename +
+                      ' should be under ' + input_dir +
+                      '-v17 directory. Please refer to '
+                      'http://crbug.com/243952 for the details.')
+
+
+def GenerateV14LayoutResourceDom(dom, filename, assert_not_deprecated=True):
+  """Convert layout resource to API 14 compatible layout resource.
+
+  Args:
+    dom: Parsed minidom object to be modified.
+    filename: Filename that the DOM was parsed from.
+    assert_not_deprecated: Whether deprecated attributes (e.g. paddingLeft) will
+                           cause an exception to be thrown.
+
+  Returns:
+    True if dom is modified, False otherwise.
+  """
+  is_modified = False
+
+  # Iterate all the elements' attributes to find attributes to convert.
+  for element in IterateXmlElements(dom):
+    for name, value in list(element.attributes.items()):
+      # Convert any API 17 Start/End attributes to Left/Right attributes.
+      # For example, from paddingStart="10dp" to paddingLeft="10dp"
+      # Note: gravity attributes are not necessary to convert because
+      # start/end values are backward-compatible. Explained at
+      # https://plus.sandbox.google.com/+RomanNurik/posts/huuJd8iVVXY?e=Showroom
+      if name in ATTRIBUTES_TO_MAP:
+        element.setAttribute(ATTRIBUTES_TO_MAP[name], value)
+        del element.attributes[name]
+        is_modified = True
+      elif assert_not_deprecated:
+        AssertNotDeprecatedAttribute(name, value, filename)
+
+  return is_modified
+
+
+def GenerateV14StyleResourceDom(dom, filename, assert_not_deprecated=True):
+  """Convert style resource to API 14 compatible style resource.
+
+  Args:
+    dom: Parsed minidom object to be modified.
+    filename: Filename that the DOM was parsed from.
+    assert_not_deprecated: Whether deprecated attributes (e.g. paddingLeft) will
+                           cause an exception to be thrown.
+
+  Returns:
+    True if dom is modified, False otherwise.
+  """
+  is_modified = False
+
+  for style_element in dom.getElementsByTagName('style'):
+    for item_element in style_element.getElementsByTagName('item'):
+      name = item_element.attributes['name'].value
+      value = item_element.childNodes[0].nodeValue
+      if name in ATTRIBUTES_TO_MAP:
+        item_element.attributes['name'].value = ATTRIBUTES_TO_MAP[name]
+        is_modified = True
+      elif assert_not_deprecated:
+        AssertNotDeprecatedAttribute(name, value, filename)
+
+  return is_modified
+
+
+def GenerateV14LayoutResource(input_filename, output_v14_filename,
+                              output_v17_filename):
+  """Convert API 17 layout resource to API 14 compatible layout resource.
+
+  It's mostly a simple replacement, s/Start/Left s/End/Right,
+  on the attribute names.
+  If the generated resource is identical to the original resource,
+  don't do anything. If not, write the generated resource to
+  output_v14_filename, and copy the original resource to output_v17_filename.
+  """
+  dom = ParseAndReportErrors(input_filename)
+  is_modified = GenerateV14LayoutResourceDom(dom, input_filename)
+
+  if is_modified:
+    # Write the generated resource.
+    WriteDomToFile(dom, output_v14_filename)
+
+    # Copy the original resource.
+    build_utils.MakeDirectory(os.path.dirname(output_v17_filename))
+    shutil.copy2(input_filename, output_v17_filename)
+
+
+def GenerateV14StyleResource(input_filename, output_v14_filename):
+  """Convert API 17 style resources to API 14 compatible style resource.
+
+  Write the generated style resource to output_v14_filename.
+  It's mostly a simple replacement, s/Start/Left s/End/Right,
+  on the attribute names.
+  """
+  dom = ParseAndReportErrors(input_filename)
+  GenerateV14StyleResourceDom(dom, input_filename)
+
+  # Write the generated resource.
+  WriteDomToFile(dom, output_v14_filename)
+
+
+def GenerateV14LayoutResourcesInDir(input_dir, output_v14_dir, output_v17_dir):
+  """Convert layout resources to API 14 compatible resources in input_dir."""
+  for input_filename in build_utils.FindInDirectory(input_dir, '*.xml'):
+    rel_filename = os.path.relpath(input_filename, input_dir)
+    output_v14_filename = os.path.join(output_v14_dir, rel_filename)
+    output_v17_filename = os.path.join(output_v17_dir, rel_filename)
+    GenerateV14LayoutResource(input_filename, output_v14_filename,
+                              output_v17_filename)
+
+
+def GenerateV14StyleResourcesInDir(input_dir, output_v14_dir):
+  """Convert style resources to API 14 compatible resources in input_dir."""
+  for input_filename in build_utils.FindInDirectory(input_dir, '*.xml'):
+    rel_filename = os.path.relpath(input_filename, input_dir)
+    output_v14_filename = os.path.join(output_v14_dir, rel_filename)
+    GenerateV14StyleResource(input_filename, output_v14_filename)
+
+
+def ParseArgs():
+  """Parses command line options.
+
+  Returns:
+    An options object as from optparse.OptionsParser.parse_args()
+  """
+  parser = optparse.OptionParser()
+  parser.add_option('--res-dir',
+                    help='directory containing resources '
+                         'used to generate v14 compatible resources')
+  parser.add_option('--res-v14-compatibility-dir',
+                    help='output directory into which '
+                         'v14 compatible resources will be generated')
+  parser.add_option('--stamp', help='File to touch on success')
+
+  options, args = parser.parse_args()
+
+  if args:
+    parser.error('No positional arguments should be given.')
+
+  # Check that required options have been provided.
+  required_options = ('res_dir', 'res_v14_compatibility_dir')
+  build_utils.CheckOptions(options, parser, required=required_options)
+  return options
+
+def GenerateV14Resources(res_dir, res_v14_dir):
+  for name in os.listdir(res_dir):
+    if not os.path.isdir(os.path.join(res_dir, name)):
+      continue
+
+    dir_pieces = name.split('-')
+    resource_type = dir_pieces[0]
+    qualifiers = dir_pieces[1:]
+
+    api_level_qualifier_index = -1
+    api_level_qualifier = ''
+    for index, qualifier in enumerate(qualifiers):
+      if re.match('v[0-9]+$', qualifier):
+        api_level_qualifier_index = index
+        api_level_qualifier = qualifier
+        break
+
+    # Android pre-v17 API doesn't support RTL. Skip.
+    if 'ldrtl' in qualifiers:
+      continue
+
+    input_dir = os.path.abspath(os.path.join(res_dir, name))
+
+    # We also need to copy the original v17 resource to *-v17 directory
+    # because the generated v14 resource will hide the original resource.
+    output_v14_dir = os.path.join(res_v14_dir, name)
+    output_v17_dir = os.path.join(res_v14_dir, name + '-v17')
+
+    # We only convert layout resources under layout*/, xml*/,
+    # and style resources under values*/.
+    if resource_type in ('layout', 'xml'):
+      if not api_level_qualifier:
+        GenerateV14LayoutResourcesInDir(input_dir, output_v14_dir,
+                                        output_v17_dir)
+    elif resource_type == 'values':
+      if api_level_qualifier == 'v17':
+        output_qualifiers = qualifiers[:]
+        del output_qualifiers[api_level_qualifier_index]
+        output_v14_dir = os.path.join(res_v14_dir,
+                                      '-'.join([resource_type] +
+                                               output_qualifiers))
+        GenerateV14StyleResourcesInDir(input_dir, output_v14_dir)
+      elif not api_level_qualifier:
+        ErrorIfStyleResourceExistsInDir(input_dir)
+
+def main():
+  options = ParseArgs()
+
+  res_v14_dir = options.res_v14_compatibility_dir
+
+  build_utils.DeleteDirectory(res_v14_dir)
+  build_utils.MakeDirectory(res_v14_dir)
+
+  GenerateV14Resources(options.res_dir, res_v14_dir)
+
+  if options.stamp:
+    build_utils.Touch(options.stamp)
+
+if __name__ == '__main__':
+  sys.exit(main())
+
diff --git a/build/android/gyp/get_device_configuration.py b/build/android/gyp/get_device_configuration.py
new file mode 100755
index 0000000..390eb2f
--- /dev/null
+++ b/build/android/gyp/get_device_configuration.py
@@ -0,0 +1,67 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Gets and writes the configurations of the attached devices.
+
+This configuration is used by later build steps to determine which devices to
+install to and what needs to be installed to those devices.
+"""
+
+import optparse
+import sys
+
+from util import build_utils
+from util import build_device
+
+
+def main(argv):
+  parser = optparse.OptionParser()
+  parser.add_option('--stamp', action='store')
+  parser.add_option('--output', action='store')
+  options, _ = parser.parse_args(argv)
+
+  devices = build_device.GetAttachedDevices()
+
+  device_configurations = []
+  for d in devices:
+    configuration, is_online, has_root = (
+        build_device.GetConfigurationForDevice(d))
+
+    if not is_online:
+      build_utils.PrintBigWarning(
+          '%s is not online. Skipping managed install for this device. '
+          'Try rebooting the device to fix this warning.' % d)
+      continue
+
+    if not has_root:
+      build_utils.PrintBigWarning(
+          '"adb root" failed on device: %s\n'
+          'Skipping managed install for this device.'
+          % configuration['description'])
+      continue
+
+    device_configurations.append(configuration)
+
+  if len(device_configurations) == 0:
+    build_utils.PrintBigWarning(
+        'No valid devices attached. Skipping managed install steps.')
+  elif len(devices) > 1:
+    # Note that this checks len(devices) and not len(device_configurations).
+    # This way, any time there are multiple devices attached it is
+    # explicitly stated which device we will install things to even if all but
+    # one device were rejected for other reasons (e.g. two devices attached,
+    # one w/o root).
+    build_utils.PrintBigWarning(
+        'Multiple devices attached. '
+        'Installing to the preferred device: '
+        '%(id)s (%(description)s)' % (device_configurations[0]))
+
+
+  build_device.WriteConfigurations(device_configurations, options.output)
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
diff --git a/build/android/gyp/insert_chromium_version.py b/build/android/gyp/insert_chromium_version.py
new file mode 100755
index 0000000..171f9d4
--- /dev/null
+++ b/build/android/gyp/insert_chromium_version.py
@@ -0,0 +1,66 @@
+#!/usr/bin/env python
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Insert a version string into a library as a section '.chromium.version'.
+"""
+
+import optparse
+import os
+import sys
+import tempfile
+
+from util import build_utils
+
+def InsertChromiumVersion(android_objcopy,
+                          library_path,
+                          version_string):
+  # Remove existing .chromium.version section from .so
+  objcopy_command = [android_objcopy,
+                     '--remove-section=.chromium.version',
+                     library_path]
+  build_utils.CheckOutput(objcopy_command)
+
+  # Add a .chromium.version section.
+  with tempfile.NamedTemporaryFile() as stream:
+    stream.write(version_string)
+    stream.flush()
+    objcopy_command = [android_objcopy,
+                       '--add-section', '.chromium.version=%s' % stream.name,
+                       library_path]
+    build_utils.CheckOutput(objcopy_command)
+
+def main(args):
+  args = build_utils.ExpandFileArgs(args)
+  parser = optparse.OptionParser()
+
+  parser.add_option('--android-objcopy',
+      help='Path to the toolchain\'s objcopy binary')
+  parser.add_option('--stripped-libraries-dir',
+      help='Directory of native libraries')
+  parser.add_option('--libraries',
+      help='List of libraries')
+  parser.add_option('--version-string',
+      help='Version string to be inserted')
+  parser.add_option('--stamp', help='Path to touch on success')
+
+  options, _ = parser.parse_args(args)
+  libraries = build_utils.ParseGypList(options.libraries)
+
+  for library in libraries:
+    library_path = os.path.join(options.stripped_libraries_dir, library)
+
+    InsertChromiumVersion(options.android_objcopy,
+                          library_path,
+                          options.version_string)
+
+  if options.stamp:
+    build_utils.Touch(options.stamp)
+
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/build/android/gyp/jar.py b/build/android/gyp/jar.py
new file mode 100755
index 0000000..48abf5e
--- /dev/null
+++ b/build/android/gyp/jar.py
@@ -0,0 +1,79 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import fnmatch
+import optparse
+import os
+import sys
+
+from util import build_utils
+from util import md5_check
+
+
+def Jar(class_files, classes_dir, jar_path, manifest_file=None):
+  jar_path = os.path.abspath(jar_path)
+
+  # The paths of the files in the jar will be the same as they are passed in to
+  # the command. Because of this, the command should be run in
+  # options.classes_dir so the .class file paths in the jar are correct.
+  jar_cwd = classes_dir
+  class_files_rel = [os.path.relpath(f, jar_cwd) for f in class_files]
+  jar_cmd = ['jar', 'cf0', jar_path]
+  if manifest_file:
+    jar_cmd[1] += 'm'
+    jar_cmd.append(os.path.abspath(manifest_file))
+  jar_cmd.extend(class_files_rel)
+
+  with build_utils.TempDir() as temp_dir:
+    empty_file = os.path.join(temp_dir, '.empty')
+    build_utils.Touch(empty_file)
+    jar_cmd.append(os.path.relpath(empty_file, jar_cwd))
+    record_path = '%s.md5.stamp' % jar_path
+    md5_check.CallAndRecordIfStale(
+        lambda: build_utils.CheckOutput(jar_cmd, cwd=jar_cwd),
+        record_path=record_path,
+        input_paths=class_files,
+        input_strings=jar_cmd,
+        force=not os.path.exists(jar_path),
+        )
+
+    build_utils.Touch(jar_path, fail_if_missing=True)
+
+
+def JarDirectory(classes_dir, excluded_classes, jar_path, manifest_file=None):
+  class_files = build_utils.FindInDirectory(classes_dir, '*.class')
+  for exclude in excluded_classes:
+    class_files = filter(
+        lambda f: not fnmatch.fnmatch(f, exclude), class_files)
+
+  Jar(class_files, classes_dir, jar_path, manifest_file=manifest_file)
+
+
+def main():
+  parser = optparse.OptionParser()
+  parser.add_option('--classes-dir', help='Directory containing .class files.')
+  parser.add_option('--jar-path', help='Jar output path.')
+  parser.add_option('--excluded-classes',
+      help='List of .class file patterns to exclude from the jar.')
+  parser.add_option('--stamp', help='Path to touch on success.')
+
+  options, _ = parser.parse_args()
+
+  if options.excluded_classes:
+    excluded_classes = build_utils.ParseGypList(options.excluded_classes)
+  else:
+    excluded_classes = []
+  JarDirectory(options.classes_dir,
+               excluded_classes,
+               options.jar_path)
+
+  if options.stamp:
+    build_utils.Touch(options.stamp)
+
+
+if __name__ == '__main__':
+  sys.exit(main())
+
diff --git a/build/android/gyp/jar_toc.py b/build/android/gyp/jar_toc.py
new file mode 100755
index 0000000..00d97d2
--- /dev/null
+++ b/build/android/gyp/jar_toc.py
@@ -0,0 +1,127 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Creates a TOC file from a Java jar.
+
+The TOC file contains the non-package API of the jar. This includes all
+public/protected/package classes/functions/members and the values of static
+final variables (members with package access are kept because in some cases we
+have multiple libraries with the same package, particularly test+non-test). Some
+other information (major/minor javac version) is also included.
+
+This TOC file then can be used to determine if a dependent library should be
+rebuilt when this jar changes. I.e. any change to the jar that would require a
+rebuild, will have a corresponding change in the TOC file.
+"""
+
+import optparse
+import os
+import re
+import sys
+import zipfile
+
+from util import build_utils
+from util import md5_check
+
+
+def GetClassesInZipFile(zip_file):
+  classes = []
+  files = zip_file.namelist()
+  for f in files:
+    if f.endswith('.class'):
+      # f is of the form org/chromium/base/Class$Inner.class
+      classes.append(f.replace('/', '.')[:-6])
+  return classes
+
+
+def CallJavap(classpath, classes):
+  javap_cmd = [
+      'javap',
+      '-package',  # Show public/protected/package.
+      # -verbose is required to get constant values (which can be inlined in
+      # dependents).
+      '-verbose',
+      '-J-XX:NewSize=4m',
+      '-classpath', classpath
+      ] + classes
+  return build_utils.CheckOutput(javap_cmd)
+
+
+def ExtractToc(disassembled_classes):
+  # javap output is structured by indent (2-space) levels.
+  good_patterns = [
+      '^[^ ]', # This includes all class/function/member signatures.
+      '^  SourceFile:',
+      '^  minor version:',
+      '^  major version:',
+      '^  Constant value:',
+      ]
+  bad_patterns = [
+      '^const #', # Matches the constant pool (i.e. literals used in the class).
+    ]
+
+  def JavapFilter(line):
+    return (re.match('|'.join(good_patterns), line) and
+        not re.match('|'.join(bad_patterns), line))
+  toc = filter(JavapFilter, disassembled_classes.split('\n'))
+
+  return '\n'.join(toc)
+
+
+def UpdateToc(jar_path, toc_path):
+  classes = GetClassesInZipFile(zipfile.ZipFile(jar_path))
+  toc = []
+
+  limit = 1000 # Split into multiple calls to stay under command size limit
+  for i in xrange(0, len(classes), limit):
+    javap_output = CallJavap(classpath=jar_path, classes=classes[i:i+limit])
+    toc.append(ExtractToc(javap_output))
+
+  with open(toc_path, 'w') as tocfile:
+    tocfile.write(''.join(toc))
+
+
+def DoJarToc(options):
+  jar_path = options.jar_path
+  toc_path = options.toc_path
+  record_path = '%s.md5.stamp' % toc_path
+  md5_check.CallAndRecordIfStale(
+      lambda: UpdateToc(jar_path, toc_path),
+      record_path=record_path,
+      input_paths=[jar_path],
+      force=not os.path.exists(toc_path),
+      )
+  build_utils.Touch(toc_path, fail_if_missing=True)
+
+
+def main():
+  parser = optparse.OptionParser()
+  build_utils.AddDepfileOption(parser)
+
+  parser.add_option('--jar-path', help='Input .jar path.')
+  parser.add_option('--toc-path', help='Output .jar.TOC path.')
+  parser.add_option('--stamp', help='Path to touch on success.')
+
+  options, _ = parser.parse_args()
+
+  if options.depfile:
+    build_utils.WriteDepfile(
+        options.depfile,
+        build_utils.GetPythonDependencies())
+
+  DoJarToc(options)
+
+  if options.depfile:
+    build_utils.WriteDepfile(
+        options.depfile,
+        build_utils.GetPythonDependencies())
+
+  if options.stamp:
+    build_utils.Touch(options.stamp)
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/android/gyp/java_cpp_enum.py b/build/android/gyp/java_cpp_enum.py
new file mode 100755
index 0000000..c2f1764
--- /dev/null
+++ b/build/android/gyp/java_cpp_enum.py
@@ -0,0 +1,340 @@
+#!/usr/bin/env python
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import collections
+import re
+import optparse
+import os
+from string import Template
+import sys
+
+from util import build_utils
+
+# List of C++ types that are compatible with the Java code generated by this
+# script.
+#
+# This script can parse .idl files however, at present it ignores special
+# rules such as [cpp_enum_prefix_override="ax_attr"].
+ENUM_FIXED_TYPE_WHITELIST = ['char', 'unsigned char',
+  'short', 'unsigned short',
+  'int', 'int8_t', 'int16_t', 'int32_t', 'uint8_t', 'uint16_t']
+
+class EnumDefinition(object):
+  def __init__(self, original_enum_name=None, class_name_override=None,
+               enum_package=None, entries=None, fixed_type=None):
+    self.original_enum_name = original_enum_name
+    self.class_name_override = class_name_override
+    self.enum_package = enum_package
+    self.entries = collections.OrderedDict(entries or [])
+    self.prefix_to_strip = None
+    self.fixed_type = fixed_type
+
+  def AppendEntry(self, key, value):
+    if key in self.entries:
+      raise Exception('Multiple definitions of key %s found.' % key)
+    self.entries[key] = value
+
+  @property
+  def class_name(self):
+    return self.class_name_override or self.original_enum_name
+
+  def Finalize(self):
+    self._Validate()
+    self._AssignEntryIndices()
+    self._StripPrefix()
+
+  def _Validate(self):
+    assert self.class_name
+    assert self.enum_package
+    assert self.entries
+    if self.fixed_type and self.fixed_type not in ENUM_FIXED_TYPE_WHITELIST:
+      raise Exception('Fixed type %s for enum %s not whitelisted.' %
+          (self.fixed_type, self.class_name))
+
+  def _AssignEntryIndices(self):
+    # Enums, if given no value, are given the value of the previous enum + 1.
+    if not all(self.entries.values()):
+      prev_enum_value = -1
+      for key, value in self.entries.iteritems():
+        if not value:
+          self.entries[key] = prev_enum_value + 1
+        elif value in self.entries:
+          self.entries[key] = self.entries[value]
+        else:
+          try:
+            self.entries[key] = int(value)
+          except ValueError:
+            raise Exception('Could not interpret integer from enum value "%s" '
+                            'for key %s.' % (value, key))
+        prev_enum_value = self.entries[key]
+
+
+  def _StripPrefix(self):
+    prefix_to_strip = self.prefix_to_strip
+    if not prefix_to_strip:
+      prefix_to_strip = self.original_enum_name
+      prefix_to_strip = re.sub('(?!^)([A-Z]+)', r'_\1', prefix_to_strip).upper()
+      prefix_to_strip += '_'
+      if not all([w.startswith(prefix_to_strip) for w in self.entries.keys()]):
+        prefix_to_strip = ''
+
+    entries = collections.OrderedDict()
+    for (k, v) in self.entries.iteritems():
+      stripped_key = k.replace(prefix_to_strip, '', 1)
+      if isinstance(v, basestring):
+        stripped_value = v.replace(prefix_to_strip, '', 1)
+      else:
+        stripped_value = v
+      entries[stripped_key] = stripped_value
+
+    self.entries = entries
+
+class DirectiveSet(object):
+  class_name_override_key = 'CLASS_NAME_OVERRIDE'
+  enum_package_key = 'ENUM_PACKAGE'
+  prefix_to_strip_key = 'PREFIX_TO_STRIP'
+
+  known_keys = [class_name_override_key, enum_package_key, prefix_to_strip_key]
+
+  def __init__(self):
+    self._directives = {}
+
+  def Update(self, key, value):
+    if key not in DirectiveSet.known_keys:
+      raise Exception("Unknown directive: " + key)
+    self._directives[key] = value
+
+  @property
+  def empty(self):
+    return len(self._directives) == 0
+
+  def UpdateDefinition(self, definition):
+    definition.class_name_override = self._directives.get(
+        DirectiveSet.class_name_override_key, '')
+    definition.enum_package = self._directives.get(
+        DirectiveSet.enum_package_key)
+    definition.prefix_to_strip = self._directives.get(
+        DirectiveSet.prefix_to_strip_key)
+
+
+class HeaderParser(object):
+  single_line_comment_re = re.compile(r'\s*//')
+  multi_line_comment_start_re = re.compile(r'\s*/\*')
+  enum_line_re = re.compile(r'^\s*(\w+)(\s*\=\s*([^,\n]+))?,?')
+  enum_end_re = re.compile(r'^\s*}\s*;\.*$')
+  generator_directive_re = re.compile(
+      r'^\s*//\s+GENERATED_JAVA_(\w+)\s*:\s*([\.\w]+)$')
+  multi_line_generator_directive_start_re = re.compile(
+      r'^\s*//\s+GENERATED_JAVA_(\w+)\s*:\s*\(([\.\w]*)$')
+  multi_line_directive_continuation_re = re.compile(
+      r'^\s*//\s+([\.\w]+)$')
+  multi_line_directive_end_re = re.compile(
+      r'^\s*//\s+([\.\w]*)\)$')
+
+  optional_class_or_struct_re = r'(class|struct)?'
+  enum_name_re = r'(\w+)'
+  optional_fixed_type_re = r'(\:\s*(\w+\s*\w+?))?'
+  enum_start_re = re.compile(r'^\s*(?:\[cpp.*\])?\s*enum\s+' +
+      optional_class_or_struct_re + '\s*' + enum_name_re + '\s*' +
+      optional_fixed_type_re + '\s*{\s*$')
+
+  def __init__(self, lines, path=None):
+    self._lines = lines
+    self._path = path
+    self._enum_definitions = []
+    self._in_enum = False
+    self._current_definition = None
+    self._generator_directives = DirectiveSet()
+    self._multi_line_generator_directive = None
+
+  def _ApplyGeneratorDirectives(self):
+    self._generator_directives.UpdateDefinition(self._current_definition)
+    self._generator_directives = DirectiveSet()
+
+  def ParseDefinitions(self):
+    for line in self._lines:
+      self._ParseLine(line)
+    return self._enum_definitions
+
+  def _ParseLine(self, line):
+    if self._multi_line_generator_directive:
+      self._ParseMultiLineDirectiveLine(line)
+    elif not self._in_enum:
+      self._ParseRegularLine(line)
+    else:
+      self._ParseEnumLine(line)
+
+  def _ParseEnumLine(self, line):
+    if HeaderParser.single_line_comment_re.match(line):
+      return
+    if HeaderParser.multi_line_comment_start_re.match(line):
+      raise Exception('Multi-line comments in enums are not supported in ' +
+                      self._path)
+    enum_end = HeaderParser.enum_end_re.match(line)
+    enum_entry = HeaderParser.enum_line_re.match(line)
+    if enum_end:
+      self._ApplyGeneratorDirectives()
+      self._current_definition.Finalize()
+      self._enum_definitions.append(self._current_definition)
+      self._in_enum = False
+    elif enum_entry:
+      enum_key = enum_entry.groups()[0]
+      enum_value = enum_entry.groups()[2]
+      self._current_definition.AppendEntry(enum_key, enum_value)
+
+  def _ParseMultiLineDirectiveLine(self, line):
+    multi_line_directive_continuation = (
+        HeaderParser.multi_line_directive_continuation_re.match(line))
+    multi_line_directive_end = (
+        HeaderParser.multi_line_directive_end_re.match(line))
+
+    if multi_line_directive_continuation:
+      value_cont = multi_line_directive_continuation.groups()[0]
+      self._multi_line_generator_directive[1].append(value_cont)
+    elif multi_line_directive_end:
+      directive_name = self._multi_line_generator_directive[0]
+      directive_value = "".join(self._multi_line_generator_directive[1])
+      directive_value += multi_line_directive_end.groups()[0]
+      self._multi_line_generator_directive = None
+      self._generator_directives.Update(directive_name, directive_value)
+    else:
+      raise Exception('Malformed multi-line directive declaration in ' +
+                      self._path)
+
+  def _ParseRegularLine(self, line):
+    enum_start = HeaderParser.enum_start_re.match(line)
+    generator_directive = HeaderParser.generator_directive_re.match(line)
+    multi_line_generator_directive_start = (
+        HeaderParser.multi_line_generator_directive_start_re.match(line))
+
+    if generator_directive:
+      directive_name = generator_directive.groups()[0]
+      directive_value = generator_directive.groups()[1]
+      self._generator_directives.Update(directive_name, directive_value)
+    elif multi_line_generator_directive_start:
+      directive_name = multi_line_generator_directive_start.groups()[0]
+      directive_value = multi_line_generator_directive_start.groups()[1]
+      self._multi_line_generator_directive = (directive_name, [directive_value])
+    elif enum_start:
+      if self._generator_directives.empty:
+        return
+      self._current_definition = EnumDefinition(
+          original_enum_name=enum_start.groups()[1],
+          fixed_type=enum_start.groups()[3])
+      self._in_enum = True
+
+def GetScriptName():
+  script_components = os.path.abspath(sys.argv[0]).split(os.path.sep)
+  build_index = script_components.index('build')
+  return os.sep.join(script_components[build_index:])
+
+
+def DoGenerate(output_dir, source_paths, print_output_only=False):
+  output_paths = []
+  for source_path in source_paths:
+    enum_definitions = DoParseHeaderFile(source_path)
+    if not enum_definitions:
+      raise Exception('No enums found in %s\n'
+                      'Did you forget prefixing enums with '
+                      '"// GENERATED_JAVA_ENUM_PACKAGE: foo"?' %
+                      source_path)
+    for enum_definition in enum_definitions:
+      package_path = enum_definition.enum_package.replace('.', os.path.sep)
+      file_name = enum_definition.class_name + '.java'
+      output_path = os.path.join(output_dir, package_path, file_name)
+      output_paths.append(output_path)
+      if not print_output_only:
+        build_utils.MakeDirectory(os.path.dirname(output_path))
+        DoWriteOutput(source_path, output_path, enum_definition)
+  return output_paths
+
+
+def DoParseHeaderFile(path):
+  with open(path) as f:
+    return HeaderParser(f.readlines(), path).ParseDefinitions()
+
+
+def GenerateOutput(source_path, enum_definition):
+  template = Template("""
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// This file is autogenerated by
+//     ${SCRIPT_NAME}
+// From
+//     ${SOURCE_PATH}
+
+package ${PACKAGE};
+
+public class ${CLASS_NAME} {
+${ENUM_ENTRIES}
+}
+""")
+
+  enum_template = Template('  public static final int ${NAME} = ${VALUE};')
+  enum_entries_string = []
+  for enum_name, enum_value in enum_definition.entries.iteritems():
+    values = {
+        'NAME': enum_name,
+        'VALUE': enum_value,
+    }
+    enum_entries_string.append(enum_template.substitute(values))
+  enum_entries_string = '\n'.join(enum_entries_string)
+
+  values = {
+      'CLASS_NAME': enum_definition.class_name,
+      'ENUM_ENTRIES': enum_entries_string,
+      'PACKAGE': enum_definition.enum_package,
+      'SCRIPT_NAME': GetScriptName(),
+      'SOURCE_PATH': source_path,
+  }
+  return template.substitute(values)
+
+
+def DoWriteOutput(source_path, output_path, enum_definition):
+  with open(output_path, 'w') as out_file:
+    out_file.write(GenerateOutput(source_path, enum_definition))
+
+def AssertFilesList(output_paths, assert_files_list):
+  actual = set(output_paths)
+  expected = set(assert_files_list)
+  if not actual == expected:
+    need_to_add = list(actual - expected)
+    need_to_remove = list(expected - actual)
+    raise Exception('Output files list does not match expectations. Please '
+                    'add %s and remove %s.' % (need_to_add, need_to_remove))
+
+def DoMain(argv):
+  usage = 'usage: %prog [options] output_dir input_file(s)...'
+  parser = optparse.OptionParser(usage=usage)
+
+  parser.add_option('--assert_file', action="append", default=[],
+                    dest="assert_files_list", help='Assert that the given '
+                    'file is an output. There can be multiple occurrences of '
+                    'this flag.')
+  parser.add_option('--print_output_only', help='Only print output paths.',
+                    action='store_true')
+  parser.add_option('--verbose', help='Print more information.',
+                    action='store_true')
+
+  options, args = parser.parse_args(argv)
+  if len(args) < 2:
+    parser.error('Need to specify output directory and at least one input file')
+  output_paths = DoGenerate(args[0], args[1:],
+                            print_output_only=options.print_output_only)
+
+  if options.assert_files_list:
+    AssertFilesList(output_paths, options.assert_files_list)
+
+  if options.verbose:
+    print 'Output paths:'
+    print '\n'.join(output_paths)
+
+  return ' '.join(output_paths)
+
+if __name__ == '__main__':
+  DoMain(sys.argv[1:])
diff --git a/build/android/gyp/java_cpp_enum_tests.py b/build/android/gyp/java_cpp_enum_tests.py
new file mode 100755
index 0000000..44f9766
--- /dev/null
+++ b/build/android/gyp/java_cpp_enum_tests.py
@@ -0,0 +1,436 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Tests for enum_preprocess.py.
+
+This test suite containss various tests for the C++ -> Java enum generator.
+"""
+
+import collections
+import optparse
+import os
+import sys
+import unittest
+
+import java_cpp_enum
+from java_cpp_enum import EnumDefinition, GenerateOutput, GetScriptName
+from java_cpp_enum import HeaderParser
+
+sys.path.append(os.path.join(os.path.dirname(__file__), "gyp"))
+from util import build_utils
+
+class TestPreprocess(unittest.TestCase):
+  def testOutput(self):
+    definition = EnumDefinition(original_enum_name='ClassName',
+                                enum_package='some.package',
+                                entries=[('E1', 1), ('E2', '2 << 2')])
+    output = GenerateOutput('path/to/file', definition)
+    expected = """
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// This file is autogenerated by
+//     %s
+// From
+//     path/to/file
+
+package some.package;
+
+public class ClassName {
+  public static final int E1 = 1;
+  public static final int E2 = 2 << 2;
+}
+"""
+    self.assertEqual(expected % GetScriptName(), output)
+
+  def testParseSimpleEnum(self):
+    test_data = """
+      // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+      enum EnumName {
+        VALUE_ZERO,
+        VALUE_ONE,
+      };
+    """.split('\n')
+    definitions = HeaderParser(test_data).ParseDefinitions()
+    self.assertEqual(1, len(definitions))
+    definition = definitions[0]
+    self.assertEqual('EnumName', definition.class_name)
+    self.assertEqual('test.namespace', definition.enum_package)
+    self.assertEqual(collections.OrderedDict([('VALUE_ZERO', 0),
+                                              ('VALUE_ONE', 1)]),
+                     definition.entries)
+
+  def testParseBitShifts(self):
+    test_data = """
+      // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+      enum EnumName {
+        VALUE_ZERO = 1 << 0,
+        VALUE_ONE = 1 << 1,
+      };
+    """.split('\n')
+    definitions = HeaderParser(test_data).ParseDefinitions()
+    self.assertEqual(1, len(definitions))
+    definition = definitions[0]
+    self.assertEqual('EnumName', definition.class_name)
+    self.assertEqual('test.namespace', definition.enum_package)
+    self.assertEqual(collections.OrderedDict([('VALUE_ZERO', '1 << 0'),
+                                              ('VALUE_ONE', '1 << 1')]),
+                     definition.entries)
+
+  def testParseClassNameOverride(self):
+    test_data = """
+      // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+      // GENERATED_JAVA_CLASS_NAME_OVERRIDE: OverrideName
+      enum EnumName {
+        FOO
+      };
+
+      // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+      // GENERATED_JAVA_CLASS_NAME_OVERRIDE: OtherOverride
+      enum PrefixTest {
+        PREFIX_TEST_A,
+        PREFIX_TEST_B,
+      };
+    """.split('\n')
+    definitions = HeaderParser(test_data).ParseDefinitions()
+    self.assertEqual(2, len(definitions))
+    definition = definitions[0]
+    self.assertEqual('OverrideName', definition.class_name)
+
+    definition = definitions[1]
+    self.assertEqual('OtherOverride', definition.class_name)
+    self.assertEqual(collections.OrderedDict([('A', 0),
+                                              ('B', 1)]),
+                     definition.entries)
+
+  def testParseTwoEnums(self):
+    test_data = """
+      // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+      enum EnumOne {
+        ENUM_ONE_A = 1,
+        // Comment there
+        ENUM_ONE_B = A,
+      };
+
+      enum EnumIgnore {
+        C, D, E
+      };
+
+      // GENERATED_JAVA_ENUM_PACKAGE: other.package
+      // GENERATED_JAVA_PREFIX_TO_STRIP: P_
+      enum EnumTwo {
+        P_A,
+        P_B
+      };
+    """.split('\n')
+    definitions = HeaderParser(test_data).ParseDefinitions()
+    self.assertEqual(2, len(definitions))
+    definition = definitions[0]
+    self.assertEqual('EnumOne', definition.class_name)
+    self.assertEqual('test.namespace', definition.enum_package)
+    self.assertEqual(collections.OrderedDict([('A', '1'),
+                                              ('B', 'A')]),
+                     definition.entries)
+
+    definition = definitions[1]
+    self.assertEqual('EnumTwo', definition.class_name)
+    self.assertEqual('other.package', definition.enum_package)
+    self.assertEqual(collections.OrderedDict([('A', 0),
+                                              ('B', 1)]),
+                     definition.entries)
+
+  def testParseThrowsOnUnknownDirective(self):
+    test_data = """
+      // GENERATED_JAVA_UNKNOWN: Value
+      enum EnumName {
+        VALUE_ONE,
+      };
+    """.split('\n')
+    with self.assertRaises(Exception):
+      HeaderParser(test_data).ParseDefinitions()
+
+  def testParseReturnsEmptyListWithoutDirectives(self):
+    test_data = """
+      enum EnumName {
+        VALUE_ONE,
+      };
+    """.split('\n')
+    self.assertEqual([], HeaderParser(test_data).ParseDefinitions())
+
+  def testParseEnumClass(self):
+    test_data = """
+      // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+      enum class Foo {
+        FOO_A,
+      };
+    """.split('\n')
+    definitions = HeaderParser(test_data).ParseDefinitions()
+    self.assertEqual(1, len(definitions))
+    definition = definitions[0]
+    self.assertEqual('Foo', definition.class_name)
+    self.assertEqual('test.namespace', definition.enum_package)
+    self.assertEqual(collections.OrderedDict([('A', 0)]),
+                     definition.entries)
+
+  def testParseEnumStruct(self):
+    test_data = """
+      // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+      enum struct Foo {
+        FOO_A,
+      };
+    """.split('\n')
+    definitions = HeaderParser(test_data).ParseDefinitions()
+    self.assertEqual(1, len(definitions))
+    definition = definitions[0]
+    self.assertEqual('Foo', definition.class_name)
+    self.assertEqual('test.namespace', definition.enum_package)
+    self.assertEqual(collections.OrderedDict([('A', 0)]),
+                     definition.entries)
+
+  def testParseFixedTypeEnum(self):
+    test_data = """
+      // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+      enum Foo : int {
+        FOO_A,
+      };
+    """.split('\n')
+    definitions = HeaderParser(test_data).ParseDefinitions()
+    self.assertEqual(1, len(definitions))
+    definition = definitions[0]
+    self.assertEqual('Foo', definition.class_name)
+    self.assertEqual('test.namespace', definition.enum_package)
+    self.assertEqual('int', definition.fixed_type)
+    self.assertEqual(collections.OrderedDict([('A', 0)]),
+                     definition.entries)
+
+  def testParseFixedTypeEnumClass(self):
+    test_data = """
+      // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+      enum class Foo: unsigned short {
+        FOO_A,
+      };
+    """.split('\n')
+    definitions = HeaderParser(test_data).ParseDefinitions()
+    self.assertEqual(1, len(definitions))
+    definition = definitions[0]
+    self.assertEqual('Foo', definition.class_name)
+    self.assertEqual('test.namespace', definition.enum_package)
+    self.assertEqual('unsigned short', definition.fixed_type)
+    self.assertEqual(collections.OrderedDict([('A', 0)]),
+                     definition.entries)
+
+  def testParseUnknownFixedTypeRaises(self):
+    test_data = """
+      // GENERATED_JAVA_ENUM_PACKAGE: test.namespace
+      enum class Foo: foo_type {
+        FOO_A,
+      };
+    """.split('\n')
+    with self.assertRaises(Exception):
+      HeaderParser(test_data).ParseDefinitions()
+
+  def testParseSimpleMultiLineDirective(self):
+    test_data = """
+      // GENERATED_JAVA_ENUM_PACKAGE: (
+      //   test.namespace)
+      // GENERATED_JAVA_CLASS_NAME_OVERRIDE: Bar
+      enum Foo {
+        FOO_A,
+      };
+    """.split('\n')
+    definitions = HeaderParser(test_data).ParseDefinitions()
+    self.assertEqual('test.namespace', definitions[0].enum_package)
+    self.assertEqual('Bar', definitions[0].class_name)
+
+  def testParseMultiLineDirective(self):
+    test_data = """
+      // GENERATED_JAVA_ENUM_PACKAGE: (te
+      //   st.name
+      //   space)
+      enum Foo {
+        FOO_A,
+      };
+    """.split('\n')
+    definitions = HeaderParser(test_data).ParseDefinitions()
+    self.assertEqual('test.namespace', definitions[0].enum_package)
+
+  def testParseMultiLineDirectiveWithOtherDirective(self):
+    test_data = """
+      // GENERATED_JAVA_ENUM_PACKAGE: (
+      //   test.namespace)
+      // GENERATED_JAVA_CLASS_NAME_OVERRIDE: (
+      //   Ba
+      //   r
+      //   )
+      enum Foo {
+        FOO_A,
+      };
+    """.split('\n')
+    definitions = HeaderParser(test_data).ParseDefinitions()
+    self.assertEqual('test.namespace', definitions[0].enum_package)
+    self.assertEqual('Bar', definitions[0].class_name)
+
+  def testParseMalformedMultiLineDirectiveWithOtherDirective(self):
+    test_data = """
+      // GENERATED_JAVA_ENUM_PACKAGE: (
+      //   test.name
+      //   space
+      // GENERATED_JAVA_CLASS_NAME_OVERRIDE: Bar
+      enum Foo {
+        FOO_A,
+      };
+    """.split('\n')
+    with self.assertRaises(Exception):
+      HeaderParser(test_data).ParseDefinitions()
+
+  def testParseMalformedMultiLineDirective(self):
+    test_data = """
+      // GENERATED_JAVA_ENUM_PACKAGE: (
+      //   test.name
+      //   space
+      enum Foo {
+        FOO_A,
+      };
+    """.split('\n')
+    with self.assertRaises(Exception):
+      HeaderParser(test_data).ParseDefinitions()
+
+  def testParseMalformedMultiLineDirectiveShort(self):
+    test_data = """
+      // GENERATED_JAVA_ENUM_PACKAGE: (
+      enum Foo {
+        FOO_A,
+      };
+    """.split('\n')
+    with self.assertRaises(Exception):
+      HeaderParser(test_data).ParseDefinitions()
+
+  def testEnumValueAssignmentNoneDefined(self):
+    definition = EnumDefinition(original_enum_name='c', enum_package='p')
+    definition.AppendEntry('A', None)
+    definition.AppendEntry('B', None)
+    definition.AppendEntry('C', None)
+    definition.Finalize()
+    self.assertEqual(collections.OrderedDict([('A', 0),
+                                              ('B', 1),
+                                              ('C', 2)]),
+                     definition.entries)
+
+  def testEnumValueAssignmentAllDefined(self):
+    definition = EnumDefinition(original_enum_name='c', enum_package='p')
+    definition.AppendEntry('A', '1')
+    definition.AppendEntry('B', '2')
+    definition.AppendEntry('C', '3')
+    definition.Finalize()
+    self.assertEqual(collections.OrderedDict([('A', '1'),
+                                              ('B', '2'),
+                                              ('C', '3')]),
+                     definition.entries)
+
+  def testEnumValueAssignmentReferences(self):
+    definition = EnumDefinition(original_enum_name='c', enum_package='p')
+    definition.AppendEntry('A', None)
+    definition.AppendEntry('B', 'A')
+    definition.AppendEntry('C', None)
+    definition.AppendEntry('D', 'C')
+    definition.Finalize()
+    self.assertEqual(collections.OrderedDict([('A', 0),
+                                              ('B', 0),
+                                              ('C', 1),
+                                              ('D', 1)]),
+                     definition.entries)
+
+  def testEnumValueAssignmentSet(self):
+    definition = EnumDefinition(original_enum_name='c', enum_package='p')
+    definition.AppendEntry('A', None)
+    definition.AppendEntry('B', '2')
+    definition.AppendEntry('C', None)
+    definition.Finalize()
+    self.assertEqual(collections.OrderedDict([('A', 0),
+                                              ('B', 2),
+                                              ('C', 3)]),
+                     definition.entries)
+
+  def testEnumValueAssignmentSetReferences(self):
+    definition = EnumDefinition(original_enum_name='c', enum_package='p')
+    definition.AppendEntry('A', None)
+    definition.AppendEntry('B', 'A')
+    definition.AppendEntry('C', 'B')
+    definition.AppendEntry('D', None)
+    definition.Finalize()
+    self.assertEqual(collections.OrderedDict([('A', 0),
+                                              ('B', 0),
+                                              ('C', 0),
+                                              ('D', 1)]),
+                     definition.entries)
+
+  def testEnumValueAssignmentRaises(self):
+    definition = EnumDefinition(original_enum_name='c', enum_package='p')
+    definition.AppendEntry('A', None)
+    definition.AppendEntry('B', 'foo')
+    definition.AppendEntry('C', None)
+    with self.assertRaises(Exception):
+      definition.Finalize()
+
+  def testExplicitPrefixStripping(self):
+    definition = EnumDefinition(original_enum_name='c', enum_package='p')
+    definition.AppendEntry('P_A', None)
+    definition.AppendEntry('B', None)
+    definition.AppendEntry('P_C', None)
+    definition.AppendEntry('P_LAST', 'P_C')
+    definition.prefix_to_strip = 'P_'
+    definition.Finalize()
+    self.assertEqual(collections.OrderedDict([('A', 0),
+                                              ('B', 1),
+                                              ('C', 2),
+                                              ('LAST', 2)]),
+                     definition.entries)
+
+  def testImplicitPrefixStripping(self):
+    definition = EnumDefinition(original_enum_name='ClassName',
+                                enum_package='p')
+    definition.AppendEntry('CLASS_NAME_A', None)
+    definition.AppendEntry('CLASS_NAME_B', None)
+    definition.AppendEntry('CLASS_NAME_C', None)
+    definition.AppendEntry('CLASS_NAME_LAST', 'CLASS_NAME_C')
+    definition.Finalize()
+    self.assertEqual(collections.OrderedDict([('A', 0),
+                                              ('B', 1),
+                                              ('C', 2),
+                                              ('LAST', 2)]),
+                     definition.entries)
+
+  def testImplicitPrefixStrippingRequiresAllConstantsToBePrefixed(self):
+    definition = EnumDefinition(original_enum_name='Name',
+                                enum_package='p')
+    definition.AppendEntry('A', None)
+    definition.AppendEntry('B', None)
+    definition.AppendEntry('NAME_LAST', None)
+    definition.Finalize()
+    self.assertEqual(['A', 'B', 'NAME_LAST'], definition.entries.keys())
+
+  def testGenerateThrowsOnEmptyInput(self):
+    with self.assertRaises(Exception):
+      original_do_parse = java_cpp_enum.DoParseHeaderFile
+      try:
+        java_cpp_enum.DoParseHeaderFile = lambda _: []
+        java_cpp_enum.DoGenerate('dir', ['file'])
+      finally:
+        java_cpp_enum.DoParseHeaderFile = original_do_parse
+
+def main(argv):
+  parser = optparse.OptionParser()
+  parser.add_option("--stamp", help="File to touch on success.")
+  options, _ = parser.parse_args(argv)
+
+  suite = unittest.TestLoader().loadTestsFromTestCase(TestPreprocess)
+  unittest.TextTestRunner(verbosity=0).run(suite)
+
+  if options.stamp:
+    build_utils.Touch(options.stamp)
+
+if __name__ == '__main__':
+  main(sys.argv[1:])
diff --git a/build/android/gyp/javac.py b/build/android/gyp/javac.py
new file mode 100755
index 0000000..dafe5df
--- /dev/null
+++ b/build/android/gyp/javac.py
@@ -0,0 +1,321 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import fnmatch
+import optparse
+import os
+import shutil
+import re
+import sys
+import textwrap
+
+from util import build_utils
+from util import md5_check
+
+import jar
+
+sys.path.append(build_utils.COLORAMA_ROOT)
+import colorama
+
+
+def ColorJavacOutput(output):
+  fileline_prefix = r'(?P<fileline>(?P<file>[-.\w/\\]+.java):(?P<line>[0-9]+):)'
+  warning_re = re.compile(
+      fileline_prefix + r'(?P<full_message> warning: (?P<message>.*))$')
+  error_re = re.compile(
+      fileline_prefix + r'(?P<full_message> (?P<message>.*))$')
+  marker_re = re.compile(r'\s*(?P<marker>\^)\s*$')
+
+  warning_color = ['full_message', colorama.Fore.YELLOW + colorama.Style.DIM]
+  error_color = ['full_message', colorama.Fore.MAGENTA + colorama.Style.BRIGHT]
+  marker_color = ['marker',  colorama.Fore.BLUE + colorama.Style.BRIGHT]
+
+  def Colorize(line, regex, color):
+    match = regex.match(line)
+    start = match.start(color[0])
+    end = match.end(color[0])
+    return (line[:start]
+            + color[1] + line[start:end]
+            + colorama.Fore.RESET + colorama.Style.RESET_ALL
+            + line[end:])
+
+  def ApplyColor(line):
+    if warning_re.match(line):
+      line = Colorize(line, warning_re, warning_color)
+    elif error_re.match(line):
+      line = Colorize(line, error_re, error_color)
+    elif marker_re.match(line):
+      line = Colorize(line, marker_re, marker_color)
+    return line
+
+  return '\n'.join(map(ApplyColor, output.split('\n')))
+
+
+ERRORPRONE_OPTIONS = [
+  '-Xepdisable:'
+  # Something in chrome_private_java makes this check crash.
+  'com.google.errorprone.bugpatterns.ClassCanBeStatic,'
+  # These crash on lots of targets.
+  'com.google.errorprone.bugpatterns.WrongParameterPackage,'
+  'com.google.errorprone.bugpatterns.GuiceOverridesGuiceInjectableMethod,'
+  'com.google.errorprone.bugpatterns.GuiceOverridesJavaxInjectableMethod,'
+  'com.google.errorprone.bugpatterns.ElementsCountedInLoop'
+]
+
+def DoJavac(
+    bootclasspath, classpath, classes_dir, chromium_code,
+    use_errorprone_path, java_files):
+  """Runs javac.
+
+  Builds |java_files| with the provided |classpath| and puts the generated
+  .class files into |classes_dir|. If |chromium_code| is true, extra lint
+  checking will be enabled.
+  """
+
+  jar_inputs = []
+  for path in classpath:
+    if os.path.exists(path + '.TOC'):
+      jar_inputs.append(path + '.TOC')
+    else:
+      jar_inputs.append(path)
+
+  javac_args = [
+      '-g',
+      # Chromium only allows UTF8 source files.  Being explicit avoids
+      # javac pulling a default encoding from the user's environment.
+      '-encoding', 'UTF-8',
+      '-classpath', ':'.join(classpath),
+      '-d', classes_dir]
+
+  if bootclasspath:
+    javac_args.extend([
+        '-bootclasspath', ':'.join(bootclasspath),
+        '-source', '1.7',
+        '-target', '1.7',
+        ])
+
+  if chromium_code:
+    # TODO(aurimas): re-enable '-Xlint:deprecation' checks once they are fixed.
+    javac_args.extend(['-Xlint:unchecked'])
+  else:
+    # XDignore.symbol.file makes javac compile against rt.jar instead of
+    # ct.sym. This means that using a java internal package/class will not
+    # trigger a compile warning or error.
+    javac_args.extend(['-XDignore.symbol.file'])
+
+  if use_errorprone_path:
+    javac_cmd = [use_errorprone_path] + ERRORPRONE_OPTIONS
+  else:
+    javac_cmd = ['javac']
+
+  javac_cmd = javac_cmd + javac_args + java_files
+
+  def Compile():
+    build_utils.CheckOutput(
+        javac_cmd,
+        print_stdout=chromium_code,
+        stderr_filter=ColorJavacOutput)
+
+  record_path = os.path.join(classes_dir, 'javac.md5.stamp')
+  md5_check.CallAndRecordIfStale(
+      Compile,
+      record_path=record_path,
+      input_paths=java_files + jar_inputs,
+      input_strings=javac_cmd)
+
+
+_MAX_MANIFEST_LINE_LEN = 72
+
+
+def CreateManifest(manifest_path, classpath, main_class=None,
+                   manifest_entries=None):
+  """Creates a manifest file with the given parameters.
+
+  This generates a manifest file that compiles with the spec found at
+  http://docs.oracle.com/javase/7/docs/technotes/guides/jar/jar.html#JAR_Manifest
+
+  Args:
+    manifest_path: The path to the manifest file that should be created.
+    classpath: The JAR files that should be listed on the manifest file's
+      classpath.
+    main_class: If present, the class containing the main() function.
+    manifest_entries: If present, a list of (key, value) pairs to add to
+      the manifest.
+
+  """
+  output = ['Manifest-Version: 1.0']
+  if main_class:
+    output.append('Main-Class: %s' % main_class)
+  if manifest_entries:
+    for k, v in manifest_entries:
+      output.append('%s: %s' % (k, v))
+  if classpath:
+    sanitized_paths = []
+    for path in classpath:
+      sanitized_paths.append(os.path.basename(path.strip('"')))
+    output.append('Class-Path: %s' % ' '.join(sanitized_paths))
+  output.append('Created-By: ')
+  output.append('')
+
+  wrapper = textwrap.TextWrapper(break_long_words=True,
+                                 drop_whitespace=False,
+                                 subsequent_indent=' ',
+                                 width=_MAX_MANIFEST_LINE_LEN - 2)
+  output = '\r\n'.join(w for l in output for w in wrapper.wrap(l))
+
+  with open(manifest_path, 'w') as f:
+    f.write(output)
+
+
+def main(argv):
+  colorama.init()
+
+  argv = build_utils.ExpandFileArgs(argv)
+
+  parser = optparse.OptionParser()
+  build_utils.AddDepfileOption(parser)
+
+  parser.add_option(
+      '--src-gendirs',
+      help='Directories containing generated java files.')
+  parser.add_option(
+      '--java-srcjars',
+      action='append',
+      default=[],
+      help='List of srcjars to include in compilation.')
+  parser.add_option(
+      '--bootclasspath',
+      action='append',
+      default=[],
+      help='Boot classpath for javac. If this is specified multiple times, '
+      'they will all be appended to construct the classpath.')
+  parser.add_option(
+      '--classpath',
+      action='append',
+      help='Classpath for javac. If this is specified multiple times, they '
+      'will all be appended to construct the classpath.')
+  parser.add_option(
+      '--javac-includes',
+      help='A list of file patterns. If provided, only java files that match'
+      'one of the patterns will be compiled.')
+  parser.add_option(
+      '--jar-excluded-classes',
+      default='',
+      help='List of .class file patterns to exclude from the jar.')
+
+  parser.add_option(
+      '--chromium-code',
+      type='int',
+      help='Whether code being compiled should be built with stricter '
+      'warnings for chromium code.')
+
+  parser.add_option(
+      '--use-errorprone-path',
+      help='Use the Errorprone compiler at this path.')
+
+  parser.add_option(
+      '--classes-dir',
+      help='Directory for compiled .class files.')
+  parser.add_option('--jar-path', help='Jar output path.')
+  parser.add_option(
+      '--main-class',
+      help='The class containing the main method.')
+  parser.add_option(
+      '--manifest-entry',
+      action='append',
+      help='Key:value pairs to add to the .jar manifest.')
+
+  parser.add_option('--stamp', help='Path to touch on success.')
+
+  options, args = parser.parse_args(argv)
+
+  if options.main_class and not options.jar_path:
+    parser.error('--main-class requires --jar-path')
+
+  bootclasspath = []
+  for arg in options.bootclasspath:
+    bootclasspath += build_utils.ParseGypList(arg)
+
+  classpath = []
+  for arg in options.classpath:
+    classpath += build_utils.ParseGypList(arg)
+
+  java_srcjars = []
+  for arg in options.java_srcjars:
+    java_srcjars += build_utils.ParseGypList(arg)
+
+  java_files = args
+  if options.src_gendirs:
+    src_gendirs = build_utils.ParseGypList(options.src_gendirs)
+    java_files += build_utils.FindInDirectories(src_gendirs, '*.java')
+
+  input_files = bootclasspath + classpath + java_srcjars + java_files
+  with build_utils.TempDir() as temp_dir:
+    classes_dir = os.path.join(temp_dir, 'classes')
+    os.makedirs(classes_dir)
+    if java_srcjars:
+      java_dir = os.path.join(temp_dir, 'java')
+      os.makedirs(java_dir)
+      for srcjar in java_srcjars:
+        build_utils.ExtractAll(srcjar, path=java_dir, pattern='*.java')
+      java_files += build_utils.FindInDirectory(java_dir, '*.java')
+
+    if options.javac_includes:
+      javac_includes = build_utils.ParseGypList(options.javac_includes)
+      filtered_java_files = []
+      for f in java_files:
+        for include in javac_includes:
+          if fnmatch.fnmatch(f, include):
+            filtered_java_files.append(f)
+            break
+      java_files = filtered_java_files
+
+    if len(java_files) != 0:
+      DoJavac(
+          bootclasspath,
+          classpath,
+          classes_dir,
+          options.chromium_code,
+          options.use_errorprone_path,
+          java_files)
+
+    if options.jar_path:
+      if options.main_class or options.manifest_entry:
+        if options.manifest_entry:
+          entries = map(lambda e: e.split(":"), options.manifest_entry)
+        else:
+          entries = []
+        manifest_file = os.path.join(temp_dir, 'manifest')
+        CreateManifest(manifest_file, classpath, options.main_class, entries)
+      else:
+        manifest_file = None
+      jar.JarDirectory(classes_dir,
+                       build_utils.ParseGypList(options.jar_excluded_classes),
+                       options.jar_path,
+                       manifest_file=manifest_file)
+
+    if options.classes_dir:
+      # Delete the old classes directory. This ensures that all .class files in
+      # the output are actually from the input .java files. For example, if a
+      # .java file is deleted or an inner class is removed, the classes
+      # directory should not contain the corresponding old .class file after
+      # running this action.
+      build_utils.DeleteDirectory(options.classes_dir)
+      shutil.copytree(classes_dir, options.classes_dir)
+
+  if options.depfile:
+    build_utils.WriteDepfile(
+        options.depfile,
+        input_files + build_utils.GetPythonDependencies())
+
+  if options.stamp:
+    build_utils.Touch(options.stamp)
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
+
+
diff --git a/build/android/gyp/jinja_template.py b/build/android/gyp/jinja_template.py
new file mode 100755
index 0000000..e7c9a34
--- /dev/null
+++ b/build/android/gyp/jinja_template.py
@@ -0,0 +1,121 @@
+#!/usr/bin/env python
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Renders one or more template files using the Jinja template engine."""
+
+import codecs
+import optparse
+import os
+import sys
+
+from util import build_utils
+
+# Import jinja2 from third_party/jinja2
+sys.path.append(os.path.join(os.path.dirname(__file__), '../../../third_party'))
+import jinja2  # pylint: disable=F0401
+
+
+class RecordingFileSystemLoader(jinja2.FileSystemLoader):
+  '''A FileSystemLoader that stores a list of loaded templates.'''
+  def __init__(self, searchpath):
+    jinja2.FileSystemLoader.__init__(self, searchpath)
+    self.loaded_templates = set()
+
+  def get_source(self, environment, template):
+    contents, filename, uptodate = jinja2.FileSystemLoader.get_source(
+        self, environment, template)
+    self.loaded_templates.add(os.path.relpath(filename))
+    return contents, filename, uptodate
+
+  def get_loaded_templates(self):
+    return list(self.loaded_templates)
+
+
+def ProcessFile(env, input_filename, loader_base_dir, output_filename,
+                variables):
+  input_rel_path = os.path.relpath(input_filename, loader_base_dir)
+  template = env.get_template(input_rel_path)
+  output = template.render(variables)
+  with codecs.open(output_filename, 'w', 'utf-8') as output_file:
+    output_file.write(output)
+
+
+def ProcessFiles(env, input_filenames, loader_base_dir, inputs_base_dir,
+                 outputs_zip, variables):
+  with build_utils.TempDir() as temp_dir:
+    for input_filename in input_filenames:
+      relpath = os.path.relpath(os.path.abspath(input_filename),
+                                os.path.abspath(inputs_base_dir))
+      if relpath.startswith(os.pardir):
+        raise Exception('input file %s is not contained in inputs base dir %s'
+                        % (input_filename, inputs_base_dir))
+
+      output_filename = os.path.join(temp_dir, relpath)
+      parent_dir = os.path.dirname(output_filename)
+      build_utils.MakeDirectory(parent_dir)
+      ProcessFile(env, input_filename, loader_base_dir, output_filename,
+                  variables)
+
+    build_utils.ZipDir(outputs_zip, temp_dir)
+
+
+def main():
+  parser = optparse.OptionParser()
+  build_utils.AddDepfileOption(parser)
+  parser.add_option('--inputs', help='The template files to process.')
+  parser.add_option('--output', help='The output file to generate. Valid '
+                    'only if there is a single input.')
+  parser.add_option('--outputs-zip', help='A zip file containing the processed '
+                    'templates. Required if there are multiple inputs.')
+  parser.add_option('--inputs-base-dir', help='A common ancestor directory of '
+                    'the inputs. Each output\'s path in the output zip will '
+                    'match the relative path from INPUTS_BASE_DIR to the '
+                    'input. Required if --output-zip is given.')
+  parser.add_option('--loader-base-dir', help='Base path used by the template '
+                    'loader. Must be a common ancestor directory of '
+                    'the inputs. Defaults to CHROMIUM_SRC.',
+                    default=build_utils.CHROMIUM_SRC)
+  parser.add_option('--variables', help='Variables to be made available in the '
+                    'template processing environment, as a GYP list (e.g. '
+                    '--variables "channel=beta mstone=39")', default='')
+  options, args = parser.parse_args()
+
+  build_utils.CheckOptions(options, parser, required=['inputs'])
+  inputs = build_utils.ParseGypList(options.inputs)
+
+  if (options.output is None) == (options.outputs_zip is None):
+    parser.error('Exactly one of --output and --output-zip must be given')
+  if options.output and len(inputs) != 1:
+    parser.error('--output cannot be used with multiple inputs')
+  if options.outputs_zip and not options.inputs_base_dir:
+    parser.error('--inputs-base-dir must be given when --output-zip is used')
+  if args:
+    parser.error('No positional arguments should be given.')
+
+  variables = {}
+  for v in build_utils.ParseGypList(options.variables):
+    if '=' not in v:
+      parser.error('--variables argument must contain "=": ' + v)
+    name, _, value = v.partition('=')
+    variables[name] = value
+
+  loader = RecordingFileSystemLoader(options.loader_base_dir)
+  env = jinja2.Environment(loader=loader, undefined=jinja2.StrictUndefined,
+                           line_comment_prefix='##')
+  if options.output:
+    ProcessFile(env, inputs[0], options.loader_base_dir, options.output,
+                variables)
+  else:
+    ProcessFiles(env, inputs, options.loader_base_dir, options.inputs_base_dir,
+                 options.outputs_zip, variables)
+
+  if options.depfile:
+    deps = loader.get_loaded_templates() + build_utils.GetPythonDependencies()
+    build_utils.WriteDepfile(options.depfile, deps)
+
+
+if __name__ == '__main__':
+  main()
diff --git a/build/android/gyp/lint.py b/build/android/gyp/lint.py
new file mode 100755
index 0000000..6c4645a
--- /dev/null
+++ b/build/android/gyp/lint.py
@@ -0,0 +1,214 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Runs Android's lint tool."""
+
+
+import optparse
+import os
+import sys
+from xml.dom import minidom
+
+from util import build_utils
+
+
+_SRC_ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__),
+                                         '..', '..', '..'))
+
+
+def _RunLint(lint_path, config_path, processed_config_path, manifest_path,
+             result_path, product_dir, sources, jar_path, resource_dir=None):
+
+  def _RelativizePath(path):
+    """Returns relative path to top-level src dir.
+
+    Args:
+      path: A path relative to cwd.
+    """
+    return os.path.relpath(os.path.abspath(path), _SRC_ROOT)
+
+  def _ProcessConfigFile():
+    if not build_utils.IsTimeStale(processed_config_path, [config_path]):
+      return
+
+    with open(config_path, 'rb') as f:
+      content = f.read().replace(
+          'PRODUCT_DIR', _RelativizePath(product_dir))
+
+    with open(processed_config_path, 'wb') as f:
+      f.write(content)
+
+  def _ProcessResultFile():
+    with open(result_path, 'rb') as f:
+      content = f.read().replace(
+          _RelativizePath(product_dir), 'PRODUCT_DIR')
+
+    with open(result_path, 'wb') as f:
+      f.write(content)
+
+  def _ParseAndShowResultFile():
+    dom = minidom.parse(result_path)
+    issues = dom.getElementsByTagName('issue')
+    print >> sys.stderr
+    for issue in issues:
+      issue_id = issue.attributes['id'].value
+      message = issue.attributes['message'].value
+      location_elem = issue.getElementsByTagName('location')[0]
+      path = location_elem.attributes['file'].value
+      line = location_elem.getAttribute('line')
+      if line:
+        error = '%s:%s %s: %s [warning]' % (path, line, message, issue_id)
+      else:
+        # Issues in class files don't have a line number.
+        error = '%s %s: %s [warning]' % (path, message, issue_id)
+      print >> sys.stderr, error
+      for attr in ['errorLine1', 'errorLine2']:
+        error_line = issue.getAttribute(attr)
+        if error_line:
+          print >> sys.stderr, error_line
+    return len(issues)
+
+  with build_utils.TempDir() as temp_dir:
+    _ProcessConfigFile()
+
+    cmd = [
+        _RelativizePath(lint_path), '-Werror', '--exitcode', '--showall',
+        '--config', _RelativizePath(processed_config_path),
+        '--classpath', _RelativizePath(jar_path),
+        '--xml', _RelativizePath(result_path),
+    ]
+    if resource_dir:
+      cmd.extend(['--resources', _RelativizePath(resource_dir)])
+
+    # There may be multiple source files with the same basename (but in
+    # different directories). It is difficult to determine what part of the path
+    # corresponds to the java package, and so instead just link the source files
+    # into temporary directories (creating a new one whenever there is a name
+    # conflict).
+    src_dirs = []
+    def NewSourceDir():
+      new_dir = os.path.join(temp_dir, str(len(src_dirs)))
+      os.mkdir(new_dir)
+      src_dirs.append(new_dir)
+      cmd.extend(['--sources', _RelativizePath(new_dir)])
+      return new_dir
+
+    def PathInDir(d, src):
+      return os.path.join(d, os.path.basename(src))
+
+    for src in sources:
+      src_dir = None
+      for d in src_dirs:
+        if not os.path.exists(PathInDir(d, src)):
+          src_dir = d
+          break
+      if not src_dir:
+        src_dir = NewSourceDir()
+      os.symlink(os.path.abspath(src), PathInDir(src_dir, src))
+
+    cmd.append(_RelativizePath(os.path.join(manifest_path, os.pardir)))
+
+    if os.path.exists(result_path):
+      os.remove(result_path)
+
+    try:
+      build_utils.CheckOutput(cmd, cwd=_SRC_ROOT)
+    except build_utils.CalledProcessError as e:
+      # There is a problem with lint usage
+      if not os.path.exists(result_path):
+        print 'Something is wrong:'
+        print e
+        return 1
+
+      # There are actual lint issues
+      else:
+        try:
+          num_issues = _ParseAndShowResultFile()
+        except Exception:
+          print 'Lint created unparseable xml file...'
+          print 'File contents:'
+          with open(result_path) as f:
+            print f.read()
+          return 1
+
+        _ProcessResultFile()
+        msg = ('\nLint found %d new issues.\n'
+               ' - For full explanation refer to %s\n'
+               ' - Wanna suppress these issues?\n'
+               '    1. Read comment in %s\n'
+               '    2. Run "python %s %s"\n' %
+               (num_issues,
+                _RelativizePath(result_path),
+                _RelativizePath(config_path),
+                _RelativizePath(os.path.join(_SRC_ROOT, 'build', 'android',
+                                             'lint', 'suppress.py')),
+                _RelativizePath(result_path)))
+        print >> sys.stderr, msg
+        return 1
+
+  return 0
+
+
+def main():
+  parser = optparse.OptionParser()
+  build_utils.AddDepfileOption(parser)
+  parser.add_option('--lint-path', help='Path to lint executable.')
+  parser.add_option('--config-path', help='Path to lint suppressions file.')
+  parser.add_option('--processed-config-path',
+                    help='Path to processed lint suppressions file.')
+  parser.add_option('--manifest-path', help='Path to AndroidManifest.xml')
+  parser.add_option('--result-path', help='Path to XML lint result file.')
+  parser.add_option('--product-dir', help='Path to product dir.')
+  parser.add_option('--src-dirs', help='Directories containing java files.')
+  parser.add_option('--java-files', help='Paths to java files.')
+  parser.add_option('--jar-path', help='Jar file containing class files.')
+  parser.add_option('--resource-dir', help='Path to resource dir.')
+  parser.add_option('--can-fail-build', action='store_true',
+                    help='If set, script will exit with nonzero exit status'
+                    ' if lint errors are present')
+  parser.add_option('--stamp', help='Path to touch on success.')
+  parser.add_option('--enable', action='store_true',
+                    help='Run lint instead of just touching stamp.')
+
+  options, _ = parser.parse_args()
+
+  build_utils.CheckOptions(
+      options, parser, required=['lint_path', 'config_path',
+                                 'processed_config_path', 'manifest_path',
+                                 'result_path', 'product_dir',
+                                 'jar_path'])
+
+  rc = 0
+
+  if options.enable:
+    sources = []
+    if options.src_dirs:
+      src_dirs = build_utils.ParseGypList(options.src_dirs)
+      sources = build_utils.FindInDirectories(src_dirs, '*.java')
+    elif options.java_files:
+      sources = build_utils.ParseGypList(options.java_files)
+    else:
+      print 'One of --src-dirs or --java-files must be specified.'
+      return 1
+    rc = _RunLint(options.lint_path, options.config_path,
+                  options.processed_config_path,
+                  options.manifest_path, options.result_path,
+                  options.product_dir, sources, options.jar_path,
+                  options.resource_dir)
+
+  if options.depfile:
+    build_utils.WriteDepfile(
+        options.depfile,
+        build_utils.GetPythonDependencies())
+
+  if options.stamp and not rc:
+    build_utils.Touch(options.stamp)
+
+  return rc if options.can_fail_build else 0
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/android/gyp/locale_pak_resources.py b/build/android/gyp/locale_pak_resources.py
new file mode 100755
index 0000000..84c4a37
--- /dev/null
+++ b/build/android/gyp/locale_pak_resources.py
@@ -0,0 +1,126 @@
+#!/usr/bin/env python
+#
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Creates a resources.zip for locale .pak files.
+
+Places the locale.pak files into appropriate resource configs
+(e.g. en-GB.pak -> res/raw-en/en_gb.lpak). Also generates a locale_paks
+TypedArray so that resource files can be enumerated at runtime.
+"""
+
+import collections
+import optparse
+import os
+import sys
+import zipfile
+
+from util import build_utils
+
+
+# This should stay in sync with:
+# base/android/java/src/org/chromium/base/LocaleUtils.java
+_CHROME_TO_ANDROID_LOCALE_MAP = {
+    'he': 'iw',
+    'id': 'in',
+    'fil': 'tl',
+}
+
+
+def ToResourceFileName(name):
+  """Returns the resource-compatible file name for the given file."""
+  # Resources file names must consist of [a-z0-9_.].
+  # Changes extension to .lpak so that compression can be toggled separately for
+  # locale pak files vs other pak files.
+  return name.replace('-', '_').replace('.pak', '.lpak').lower()
+
+
+def CreateLocalePaksXml(names):
+  """Creates the contents for the locale-paks.xml files."""
+  VALUES_FILE_TEMPLATE = '''<?xml version="1.0" encoding="utf-8"?>
+<resources>
+  <array name="locale_paks">%s
+  </array>
+</resources>
+'''
+  VALUES_ITEM_TEMPLATE = '''
+    <item>@raw/%s</item>'''
+
+  res_names = (os.path.splitext(name)[0] for name in names)
+  items = ''.join((VALUES_ITEM_TEMPLATE % name for name in res_names))
+  return VALUES_FILE_TEMPLATE % items
+
+
+def ComputeMappings(sources):
+  """Computes the mappings of sources -> resources.
+
+  Returns a tuple of:
+    - mappings: List of (src, dest) paths
+    - lang_to_locale_map: Map of language -> list of resource names
+      e.g. "en" -> ["en_gb.lpak"]
+  """
+  lang_to_locale_map = collections.defaultdict(list)
+  mappings = []
+  for src_path in sources:
+    basename = os.path.basename(src_path)
+    name = os.path.splitext(basename)[0]
+    res_name = ToResourceFileName(basename)
+    if name == 'en-US':
+      dest_dir = 'raw'
+    else:
+      # Chrome's uses different region mapping logic from Android, so include
+      # all regions for each language.
+      android_locale = _CHROME_TO_ANDROID_LOCALE_MAP.get(name, name)
+      lang = android_locale[0:2]
+      dest_dir = 'raw-' + lang
+      lang_to_locale_map[lang].append(res_name)
+    mappings.append((src_path, os.path.join(dest_dir, res_name)))
+  return mappings, lang_to_locale_map
+
+
+def main():
+  parser = optparse.OptionParser()
+  build_utils.AddDepfileOption(parser)
+  parser.add_option('--locale-paks', help='List of files for res/raw-LOCALE')
+  parser.add_option('--resources-zip', help='Path to output resources.zip')
+  parser.add_option('--print-languages',
+      action='store_true',
+      help='Print out the list of languages that cover the given locale paks '
+           '(using Android\'s language codes)')
+
+  options, _ = parser.parse_args()
+  build_utils.CheckOptions(options, parser,
+                           required=['locale_paks'])
+
+  sources = build_utils.ParseGypList(options.locale_paks)
+
+  if options.depfile:
+    deps = sources + build_utils.GetPythonDependencies()
+    build_utils.WriteDepfile(options.depfile, deps)
+
+  mappings, lang_to_locale_map = ComputeMappings(sources)
+  if options.print_languages:
+    print '\n'.join(sorted(lang_to_locale_map))
+
+  if options.resources_zip:
+    with zipfile.ZipFile(options.resources_zip, 'w', zipfile.ZIP_STORED) as out:
+      for mapping in mappings:
+        out.write(mapping[0], mapping[1])
+
+      # Create TypedArray resources so ResourceExtractor can enumerate files.
+      def WriteValuesFile(lang, names):
+        dest_dir = 'values'
+        if lang:
+          dest_dir += '-' + lang
+        # Always extract en-US.lpak since it's the fallback.
+        xml = CreateLocalePaksXml(names + ['en_us.lpak'])
+        out.writestr(os.path.join(dest_dir, 'locale-paks.xml'), xml)
+
+      for lang, names in lang_to_locale_map.iteritems():
+        WriteValuesFile(lang, names)
+      WriteValuesFile(None, [])
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/android/gyp/pack_relocations.py b/build/android/gyp/pack_relocations.py
new file mode 100755
index 0000000..02e4499
--- /dev/null
+++ b/build/android/gyp/pack_relocations.py
@@ -0,0 +1,107 @@
+#!/usr/bin/env python
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Pack relocations in a library (or copy unchanged).
+
+If --enable-packing and --configuration-name=='Release', invoke the
+relocation_packer tool to pack the .rel.dyn or .rela.dyn section in the given
+library files.  This step is inserted after the libraries are stripped.
+
+If --enable-packing is zero, the script copies files verbatim, with no
+attempt to pack relocations.
+
+Any library listed in --exclude-packing-list is also copied verbatim,
+irrespective of any --enable-packing setting.  Typically this would be
+'libchromium_android_linker.so'.
+"""
+
+import optparse
+import os
+import shlex
+import shutil
+import sys
+import tempfile
+
+from util import build_utils
+
+def PackLibraryRelocations(android_pack_relocations, library_path, output_path):
+  shutil.copy(library_path, output_path)
+  pack_command = [android_pack_relocations, output_path]
+  build_utils.CheckOutput(pack_command)
+
+
+def CopyLibraryUnchanged(library_path, output_path):
+  shutil.copy(library_path, output_path)
+
+
+def main(args):
+  args = build_utils.ExpandFileArgs(args)
+  parser = optparse.OptionParser()
+  build_utils.AddDepfileOption(parser)
+  parser.add_option('--clear-dir', action='store_true',
+                    help='If set, the destination directory will be deleted '
+                    'before copying files to it. This is highly recommended to '
+                    'ensure that no stale files are left in the directory.')
+
+  parser.add_option('--configuration-name',
+      default='Release',
+      help='Gyp configuration name (i.e. Debug, Release)')
+  parser.add_option('--enable-packing',
+      choices=['0', '1'],
+      help=('Pack relocations if 1 and configuration name is \'Release\','
+            ' otherwise plain file copy'))
+  parser.add_option('--exclude-packing-list',
+      default='',
+      help='Names of any libraries explicitly not packed')
+  parser.add_option('--android-pack-relocations',
+      help='Path to the relocations packer binary')
+  parser.add_option('--stripped-libraries-dir',
+      help='Directory for stripped libraries')
+  parser.add_option('--packed-libraries-dir',
+      help='Directory for packed libraries')
+  parser.add_option('--libraries', action='append',
+      help='List of libraries')
+  parser.add_option('--stamp', help='Path to touch on success')
+
+  options, _ = parser.parse_args(args)
+  enable_packing = (options.enable_packing == '1' and
+                    options.configuration_name == 'Release')
+  exclude_packing_set = set(shlex.split(options.exclude_packing_list))
+
+  libraries = []
+  for libs_arg in options.libraries:
+    libraries += build_utils.ParseGypList(libs_arg)
+
+  if options.clear_dir:
+    build_utils.DeleteDirectory(options.packed_libraries_dir)
+
+  build_utils.MakeDirectory(options.packed_libraries_dir)
+
+  for library in libraries:
+    library_path = os.path.join(options.stripped_libraries_dir, library)
+    output_path = os.path.join(
+        options.packed_libraries_dir, os.path.basename(library))
+
+    if enable_packing and library not in exclude_packing_set:
+      PackLibraryRelocations(options.android_pack_relocations,
+                             library_path,
+                             output_path)
+    else:
+      CopyLibraryUnchanged(library_path, output_path)
+
+  if options.depfile:
+    build_utils.WriteDepfile(
+        options.depfile,
+        libraries + build_utils.GetPythonDependencies())
+
+  if options.stamp:
+    build_utils.Touch(options.stamp)
+
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/build/android/gyp/package_resources.py b/build/android/gyp/package_resources.py
new file mode 100755
index 0000000..d17d1fe
--- /dev/null
+++ b/build/android/gyp/package_resources.py
@@ -0,0 +1,270 @@
+#!/usr/bin/env python
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# pylint: disable=C0301
+"""Package resources into an apk.
+
+See https://android.googlesource.com/platform/tools/base/+/master/legacy/ant-tasks/src/main/java/com/android/ant/AaptExecTask.java
+and
+https://android.googlesource.com/platform/sdk/+/master/files/ant/build.xml
+"""
+# pylint: enable=C0301
+
+import optparse
+import os
+import re
+import shutil
+import zipfile
+
+from util import build_utils
+
+
+# List is generated from the chrome_apk.apk_intermediates.ap_ via:
+#     unzip -l $FILE_AP_ | cut -c31- | grep res/draw | cut -d'/' -f 2 | sort \
+#     | uniq | grep -- -tvdpi- | cut -c10-
+# and then manually sorted.
+# Note that we can't just do a cross-product of dimentions because the filenames
+# become too big and aapt fails to create the files.
+# This leaves all default drawables (mdpi) in the main apk. Android gets upset
+# though if any drawables are missing from the default drawables/ directory.
+DENSITY_SPLITS = {
+    'hdpi': (
+        'hdpi-v4', # Order matters for output file names.
+        'ldrtl-hdpi-v4',
+        'sw600dp-hdpi-v13',
+        'ldrtl-hdpi-v17',
+        'ldrtl-sw600dp-hdpi-v17',
+        'hdpi-v21',
+    ),
+    'xhdpi': (
+        'xhdpi-v4',
+        'ldrtl-xhdpi-v4',
+        'sw600dp-xhdpi-v13',
+        'ldrtl-xhdpi-v17',
+        'ldrtl-sw600dp-xhdpi-v17',
+        'xhdpi-v21',
+    ),
+    'xxhdpi': (
+        'xxhdpi-v4',
+        'ldrtl-xxhdpi-v4',
+        'sw600dp-xxhdpi-v13',
+        'ldrtl-xxhdpi-v17',
+        'ldrtl-sw600dp-xxhdpi-v17',
+        'xxhdpi-v21',
+    ),
+    'xxxhdpi': (
+        'xxxhdpi-v4',
+        'ldrtl-xxxhdpi-v4',
+        'sw600dp-xxxhdpi-v13',
+        'ldrtl-xxxhdpi-v17',
+        'ldrtl-sw600dp-xxxhdpi-v17',
+        'xxxhdpi-v21',
+    ),
+    'tvdpi': (
+        'tvdpi-v4',
+        'sw600dp-tvdpi-v13',
+        'ldrtl-sw600dp-tvdpi-v17',
+    ),
+}
+
+
+def ParseArgs():
+  """Parses command line options.
+
+  Returns:
+    An options object as from optparse.OptionsParser.parse_args()
+  """
+  parser = optparse.OptionParser()
+  build_utils.AddDepfileOption(parser)
+  parser.add_option('--android-sdk', help='path to the Android SDK folder')
+  parser.add_option('--aapt-path',
+                    help='path to the Android aapt tool')
+
+  parser.add_option('--configuration-name',
+                    help='Gyp\'s configuration name (Debug or Release).')
+
+  parser.add_option('--android-manifest', help='AndroidManifest.xml path')
+  parser.add_option('--version-code', help='Version code for apk.')
+  parser.add_option('--version-name', help='Version name for apk.')
+  parser.add_option(
+      '--shared-resources',
+      action='store_true',
+      help='Make a resource package that can be loaded by a different'
+      'application at runtime to access the package\'s resources.')
+  parser.add_option('--resource-zips',
+                    help='zip files containing resources to be packaged')
+  parser.add_option('--asset-dir',
+                    help='directories containing assets to be packaged')
+  parser.add_option('--no-compress', help='disables compression for the '
+                    'given comma separated list of extensions')
+  parser.add_option(
+      '--create-density-splits',
+      action='store_true',
+      help='Enables density splits')
+  parser.add_option('--language-splits',
+                    help='GYP list of languages to create splits for')
+
+  parser.add_option('--apk-path',
+                    help='Path to output (partial) apk.')
+
+  (options, args) = parser.parse_args()
+
+  if args:
+    parser.error('No positional arguments should be given.')
+
+  # Check that required options have been provided.
+  required_options = ('android_sdk', 'aapt_path', 'configuration_name',
+                      'android_manifest', 'version_code', 'version_name',
+                      'apk_path')
+
+  build_utils.CheckOptions(options, parser, required=required_options)
+
+  return options
+
+
+def MoveImagesToNonMdpiFolders(res_root):
+  """Move images from drawable-*-mdpi-* folders to drawable-* folders.
+
+  Why? http://crbug.com/289843
+  """
+  for src_dir_name in os.listdir(res_root):
+    src_components = src_dir_name.split('-')
+    if src_components[0] != 'drawable' or 'mdpi' not in src_components:
+      continue
+    src_dir = os.path.join(res_root, src_dir_name)
+    if not os.path.isdir(src_dir):
+      continue
+    dst_components = [c for c in src_components if c != 'mdpi']
+    assert dst_components != src_components
+    dst_dir_name = '-'.join(dst_components)
+    dst_dir = os.path.join(res_root, dst_dir_name)
+    build_utils.MakeDirectory(dst_dir)
+    for src_file_name in os.listdir(src_dir):
+      if not src_file_name.endswith('.png'):
+        continue
+      src_file = os.path.join(src_dir, src_file_name)
+      dst_file = os.path.join(dst_dir, src_file_name)
+      assert not os.path.lexists(dst_file)
+      shutil.move(src_file, dst_file)
+
+
+def PackageArgsForExtractedZip(d):
+  """Returns the aapt args for an extracted resources zip.
+
+  A resources zip either contains the resources for a single target or for
+  multiple targets. If it is multiple targets merged into one, the actual
+  resource directories will be contained in the subdirectories 0, 1, 2, ...
+  """
+  subdirs = [os.path.join(d, s) for s in os.listdir(d)]
+  subdirs = [s for s in subdirs if os.path.isdir(s)]
+  is_multi = '0' in [os.path.basename(s) for s in subdirs]
+  if is_multi:
+    res_dirs = sorted(subdirs, key=lambda p : int(os.path.basename(p)))
+  else:
+    res_dirs = [d]
+  package_command = []
+  for d in res_dirs:
+    MoveImagesToNonMdpiFolders(d)
+    package_command += ['-S', d]
+  return package_command
+
+
+def RenameDensitySplits(apk_path):
+  """Renames all density splits to have shorter / predictable names."""
+  for density, config in DENSITY_SPLITS.iteritems():
+    src_path = '%s_%s' % (apk_path, '_'.join(config))
+    dst_path = '%s_%s' % (apk_path, density)
+    if src_path != dst_path:
+      if os.path.exists(dst_path):
+        os.unlink(dst_path)
+      os.rename(src_path, dst_path)
+
+
+def CheckForMissedConfigs(apk_path, check_density, languages):
+  """Raises an exception if apk_path contains any unexpected configs."""
+  triggers = []
+  if check_density:
+    triggers.extend(re.compile('-%s' % density) for density in DENSITY_SPLITS)
+  if languages:
+    triggers.extend(re.compile(r'-%s\b' % lang) for lang in languages)
+  with zipfile.ZipFile(apk_path) as main_apk_zip:
+    for name in main_apk_zip.namelist():
+      for trigger in triggers:
+        if trigger.search(name) and not 'mipmap-' in name:
+          raise Exception(('Found config in main apk that should have been ' +
+                           'put into a split: %s\nYou need to update ' +
+                           'package_resources.py to include this new ' +
+                           'config (trigger=%s)') % (name, trigger.pattern))
+
+
+def main():
+  options = ParseArgs()
+  android_jar = os.path.join(options.android_sdk, 'android.jar')
+  aapt = options.aapt_path
+
+  with build_utils.TempDir() as temp_dir:
+    package_command = [aapt,
+                       'package',
+                       '--version-code', options.version_code,
+                       '--version-name', options.version_name,
+                       '-M', options.android_manifest,
+                       '--no-crunch',
+                       '-f',
+                       '--auto-add-overlay',
+                       '-I', android_jar,
+                       '-F', options.apk_path,
+                       '--ignore-assets', build_utils.AAPT_IGNORE_PATTERN,
+                       ]
+
+    if options.no_compress:
+      for ext in options.no_compress.split(','):
+        package_command += ['-0', ext]
+    if options.shared_resources:
+      package_command.append('--shared-lib')
+
+    if options.asset_dir and os.path.exists(options.asset_dir):
+      package_command += ['-A', options.asset_dir]
+
+    if options.resource_zips:
+      dep_zips = build_utils.ParseGypList(options.resource_zips)
+      for z in dep_zips:
+        subdir = os.path.join(temp_dir, os.path.basename(z))
+        if os.path.exists(subdir):
+          raise Exception('Resource zip name conflict: ' + os.path.basename(z))
+        build_utils.ExtractAll(z, path=subdir)
+        package_command += PackageArgsForExtractedZip(subdir)
+
+    if options.create_density_splits:
+      for config in DENSITY_SPLITS.itervalues():
+        package_command.extend(('--split', ','.join(config)))
+
+    language_splits = None
+    if options.language_splits:
+      language_splits = build_utils.ParseGypList(options.language_splits)
+      for lang in language_splits:
+        package_command.extend(('--split', lang))
+
+    if 'Debug' in options.configuration_name:
+      package_command += ['--debug-mode']
+
+    build_utils.CheckOutput(
+        package_command, print_stdout=False, print_stderr=False)
+
+    if options.create_density_splits or language_splits:
+      CheckForMissedConfigs(
+          options.apk_path, options.create_density_splits, language_splits)
+
+    if options.create_density_splits:
+      RenameDensitySplits(options.apk_path)
+
+    if options.depfile:
+      build_utils.WriteDepfile(
+          options.depfile,
+          build_utils.GetPythonDependencies())
+
+
+if __name__ == '__main__':
+  main()
diff --git a/build/android/gyp/process_resources.py b/build/android/gyp/process_resources.py
new file mode 100755
index 0000000..d227954
--- /dev/null
+++ b/build/android/gyp/process_resources.py
@@ -0,0 +1,420 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Process Android resources to generate R.java, and prepare for packaging.
+
+This will crunch images and generate v14 compatible resources
+(see generate_v14_compatible_resources.py).
+"""
+
+import codecs
+import optparse
+import os
+import re
+import shutil
+import sys
+import zipfile
+
+import generate_v14_compatible_resources
+
+from util import build_utils
+
+# Import jinja2 from third_party/jinja2
+sys.path.insert(1,
+    os.path.join(os.path.dirname(__file__), '../../../third_party'))
+from jinja2 import Template # pylint: disable=F0401
+
+
+def ParseArgs(args):
+  """Parses command line options.
+
+  Returns:
+    An options object as from optparse.OptionsParser.parse_args()
+  """
+  parser = optparse.OptionParser()
+  build_utils.AddDepfileOption(parser)
+
+  parser.add_option('--android-sdk', help='path to the Android SDK folder')
+  parser.add_option('--aapt-path',
+                    help='path to the Android aapt tool')
+  parser.add_option('--non-constant-id', action='store_true')
+
+  parser.add_option('--android-manifest', help='AndroidManifest.xml path')
+  parser.add_option('--custom-package', help='Java package for R.java')
+  parser.add_option(
+      '--shared-resources',
+      action='store_true',
+      help='Make a resource package that can be loaded by a different'
+      'application at runtime to access the package\'s resources.')
+
+  parser.add_option('--resource-dirs',
+                    help='Directories containing resources of this target.')
+  parser.add_option('--dependencies-res-zips',
+                    help='Resources from dependents.')
+
+  parser.add_option('--resource-zip-out',
+                    help='Path for output zipped resources.')
+
+  parser.add_option('--R-dir',
+                    help='directory to hold generated R.java.')
+  parser.add_option('--srcjar-out',
+                    help='Path to srcjar to contain generated R.java.')
+  parser.add_option('--r-text-out',
+                    help='Path to store the R.txt file generated by appt.')
+
+  parser.add_option('--proguard-file',
+                    help='Path to proguard.txt generated file')
+
+  parser.add_option(
+      '--v14-skip',
+      action="store_true",
+      help='Do not generate nor verify v14 resources')
+
+  parser.add_option(
+      '--extra-res-packages',
+      help='Additional package names to generate R.java files for')
+  parser.add_option(
+      '--extra-r-text-files',
+      help='For each additional package, the R.txt file should contain a '
+      'list of resources to be included in the R.java file in the format '
+      'generated by aapt')
+  parser.add_option(
+      '--include-all-resources',
+      action='store_true',
+      help='Include every resource ID in every generated R.java file '
+      '(ignoring R.txt).')
+
+  parser.add_option(
+      '--all-resources-zip-out',
+      help='Path for output of all resources. This includes resources in '
+      'dependencies.')
+
+  parser.add_option('--stamp', help='File to touch on success')
+
+  (options, args) = parser.parse_args(args)
+
+  if args:
+    parser.error('No positional arguments should be given.')
+
+  # Check that required options have been provided.
+  required_options = (
+      'android_sdk',
+      'aapt_path',
+      'android_manifest',
+      'dependencies_res_zips',
+      'resource_dirs',
+      'resource_zip_out',
+      )
+  build_utils.CheckOptions(options, parser, required=required_options)
+
+  if (options.R_dir is None) == (options.srcjar_out is None):
+    raise Exception('Exactly one of --R-dir or --srcjar-out must be specified.')
+
+  return options
+
+
+def CreateExtraRJavaFiles(
+      r_dir, extra_packages, extra_r_text_files, shared_resources, include_all):
+  if include_all:
+    java_files = build_utils.FindInDirectory(r_dir, "R.java")
+    if len(java_files) != 1:
+      return
+    r_java_file = java_files[0]
+    r_java_contents = codecs.open(r_java_file, encoding='utf-8').read()
+
+    for package in extra_packages:
+      package_r_java_dir = os.path.join(r_dir, *package.split('.'))
+      build_utils.MakeDirectory(package_r_java_dir)
+      package_r_java_path = os.path.join(package_r_java_dir, 'R.java')
+      new_r_java = re.sub(r'package [.\w]*;', u'package %s;' % package,
+                          r_java_contents)
+      codecs.open(package_r_java_path, 'w', encoding='utf-8').write(new_r_java)
+  else:
+    if len(extra_packages) != len(extra_r_text_files):
+      raise Exception('Need one R.txt file per extra package')
+
+    all_resources = {}
+    r_txt_file = os.path.join(r_dir, 'R.txt')
+    if not os.path.exists(r_txt_file):
+      return
+    with open(r_txt_file) as f:
+      for line in f:
+        m = re.match(r'(int(?:\[\])?) (\w+) (\w+) (.+)$', line)
+        if not m:
+          raise Exception('Unexpected line in R.txt: %s' % line)
+        java_type, resource_type, name, value = m.groups()
+        all_resources[(resource_type, name)] = (java_type, value)
+
+    for package, r_text_file in zip(extra_packages, extra_r_text_files):
+      if os.path.exists(r_text_file):
+        package_r_java_dir = os.path.join(r_dir, *package.split('.'))
+        build_utils.MakeDirectory(package_r_java_dir)
+        package_r_java_path = os.path.join(package_r_java_dir, 'R.java')
+        CreateExtraRJavaFile(
+            package, package_r_java_path, r_text_file, all_resources,
+            shared_resources)
+
+
+def CreateExtraRJavaFile(
+      package, r_java_path, r_text_file, all_resources, shared_resources):
+  resources = {}
+  with open(r_text_file) as f:
+    for line in f:
+      m = re.match(r'int(?:\[\])? (\w+) (\w+) ', line)
+      if not m:
+        raise Exception('Unexpected line in R.txt: %s' % line)
+      resource_type, name = m.groups()
+      java_type, value = all_resources[(resource_type, name)]
+      if resource_type not in resources:
+        resources[resource_type] = []
+      resources[resource_type].append((name, java_type, value))
+
+  template = Template("""/* AUTO-GENERATED FILE.  DO NOT MODIFY. */
+
+package {{ package }};
+
+public final class R {
+    {% for resource_type in resources %}
+    public static final class {{ resource_type }} {
+        {% for name, java_type, value in resources[resource_type] %}
+        {% if shared_resources %}
+        public static {{ java_type }} {{ name }} = {{ value }};
+        {% else %}
+        public static final {{ java_type }} {{ name }} = {{ value }};
+        {% endif %}
+        {% endfor %}
+    }
+    {% endfor %}
+    {% if shared_resources %}
+    public static void onResourcesLoaded(int packageId) {
+        {% for resource_type in resources %}
+        {% for name, java_type, value in resources[resource_type] %}
+        {% if java_type == 'int[]' %}
+        for(int i = 0; i < {{ resource_type }}.{{ name }}.length; ++i) {
+            {{ resource_type }}.{{ name }}[i] =
+                    ({{ resource_type }}.{{ name }}[i] & 0x00ffffff)
+                    | (packageId << 24);
+        }
+        {% else %}
+        {{ resource_type }}.{{ name }} =
+                ({{ resource_type }}.{{ name }} & 0x00ffffff)
+                | (packageId << 24);
+        {% endif %}
+        {% endfor %}
+        {% endfor %}
+    }
+    {% endif %}
+}
+""", trim_blocks=True, lstrip_blocks=True)
+
+  output = template.render(package=package, resources=resources,
+                           shared_resources=shared_resources)
+  with open(r_java_path, 'w') as f:
+    f.write(output)
+
+
+def CrunchDirectory(aapt, input_dir, output_dir):
+  """Crunches the images in input_dir and its subdirectories into output_dir.
+
+  If an image is already optimized, crunching often increases image size. In
+  this case, the crunched image is overwritten with the original image.
+  """
+  aapt_cmd = [aapt,
+              'crunch',
+              '-C', output_dir,
+              '-S', input_dir,
+              '--ignore-assets', build_utils.AAPT_IGNORE_PATTERN]
+  build_utils.CheckOutput(aapt_cmd, stderr_filter=FilterCrunchStderr,
+                          fail_func=DidCrunchFail)
+
+  # Check for images whose size increased during crunching and replace them
+  # with their originals (except for 9-patches, which must be crunched).
+  for dir_, _, files in os.walk(output_dir):
+    for crunched in files:
+      if crunched.endswith('.9.png'):
+        continue
+      if not crunched.endswith('.png'):
+        raise Exception('Unexpected file in crunched dir: ' + crunched)
+      crunched = os.path.join(dir_, crunched)
+      original = os.path.join(input_dir, os.path.relpath(crunched, output_dir))
+      original_size = os.path.getsize(original)
+      crunched_size = os.path.getsize(crunched)
+      if original_size < crunched_size:
+        shutil.copyfile(original, crunched)
+
+
+def FilterCrunchStderr(stderr):
+  """Filters out lines from aapt crunch's stderr that can safely be ignored."""
+  filtered_lines = []
+  for line in stderr.splitlines(True):
+    # Ignore this libpng warning, which is a known non-error condition.
+    # http://crbug.com/364355
+    if ('libpng warning: iCCP: Not recognizing known sRGB profile that has '
+        + 'been edited' in line):
+      continue
+    filtered_lines.append(line)
+  return ''.join(filtered_lines)
+
+
+def DidCrunchFail(returncode, stderr):
+  """Determines whether aapt crunch failed from its return code and output.
+
+  Because aapt's return code cannot be trusted, any output to stderr is
+  an indication that aapt has failed (http://crbug.com/314885).
+  """
+  return returncode != 0 or stderr
+
+
+def ZipResources(resource_dirs, zip_path):
+  # Python zipfile does not provide a way to replace a file (it just writes
+  # another file with the same name). So, first collect all the files to put
+  # in the zip (with proper overriding), and then zip them.
+  files_to_zip = dict()
+  for d in resource_dirs:
+    for root, _, files in os.walk(d):
+      for f in files:
+        archive_path = os.path.join(os.path.relpath(root, d), f)
+        path = os.path.join(root, f)
+        files_to_zip[archive_path] = path
+  with zipfile.ZipFile(zip_path, 'w') as outzip:
+    for archive_path, path in files_to_zip.iteritems():
+      outzip.write(path, archive_path)
+
+
+def CombineZips(zip_files, output_path):
+  # When packaging resources, if the top-level directories in the zip file are
+  # of the form 0, 1, ..., then each subdirectory will be passed to aapt as a
+  # resources directory. While some resources just clobber others (image files,
+  # etc), other resources (particularly .xml files) need to be more
+  # intelligently merged. That merging is left up to aapt.
+  with zipfile.ZipFile(output_path, 'w') as outzip:
+    for i, z in enumerate(zip_files):
+      with zipfile.ZipFile(z, 'r') as inzip:
+        for name in inzip.namelist():
+          new_name = '%d/%s' % (i, name)
+          outzip.writestr(new_name, inzip.read(name))
+
+
+def main():
+  args = build_utils.ExpandFileArgs(sys.argv[1:])
+
+  options = ParseArgs(args)
+  android_jar = os.path.join(options.android_sdk, 'android.jar')
+  aapt = options.aapt_path
+
+  input_files = []
+
+  with build_utils.TempDir() as temp_dir:
+    deps_dir = os.path.join(temp_dir, 'deps')
+    build_utils.MakeDirectory(deps_dir)
+    v14_dir = os.path.join(temp_dir, 'v14')
+    build_utils.MakeDirectory(v14_dir)
+
+    gen_dir = os.path.join(temp_dir, 'gen')
+    build_utils.MakeDirectory(gen_dir)
+
+    input_resource_dirs = build_utils.ParseGypList(options.resource_dirs)
+
+    if not options.v14_skip:
+      for resource_dir in input_resource_dirs:
+        generate_v14_compatible_resources.GenerateV14Resources(
+            resource_dir,
+            v14_dir)
+
+    dep_zips = build_utils.ParseGypList(options.dependencies_res_zips)
+    input_files += dep_zips
+    dep_subdirs = []
+    for z in dep_zips:
+      subdir = os.path.join(deps_dir, os.path.basename(z))
+      if os.path.exists(subdir):
+        raise Exception('Resource zip name conflict: ' + os.path.basename(z))
+      build_utils.ExtractAll(z, path=subdir)
+      dep_subdirs.append(subdir)
+
+    # Generate R.java. This R.java contains non-final constants and is used only
+    # while compiling the library jar (e.g. chromium_content.jar). When building
+    # an apk, a new R.java file with the correct resource -> ID mappings will be
+    # generated by merging the resources from all libraries and the main apk
+    # project.
+    package_command = [aapt,
+                       'package',
+                       '-m',
+                       '-M', options.android_manifest,
+                       '--auto-add-overlay',
+                       '-I', android_jar,
+                       '--output-text-symbols', gen_dir,
+                       '-J', gen_dir,
+                       '--ignore-assets', build_utils.AAPT_IGNORE_PATTERN]
+
+    for d in input_resource_dirs:
+      package_command += ['-S', d]
+
+    for d in dep_subdirs:
+      package_command += ['-S', d]
+
+    if options.non_constant_id:
+      package_command.append('--non-constant-id')
+    if options.custom_package:
+      package_command += ['--custom-package', options.custom_package]
+    if options.proguard_file:
+      package_command += ['-G', options.proguard_file]
+    if options.shared_resources:
+      package_command.append('--shared-lib')
+    build_utils.CheckOutput(package_command, print_stderr=False)
+
+    if options.extra_res_packages:
+      CreateExtraRJavaFiles(
+          gen_dir,
+          build_utils.ParseGypList(options.extra_res_packages),
+          build_utils.ParseGypList(options.extra_r_text_files),
+          options.shared_resources,
+          options.include_all_resources)
+
+    # This is the list of directories with resources to put in the final .zip
+    # file. The order of these is important so that crunched/v14 resources
+    # override the normal ones.
+    zip_resource_dirs = input_resource_dirs + [v14_dir]
+
+    base_crunch_dir = os.path.join(temp_dir, 'crunch')
+
+    # Crunch image resources. This shrinks png files and is necessary for
+    # 9-patch images to display correctly. 'aapt crunch' accepts only a single
+    # directory at a time and deletes everything in the output directory.
+    for idx, input_dir in enumerate(input_resource_dirs):
+      crunch_dir = os.path.join(base_crunch_dir, str(idx))
+      build_utils.MakeDirectory(crunch_dir)
+      zip_resource_dirs.append(crunch_dir)
+      CrunchDirectory(aapt, input_dir, crunch_dir)
+
+    ZipResources(zip_resource_dirs, options.resource_zip_out)
+
+    if options.all_resources_zip_out:
+      CombineZips([options.resource_zip_out] + dep_zips,
+                  options.all_resources_zip_out)
+
+    if options.R_dir:
+      build_utils.DeleteDirectory(options.R_dir)
+      shutil.copytree(gen_dir, options.R_dir)
+    else:
+      build_utils.ZipDir(options.srcjar_out, gen_dir)
+
+    if options.r_text_out:
+      r_text_path = os.path.join(gen_dir, 'R.txt')
+      if os.path.exists(r_text_path):
+        shutil.copyfile(r_text_path, options.r_text_out)
+      else:
+        open(options.r_text_out, 'w').close()
+
+  if options.depfile:
+    input_files += build_utils.GetPythonDependencies()
+    build_utils.WriteDepfile(options.depfile, input_files)
+
+  if options.stamp:
+    build_utils.Touch(options.stamp)
+
+
+if __name__ == '__main__':
+  main()
diff --git a/build/android/gyp/proguard.py b/build/android/gyp/proguard.py
new file mode 100755
index 0000000..5127100
--- /dev/null
+++ b/build/android/gyp/proguard.py
@@ -0,0 +1,69 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import optparse
+import sys
+
+from util import build_utils
+from util import proguard_util
+
+def DoProguard(options):
+  proguard = proguard_util.ProguardCmdBuilder(options.proguard_path)
+  proguard.injars(build_utils.ParseGypList(options.input_paths))
+  proguard.configs(build_utils.ParseGypList(options.proguard_configs))
+  proguard.outjar(options.output_path)
+
+  if options.mapping:
+    proguard.mapping(options.mapping)
+
+  if options.is_test:
+    proguard.is_test(True)
+
+  classpath = []
+  for arg in options.classpath:
+    classpath += build_utils.ParseGypList(arg)
+  classpath = list(set(classpath))
+  proguard.libraryjars(classpath)
+
+  proguard.CheckOutput()
+
+  return proguard.GetInputs()
+
+
+def main(args):
+  args = build_utils.ExpandFileArgs(args)
+  parser = optparse.OptionParser()
+  build_utils.AddDepfileOption(parser)
+  parser.add_option('--proguard-path',
+                    help='Path to the proguard executable.')
+  parser.add_option('--input-paths',
+                    help='Paths to the .jar files proguard should run on.')
+  parser.add_option('--output-path', help='Path to the generated .jar file.')
+  parser.add_option('--proguard-configs',
+                    help='Paths to proguard configuration files.')
+  parser.add_option('--mapping', help='Path to proguard mapping to apply.')
+  parser.add_option('--is-test', action='store_true',
+      help='If true, extra proguard options for instrumentation tests will be '
+      'added.')
+  parser.add_option('--classpath', action='append',
+                    help='Classpath for proguard.')
+  parser.add_option('--stamp', help='Path to touch on success.')
+
+  options, _ = parser.parse_args(args)
+
+  inputs = DoProguard(options)
+
+  if options.depfile:
+    build_utils.WriteDepfile(
+        options.depfile,
+        inputs + build_utils.GetPythonDependencies())
+
+  if options.stamp:
+    build_utils.Touch(options.stamp)
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/build/android/gyp/push_libraries.py b/build/android/gyp/push_libraries.py
new file mode 100755
index 0000000..6b31a2e
--- /dev/null
+++ b/build/android/gyp/push_libraries.py
@@ -0,0 +1,80 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Pushes native libraries to a device.
+
+"""
+
+import optparse
+import os
+import sys
+
+BUILD_ANDROID_DIR = os.path.join(os.path.dirname(__file__), os.pardir)
+sys.path.append(BUILD_ANDROID_DIR)
+
+from pylib import constants
+
+from util import build_device
+from util import build_utils
+from util import md5_check
+
+def DoPush(options):
+  libraries = build_utils.ParseGypList(options.libraries)
+
+  device = build_device.GetBuildDeviceFromPath(
+      options.build_device_configuration)
+  if not device:
+    return
+
+  serial_number = device.GetSerialNumber()
+  # A list so that it is modifiable in Push below.
+  needs_directory = [True]
+  for lib in libraries:
+    device_path = os.path.join(options.device_dir, lib)
+    host_path = os.path.join(options.libraries_dir, lib)
+
+    def Push():
+      if needs_directory:
+        device.RunShellCommand('mkdir -p ' + options.device_dir)
+        needs_directory[:] = [] # = False
+      device.PushChangedFiles([(host_path, device_path)])
+
+    record_path = '%s.%s.push.md5.stamp' % (host_path, serial_number)
+    md5_check.CallAndRecordIfStale(
+        Push,
+        record_path=record_path,
+        input_paths=[host_path],
+        input_strings=[device_path])
+
+
+def main(args):
+  args = build_utils.ExpandFileArgs(args)
+  parser = optparse.OptionParser()
+  parser.add_option('--libraries-dir',
+      help='Directory that contains stripped libraries.')
+  parser.add_option('--device-dir',
+      help='Device directory to push the libraries to.')
+  parser.add_option('--libraries',
+      help='List of native libraries.')
+  parser.add_option('--stamp', help='Path to touch on success.')
+  parser.add_option('--build-device-configuration',
+      help='Path to build device configuration.')
+  parser.add_option('--configuration-name',
+      help='The build CONFIGURATION_NAME')
+  options, _ = parser.parse_args(args)
+
+  required_options = ['libraries', 'device_dir', 'libraries']
+  build_utils.CheckOptions(options, parser, required=required_options)
+  constants.SetBuildType(options.configuration_name)
+
+  DoPush(options)
+
+  if options.stamp:
+    build_utils.Touch(options.stamp)
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/build/android/gyp/strip_library_for_device.py b/build/android/gyp/strip_library_for_device.py
new file mode 100755
index 0000000..9e2daae
--- /dev/null
+++ b/build/android/gyp/strip_library_for_device.py
@@ -0,0 +1,61 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import optparse
+import os
+import sys
+
+from util import build_utils
+
+
+def StripLibrary(android_strip, android_strip_args, library_path, output_path):
+  if build_utils.IsTimeStale(output_path, [library_path]):
+    strip_cmd = ([android_strip] +
+                 android_strip_args +
+                 ['-o', output_path, library_path])
+    build_utils.CheckOutput(strip_cmd)
+
+
+def main(args):
+  args = build_utils.ExpandFileArgs(args)
+
+  parser = optparse.OptionParser()
+  build_utils.AddDepfileOption(parser)
+
+  parser.add_option('--android-strip',
+      help='Path to the toolchain\'s strip binary')
+  parser.add_option('--android-strip-arg', action='append',
+      help='Argument to be passed to strip')
+  parser.add_option('--libraries-dir',
+      help='Directory for un-stripped libraries')
+  parser.add_option('--stripped-libraries-dir',
+      help='Directory for stripped libraries')
+  parser.add_option('--libraries',
+      help='List of libraries to strip')
+  parser.add_option('--stamp', help='Path to touch on success')
+
+  options, _ = parser.parse_args(args)
+
+  libraries = build_utils.ParseGypList(options.libraries)
+
+  build_utils.MakeDirectory(options.stripped_libraries_dir)
+
+  for library in libraries:
+    for base_path in options.libraries_dir.split(','):
+      library_path = os.path.join(base_path, library)
+      if (os.path.exists(library_path)):
+        break
+    stripped_library_path = os.path.join(
+        options.stripped_libraries_dir, library)
+    StripLibrary(options.android_strip, options.android_strip_arg, library_path,
+        stripped_library_path)
+
+  if options.stamp:
+    build_utils.Touch(options.stamp)
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/build/android/gyp/test/BUILD.gn b/build/android/gyp/test/BUILD.gn
new file mode 100644
index 0000000..2deac1d
--- /dev/null
+++ b/build/android/gyp/test/BUILD.gn
@@ -0,0 +1,13 @@
+import("//build/config/android/rules.gni")
+
+java_library("hello_world_java") {
+  java_files = [ "java/org/chromium/helloworld/HelloWorldPrinter.java" ]
+}
+
+java_binary("hello_world") {
+  deps = [
+    ":hello_world_java",
+  ]
+  java_files = [ "java/org/chromium/helloworld/HelloWorldMain.java" ]
+  main_class = "org.chromium.helloworld.HelloWorldMain"
+}
diff --git a/build/android/gyp/test/java/org/chromium/helloworld/HelloWorldMain.java b/build/android/gyp/test/java/org/chromium/helloworld/HelloWorldMain.java
new file mode 100644
index 0000000..10860d8
--- /dev/null
+++ b/build/android/gyp/test/java/org/chromium/helloworld/HelloWorldMain.java
@@ -0,0 +1,15 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.helloworld;
+
+public class HelloWorldMain {
+    public static void main(String[] args) {
+        if (args.length > 0) {
+            System.exit(Integer.parseInt(args[0]));
+        }
+        HelloWorldPrinter.print();
+    }
+}
+
diff --git a/build/android/gyp/test/java/org/chromium/helloworld/HelloWorldPrinter.java b/build/android/gyp/test/java/org/chromium/helloworld/HelloWorldPrinter.java
new file mode 100644
index 0000000..b09673e
--- /dev/null
+++ b/build/android/gyp/test/java/org/chromium/helloworld/HelloWorldPrinter.java
@@ -0,0 +1,12 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.helloworld;
+
+public class HelloWorldPrinter {
+    public static void print() {
+        System.out.println("Hello, world!");
+    }
+}
+
diff --git a/build/android/gyp/touch.py b/build/android/gyp/touch.py
new file mode 100755
index 0000000..7b4375e
--- /dev/null
+++ b/build/android/gyp/touch.py
@@ -0,0 +1,16 @@
+#!/usr/bin/env python
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import sys
+
+from util import build_utils
+
+def main(argv):
+  for f in argv[1:]:
+    build_utils.Touch(f)
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
diff --git a/build/android/gyp/util/__init__.py b/build/android/gyp/util/__init__.py
new file mode 100644
index 0000000..727e987
--- /dev/null
+++ b/build/android/gyp/util/__init__.py
@@ -0,0 +1,4 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
diff --git a/build/android/gyp/util/build_device.py b/build/android/gyp/util/build_device.py
new file mode 100644
index 0000000..8ab1112
--- /dev/null
+++ b/build/android/gyp/util/build_device.py
@@ -0,0 +1,108 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+""" A simple device interface for build steps.
+
+"""
+
+import logging
+import os
+import re
+import sys
+
+from util import build_utils
+
+BUILD_ANDROID_DIR = os.path.join(os.path.dirname(__file__), '..', '..')
+sys.path.append(BUILD_ANDROID_DIR)
+
+from pylib import android_commands
+from pylib.device import device_errors
+from pylib.device import device_utils
+
+GetAttachedDevices = android_commands.GetAttachedDevices
+
+
+class BuildDevice(object):
+  def __init__(self, configuration):
+    self.id = configuration['id']
+    self.description = configuration['description']
+    self.install_metadata = configuration['install_metadata']
+    self.device = device_utils.DeviceUtils(self.id)
+
+  def RunShellCommand(self, *args, **kwargs):
+    return self.device.RunShellCommand(*args, **kwargs)
+
+  def PushChangedFiles(self, *args, **kwargs):
+    return self.device.PushChangedFiles(*args, **kwargs)
+
+  def GetSerialNumber(self):
+    return self.id
+
+  def Install(self, *args, **kwargs):
+    return self.device.Install(*args, **kwargs)
+
+  def InstallSplitApk(self, *args, **kwargs):
+    return self.device.InstallSplitApk(*args, **kwargs)
+
+  def GetInstallMetadata(self, apk_package):
+    """Gets the metadata on the device for the apk_package apk."""
+    # Matches lines like:
+    # -rw-r--r-- system   system    7376582 2013-04-19 16:34 \
+    #   org.chromium.chrome.shell.apk
+    # -rw-r--r-- system   system    7376582 2013-04-19 16:34 \
+    #   org.chromium.chrome.shell-1.apk
+    apk_matcher = lambda s: re.match('.*%s(-[0-9]*)?.apk$' % apk_package, s)
+    matches = filter(apk_matcher, self.install_metadata)
+    return matches[0] if matches else None
+
+
+def GetConfigurationForDevice(device_id):
+  device = device_utils.DeviceUtils(device_id)
+  configuration = None
+  has_root = False
+  is_online = device.IsOnline()
+  if is_online:
+    cmd = 'ls -l /data/app; getprop ro.build.description'
+    cmd_output = device.RunShellCommand(cmd)
+    has_root = not 'Permission denied' in cmd_output[0]
+    if not has_root:
+      # Disable warning log messages from EnableRoot()
+      logging.getLogger().disabled = True
+      try:
+        device.EnableRoot()
+        has_root = True
+      except device_errors.CommandFailedError:
+        has_root = False
+      finally:
+        logging.getLogger().disabled = False
+      cmd_output = device.RunShellCommand(cmd)
+
+    configuration = {
+        'id': device_id,
+        'description': cmd_output[-1],
+        'install_metadata': cmd_output[:-1],
+      }
+  return configuration, is_online, has_root
+
+
+def WriteConfigurations(configurations, path):
+  # Currently we only support installing to the first device.
+  build_utils.WriteJson(configurations[:1], path, only_if_changed=True)
+
+
+def ReadConfigurations(path):
+  return build_utils.ReadJson(path)
+
+
+def GetBuildDevice(configurations):
+  assert len(configurations) == 1
+  return BuildDevice(configurations[0])
+
+
+def GetBuildDeviceFromPath(path):
+  configurations = ReadConfigurations(path)
+  if len(configurations) > 0:
+    return GetBuildDevice(ReadConfigurations(path))
+  return None
+
diff --git a/build/android/gyp/util/build_utils.py b/build/android/gyp/util/build_utils.py
new file mode 100644
index 0000000..65b1a64
--- /dev/null
+++ b/build/android/gyp/util/build_utils.py
@@ -0,0 +1,376 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import ast
+import contextlib
+import fnmatch
+import json
+import os
+import pipes
+import re
+import shlex
+import shutil
+import subprocess
+import sys
+import tempfile
+import zipfile
+
+
+CHROMIUM_SRC = os.path.normpath(
+    os.path.join(os.path.dirname(__file__),
+                 os.pardir, os.pardir, os.pardir, os.pardir))
+COLORAMA_ROOT = os.path.join(CHROMIUM_SRC,
+                             'third_party', 'colorama', 'src')
+# aapt should ignore OWNERS files in addition the default ignore pattern.
+AAPT_IGNORE_PATTERN = ('!OWNERS:!.svn:!.git:!.ds_store:!*.scc:.*:<dir>_*:' +
+                       '!CVS:!thumbs.db:!picasa.ini:!*~:!*.d.stamp')
+
+
+@contextlib.contextmanager
+def TempDir():
+  dirname = tempfile.mkdtemp()
+  try:
+    yield dirname
+  finally:
+    shutil.rmtree(dirname)
+
+
+def MakeDirectory(dir_path):
+  try:
+    os.makedirs(dir_path)
+  except OSError:
+    pass
+
+
+def DeleteDirectory(dir_path):
+  if os.path.exists(dir_path):
+    shutil.rmtree(dir_path)
+
+
+def Touch(path, fail_if_missing=False):
+  if fail_if_missing and not os.path.exists(path):
+    raise Exception(path + ' doesn\'t exist.')
+
+  MakeDirectory(os.path.dirname(path))
+  with open(path, 'a'):
+    os.utime(path, None)
+
+
+def FindInDirectory(directory, filename_filter):
+  files = []
+  for root, _dirnames, filenames in os.walk(directory):
+    matched_files = fnmatch.filter(filenames, filename_filter)
+    files.extend((os.path.join(root, f) for f in matched_files))
+  return files
+
+
+def FindInDirectories(directories, filename_filter):
+  all_files = []
+  for directory in directories:
+    all_files.extend(FindInDirectory(directory, filename_filter))
+  return all_files
+
+
+def ParseGnList(gn_string):
+  return ast.literal_eval(gn_string)
+
+
+def ParseGypList(gyp_string):
+  # The ninja generator doesn't support $ in strings, so use ## to
+  # represent $.
+  # TODO(cjhopman): Remove when
+  # https://code.google.com/p/gyp/issues/detail?id=327
+  # is addressed.
+  gyp_string = gyp_string.replace('##', '$')
+
+  if gyp_string.startswith('['):
+    return ParseGnList(gyp_string)
+  return shlex.split(gyp_string)
+
+
+def CheckOptions(options, parser, required=None):
+  if not required:
+    return
+  for option_name in required:
+    if getattr(options, option_name) is None:
+      parser.error('--%s is required' % option_name.replace('_', '-'))
+
+
+def WriteJson(obj, path, only_if_changed=False):
+  old_dump = None
+  if os.path.exists(path):
+    with open(path, 'r') as oldfile:
+      old_dump = oldfile.read()
+
+  new_dump = json.dumps(obj, sort_keys=True, indent=2, separators=(',', ': '))
+
+  if not only_if_changed or old_dump != new_dump:
+    with open(path, 'w') as outfile:
+      outfile.write(new_dump)
+
+
+def ReadJson(path):
+  with open(path, 'r') as jsonfile:
+    return json.load(jsonfile)
+
+
+class CalledProcessError(Exception):
+  """This exception is raised when the process run by CheckOutput
+  exits with a non-zero exit code."""
+
+  def __init__(self, cwd, args, output):
+    super(CalledProcessError, self).__init__()
+    self.cwd = cwd
+    self.args = args
+    self.output = output
+
+  def __str__(self):
+    # A user should be able to simply copy and paste the command that failed
+    # into their shell.
+    copyable_command = '( cd {}; {} )'.format(os.path.abspath(self.cwd),
+        ' '.join(map(pipes.quote, self.args)))
+    return 'Command failed: {}\n{}'.format(copyable_command, self.output)
+
+
+# This can be used in most cases like subprocess.check_output(). The output,
+# particularly when the command fails, better highlights the command's failure.
+# If the command fails, raises a build_utils.CalledProcessError.
+def CheckOutput(args, cwd=None,
+                print_stdout=False, print_stderr=True,
+                stdout_filter=None,
+                stderr_filter=None,
+                fail_func=lambda returncode, stderr: returncode != 0):
+  if not cwd:
+    cwd = os.getcwd()
+
+  child = subprocess.Popen(args,
+      stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=cwd)
+  stdout, stderr = child.communicate()
+
+  if stdout_filter is not None:
+    stdout = stdout_filter(stdout)
+
+  if stderr_filter is not None:
+    stderr = stderr_filter(stderr)
+
+  if fail_func(child.returncode, stderr):
+    raise CalledProcessError(cwd, args, stdout + stderr)
+
+  if print_stdout:
+    sys.stdout.write(stdout)
+  if print_stderr:
+    sys.stderr.write(stderr)
+
+  return stdout
+
+
+def GetModifiedTime(path):
+  # For a symlink, the modified time should be the greater of the link's
+  # modified time and the modified time of the target.
+  return max(os.lstat(path).st_mtime, os.stat(path).st_mtime)
+
+
+def IsTimeStale(output, inputs):
+  if not os.path.exists(output):
+    return True
+
+  output_time = GetModifiedTime(output)
+  for i in inputs:
+    if GetModifiedTime(i) > output_time:
+      return True
+  return False
+
+
+def IsDeviceReady():
+  device_state = CheckOutput(['adb', 'get-state'])
+  return device_state.strip() == 'device'
+
+
+def CheckZipPath(name):
+  if os.path.normpath(name) != name:
+    raise Exception('Non-canonical zip path: %s' % name)
+  if os.path.isabs(name):
+    raise Exception('Absolute zip path: %s' % name)
+
+
+def ExtractAll(zip_path, path=None, no_clobber=True, pattern=None):
+  if path is None:
+    path = os.getcwd()
+  elif not os.path.exists(path):
+    MakeDirectory(path)
+
+  with zipfile.ZipFile(zip_path) as z:
+    for name in z.namelist():
+      if name.endswith('/'):
+        continue
+      if pattern is not None:
+        if not fnmatch.fnmatch(name, pattern):
+          continue
+      CheckZipPath(name)
+      if no_clobber:
+        output_path = os.path.join(path, name)
+        if os.path.exists(output_path):
+          raise Exception(
+              'Path already exists from zip: %s %s %s'
+              % (zip_path, name, output_path))
+
+    z.extractall(path=path)
+
+
+def DoZip(inputs, output, base_dir):
+  with zipfile.ZipFile(output, 'w') as outfile:
+    for f in inputs:
+      CheckZipPath(os.path.relpath(f, base_dir))
+      outfile.write(f, os.path.relpath(f, base_dir))
+
+
+def ZipDir(output, base_dir):
+  with zipfile.ZipFile(output, 'w') as outfile:
+    for root, _, files in os.walk(base_dir):
+      for f in files:
+        path = os.path.join(root, f)
+        archive_path = os.path.relpath(path, base_dir)
+        CheckZipPath(archive_path)
+        outfile.write(path, archive_path)
+
+
+def MergeZips(output, inputs, exclude_patterns=None):
+  added_names = set()
+  def Allow(name):
+    if exclude_patterns is not None:
+      for p in exclude_patterns:
+        if fnmatch.fnmatch(name, p):
+          return False
+    return True
+
+  with zipfile.ZipFile(output, 'w') as out_zip:
+    for in_file in inputs:
+      with zipfile.ZipFile(in_file, 'r') as in_zip:
+        for name in in_zip.namelist():
+          if name not in added_names and Allow(name):
+            out_zip.writestr(name, in_zip.read(name))
+            added_names.add(name)
+
+
+def PrintWarning(message):
+  print 'WARNING: ' + message
+
+
+def PrintBigWarning(message):
+  print '*****     ' * 8
+  PrintWarning(message)
+  print '*****     ' * 8
+
+
+def GetSortedTransitiveDependencies(top, deps_func):
+  """Gets the list of all transitive dependencies in sorted order.
+
+  There should be no cycles in the dependency graph.
+
+  Args:
+    top: a list of the top level nodes
+    deps_func: A function that takes a node and returns its direct dependencies.
+  Returns:
+    A list of all transitive dependencies of nodes in top, in order (a node will
+    appear in the list at a higher index than all of its dependencies).
+  """
+  def Node(dep):
+    return (dep, deps_func(dep))
+
+  # First: find all deps
+  unchecked_deps = list(top)
+  all_deps = set(top)
+  while unchecked_deps:
+    dep = unchecked_deps.pop()
+    new_deps = deps_func(dep).difference(all_deps)
+    unchecked_deps.extend(new_deps)
+    all_deps = all_deps.union(new_deps)
+
+  # Then: simple, slow topological sort.
+  sorted_deps = []
+  unsorted_deps = dict(map(Node, all_deps))
+  while unsorted_deps:
+    for library, dependencies in unsorted_deps.items():
+      if not dependencies.intersection(unsorted_deps.keys()):
+        sorted_deps.append(library)
+        del unsorted_deps[library]
+
+  return sorted_deps
+
+
+def GetPythonDependencies():
+  """Gets the paths of imported non-system python modules.
+
+  A path is assumed to be a "system" import if it is outside of chromium's
+  src/. The paths will be relative to the current directory.
+  """
+  module_paths = (m.__file__ for m in sys.modules.itervalues()
+                  if m is not None and hasattr(m, '__file__'))
+
+  abs_module_paths = map(os.path.abspath, module_paths)
+
+  non_system_module_paths = [
+      p for p in abs_module_paths if p.startswith(CHROMIUM_SRC)]
+  def ConvertPycToPy(s):
+    if s.endswith('.pyc'):
+      return s[:-1]
+    return s
+
+  non_system_module_paths = map(ConvertPycToPy, non_system_module_paths)
+  non_system_module_paths = map(os.path.relpath, non_system_module_paths)
+  return sorted(set(non_system_module_paths))
+
+
+def AddDepfileOption(parser):
+  parser.add_option('--depfile',
+                    help='Path to depfile. This must be specified as the '
+                    'action\'s first output.')
+
+
+def WriteDepfile(path, dependencies):
+  with open(path, 'w') as depfile:
+    depfile.write(path)
+    depfile.write(': ')
+    depfile.write(' '.join(dependencies))
+    depfile.write('\n')
+
+
+def ExpandFileArgs(args):
+  """Replaces file-arg placeholders in args.
+
+  These placeholders have the form:
+    @FileArg(filename:key1:key2:...:keyn)
+
+  The value of such a placeholder is calculated by reading 'filename' as json.
+  And then extracting the value at [key1][key2]...[keyn].
+
+  Note: This intentionally does not return the list of files that appear in such
+  placeholders. An action that uses file-args *must* know the paths of those
+  files prior to the parsing of the arguments (typically by explicitly listing
+  them in the action's inputs in build files).
+  """
+  new_args = list(args)
+  file_jsons = dict()
+  r = re.compile('@FileArg\((.*?)\)')
+  for i, arg in enumerate(args):
+    match = r.search(arg)
+    if not match:
+      continue
+
+    if match.end() != len(arg):
+      raise Exception('Unexpected characters after FileArg: ' + arg)
+
+    lookup_path = match.group(1).split(':')
+    file_path = lookup_path[0]
+    if not file_path in file_jsons:
+      file_jsons[file_path] = ReadJson(file_path)
+
+    expansion = file_jsons[file_path]
+    for k in lookup_path[1:]:
+      expansion = expansion[k]
+
+    new_args[i] = arg[:match.start()] + str(expansion)
+
+  return new_args
+
diff --git a/build/android/gyp/util/md5_check.py b/build/android/gyp/util/md5_check.py
new file mode 100644
index 0000000..9f365aa
--- /dev/null
+++ b/build/android/gyp/util/md5_check.py
@@ -0,0 +1,86 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import hashlib
+import os
+
+
+def CallAndRecordIfStale(
+    function, record_path=None, input_paths=None, input_strings=None,
+    force=False):
+  """Calls function if the md5sum of the input paths/strings has changed.
+
+  The md5sum of the inputs is compared with the one stored in record_path. If
+  this has changed (or the record doesn't exist), function will be called and
+  the new md5sum will be recorded.
+
+  If force is True, the function will be called regardless of whether the
+  md5sum is out of date.
+  """
+  if not input_paths:
+    input_paths = []
+  if not input_strings:
+    input_strings = []
+  md5_checker = _Md5Checker(
+      record_path=record_path,
+      input_paths=input_paths,
+      input_strings=input_strings)
+  if force or md5_checker.IsStale():
+    function()
+    md5_checker.Write()
+
+
+def _UpdateMd5ForFile(md5, path, block_size=2**16):
+  with open(path, 'rb') as infile:
+    while True:
+      data = infile.read(block_size)
+      if not data:
+        break
+      md5.update(data)
+
+
+def _UpdateMd5ForDirectory(md5, dir_path):
+  for root, _, files in os.walk(dir_path):
+    for f in files:
+      _UpdateMd5ForFile(md5, os.path.join(root, f))
+
+
+def _UpdateMd5ForPath(md5, path):
+  if os.path.isdir(path):
+    _UpdateMd5ForDirectory(md5, path)
+  else:
+    _UpdateMd5ForFile(md5, path)
+
+
+class _Md5Checker(object):
+  def __init__(self, record_path=None, input_paths=None, input_strings=None):
+    if not input_paths:
+      input_paths = []
+    if not input_strings:
+      input_strings = []
+
+    assert record_path.endswith('.stamp'), (
+        'record paths must end in \'.stamp\' so that they are easy to find '
+        'and delete')
+
+    self.record_path = record_path
+
+    md5 = hashlib.md5()
+    for i in sorted(input_paths):
+      _UpdateMd5ForPath(md5, i)
+    for s in input_strings:
+      md5.update(s)
+    self.new_digest = md5.hexdigest()
+
+    self.old_digest = ''
+    if os.path.exists(self.record_path):
+      with open(self.record_path, 'r') as old_record:
+        self.old_digest = old_record.read()
+
+  def IsStale(self):
+    return self.old_digest != self.new_digest
+
+  def Write(self):
+    with open(self.record_path, 'w') as new_record:
+      new_record.write(self.new_digest)
diff --git a/build/android/gyp/util/md5_check_test.py b/build/android/gyp/util/md5_check_test.py
new file mode 100644
index 0000000..4f89fc2
--- /dev/null
+++ b/build/android/gyp/util/md5_check_test.py
@@ -0,0 +1,72 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import tempfile
+import unittest
+
+import md5_check # pylint: disable=W0403
+
+
+class TestMd5Check(unittest.TestCase):
+  def setUp(self):
+    self.called = False
+
+  def testCallAndRecordIfStale(self):
+    input_strings = ['string1', 'string2']
+    input_file1 = tempfile.NamedTemporaryFile()
+    input_file2 = tempfile.NamedTemporaryFile()
+    file1_contents = 'input file 1'
+    file2_contents = 'input file 2'
+    input_file1.write(file1_contents)
+    input_file1.flush()
+    input_file2.write(file2_contents)
+    input_file2.flush()
+    input_files = [input_file1.name, input_file2.name]
+
+    record_path = tempfile.NamedTemporaryFile(suffix='.stamp')
+
+    def CheckCallAndRecord(should_call, message, force=False):
+      self.called = False
+      def MarkCalled():
+        self.called = True
+      md5_check.CallAndRecordIfStale(
+          MarkCalled,
+          record_path=record_path.name,
+          input_paths=input_files,
+          input_strings=input_strings,
+          force=force)
+      self.failUnlessEqual(should_call, self.called, message)
+
+    CheckCallAndRecord(True, 'should call when record doesn\'t exist')
+    CheckCallAndRecord(False, 'should not call when nothing changed')
+    CheckCallAndRecord(True, force=True, message='should call when forced')
+
+    input_file1.write('some more input')
+    input_file1.flush()
+    CheckCallAndRecord(True, 'changed input file should trigger call')
+
+    input_files = input_files[::-1]
+    CheckCallAndRecord(False, 'reordering of inputs shouldn\'t trigger call')
+
+    input_files = input_files[:1]
+    CheckCallAndRecord(True, 'removing file should trigger call')
+
+    input_files.append(input_file2.name)
+    CheckCallAndRecord(True, 'added input file should trigger call')
+
+    input_strings[0] = input_strings[0] + ' a bit longer'
+    CheckCallAndRecord(True, 'changed input string should trigger call')
+
+    input_strings = input_strings[::-1]
+    CheckCallAndRecord(True, 'reordering of string inputs should trigger call')
+
+    input_strings = input_strings[:1]
+    CheckCallAndRecord(True, 'removing a string should trigger call')
+
+    input_strings.append('a brand new string')
+    CheckCallAndRecord(True, 'added input string should trigger call')
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/build/android/gyp/util/proguard_util.py b/build/android/gyp/util/proguard_util.py
new file mode 100644
index 0000000..901cd9f
--- /dev/null
+++ b/build/android/gyp/util/proguard_util.py
@@ -0,0 +1,128 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+from util import build_utils
+
+def FilterProguardOutput(output):
+  '''ProGuard outputs boring stuff to stdout (proguard version, jar path, etc)
+  as well as interesting stuff (notes, warnings, etc). If stdout is entirely
+  boring, this method suppresses the output.
+  '''
+  ignore_patterns = [
+    'ProGuard, version ',
+    'Reading program jar [',
+    'Reading library jar [',
+    'Preparing output jar [',
+    '  Copying resources from program jar [',
+  ]
+  for line in output.splitlines():
+    for pattern in ignore_patterns:
+      if line.startswith(pattern):
+        break
+    else:
+      # line doesn't match any of the patterns; it's probably something worth
+      # printing out.
+      return output
+  return ''
+
+
+class ProguardCmdBuilder(object):
+  def __init__(self, proguard_jar):
+    assert os.path.exists(proguard_jar)
+    self._proguard_jar_path = proguard_jar
+    self._test = None
+    self._mapping = None
+    self._libraries = None
+    self._injars = None
+    self._configs = None
+    self._outjar = None
+
+  def outjar(self, path):
+    assert self._outjar is None
+    self._outjar = path
+
+  def is_test(self, enable):
+    assert self._test is None
+    self._test = enable
+
+  def mapping(self, path):
+    assert self._mapping is None
+    assert os.path.exists(path), path
+    self._mapping = path
+
+  def libraryjars(self, paths):
+    assert self._libraries is None
+    for p in paths:
+      assert os.path.exists(p), p
+    self._libraries = paths
+
+  def injars(self, paths):
+    assert self._injars is None
+    for p in paths:
+      assert os.path.exists(p), p
+    self._injars = paths
+
+  def configs(self, paths):
+    assert self._configs is None
+    for p in paths:
+      assert os.path.exists(p), p
+    self._configs = paths
+
+  def build(self):
+    assert self._injars is not None
+    assert self._outjar is not None
+    assert self._configs is not None
+    cmd = [
+      'java', '-jar', self._proguard_jar_path,
+      '-forceprocessing',
+    ]
+    if self._test:
+      cmd += [
+        '-dontobfuscate',
+        '-dontoptimize',
+        '-dontshrink',
+        '-dontskipnonpubliclibraryclassmembers',
+      ]
+
+    if self._mapping:
+      cmd += [
+        '-applymapping', self._mapping,
+      ]
+
+    if self._libraries:
+      cmd += [
+        '-libraryjars', ':'.join(self._libraries),
+      ]
+
+    cmd += [
+      '-injars', ':'.join(self._injars)
+    ]
+
+    for config_file in self._configs:
+      cmd += ['-include', config_file]
+
+    # The output jar must be specified after inputs.
+    cmd += [
+      '-outjars', self._outjar,
+      '-dump', self._outjar + '.dump',
+      '-printseeds', self._outjar + '.seeds',
+      '-printusage', self._outjar + '.usage',
+      '-printmapping', self._outjar + '.mapping',
+    ]
+    return cmd
+
+  def GetInputs(self):
+    inputs = [self._proguard_jar_path] + self._configs + self._injars
+    if self._mapping:
+      inputs.append(self._mapping)
+    if self._libraries:
+      inputs += self._libraries
+    return inputs
+
+
+  def CheckOutput(self):
+    build_utils.CheckOutput(self.build(), print_stdout=True,
+                            stdout_filter=FilterProguardOutput)
+
diff --git a/build/android/gyp/write_build_config.py b/build/android/gyp/write_build_config.py
new file mode 100755
index 0000000..3773e98
--- /dev/null
+++ b/build/android/gyp/write_build_config.py
@@ -0,0 +1,357 @@
+#!/usr/bin/env python
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Writes a build_config file.
+
+The build_config file for a target is a json file containing information about
+how to build that target based on the target's dependencies. This includes
+things like: the javac classpath, the list of android resources dependencies,
+etc. It also includes the information needed to create the build_config for
+other targets that depend on that one.
+
+Android build scripts should not refer to the build_config directly, and the
+build specification should instead pass information in using the special
+file-arg syntax (see build_utils.py:ExpandFileArgs). That syntax allows passing
+of values in a json dict in a file and looks like this:
+  --python-arg=@FileArg(build_config_path:javac:classpath)
+
+Note: If paths to input files are passed in this way, it is important that:
+  1. inputs/deps of the action ensure that the files are available the first
+  time the action runs.
+  2. Either (a) or (b)
+    a. inputs/deps ensure that the action runs whenever one of the files changes
+    b. the files are added to the action's depfile
+"""
+
+import optparse
+import os
+import sys
+import xml.dom.minidom
+
+from util import build_utils
+
+import write_ordered_libraries
+
+class AndroidManifest(object):
+  def __init__(self, path):
+    self.path = path
+    dom = xml.dom.minidom.parse(path)
+    manifests = dom.getElementsByTagName('manifest')
+    assert len(manifests) == 1
+    self.manifest = manifests[0]
+
+  def GetInstrumentation(self):
+    instrumentation_els = self.manifest.getElementsByTagName('instrumentation')
+    if len(instrumentation_els) == 0:
+      return None
+    if len(instrumentation_els) != 1:
+      raise Exception(
+          'More than one <instrumentation> element found in %s' % self.path)
+    return instrumentation_els[0]
+
+  def CheckInstrumentation(self, expected_package):
+    instr = self.GetInstrumentation()
+    if not instr:
+      raise Exception('No <instrumentation> elements found in %s' % self.path)
+    instrumented_package = instr.getAttributeNS(
+        'http://schemas.android.com/apk/res/android', 'targetPackage')
+    if instrumented_package != expected_package:
+      raise Exception(
+          'Wrong instrumented package. Expected %s, got %s'
+          % (expected_package, instrumented_package))
+
+  def GetPackageName(self):
+    return self.manifest.getAttribute('package')
+
+
+dep_config_cache = {}
+def GetDepConfig(path):
+  if not path in dep_config_cache:
+    dep_config_cache[path] = build_utils.ReadJson(path)['deps_info']
+  return dep_config_cache[path]
+
+
+def DepsOfType(wanted_type, configs):
+  return [c for c in configs if c['type'] == wanted_type]
+
+
+def GetAllDepsConfigsInOrder(deps_config_paths):
+  def GetDeps(path):
+    return set(GetDepConfig(path)['deps_configs'])
+  return build_utils.GetSortedTransitiveDependencies(deps_config_paths, GetDeps)
+
+
+class Deps(object):
+  def __init__(self, direct_deps_config_paths):
+    self.all_deps_config_paths = GetAllDepsConfigsInOrder(
+        direct_deps_config_paths)
+    self.direct_deps_configs = [
+        GetDepConfig(p) for p in direct_deps_config_paths]
+    self.all_deps_configs = [
+        GetDepConfig(p) for p in self.all_deps_config_paths]
+
+  def All(self, wanted_type=None):
+    if type is None:
+      return self.all_deps_configs
+    return DepsOfType(wanted_type, self.all_deps_configs)
+
+  def Direct(self, wanted_type=None):
+    if wanted_type is None:
+      return self.direct_deps_configs
+    return DepsOfType(wanted_type, self.direct_deps_configs)
+
+  def AllConfigPaths(self):
+    return self.all_deps_config_paths
+
+
+def main(argv):
+  parser = optparse.OptionParser()
+  build_utils.AddDepfileOption(parser)
+  parser.add_option('--build-config', help='Path to build_config output.')
+  parser.add_option(
+      '--type',
+      help='Type of this target (e.g. android_library).')
+  parser.add_option(
+      '--possible-deps-configs',
+      help='List of paths for dependency\'s build_config files. Some '
+      'dependencies may not write build_config files. Missing build_config '
+      'files are handled differently based on the type of this target.')
+
+  # android_resources options
+  parser.add_option('--srcjar', help='Path to target\'s resources srcjar.')
+  parser.add_option('--resources-zip', help='Path to target\'s resources zip.')
+  parser.add_option('--r-text', help='Path to target\'s R.txt file.')
+  parser.add_option('--package-name',
+      help='Java package name for these resources.')
+  parser.add_option('--android-manifest', help='Path to android manifest.')
+
+  # java library options
+  parser.add_option('--jar-path', help='Path to target\'s jar output.')
+  parser.add_option('--supports-android', action='store_true',
+      help='Whether this library supports running on the Android platform.')
+  parser.add_option('--requires-android', action='store_true',
+      help='Whether this library requires running on the Android platform.')
+  parser.add_option('--bypass-platform-checks', action='store_true',
+      help='Bypass checks for support/require Android platform.')
+
+  # android library options
+  parser.add_option('--dex-path', help='Path to target\'s dex output.')
+
+  # native library options
+  parser.add_option('--native-libs', help='List of top-level native libs.')
+  parser.add_option('--readelf-path', help='Path to toolchain\'s readelf.')
+
+  parser.add_option('--tested-apk-config',
+      help='Path to the build config of the tested apk (for an instrumentation '
+      'test apk).')
+
+  options, args = parser.parse_args(argv)
+
+  if args:
+    parser.error('No positional arguments should be given.')
+
+
+  if not options.type in [
+      'java_library', 'android_resources', 'android_apk', 'deps_dex']:
+    raise Exception('Unknown type: <%s>' % options.type)
+
+  required_options = ['build_config'] + {
+      'java_library': ['jar_path'],
+      'android_resources': ['resources_zip'],
+      'android_apk': ['jar_path', 'dex_path', 'resources_zip'],
+      'deps_dex': ['dex_path']
+    }[options.type]
+
+  if options.native_libs:
+    required_options.append('readelf_path')
+
+  build_utils.CheckOptions(options, parser, required_options)
+
+  if options.type == 'java_library':
+    if options.supports_android and not options.dex_path:
+      raise Exception('java_library that supports Android requires a dex path.')
+
+    if options.requires_android and not options.supports_android:
+      raise Exception(
+          '--supports-android is required when using --requires-android')
+
+  possible_deps_config_paths = build_utils.ParseGypList(
+      options.possible_deps_configs)
+
+  allow_unknown_deps = (options.type == 'android_apk' or
+                        options.type == 'android_resources')
+  unknown_deps = [
+      c for c in possible_deps_config_paths if not os.path.exists(c)]
+  if unknown_deps and not allow_unknown_deps:
+    raise Exception('Unknown deps: ' + str(unknown_deps))
+
+  direct_deps_config_paths = [
+      c for c in possible_deps_config_paths if not c in unknown_deps]
+
+  deps = Deps(direct_deps_config_paths)
+  direct_library_deps = deps.Direct('java_library')
+  all_library_deps = deps.All('java_library')
+
+  direct_resources_deps = deps.Direct('android_resources')
+  all_resources_deps = deps.All('android_resources')
+  # Resources should be ordered with the highest-level dependency first so that
+  # overrides are done correctly.
+  all_resources_deps.reverse()
+
+  if options.type == 'android_apk' and options.tested_apk_config:
+    tested_apk_deps = Deps([options.tested_apk_config])
+    tested_apk_resources_deps = tested_apk_deps.All('android_resources')
+    all_resources_deps = [
+        d for d in all_resources_deps if not d in tested_apk_resources_deps]
+
+  # Initialize some common config.
+  config = {
+    'deps_info': {
+      'name': os.path.basename(options.build_config),
+      'path': options.build_config,
+      'type': options.type,
+      'deps_configs': direct_deps_config_paths,
+    }
+  }
+  deps_info = config['deps_info']
+
+  if options.type == 'java_library' and not options.bypass_platform_checks:
+    deps_info['requires_android'] = options.requires_android
+    deps_info['supports_android'] = options.supports_android
+
+    deps_require_android = (all_resources_deps +
+        [d['name'] for d in all_library_deps if d['requires_android']])
+    deps_not_support_android = (
+        [d['name'] for d in all_library_deps if not d['supports_android']])
+
+    if deps_require_android and not options.requires_android:
+      raise Exception('Some deps require building for the Android platform: ' +
+          str(deps_require_android))
+
+    if deps_not_support_android and options.supports_android:
+      raise Exception('Not all deps support the Android platform: ' +
+          str(deps_not_support_android))
+
+  if options.type in ['java_library', 'android_apk']:
+    javac_classpath = [c['jar_path'] for c in direct_library_deps]
+    java_full_classpath = [c['jar_path'] for c in all_library_deps]
+    deps_info['resources_deps'] = [c['path'] for c in all_resources_deps]
+    deps_info['jar_path'] = options.jar_path
+    if options.type == 'android_apk' or options.supports_android:
+      deps_info['dex_path'] = options.dex_path
+    config['javac'] = {
+      'classpath': javac_classpath,
+    }
+    config['java'] = {
+      'full_classpath': java_full_classpath
+    }
+
+  if options.type == 'java_library':
+    # Only resources might have srcjars (normal srcjar targets are listed in
+    # srcjar_deps). A resource's srcjar contains the R.java file for those
+    # resources, and (like Android's default build system) we allow a library to
+    # refer to the resources in any of its dependents.
+    config['javac']['srcjars'] = [
+        c['srcjar'] for c in direct_resources_deps if 'srcjar' in c]
+
+  if options.type == 'android_apk':
+    # Apks will get their resources srcjar explicitly passed to the java step.
+    config['javac']['srcjars'] = []
+
+  if options.type == 'android_resources':
+    deps_info['resources_zip'] = options.resources_zip
+    if options.srcjar:
+      deps_info['srcjar'] = options.srcjar
+    if options.android_manifest:
+      manifest = AndroidManifest(options.android_manifest)
+      deps_info['package_name'] = manifest.GetPackageName()
+    if options.package_name:
+      deps_info['package_name'] = options.package_name
+    if options.r_text:
+      deps_info['r_text'] = options.r_text
+
+  if options.type == 'android_resources' or options.type == 'android_apk':
+    config['resources'] = {}
+    config['resources']['dependency_zips'] = [
+        c['resources_zip'] for c in all_resources_deps]
+    config['resources']['extra_package_names'] = []
+    config['resources']['extra_r_text_files'] = []
+
+  if options.type == 'android_apk':
+    config['resources']['extra_package_names'] = [
+        c['package_name'] for c in all_resources_deps if 'package_name' in c]
+    config['resources']['extra_r_text_files'] = [
+        c['r_text'] for c in all_resources_deps if 'r_text' in c]
+
+  if options.type in ['android_apk', 'deps_dex']:
+    deps_dex_files = [c['dex_path'] for c in all_library_deps]
+
+  # An instrumentation test apk should exclude the dex files that are in the apk
+  # under test.
+  if options.type == 'android_apk' and options.tested_apk_config:
+    tested_apk_deps = Deps([options.tested_apk_config])
+    tested_apk_library_deps = tested_apk_deps.All('java_library')
+    tested_apk_deps_dex_files = [c['dex_path'] for c in tested_apk_library_deps]
+    deps_dex_files = [
+        p for p in deps_dex_files if not p in tested_apk_deps_dex_files]
+
+    tested_apk_config = GetDepConfig(options.tested_apk_config)
+    expected_tested_package = tested_apk_config['package_name']
+    AndroidManifest(options.android_manifest).CheckInstrumentation(
+        expected_tested_package)
+
+  # Dependencies for the final dex file of an apk or a 'deps_dex'.
+  if options.type in ['android_apk', 'deps_dex']:
+    config['final_dex'] = {}
+    dex_config = config['final_dex']
+    # TODO(cjhopman): proguard version
+    dex_config['dependency_dex_files'] = deps_dex_files
+
+  if options.type == 'android_apk':
+    config['dist_jar'] = {
+      'dependency_jars': [
+        c['jar_path'] for c in all_library_deps
+      ]
+    }
+    manifest = AndroidManifest(options.android_manifest)
+    deps_info['package_name'] = manifest.GetPackageName()
+    if not options.tested_apk_config and manifest.GetInstrumentation():
+      # This must then have instrumentation only for itself.
+      manifest.CheckInstrumentation(manifest.GetPackageName())
+
+    library_paths = []
+    java_libraries_list = []
+    if options.native_libs:
+      libraries = build_utils.ParseGypList(options.native_libs)
+      if libraries:
+        libraries_dir = os.path.dirname(libraries[0])
+        write_ordered_libraries.SetReadelfPath(options.readelf_path)
+        write_ordered_libraries.SetLibraryDirs([libraries_dir])
+        all_native_library_deps = (
+            write_ordered_libraries.GetSortedTransitiveDependenciesForBinaries(
+                libraries))
+        # Create a java literal array with the "base" library names:
+        # e.g. libfoo.so -> foo
+        java_libraries_list = '{%s}' % ','.join(
+            ['"%s"' % s[3:-3] for s in all_native_library_deps])
+        library_paths = map(
+            write_ordered_libraries.FullLibraryPath, all_native_library_deps)
+
+      config['native'] = {
+        'libraries': library_paths,
+        'java_libraries_list': java_libraries_list
+      }
+
+  build_utils.WriteJson(config, options.build_config, only_if_changed=True)
+
+  if options.depfile:
+    build_utils.WriteDepfile(
+        options.depfile,
+        deps.AllConfigPaths() + build_utils.GetPythonDependencies())
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/build/android/gyp/write_ordered_libraries.py b/build/android/gyp/write_ordered_libraries.py
new file mode 100755
index 0000000..0fc9a8c
--- /dev/null
+++ b/build/android/gyp/write_ordered_libraries.py
@@ -0,0 +1,144 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Writes dependency ordered list of native libraries.
+
+The list excludes any Android system libraries, as those are not bundled with
+the APK.
+
+This list of libraries is used for several steps of building an APK.
+In the component build, the --input-libraries only needs to be the top-level
+library (i.e. libcontent_shell_content_view). This will then use readelf to
+inspect the shared libraries and determine the full list of (non-system)
+libraries that should be included in the APK.
+"""
+
+# TODO(cjhopman): See if we can expose the list of library dependencies from
+# gyp, rather than calculating it ourselves.
+# http://crbug.com/225558
+
+import optparse
+import os
+import re
+import sys
+
+from util import build_utils
+
+_readelf = None
+_library_dirs = None
+
+_library_re = re.compile(
+    '.*NEEDED.*Shared library: \[(?P<library_name>.+)\]')
+
+
+def SetReadelfPath(path):
+  global _readelf
+  _readelf = path
+
+
+def SetLibraryDirs(dirs):
+  global _library_dirs
+  _library_dirs = dirs
+
+
+def FullLibraryPath(library_name):
+  assert _library_dirs is not None
+  for directory in _library_dirs:
+    path = '%s/%s' % (directory, library_name)
+    if os.path.exists(path):
+      return path
+  return library_name
+
+
+def IsSystemLibrary(library_name):
+  # If the library doesn't exist in the libraries directory, assume that it is
+  # an Android system library.
+  return not os.path.exists(FullLibraryPath(library_name))
+
+
+def CallReadElf(library_or_executable):
+  assert _readelf is not None
+  readelf_cmd = [_readelf,
+                 '-d',
+                 FullLibraryPath(library_or_executable)]
+  return build_utils.CheckOutput(readelf_cmd)
+
+
+def GetDependencies(library_or_executable):
+  elf = CallReadElf(library_or_executable)
+  return set(_library_re.findall(elf))
+
+
+def GetNonSystemDependencies(library_name):
+  all_deps = GetDependencies(library_name)
+  return set((lib for lib in all_deps if not IsSystemLibrary(lib)))
+
+
+def GetSortedTransitiveDependencies(libraries):
+  """Returns all transitive library dependencies in dependency order."""
+  return build_utils.GetSortedTransitiveDependencies(
+      libraries, GetNonSystemDependencies)
+
+
+def GetSortedTransitiveDependenciesForBinaries(binaries):
+  if binaries[0].endswith('.so'):
+    libraries = [os.path.basename(lib) for lib in binaries]
+  else:
+    assert len(binaries) == 1
+    all_deps = GetDependencies(binaries[0])
+    libraries = [lib for lib in all_deps if not IsSystemLibrary(lib)]
+
+  return GetSortedTransitiveDependencies(libraries)
+
+
+def main():
+  parser = optparse.OptionParser()
+  build_utils.AddDepfileOption(parser)
+
+  parser.add_option('--input-libraries',
+      help='A list of top-level input libraries.')
+  parser.add_option('--libraries-dir',
+      help='The directory which contains shared libraries.')
+  parser.add_option('--readelf', help='Path to the readelf binary.')
+  parser.add_option('--output', help='Path to the generated .json file.')
+  parser.add_option('--stamp', help='Path to touch on success.')
+
+  options, _ = parser.parse_args()
+
+  SetReadelfPath(options.readelf)
+  SetLibraryDirs(options.libraries_dir.split(','))
+
+  libraries = build_utils.ParseGypList(options.input_libraries)
+  if len(libraries):
+    libraries = GetSortedTransitiveDependenciesForBinaries(libraries)
+
+  # Convert to "base" library names: e.g. libfoo.so -> foo
+  java_libraries_list = (
+      '{%s}' % ','.join(['"%s"' % s[3:-3] for s in libraries]))
+
+  out_json = {
+      'libraries': libraries,
+      'lib_paths': [FullLibraryPath(l) for l in libraries],
+      'java_libraries_list': java_libraries_list
+      }
+  build_utils.WriteJson(
+      out_json,
+      options.output,
+      only_if_changed=True)
+
+  if options.stamp:
+    build_utils.Touch(options.stamp)
+
+  if options.depfile:
+    build_utils.WriteDepfile(
+        options.depfile,
+        libraries + build_utils.GetPythonDependencies())
+
+
+if __name__ == '__main__':
+  sys.exit(main())
+
+
diff --git a/build/android/gyp/zip.py b/build/android/gyp/zip.py
new file mode 100755
index 0000000..51322df
--- /dev/null
+++ b/build/android/gyp/zip.py
@@ -0,0 +1,26 @@
+#!/usr/bin/env python
+#
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Archives a set of files.
+"""
+
+import optparse
+import sys
+
+from util import build_utils
+
+def main():
+  parser = optparse.OptionParser()
+  parser.add_option('--input-dir', help='Directory of files to archive.')
+  parser.add_option('--output', help='Path to output archive.')
+  options, _ = parser.parse_args()
+
+  inputs = build_utils.FindInDirectory(options.input_dir, '*')
+  build_utils.DoZip(inputs, options.output, options.input_dir)
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/android/host_heartbeat.py b/build/android/host_heartbeat.py
new file mode 100755
index 0000000..6a7cdd1
--- /dev/null
+++ b/build/android/host_heartbeat.py
@@ -0,0 +1,33 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Sends a heart beat pulse to the currently online Android devices.
+This heart beat lets the devices know that they are connected to a host.
+"""
+# pylint: disable=W0702
+
+import sys
+import time
+
+from pylib.device import device_utils
+
+PULSE_PERIOD = 20
+
+def main():
+  while True:
+    try:
+      devices = device_utils.DeviceUtils.HealthyDevices()
+      for d in devices:
+        d.RunShellCommand(['touch', '/sdcard/host_heartbeat'],
+                          check_return=True)
+    except:
+      # Keep the heatbeat running bypassing all errors.
+      pass
+    time.sleep(PULSE_PERIOD)
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/android/increase_size_for_speed.gypi b/build/android/increase_size_for_speed.gypi
new file mode 100644
index 0000000..48d17f5
--- /dev/null
+++ b/build/android/increase_size_for_speed.gypi
@@ -0,0 +1,42 @@
+# Copyright (c) 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included to optimize a target for speed
+# rather than for size on Android.
+# This is used in some carefully tailored targets and is not meant
+# to be included everywhere. Before adding the template to another target,
+# please ask in chromium-dev@. See crbug.com/411909
+
+{
+  'configurations': {
+    'Release': {
+      'target_conditions': [
+        ['_toolset=="target"', {
+          'conditions': [
+            ['OS=="android"', {
+              'cflags!': ['-Os'],
+              'cflags': ['-O2'],
+            }],
+            # Do not merge -Os and -O2 in LTO.
+            # LTO merges all optimization options at link-time. -O2 takes
+            # precedence over -Os. Avoid using LTO simultaneously
+            # on -Os and -O2 parts for that reason.
+            ['OS=="android" and use_lto==1', {
+              'cflags!': [
+                '-flto',
+                '-ffat-lto-objects',
+              ],
+            }],
+            ['OS=="android" and use_lto_o2==1', {
+              'cflags': [
+                '-flto',
+                '-ffat-lto-objects',
+              ],
+            }],
+          ],
+        }],
+      ],
+    },
+  },
+}
diff --git a/build/android/insert_chromium_version.gypi b/build/android/insert_chromium_version.gypi
new file mode 100644
index 0000000..a6ff908
--- /dev/null
+++ b/build/android/insert_chromium_version.gypi
@@ -0,0 +1,53 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into an action to provide a rule that
+# inserts a chromium version string into native libraries.
+#
+# To use this, create a gyp target with the following form:
+#  {
+#    'action_name': 'insert_chromium_version',
+#    'actions': [
+#      'variables': {
+#        'ordered_libraries_file': 'file generated by write_ordered_libraries'
+#        'stripped_libraries_dir': 'the directory contains native libraries'
+#        'input_paths': 'files to be added to the list of inputs'
+#        'stamp': 'file to touch when the action is complete'
+#        'version_string': 'chromium version string to be inserted'
+#      'includes': [ '../../build/android/insert_chromium_version.gypi' ],
+#    ],
+#  },
+#
+
+{
+  'message': 'Inserting chromium version string into native libraries',
+  'variables': {
+    'input_paths': [],
+  },
+  'inputs': [
+    '<(DEPTH)/build/android/gyp/util/build_utils.py',
+    '<(DEPTH)/build/android/gyp/insert_chromium_version.py',
+    '<(ordered_libraries_file)',
+    '>@(input_paths)',
+  ],
+  'outputs': [
+    '<(stamp)',
+  ],
+  'action': [
+    'python', '<(DEPTH)/build/android/gyp/insert_chromium_version.py',
+    '--android-objcopy=<(android_objcopy)',
+    '--stripped-libraries-dir=<(stripped_libraries_dir)',
+    '--libraries=@FileArg(<(ordered_libraries_file):libraries)',
+    '--version-string=<(version_string)',
+    '--stamp=<(stamp)',
+  ],
+  'conditions': [
+    ['component == "shared_library"', {
+      # Add a fake output to force the build to always re-run this step. This
+      # is required because the real inputs are not known at gyp-time and
+      # changing base.so may not trigger changes to dependent libraries.
+      'outputs': [ '<(stamp).fake' ]
+    }],
+  ],
+}
diff --git a/build/android/install_emulator_deps.py b/build/android/install_emulator_deps.py
new file mode 100755
index 0000000..82d1c75
--- /dev/null
+++ b/build/android/install_emulator_deps.py
@@ -0,0 +1,277 @@
+#!/usr/bin/env python
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Installs deps for using SDK emulator for testing.
+
+The script will download the SDK and system images, if they are not present, and
+install and enable KVM, if virtualization has been enabled in the BIOS.
+"""
+
+
+import logging
+import optparse
+import os
+import re
+import shutil
+import sys
+
+from pylib import cmd_helper
+from pylib import constants
+from pylib import pexpect
+from pylib.utils import run_tests_helper
+
+# Android API level
+DEFAULT_ANDROID_API_LEVEL = constants.ANDROID_SDK_VERSION
+
+# From the Android Developer's website.
+# Keep this up to date; the user can install older API levels as necessary.
+SDK_BASE_URL = 'http://dl.google.com/android/adt'
+SDK_ZIP = 'adt-bundle-linux-x86_64-20131030.zip'
+
+# pylint: disable=line-too-long
+# Android x86 system image from the Intel website:
+# http://software.intel.com/en-us/articles/intel-eula-x86-android-4-2-jelly-bean-bin
+# These don't exist prior to Android-15.
+# As of 08 Nov 2013, Android-19 is not yet available either.
+X86_IMG_URLS = {
+  15: 'https://software.intel.com/sites/landingpage/android/sysimg_x86-15_r01.zip',
+  16: 'https://software.intel.com/sites/landingpage/android/sysimg_x86-16_r01.zip',
+  17: 'https://software.intel.com/sites/landingpage/android/sysimg_x86-17_r01.zip',
+  18: 'https://software.intel.com/sites/landingpage/android/sysimg_x86-18_r01.zip',
+  19: 'https://software.intel.com/sites/landingpage/android/sysimg_x86-19_r01.zip'}
+#pylint: enable=line-too-long
+
+def CheckSDK():
+  """Check if SDK is already installed.
+
+  Returns:
+    True if the emulator SDK directory (src/android_emulator_sdk/) exists.
+  """
+  return os.path.exists(constants.EMULATOR_SDK_ROOT)
+
+
+def CheckSDKPlatform(api_level=DEFAULT_ANDROID_API_LEVEL):
+  """Check if the "SDK Platform" for the specified API level is installed.
+     This is necessary in order for the emulator to run when the target
+     is specified.
+
+  Args:
+    api_level: the Android API level to check; defaults to the latest API.
+
+  Returns:
+    True if the platform is already installed.
+  """
+  android_binary = os.path.join(constants.EMULATOR_SDK_ROOT,
+                                'sdk', 'tools', 'android')
+  pattern = re.compile('id: [0-9]+ or "android-%d"' % api_level)
+  try:
+    exit_code, stdout = cmd_helper.GetCmdStatusAndOutput(
+        [android_binary, 'list'])
+    if exit_code != 0:
+      raise Exception('\'android list\' command failed')
+    for line in stdout.split('\n'):
+      if pattern.match(line):
+        return True
+    return False
+  except OSError:
+    logging.exception('Unable to execute \'android list\'')
+    return False
+
+
+def CheckX86Image(api_level=DEFAULT_ANDROID_API_LEVEL):
+  """Check if Android system images have been installed.
+
+  Args:
+    api_level: the Android API level to check for; defaults to the latest API.
+
+  Returns:
+    True if sdk/system-images/android-<api_level>/x86 exists inside
+    EMULATOR_SDK_ROOT.
+  """
+  api_target = 'android-%d' % api_level
+  return os.path.exists(os.path.join(constants.EMULATOR_SDK_ROOT,
+                                     'sdk', 'system-images',
+                                     api_target, 'x86'))
+
+
+def CheckKVM():
+  """Quickly check whether KVM is enabled.
+
+  Returns:
+    True iff /dev/kvm exists (Linux only).
+  """
+  return os.path.exists('/dev/kvm')
+
+
+def RunKvmOk():
+  """Run kvm-ok as root to check that KVM is properly enabled after installation
+     of the required packages.
+
+  Returns:
+    True iff KVM is enabled (/dev/kvm exists). On failure, returns False
+    but also print detailed information explaining why KVM isn't enabled
+    (e.g. CPU doesn't support it, or BIOS disabled it).
+  """
+  try:
+    # Note: kvm-ok is in /usr/sbin, so always use 'sudo' to run it.
+    return not cmd_helper.RunCmd(['sudo', 'kvm-ok'])
+  except OSError:
+    logging.info('kvm-ok not installed')
+    return False
+
+
+def GetSDK():
+  """Download the SDK and unzip it into EMULATOR_SDK_ROOT."""
+  logging.info('Download Android SDK.')
+  sdk_url = '%s/%s' % (SDK_BASE_URL, SDK_ZIP)
+  try:
+    cmd_helper.RunCmd(['curl', '-o', '/tmp/sdk.zip', sdk_url])
+    print 'curled unzipping...'
+    rc = cmd_helper.RunCmd(['unzip', '-o', '/tmp/sdk.zip', '-d', '/tmp/'])
+    if rc:
+      raise Exception('ERROR: could not download/unzip Android SDK.')
+    # Get the name of the sub-directory that everything will be extracted to.
+    dirname, _ = os.path.splitext(SDK_ZIP)
+    zip_dir = '/tmp/%s' % dirname
+    # Move the extracted directory to EMULATOR_SDK_ROOT
+    shutil.move(zip_dir, constants.EMULATOR_SDK_ROOT)
+  finally:
+    os.unlink('/tmp/sdk.zip')
+
+
+def InstallKVM():
+  """Installs KVM packages."""
+  rc = cmd_helper.RunCmd(['sudo', 'apt-get', 'install', 'kvm'])
+  if rc:
+    logging.critical('ERROR: Did not install KVM. Make sure hardware '
+                     'virtualization is enabled in BIOS (i.e. Intel VT-x or '
+                     'AMD SVM).')
+  # TODO(navabi): Use modprobe kvm-amd on AMD processors.
+  rc = cmd_helper.RunCmd(['sudo', 'modprobe', 'kvm-intel'])
+  if rc:
+    logging.critical('ERROR: Did not add KVM module to Linux Kernel. Make sure '
+                     'hardware virtualization is enabled in BIOS.')
+  # Now check to ensure KVM acceleration can be used.
+  if not RunKvmOk():
+    logging.critical('ERROR: Can not use KVM acceleration. Make sure hardware '
+                     'virtualization is enabled in BIOS (i.e. Intel VT-x or '
+                     'AMD SVM).')
+
+
+def GetX86Image(api_level=DEFAULT_ANDROID_API_LEVEL):
+  """Download x86 system image from Intel's website.
+
+  Args:
+    api_level: the Android API level to download for.
+  """
+  logging.info('Download x86 system image directory into sdk directory.')
+  # TODO(andrewhayden): Use python tempfile lib instead
+  temp_file = '/tmp/x86_img_android-%d.zip' % api_level
+  if api_level not in X86_IMG_URLS:
+    raise Exception('ERROR: no URL known for x86 image for android-%s' %
+                    api_level)
+  try:
+    cmd_helper.RunCmd(['curl', '-o', temp_file, X86_IMG_URLS[api_level]])
+    rc = cmd_helper.RunCmd(['unzip', '-o', temp_file, '-d', '/tmp/'])
+    if rc:
+      raise Exception('ERROR: Could not download/unzip image zip.')
+    api_target = 'android-%d' % api_level
+    sys_imgs = os.path.join(constants.EMULATOR_SDK_ROOT, 'sdk',
+                            'system-images', api_target, 'x86')
+    logging.info('Deploying system image to %s' % sys_imgs)
+    shutil.move('/tmp/x86', sys_imgs)
+  finally:
+    os.unlink(temp_file)
+
+
+def GetSDKPlatform(api_level=DEFAULT_ANDROID_API_LEVEL):
+  """Update the SDK to include the platform specified.
+
+  Args:
+    api_level: the Android API level to download
+  """
+  android_binary = os.path.join(constants.EMULATOR_SDK_ROOT,
+                                'sdk', 'tools', 'android')
+  pattern = re.compile(
+      r'\s*([0-9]+)- SDK Platform Android [\.,0-9]+, API %d.*' % api_level)
+  # Example:
+  #   2- SDK Platform Android 4.3, API 18, revision 2
+  exit_code, stdout = cmd_helper.GetCmdStatusAndOutput(
+      [android_binary, 'list', 'sdk'])
+  if exit_code != 0:
+    raise Exception('\'android list sdk\' command return %d' % exit_code)
+  for line in stdout.split('\n'):
+    match = pattern.match(line)
+    if match:
+      index = match.group(1)
+      print 'package %s corresponds to platform level %d' % (index, api_level)
+      # update sdk --no-ui --filter $INDEX
+      update_command = [android_binary,
+                        'update', 'sdk', '--no-ui', '--filter', index]
+      update_command_str = ' '.join(update_command)
+      logging.info('running update command: %s' % update_command_str)
+      update_process = pexpect.spawn(update_command_str)
+      # TODO(andrewhayden): Do we need to bug the user about this?
+      if update_process.expect('Do you accept the license') != 0:
+        raise Exception('License agreement check failed')
+      update_process.sendline('y')
+      if update_process.expect('Done. 1 package installed.') == 0:
+        print 'Successfully installed platform for API level %d' % api_level
+        return
+      else:
+        raise Exception('Failed to install platform update')
+  raise Exception('Could not find android-%d update for the SDK!' % api_level)
+
+
+def main(argv):
+  opt_parser = optparse.OptionParser(
+      description='Install dependencies for running the Android emulator')
+  opt_parser.add_option('--api-level', dest='api_level',
+      help='The API level (e.g., 19 for Android 4.4) to ensure is available',
+      type='int', default=DEFAULT_ANDROID_API_LEVEL)
+  opt_parser.add_option('-v', dest='verbose', action='store_true',
+      help='enable verbose logging')
+  options, _ = opt_parser.parse_args(argv[1:])
+
+  # run_tests_helper will set logging to INFO or DEBUG
+  # We achieve verbose output by configuring it with 2 (==DEBUG)
+  verbosity = 1
+  if options.verbose:
+    verbosity = 2
+  logging.basicConfig(level=logging.INFO,
+                      format='# %(asctime)-15s: %(message)s')
+  run_tests_helper.SetLogLevel(verbose_count=verbosity)
+
+  # Calls below will download emulator SDK and/or system images only if needed.
+  if CheckSDK():
+    logging.info('android_emulator_sdk/ already exists, skipping download.')
+  else:
+    GetSDK()
+
+  # Check target. The target has to be installed in order to run the emulator.
+  if CheckSDKPlatform(options.api_level):
+    logging.info('SDK platform android-%d already present, skipping.' %
+                 options.api_level)
+  else:
+    logging.info('SDK platform android-%d not present, installing.' %
+                 options.api_level)
+    GetSDKPlatform(options.api_level)
+
+  # Download the x86 system image only if needed.
+  if CheckX86Image(options.api_level):
+    logging.info('x86 image for android-%d already present, skipping.' %
+                 options.api_level)
+  else:
+    GetX86Image(options.api_level)
+
+  # Make sure KVM packages are installed and enabled.
+  if CheckKVM():
+    logging.info('KVM already installed and enabled.')
+  else:
+    InstallKVM()
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
diff --git a/build/android/instr_action.gypi b/build/android/instr_action.gypi
new file mode 100644
index 0000000..fa6d062
--- /dev/null
+++ b/build/android/instr_action.gypi
@@ -0,0 +1,53 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into an action to provide a rule that
+# instruments either java class files, or jars.
+
+{
+  'variables': {
+    'instr_type%': 'jar',
+    'input_path%': '',
+    'output_path%': '',
+    'stamp_path%': '',
+    'extra_instr_args': [
+      '--coverage-file=<(_target_name).em',
+      '--sources-file=<(_target_name)_sources.txt',
+    ],
+    'emma_jar': '<(android_sdk_root)/tools/lib/emma.jar',
+    'conditions': [
+      ['emma_instrument != 0', {
+        'extra_instr_args': [
+          '--sources=<(java_in_dir)/src >(additional_src_dirs) >(generated_src_dirs)',
+          '--src-root=<(DEPTH)',
+          '--emma-jar=<(emma_jar)',
+          '--filter-string=<(emma_filter)',
+        ],
+        'conditions': [
+          ['instr_type == "jar"', {
+            'instr_action': 'instrument_jar',
+          }, {
+            'instr_action': 'instrument_classes',
+          }]
+        ],
+      }, {
+        'instr_action': 'copy',
+        'extra_instr_args': [],
+      }]
+    ]
+  },
+  'inputs': [
+    '<(DEPTH)/build/android/gyp/emma_instr.py',
+    '<(DEPTH)/build/android/gyp/util/build_utils.py',
+    '<(DEPTH)/build/android/pylib/utils/command_option_parser.py',
+  ],
+  'action': [
+    'python', '<(DEPTH)/build/android/gyp/emma_instr.py',
+    '<(instr_action)',
+    '--input-path=<(input_path)',
+    '--output-path=<(output_path)',
+    '--stamp=<(stamp_path)',
+    '<@(extra_instr_args)',
+  ]
+}
diff --git a/build/android/java_cpp_enum.gypi b/build/android/java_cpp_enum.gypi
new file mode 100644
index 0000000..d4abafa
--- /dev/null
+++ b/build/android/java_cpp_enum.gypi
@@ -0,0 +1,64 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into a target to provide an action
+# to generate Java source files from a C++ header file containing annotated
+# enum definitions using a Python script.
+#
+# To use this, create a gyp target with the following form:
+#  {
+#    'target_name': 'bitmap_format_java',
+#    'type': 'none',
+#    'variables': {
+#      'source_file': 'ui/android/bitmap_format.h',
+#    },
+#    'includes': [ '../build/android/java_cpp_enum.gypi' ],
+#  },
+#
+# Then have the gyp target which compiles the java code depend on the newly
+# created target.
+
+{
+  'variables': {
+    # Location where all generated Java sources will be placed.
+    'output_dir': '<(SHARED_INTERMEDIATE_DIR)/enums/<(_target_name)',
+    'generator_path': '<(DEPTH)/build/android/gyp/java_cpp_enum.py',
+    'generator_args': '<(output_dir) <(source_file)',
+  },
+  'direct_dependent_settings': {
+    'variables': {
+      # Ensure that the output directory is used in the class path
+      # when building targets that depend on this one.
+      'generated_src_dirs': [
+        '<(output_dir)/',
+      ],
+      # Ensure that the targets depending on this one are rebuilt if the sources
+      # of this one are modified.
+      'additional_input_paths': [
+        '<(source_file)',
+      ],
+    },
+  },
+  'actions': [
+    {
+      'action_name': 'generate_java_constants',
+      'inputs': [
+        '<(DEPTH)/build/android/gyp/util/build_utils.py',
+        '<(generator_path)',
+        '<(source_file)',
+      ],
+      'outputs': [
+        # This is the main reason this is an action and not a rule. Gyp doesn't
+        # properly expand RULE_INPUT_PATH here and so it's impossible to
+        # calculate the list of outputs.
+        '<!@pymod_do_main(java_cpp_enum --print_output_only '
+            '<@(generator_args))',
+      ],
+      'action': [
+        'python', '<(generator_path)', '<@(generator_args)'
+      ],
+      'message': 'Generating Java from cpp header <(source_file)',
+    },
+  ],
+}
diff --git a/build/android/java_cpp_template.gypi b/build/android/java_cpp_template.gypi
new file mode 100644
index 0000000..3296659
--- /dev/null
+++ b/build/android/java_cpp_template.gypi
@@ -0,0 +1,81 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into a target to provide a rule
+# to generate Java source files from templates that are processed
+# through the host C pre-processor.
+#
+# NOTE: For generating Java conterparts to enums prefer using the java_cpp_enum
+#       rule instead.
+#
+# To use this, create a gyp target with the following form:
+#  {
+#    'target_name': 'android_net_java_constants',
+#    'type': 'none',
+#    'sources': [
+#      'net/android/NetError.template',
+#    ],
+#    'variables': {
+#      'package_name': 'org/chromium/net',
+#      'template_deps': ['base/net_error_list.h'],
+#    },
+#    'includes': [ '../build/android/java_cpp_template.gypi' ],
+#  },
+#
+# The 'sources' entry should only list template file. The template file
+# itself should use the 'ClassName.template' format, and will generate
+# 'gen/templates/<target-name>/<package-name>/ClassName.java. The files which
+# template dependents on and typically included by the template should be listed
+# in template_deps variables. Any change to them will force a rebuild of
+# the template, and hence of any source that depends on it.
+#
+
+{
+  # Location where all generated Java sources will be placed.
+  'variables': {
+    'include_path%': '<(DEPTH)',
+    'output_dir': '<(SHARED_INTERMEDIATE_DIR)/templates/<(_target_name)/<(package_name)',
+  },
+  'direct_dependent_settings': {
+    'variables': {
+      # Ensure that the output directory is used in the class path
+      # when building targets that depend on this one.
+      'generated_src_dirs': [
+        '<(output_dir)/',
+      ],
+      # Ensure dependents are rebuilt when sources for this rule change.
+      'additional_input_paths': [
+        '<@(_sources)',
+        '<@(template_deps)',
+      ],
+    },
+  },
+  # Define a single rule that will be apply to each .template file
+  # listed in 'sources'.
+  'rules': [
+    {
+      'rule_name': 'generate_java_constants',
+      'extension': 'template',
+      # Set template_deps as additional dependencies.
+      'variables': {
+        'output_path': '<(output_dir)/<(RULE_INPUT_ROOT).java',
+      },
+      'inputs': [
+        '<(DEPTH)/build/android/gyp/util/build_utils.py',
+        '<(DEPTH)/build/android/gyp/gcc_preprocess.py',
+        '<@(template_deps)'
+      ],
+      'outputs': [
+        '<(output_path)',
+      ],
+      'action': [
+        'python', '<(DEPTH)/build/android/gyp/gcc_preprocess.py',
+        '--include-path=<(include_path)',
+        '--output=<(output_path)',
+        '--template=<(RULE_INPUT_PATH)',
+      ],
+      'message': 'Generating Java from cpp template <(RULE_INPUT_PATH)',
+    }
+  ],
+}
diff --git a/build/android/jinja_template.gypi b/build/android/jinja_template.gypi
new file mode 100644
index 0000000..9c49360
--- /dev/null
+++ b/build/android/jinja_template.gypi
@@ -0,0 +1,85 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into a target to process one or more
+# Jinja templates.
+#
+# To process a single template file, create a gyp target with the following
+# form:
+#  {
+#    'target_name': 'chrome_shell_manifest',
+#    'type': 'none',
+#    'variables': {
+#      'jinja_inputs': ['android/shell/java/AndroidManifest.xml'],
+#      'jinja_output': '<(SHARED_INTERMEDIATE_DIR)/chrome_shell_manifest/AndroidManifest.xml',
+#      'jinja_variables': ['app_name=ChromeShell'],
+#    },
+#    'includes': [ '../build/android/jinja_template.gypi' ],
+#  },
+#
+# To process multiple template files and package the results into a zip file,
+# create a gyp target with the following form:
+#  {
+#    'target_name': 'chrome_template_resources',
+#    'type': 'none',
+#    'variables': {
+#       'jinja_inputs_base_dir': 'android/shell/java/res_template',
+#       'jinja_inputs': [
+#         '<(jinja_inputs_base_dir)/xml/searchable.xml',
+#         '<(jinja_inputs_base_dir)/xml/syncadapter.xml',
+#       ],
+#       'jinja_outputs_zip': '<(PRODUCT_DIR)/res.java/<(_target_name).zip',
+#       'jinja_variables': ['app_name=ChromeShell'],
+#     },
+#     'includes': [ '../build/android/jinja_template.gypi' ],
+#   },
+#
+
+{
+  'actions': [
+    {
+      'action_name': '<(_target_name)_jinja_template',
+      'message': 'processing jinja template',
+      'variables': {
+        'jinja_output%': '',
+        'jinja_outputs_zip%': '',
+        'jinja_inputs_base_dir%': '',
+        'jinja_includes%': [],
+        'jinja_variables%': [],
+        'jinja_args': [],
+      },
+      'inputs': [
+        '<(DEPTH)/build/android/gyp/util/build_utils.py',
+        '<(DEPTH)/build/android/gyp/jinja_template.py',
+        '<@(jinja_inputs)',
+        '<@(jinja_includes)',
+      ],
+      'conditions': [
+        ['jinja_output != ""', {
+          'outputs': [ '<(jinja_output)' ],
+          'variables': {
+            'jinja_args': ['--output', '<(jinja_output)'],
+          },
+        }],
+        ['jinja_outputs_zip != ""', {
+          'outputs': [ '<(jinja_outputs_zip)' ],
+          'variables': {
+            'jinja_args': ['--outputs-zip', '<(jinja_outputs_zip)'],
+          },
+        }],
+        ['jinja_inputs_base_dir != ""', {
+          'variables': {
+            'jinja_args': ['--inputs-base-dir', '<(jinja_inputs_base_dir)'],
+          },
+        }],
+      ],
+      'action': [
+        'python', '<(DEPTH)/build/android/gyp/jinja_template.py',
+        '--inputs', '<(jinja_inputs)',
+        '--variables', '<(jinja_variables)',
+        '<@(jinja_args)',
+      ],
+    },
+  ],
+}
diff --git a/build/android/lighttpd_server.py b/build/android/lighttpd_server.py
new file mode 100755
index 0000000..a5195ac
--- /dev/null
+++ b/build/android/lighttpd_server.py
@@ -0,0 +1,256 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Provides a convenient wrapper for spawning a test lighttpd instance.
+
+Usage:
+  lighttpd_server PATH_TO_DOC_ROOT
+"""
+
+import codecs
+import contextlib
+import httplib
+import os
+import random
+import shutil
+import socket
+import subprocess
+import sys
+import tempfile
+import time
+
+from pylib import constants
+from pylib import pexpect
+
+class LighttpdServer(object):
+  """Wraps lighttpd server, providing robust startup.
+
+  Args:
+    document_root: Path to root of this server's hosted files.
+    port: TCP port on the _host_ machine that the server will listen on. If
+        ommitted it will attempt to use 9000, or if unavailable it will find
+        a free port from 8001 - 8999.
+    lighttpd_path, lighttpd_module_path: Optional paths to lighttpd binaries.
+    base_config_path: If supplied this file will replace the built-in default
+        lighttpd config file.
+    extra_config_contents: If specified, this string will be appended to the
+        base config (default built-in, or from base_config_path).
+    config_path, error_log, access_log: Optional paths where the class should
+        place temprary files for this session.
+  """
+
+  def __init__(self, document_root, port=None,
+               lighttpd_path=None, lighttpd_module_path=None,
+               base_config_path=None, extra_config_contents=None,
+               config_path=None, error_log=None, access_log=None):
+    self.temp_dir = tempfile.mkdtemp(prefix='lighttpd_for_chrome_android')
+    self.document_root = os.path.abspath(document_root)
+    self.fixed_port = port
+    self.port = port or constants.LIGHTTPD_DEFAULT_PORT
+    self.server_tag = 'LightTPD ' + str(random.randint(111111, 999999))
+    self.lighttpd_path = lighttpd_path or '/usr/sbin/lighttpd'
+    self.lighttpd_module_path = lighttpd_module_path or '/usr/lib/lighttpd'
+    self.base_config_path = base_config_path
+    self.extra_config_contents = extra_config_contents
+    self.config_path = config_path or self._Mktmp('config')
+    self.error_log = error_log or self._Mktmp('error_log')
+    self.access_log = access_log or self._Mktmp('access_log')
+    self.pid_file = self._Mktmp('pid_file')
+    self.process = None
+
+  def _Mktmp(self, name):
+    return os.path.join(self.temp_dir, name)
+
+  @staticmethod
+  def _GetRandomPort():
+    # The ports of test server is arranged in constants.py.
+    return random.randint(constants.LIGHTTPD_RANDOM_PORT_FIRST,
+                          constants.LIGHTTPD_RANDOM_PORT_LAST)
+
+  def StartupHttpServer(self):
+    """Starts up a http server with specified document root and port."""
+    # If we want a specific port, make sure no one else is listening on it.
+    if self.fixed_port:
+      self._KillProcessListeningOnPort(self.fixed_port)
+    while True:
+      if self.base_config_path:
+        # Read the config
+        with codecs.open(self.base_config_path, 'r', 'utf-8') as f:
+          config_contents = f.read()
+      else:
+        config_contents = self._GetDefaultBaseConfig()
+      if self.extra_config_contents:
+        config_contents += self.extra_config_contents
+      # Write out the config, filling in placeholders from the members of |self|
+      with codecs.open(self.config_path, 'w', 'utf-8') as f:
+        f.write(config_contents % self.__dict__)
+      if (not os.path.exists(self.lighttpd_path) or
+          not os.access(self.lighttpd_path, os.X_OK)):
+        raise EnvironmentError(
+            'Could not find lighttpd at %s.\n'
+            'It may need to be installed (e.g. sudo apt-get install lighttpd)'
+            % self.lighttpd_path)
+      self.process = pexpect.spawn(self.lighttpd_path,
+                                   ['-D', '-f', self.config_path,
+                                    '-m', self.lighttpd_module_path],
+                                   cwd=self.temp_dir)
+      client_error, server_error = self._TestServerConnection()
+      if not client_error:
+        assert int(open(self.pid_file, 'r').read()) == self.process.pid
+        break
+      self.process.close()
+
+      if self.fixed_port or not 'in use' in server_error:
+        print 'Client error:', client_error
+        print 'Server error:', server_error
+        return False
+      self.port = self._GetRandomPort()
+    return True
+
+  def ShutdownHttpServer(self):
+    """Shuts down our lighttpd processes."""
+    if self.process:
+      self.process.terminate()
+    shutil.rmtree(self.temp_dir, ignore_errors=True)
+
+  def _TestServerConnection(self):
+    # Wait for server to start
+    server_msg = ''
+    for timeout in xrange(1, 5):
+      client_error = None
+      try:
+        with contextlib.closing(httplib.HTTPConnection(
+            '127.0.0.1', self.port, timeout=timeout)) as http:
+          http.set_debuglevel(timeout > 3)
+          http.request('HEAD', '/')
+          r = http.getresponse()
+          r.read()
+          if (r.status == 200 and r.reason == 'OK' and
+              r.getheader('Server') == self.server_tag):
+            return (None, server_msg)
+          client_error = ('Bad response: %s %s version %s\n  ' %
+                          (r.status, r.reason, r.version) +
+                          '\n  '.join([': '.join(h) for h in r.getheaders()]))
+      except (httplib.HTTPException, socket.error) as client_error:
+        pass  # Probably too quick connecting: try again
+      # Check for server startup error messages
+      ix = self.process.expect([pexpect.TIMEOUT, pexpect.EOF, '.+'],
+                               timeout=timeout)
+      if ix == 2:  # stdout spew from the server
+        server_msg += self.process.match.group(0)
+      elif ix == 1:  # EOF -- server has quit so giveup.
+        client_error = client_error or 'Server exited'
+        break
+    return (client_error or 'Timeout', server_msg)
+
+  @staticmethod
+  def _KillProcessListeningOnPort(port):
+    """Checks if there is a process listening on port number |port| and
+    terminates it if found.
+
+    Args:
+      port: Port number to check.
+    """
+    if subprocess.call(['fuser', '-kv', '%d/tcp' % port]) == 0:
+      # Give the process some time to terminate and check that it is gone.
+      time.sleep(2)
+      assert subprocess.call(['fuser', '-v', '%d/tcp' % port]) != 0, \
+          'Unable to kill process listening on port %d.' % port
+
+  @staticmethod
+  def _GetDefaultBaseConfig():
+    return """server.tag                  = "%(server_tag)s"
+server.modules              = ( "mod_access",
+                                "mod_accesslog",
+                                "mod_alias",
+                                "mod_cgi",
+                                "mod_rewrite" )
+
+# default document root required
+#server.document-root = "."
+
+# files to check for if .../ is requested
+index-file.names            = ( "index.php", "index.pl", "index.cgi",
+                                "index.html", "index.htm", "default.htm" )
+# mimetype mapping
+mimetype.assign             = (
+  ".gif"          =>      "image/gif",
+  ".jpg"          =>      "image/jpeg",
+  ".jpeg"         =>      "image/jpeg",
+  ".png"          =>      "image/png",
+  ".svg"          =>      "image/svg+xml",
+  ".css"          =>      "text/css",
+  ".html"         =>      "text/html",
+  ".htm"          =>      "text/html",
+  ".xhtml"        =>      "application/xhtml+xml",
+  ".xhtmlmp"      =>      "application/vnd.wap.xhtml+xml",
+  ".js"           =>      "application/x-javascript",
+  ".log"          =>      "text/plain",
+  ".conf"         =>      "text/plain",
+  ".text"         =>      "text/plain",
+  ".txt"          =>      "text/plain",
+  ".dtd"          =>      "text/xml",
+  ".xml"          =>      "text/xml",
+  ".manifest"     =>      "text/cache-manifest",
+ )
+
+# Use the "Content-Type" extended attribute to obtain mime type if possible
+mimetype.use-xattr          = "enable"
+
+##
+# which extensions should not be handle via static-file transfer
+#
+# .php, .pl, .fcgi are most often handled by mod_fastcgi or mod_cgi
+static-file.exclude-extensions = ( ".php", ".pl", ".cgi" )
+
+server.bind = "127.0.0.1"
+server.port = %(port)s
+
+## virtual directory listings
+dir-listing.activate        = "enable"
+#dir-listing.encoding       = "iso-8859-2"
+#dir-listing.external-css   = "style/oldstyle.css"
+
+## enable debugging
+#debug.log-request-header   = "enable"
+#debug.log-response-header  = "enable"
+#debug.log-request-handling = "enable"
+#debug.log-file-not-found   = "enable"
+
+#### SSL engine
+#ssl.engine                 = "enable"
+#ssl.pemfile                = "server.pem"
+
+# Autogenerated test-specific config follows.
+
+cgi.assign = ( ".cgi"  => "/usr/bin/env",
+               ".pl"   => "/usr/bin/env",
+               ".asis" => "/bin/cat",
+               ".php"  => "/usr/bin/php-cgi" )
+
+server.errorlog = "%(error_log)s"
+accesslog.filename = "%(access_log)s"
+server.upload-dirs = ( "/tmp" )
+server.pid-file = "%(pid_file)s"
+server.document-root = "%(document_root)s"
+
+"""
+
+
+def main(argv):
+  server = LighttpdServer(*argv[1:])
+  try:
+    if server.StartupHttpServer():
+      raw_input('Server running at http://127.0.0.1:%s -'
+                ' press Enter to exit it.' % server.port)
+    else:
+      print 'Server exit code:', server.process.exitstatus
+  finally:
+    server.ShutdownHttpServer()
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
diff --git a/build/android/lint/OWNERS b/build/android/lint/OWNERS
new file mode 100644
index 0000000..cd396e7
--- /dev/null
+++ b/build/android/lint/OWNERS
@@ -0,0 +1,2 @@
+newt@chromium.org
+aurimas@chromium.org
diff --git a/build/android/lint/suppress.py b/build/android/lint/suppress.py
new file mode 100755
index 0000000..52d7579
--- /dev/null
+++ b/build/android/lint/suppress.py
@@ -0,0 +1,115 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Add all generated lint_result.xml files to suppressions.xml"""
+
+
+import collections
+import optparse
+import os
+import sys
+from xml.dom import minidom
+
+_BUILD_ANDROID_DIR = os.path.join(os.path.dirname(__file__), '..')
+sys.path.append(_BUILD_ANDROID_DIR)
+
+from pylib import constants
+
+
+_THIS_FILE = os.path.abspath(__file__)
+_CONFIG_PATH = os.path.join(os.path.dirname(_THIS_FILE), 'suppressions.xml')
+_DOC = (
+    '\nSTOP! It looks like you want to suppress some lint errors:\n'
+    '- Have you tried identifing the offending patch?\n'
+    '  Ask the author for a fix and/or revert the patch.\n'
+    '- It is preferred to add suppressions in the code instead of\n'
+    '  sweeping it under the rug here. See:\n\n'
+    '    http://developer.android.com/tools/debugging/improving-w-lint.html\n'
+    '\n'
+    'Still reading?\n'
+    '- You can edit this file manually to suppress an issue\n'
+    '  globally if it is not applicable to the project.\n'
+    '- You can also automatically add issues found so for in the\n'
+    '  build process by running:\n\n'
+    '    ' + os.path.relpath(_THIS_FILE, constants.DIR_SOURCE_ROOT) + '\n\n'
+    '  which will generate this file (Comments are not preserved).\n'
+    '  Note: PRODUCT_DIR will be substituted at run-time with actual\n'
+    '  directory path (e.g. out/Debug)\n'
+)
+
+
+_Issue = collections.namedtuple('Issue', ['severity', 'paths'])
+
+
+def _ParseConfigFile(config_path):
+  print 'Parsing %s' % config_path
+  issues_dict = {}
+  dom = minidom.parse(config_path)
+  for issue in dom.getElementsByTagName('issue'):
+    issue_id = issue.attributes['id'].value
+    severity = issue.getAttribute('severity')
+    paths = set(
+        [p.attributes['path'].value for p in
+         issue.getElementsByTagName('ignore')])
+    issues_dict[issue_id] = _Issue(severity, paths)
+  return issues_dict
+
+
+def _ParseAndMergeResultFile(result_path, issues_dict):
+  print 'Parsing and merging %s' % result_path
+  dom = minidom.parse(result_path)
+  for issue in dom.getElementsByTagName('issue'):
+    issue_id = issue.attributes['id'].value
+    severity = issue.attributes['severity'].value
+    path = issue.getElementsByTagName('location')[0].attributes['file'].value
+    if issue_id not in issues_dict:
+      issues_dict[issue_id] = _Issue(severity, set())
+    issues_dict[issue_id].paths.add(path)
+
+
+def _WriteConfigFile(config_path, issues_dict):
+  new_dom = minidom.getDOMImplementation().createDocument(None, 'lint', None)
+  top_element = new_dom.documentElement
+  top_element.appendChild(new_dom.createComment(_DOC))
+  for issue_id in sorted(issues_dict.keys()):
+    severity = issues_dict[issue_id].severity
+    paths = issues_dict[issue_id].paths
+    issue = new_dom.createElement('issue')
+    issue.attributes['id'] = issue_id
+    if severity:
+      issue.attributes['severity'] = severity
+    if severity == 'ignore':
+      print 'Warning: [%s] is suppressed globally.' % issue_id
+    else:
+      for path in sorted(paths):
+        ignore = new_dom.createElement('ignore')
+        ignore.attributes['path'] = path
+        issue.appendChild(ignore)
+    top_element.appendChild(issue)
+
+  with open(config_path, 'w') as f:
+    f.write(new_dom.toprettyxml(indent='  ', encoding='utf-8'))
+  print 'Updated %s' % config_path
+
+
+def _Suppress(config_path, result_path):
+  issues_dict = _ParseConfigFile(config_path)
+  _ParseAndMergeResultFile(result_path, issues_dict)
+  _WriteConfigFile(config_path, issues_dict)
+
+
+def main():
+  parser = optparse.OptionParser(usage='%prog RESULT-FILE')
+  _, args = parser.parse_args()
+
+  if len(args) != 1 or not os.path.exists(args[0]):
+    parser.error('Must provide RESULT-FILE')
+
+  _Suppress(_CONFIG_PATH, args[0])
+
+
+if __name__ == '__main__':
+  main()
diff --git a/build/android/lint/suppressions.xml b/build/android/lint/suppressions.xml
new file mode 100644
index 0000000..cb77c1f
--- /dev/null
+++ b/build/android/lint/suppressions.xml
@@ -0,0 +1,111 @@
+<?xml version="1.0" encoding="utf-8"?>
+<lint>
+  <!--
+STOP! It looks like you want to suppress some lint errors:
+- Have you tried identifing the offending patch?
+  Ask the author for a fix and/or revert the patch.
+- It is preferred to add suppressions in the code instead of
+  sweeping it under the rug here. See:
+
+    http://developer.android.com/tools/debugging/improving-w-lint.html
+
+Still reading?
+- You can edit this file manually to suppress an issue
+  globally if it is not applicable to the project.
+- You can also automatically add issues found so for in the
+  build process by running:
+
+    build/android/lint/suppress.py
+
+  which will generate this file (Comments are not preserved).
+  Note: PRODUCT_DIR will be substituted at run-time with actual
+  directory path (e.g. out/Debug)
+-->
+  <issue id="AllowBackup">
+    <ignore path="AndroidManifest.xml"/>
+  </issue>
+  <issue id="Assert" severity="ignore"/>
+  <issue id="CommitPrefEdits">
+    <ignore path="third_party/cacheinvalidation/src/java/com/google/ipc/invalidation/ticl/android2/channel/AndroidChannelPreferences.java"/>
+  </issue>
+  <issue id="DefaultLocale">
+    <ignore path="third_party/cacheinvalidation/src/java/com/google/ipc/invalidation/external/client/contrib/AndroidListenerState.java"/>
+  </issue>
+  <issue id="DrawAllocation">
+    <ignore path="content/public/android/java/src/org/chromium/content/browser/ContentViewRenderView.java"/>
+    <ignore path="content/public/android/java/src/org/chromium/content/browser/PopupZoomer.java"/>
+  </issue>
+  <issue id="ExportedContentProvider">
+    <ignore path="AndroidManifest.xml"/>
+  </issue>
+  <issue id="HandlerLeak">
+    <ignore path="remoting/android/java/src/org/chromium/chromoting/TapGestureDetector.java"/>
+  </issue>
+  <issue id="HardcodedDebugMode" severity="Fatal">
+    <ignore path="AndroidManifest.xml"/>
+  </issue>
+  <issue id="IconDensities">
+    <!-- crbug.com/457918 is tracking missing assets -->
+    <ignore path="components/web_contents_delegate_android/android/java/res/drawable-xxhdpi"/>
+    <ignore path="components/web_contents_delegate_android/android/java/res/drawable-xxxhdpi"/>
+    <ignore path="content/public/android/java/res/drawable-xxhdpi"/>
+    <ignore path="content/public/android/java/res/drawable-xxxhdpi"/>
+    <ignore path="chrome/android/java/res/drawable-xxhdpi"/>
+    <ignore path="chrome/android/java/res/drawable-xxxhdpi"/>
+    <ignore path="ui/android/java/res/drawable-xxhdpi"/>
+    <ignore path="ui/android/java/res/drawable-xxxhdpi"/>
+    <ignore regexp=".*: reader_mode_bar_background.9.png, tabs_moved_htc.png, tabs_moved_nexus.png, tabs_moved_samsung.png$"/>
+  </issue>
+  <issue id="IconLocation">
+    <!-- It is OK for content_shell_apk and chrome_shell_apk to have missing assets. -->
+    <ignore path="content/shell/android/java/res/"/>
+    <ignore path="chrome/android/shell/res/"/>
+    <!-- Suppression for chrome/test/chromedriver/test/webview_shell/java/res/drawable/icon.png -->
+    <ignore path="res/drawable/icon.png"/>
+    <!-- TODO(lambroslambrou) remove this once crbug.com/502030 is fixed. -->
+    <ignore path="remoting/android/java/res"/>
+  </issue>
+  <issue id="InconsistentLayout" severity="ignore"/>
+  <issue id="InflateParams" severity="ignore"/>
+  <issue id="MissingApplicationIcon" severity="ignore"/>
+  <issue id="MissingRegistered" severity="ignore"/>
+  <issue id="MissingVersion">
+    <ignore path="AndroidManifest.xml"/>
+  </issue>
+  <issue id="InlinedApi" severity="ignore"/>
+  <issue id="NewApi">
+    <ignore regexp="Attribute `paddingStart` referenced here can result in a crash on some specific devices older than API 17"/>
+    <ignore path="org/chromium/base/AnimationFrameTimeHistogram$Recorder.class"/>
+    <ignore path="org/chromium/base/JavaHandlerThread.class"/>
+    <ignore path="org/chromium/base/SysUtils.class"/>
+    <ignore path="org/chromium/chrome/browser/TtsPlatformImpl.class"/>
+    <ignore path="org/chromium/chrome/browser/TtsPlatformImpl$*.class"/>
+    <ignore path="chrome/android/java/res/values-v17/styles.xml"/>
+  </issue>
+  <issue id="OldTargetApi">
+    <ignore path="AndroidManifest.xml"/>
+  </issue>
+  <issue id="Overdraw" severity="ignore"/>
+  <issue id="Recycle" severity="ignore"/>
+  <issue id="Registered" severity="ignore"/>
+  <issue id="RtlCompat" severity="ignore"/>
+  <issue id="RtlEnabled" severity="ignore"/>
+  <issue id="RtlSymmetry" severity="ignore"/>
+  <issue id="SdCardPath">
+    <ignore path="content/public/android/java/src/org/chromium/content/browser/MediaResourceGetter.java"/>
+  </issue>
+  <issue id="SetJavaScriptEnabled" severity="ignore"/>
+  <issue id="UnusedResources">
+    <!-- Used by chrome/android/java/AndroidManifest.xml -->
+    <ignore path="chrome/android/java/res/drawable/window_background.xml" />
+    <ignore path="chrome/android/java/res/xml/bookmark_thumbnail_widget_info.xml" />
+    <ignore path="chrome/android/java/res/xml/file_paths.xml" />
+
+    <ignore path="content/shell/android/shell_apk/res/layout/content_shell_activity.xml" />
+    <ignore path="content/shell/android/shell_apk/res/values/strings.xml" />
+  </issue>
+  <issue id="SignatureOrSystemPermissions" severity="ignore"/>
+  <issue id="UnusedAttribute" severity="ignore"/>
+  <issue id="ViewConstructor" severity="ignore"/>
+  <issue id="WrongCall" severity="ignore"/>
+</lint>
diff --git a/build/android/lint_action.gypi b/build/android/lint_action.gypi
new file mode 100644
index 0000000..e1adf1f
--- /dev/null
+++ b/build/android/lint_action.gypi
@@ -0,0 +1,43 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into an action to provide a rule to
+# run lint on java/class files.
+
+{
+  'action_name': 'lint_<(_target_name)',
+  'message': 'Linting <(_target_name)',
+  'variables': {
+    'conditions': [
+      ['chromium_code != 0 and android_lint != 0 and never_lint == 0', {
+        'is_enabled': '--enable',
+      }, {
+        'is_enabled': '',
+      }]
+    ],
+    'android_manifest_path%': '<(DEPTH)/build/android/AndroidManifest.xml',
+    'resource_dir%': '<(DEPTH)/build/android/ant/empty/res',
+  },
+  'inputs': [
+    '<(DEPTH)/build/android/gyp/util/build_utils.py',
+    '<(DEPTH)/build/android/gyp/lint.py',
+    '<(DEPTH)/build/android/lint/suppressions.xml',
+    '<(lint_jar_path)',
+  ],
+  'action': [
+    'python', '<(DEPTH)/build/android/gyp/lint.py',
+    '--lint-path=<(android_sdk_root)/tools/lint',
+    '--config-path=<(DEPTH)/build/android/lint/suppressions.xml',
+    '--processed-config-path=<(config_path)',
+    '--manifest-path=<(android_manifest_path)',
+    '--result-path=<(result_path)',
+    '--resource-dir=<(resource_dir)',
+    '--product-dir=<(PRODUCT_DIR)',
+    '--src-dirs=>(src_dirs)',
+    '--jar-path=<(lint_jar_path)',
+    '--can-fail-build',
+    '--stamp=<(stamp_path)',
+    '<(is_enabled)',
+  ],
+}
diff --git a/build/android/locale_pak_resources.gypi b/build/android/locale_pak_resources.gypi
new file mode 100644
index 0000000..6f8e56f
--- /dev/null
+++ b/build/android/locale_pak_resources.gypi
@@ -0,0 +1,52 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Creates a resources.zip with locale.pak files placed into appropriate
+# resource configs (e.g. en-GB.pak -> res/raw-en/en_gb.pak). Also generates
+# a locale_paks TypedArray so that resource files can be enumerated at runtime.
+#
+# If this target is included in the deps of an android resources/library/apk,
+# the resources will be included with that target.
+#
+# Variables:
+#   locale_pak_files - List of .pak files to process.
+#     Names must be of the form "en.pak" or "en-US.pak".
+#
+# Example
+#  {
+#    'target_name': 'my_locale_resources',
+#    'type': 'none',
+#    'variables': {
+#      'locale_paks_files': ['path1/fr.pak'],
+#    },
+#    'includes': [ '../build/android/locale_pak_resources.gypi' ],
+#  },
+#
+{
+  'variables': {
+    'resources_zip_path': '<(PRODUCT_DIR)/res.java/<(_target_name).zip',
+  },
+  'all_dependent_settings': {
+    'variables': {
+      'additional_input_paths': ['<(resources_zip_path)'],
+      'dependencies_res_zip_paths': ['<(resources_zip_path)'],
+    },
+  },
+  'actions': [{
+    'action_name': '<(_target_name)_locale_pak_resources',
+    'inputs': [
+      '<(DEPTH)/build/android/gyp/util/build_utils.py',
+      '<(DEPTH)/build/android/gyp/locale_pak_resources.py',
+      '<@(locale_pak_files)',
+    ],
+    'outputs': [
+      '<(resources_zip_path)',
+    ],
+    'action': [
+      'python', '<(DEPTH)/build/android/gyp/locale_pak_resources.py',
+      '--locale-paks', '<(locale_pak_files)',
+      '--resources-zip', '<(resources_zip_path)',
+    ],
+  }],
+}
diff --git a/build/android/method_count.py b/build/android/method_count.py
new file mode 100755
index 0000000..93250b5
--- /dev/null
+++ b/build/android/method_count.py
@@ -0,0 +1,55 @@
+#! /usr/bin/env python
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import os
+import re
+import sys
+
+from pylib import constants
+from pylib.sdk import dexdump
+
+sys.path.append(os.path.join(constants.DIR_SOURCE_ROOT, 'build', 'util', 'lib',
+                             'common'))
+import perf_tests_results_helper
+
+
+_METHOD_IDS_SIZE_RE = re.compile(r'^method_ids_size +: +(\d+)$')
+
+def MethodCount(dexfile):
+  for line in dexdump.DexDump(dexfile, file_summary=True):
+    m = _METHOD_IDS_SIZE_RE.match(line)
+    if m:
+      return m.group(1)
+  raise Exception('"method_ids_size" not found in dex dump of %s' % dexfile)
+
+def main():
+  parser = argparse.ArgumentParser()
+  parser.add_argument(
+      '--apk-name', help='Name of the APK to which the dexfile corresponds.')
+  parser.add_argument('dexfile')
+
+  args = parser.parse_args()
+
+  if not args.apk_name:
+    dirname, basename = os.path.split(args.dexfile)
+    while basename:
+      if 'apk' in basename:
+        args.apk_name = basename
+        break
+      dirname, basename = os.path.split(dirname)
+    else:
+      parser.error(
+          'Unable to determine apk name from %s, '
+          'and --apk-name was not provided.' % args.dexfile)
+
+  method_count = MethodCount(args.dexfile)
+  perf_tests_results_helper.PrintPerfResult(
+      '%s_methods' % args.apk_name, 'total', [method_count], 'methods')
+  return 0
+
+if __name__ == '__main__':
+  sys.exit(main())
+
diff --git a/build/android/native_app_dependencies.gypi b/build/android/native_app_dependencies.gypi
new file mode 100644
index 0000000..6032274
--- /dev/null
+++ b/build/android/native_app_dependencies.gypi
@@ -0,0 +1,67 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into a target to provide a rule
+# to strip and place dependent shared libraries required by a native binary in a
+# single folder that can later be pushed to the device.
+#
+# NOTE: consider packaging your binary as an apk instead of running a native
+# library.
+#
+# To use this, create a gyp target with the following form:
+#  {
+#    'target_name': 'target_that_depends_on_my_binary',
+#    'type': 'none',
+#    'dependencies': [
+#      'my_binary',
+#    ],
+#    'variables': {
+#      'native_binary': '<(PRODUCT_DIR)/my_binary',
+#      'output_dir': 'location to place binary and dependent libraries'
+#    },
+#    'includes': [ '../../build/android/native_app_dependencies.gypi' ],
+#  },
+#
+
+{
+  'variables': {
+    'include_main_binary%': 1,
+  },
+  'conditions': [
+      ['component == "shared_library"', {
+        'dependencies': [
+          '<(DEPTH)/build/android/setup.gyp:copy_system_libraries',
+        ],
+        'variables': {
+          'intermediate_dir': '<(PRODUCT_DIR)/<(_target_name)',
+          'ordered_libraries_file': '<(intermediate_dir)/native_libraries.json',
+        },
+        'actions': [
+          {
+            'variables': {
+              'input_libraries': ['<(native_binary)'],
+            },
+            'includes': ['../../build/android/write_ordered_libraries.gypi'],
+          },
+          {
+            'action_name': 'stripping native libraries',
+            'variables': {
+              'stripped_libraries_dir%': '<(output_dir)',
+              'input_paths': ['<(native_binary)'],
+              'stamp': '<(intermediate_dir)/strip.stamp',
+            },
+            'includes': ['../../build/android/strip_native_libraries.gypi'],
+          },
+        ],
+      }],
+      ['include_main_binary==1', {
+        'copies': [
+          {
+            'destination': '<(output_dir)',
+            'files': [ '<(native_binary)' ],
+          }
+        ],
+      }],
+  ],
+}
diff --git a/build/android/ndk.gyp b/build/android/ndk.gyp
new file mode 100644
index 0000000..2838a98
--- /dev/null
+++ b/build/android/ndk.gyp
@@ -0,0 +1,20 @@
+# Copyright (c) 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'targets': [
+    {
+      'target_name': 'cpu_features',
+      'type': 'static_library',
+      'direct_dependent_settings': {
+        'include_dirs': [
+          '<(android_ndk_root)/sources/android/cpufeatures',
+        ],
+      },
+      'sources': [
+        '<(android_ndk_root)/sources/android/cpufeatures/cpu-features.c',
+      ],
+    },
+  ],
+}
diff --git a/build/android/pack_relocations.gypi b/build/android/pack_relocations.gypi
new file mode 100644
index 0000000..8567fa6
--- /dev/null
+++ b/build/android/pack_relocations.gypi
@@ -0,0 +1,77 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into an action to provide a rule that
+# packs relocations in Release builds of native libraries.
+#
+# To use this, create a gyp target with the following form:
+#  {
+#    'action_name': 'pack_relocations',
+#    'actions': [
+#      'variables': {
+#        'enable_packing': 'pack relocations if 1, plain file copy if 0'
+#        'exclude_packing_list': 'names of libraries explicitly not packed',
+#        'ordered_libraries_file': 'file generated by write_ordered_libraries'
+#        'input_paths': 'files to be added to the list of inputs'
+#        'stamp': 'file to touch when the action is complete'
+#        'stripped_libraries_dir': 'directory holding stripped libraries',
+#        'packed_libraries_dir': 'directory holding packed libraries',
+#      'includes': [ '../../build/android/pack_relocations.gypi' ],
+#    ],
+#  },
+#
+
+{
+  'variables': {
+    'input_paths': [],
+  },
+  'inputs': [
+    '<(DEPTH)/build/android/gyp/util/build_utils.py',
+    '<(DEPTH)/build/android/gyp/pack_relocations.py',
+    '<(ordered_libraries_file)',
+    '>@(input_paths)',
+  ],
+  'outputs': [
+    '<(stamp)',
+  ],
+  'conditions': [
+    ['enable_packing == 1', {
+      'message': 'Packing relocations for <(_target_name)',
+      'dependencies': [
+        '<(DEPTH)/third_party/android_platform/relocation_packer.gyp:android_relocation_packer#host',
+      ],
+      'inputs': [
+        '<(PRODUCT_DIR)/android_relocation_packer',
+      ],
+      'action': [
+        'python', '<(DEPTH)/build/android/gyp/pack_relocations.py',
+        '--configuration-name=<(CONFIGURATION_NAME)',
+        '--enable-packing=1',
+        '--exclude-packing-list=<@(exclude_packing_list)',
+        '--android-pack-relocations=<(PRODUCT_DIR)/android_relocation_packer',
+        '--stripped-libraries-dir=<(stripped_libraries_dir)',
+        '--packed-libraries-dir=<(packed_libraries_dir)',
+        '--libraries=@FileArg(<(ordered_libraries_file):libraries)',
+        '--stamp=<(stamp)',
+      ],
+    }, {
+      'message': 'Copying libraries (no relocation packing) for <(_target_name)',
+      'action': [
+        'python', '<(DEPTH)/build/android/gyp/pack_relocations.py',
+        '--configuration-name=<(CONFIGURATION_NAME)',
+        '--enable-packing=0',
+        '--stripped-libraries-dir=<(stripped_libraries_dir)',
+        '--packed-libraries-dir=<(packed_libraries_dir)',
+        '--libraries=@FileArg(<(ordered_libraries_file):libraries)',
+        '--stamp=<(stamp)',
+      ],
+    }],
+    ['component == "shared_library"', {
+      # Add a fake output to force the build to always re-run this step. This
+      # is required because the real inputs are not known at gyp-time and
+      # changing base.so may not trigger changes to dependent libraries.
+      'outputs': [ '<(stamp).fake' ]
+    }],
+  ],
+}
diff --git a/build/android/package_resources_action.gypi b/build/android/package_resources_action.gypi
new file mode 100644
index 0000000..eb60871
--- /dev/null
+++ b/build/android/package_resources_action.gypi
@@ -0,0 +1,97 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is a helper to java_apk.gypi. It should be used to create an
+# action that runs ApkBuilder via ANT.
+#
+# Required variables:
+#  apk_name - File name (minus path & extension) of the output apk.
+#  android_manifest_path - Path to AndroidManifest.xml.
+#  app_manifest_version_name - set the apps 'human readable' version number.
+#  app_manifest_version_code - set the apps version number.
+# Optional variables:
+#  asset_location - The directory where assets are located (if any).
+#  create_density_splits - Whether to create density-based apk splits. Splits
+#    are supported only for minSdkVersion >= 21.
+#  language_splits - List of languages to create apk splits for.
+#  resource_zips - List of paths to resource zip files.
+#  shared_resources - Make a resource package that can be loaded by a different
+#    application at runtime to access the package's resources.
+#  extensions_to_not_compress - E.g.: 'pak,dat,bin'
+#  extra_inputs - List of extra action inputs.
+{
+  'variables': {
+    'asset_location%': '',
+    'create_density_splits%': 0,
+    'resource_zips%': [],
+    'shared_resources%': 0,
+    'extensions_to_not_compress%': '',
+    'extra_inputs%': [],
+    'resource_packaged_apk_name': '<(apk_name)-resources.ap_',
+    'resource_packaged_apk_path': '<(intermediate_dir)/<(resource_packaged_apk_name)',
+  },
+  'action_name': 'package_resources_<(apk_name)',
+  'message': 'packaging resources for <(apk_name)',
+  'inputs': [
+    # TODO: This isn't always rerun correctly, http://crbug.com/351928
+    '<(DEPTH)/build/android/gyp/util/build_utils.py',
+    '<(DEPTH)/build/android/gyp/package_resources.py',
+    '<(android_manifest_path)',
+    '<@(extra_inputs)',
+  ],
+  'outputs': [
+    '<(resource_packaged_apk_path)',
+  ],
+  'action': [
+    'python', '<(DEPTH)/build/android/gyp/package_resources.py',
+    '--android-sdk', '<(android_sdk)',
+    '--aapt-path', '<(android_aapt_path)',
+    '--configuration-name', '<(CONFIGURATION_NAME)',
+    '--android-manifest', '<(android_manifest_path)',
+    '--version-code', '<(app_manifest_version_code)',
+    '--version-name', '<(app_manifest_version_name)',
+    '--no-compress', '<(extensions_to_not_compress)',
+    '--apk-path', '<(resource_packaged_apk_path)',
+  ],
+  'conditions': [
+    ['shared_resources == 1', {
+      'action': [
+        '--shared-resources',
+      ],
+    }],
+    ['asset_location != ""', {
+      'action': [
+        '--asset-dir', '<(asset_location)',
+      ],
+    }],
+    ['create_density_splits == 1', {
+      'action': [
+        '--create-density-splits',
+      ],
+      'outputs': [
+        '<(resource_packaged_apk_path)_hdpi',
+        '<(resource_packaged_apk_path)_xhdpi',
+        '<(resource_packaged_apk_path)_xxhdpi',
+        '<(resource_packaged_apk_path)_xxxhdpi',
+        '<(resource_packaged_apk_path)_tvdpi',
+      ],
+    }],
+    ['language_splits != []', {
+      'action': [
+        '--language-splits=<(language_splits)',
+      ],
+      'outputs': [
+        "<!@(python <(DEPTH)/build/apply_locales.py '<(resource_packaged_apk_path)_ZZLOCALE' <(language_splits))",
+      ],
+    }],
+    ['resource_zips != []', {
+      'action': [
+        '--resource-zips', '>(resource_zips)',
+      ],
+      'inputs': [
+        '>@(resource_zips)',
+      ],
+    }],
+  ],
+}
diff --git a/build/android/preprocess_google_play_services.config.json b/build/android/preprocess_google_play_services.config.json
new file mode 100644
index 0000000..8b3198b
--- /dev/null
+++ b/build/android/preprocess_google_play_services.config.json
@@ -0,0 +1,90 @@
+{
+  "lib_version": "7.3.0",
+  "clients": [
+    "play-services-base",
+    "play-services-cast",
+    "play-services-identity"
+  ],
+  "client_filter": [
+    "res/drawable*",
+    "res/values-af",
+    "res/values-az",
+    "res/values-be",
+    "res/values-bn",
+    "res/values-bn-rBD",
+    "res/values-de-rAT",
+    "res/values-de-rCH",
+    "res/values-en-rIE",
+    "res/values-en-rIN",
+    "res/values-en-rSG",
+    "res/values-en-rZA",
+    "res/values-es-rAR",
+    "res/values-es-rBO",
+    "res/values-es-rCL",
+    "res/values-es-rCO",
+    "res/values-es-rCR",
+    "res/values-es-rDO",
+    "res/values-es-rEC",
+    "res/values-es-rGT",
+    "res/values-es-rHN",
+    "res/values-es-rMX",
+    "res/values-es-rNI",
+    "res/values-es-rPA",
+    "res/values-es-rPE",
+    "res/values-es-rPR",
+    "res/values-es-rPY",
+    "res/values-es-rSV",
+    "res/values-es-rUS",
+    "res/values-es-rUY",
+    "res/values-es-rVE",
+    "res/values-eu-rES",
+    "res/values-fr-rCA",
+    "res/values-fr-rCH",
+    "res/values-gl",
+    "res/values-gl-rES",
+    "res/values-gu",
+    "res/values-he",
+    "res/values-hy",
+    "res/values-hy-rAM",
+    "res/values-in",
+    "res/values-is",
+    "res/values-is-rIS",
+    "res/values-ka",
+    "res/values-ka-rGE",
+    "res/values-kk-rKZ",
+    "res/values-km",
+    "res/values-km-rKH",
+    "res/values-kn",
+    "res/values-kn-rIN",
+    "res/values-ky",
+    "res/values-ky-rKG",
+    "res/values-lo",
+    "res/values-lo-rLA",
+    "res/values-mk-rMK",
+    "res/values-ml",
+    "res/values-ml-rIN",
+    "res/values-mn",
+    "res/values-mn-rMN",
+    "res/values-mo",
+    "res/values-mr",
+    "res/values-mr-rIN",
+    "res/values-ms",
+    "res/values-ms-rMY",
+    "res/values-my-rMM",
+    "res/values-nb",
+    "res/values-ne",
+    "res/values-ne-rNP",
+    "res/values-si",
+    "res/values-si-rLK",
+    "res/values-ta",
+    "res/values-ta-rIN",
+    "res/values-te",
+    "res/values-te-rIN",
+    "res/values-tl",
+    "res/values-ur-rPK",
+    "res/values-uz-rUZ",
+    "res/values-zh",
+    "res/values-zh-rHK",
+    "res/values-zu"
+  ]
+}
diff --git a/build/android/preprocess_google_play_services.py b/build/android/preprocess_google_play_services.py
new file mode 100755
index 0000000..85d239a
--- /dev/null
+++ b/build/android/preprocess_google_play_services.py
@@ -0,0 +1,238 @@
+#!/usr/bin/env python
+#
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+'''Prepares the Google Play services split client libraries before usage by
+Chrome's build system.
+
+We need to preprocess Google Play services before using it in Chrome
+builds for 2 main reasons:
+
+- Getting rid of unused resources: unsupported languages, unused
+drawables, etc.
+
+- Merging the differents jars so that it can be proguarded more
+easily. This is necessary since debug and test apks get very close
+to the dex limit.
+
+The script is supposed to be used with the maven repository that can be obtained
+by downloading the "extra-google-m2repository" from the Android SDK Manager. It
+also supports importing from already extracted AAR files using the
+--is-extracted-repo flag.
+
+The json config (see the -c argument) file should provide the following fields:
+
+- lib_version: String. Used when building from the maven repository. It should
+  be the package's version (e.g. "7.3.0")
+
+- clients: String array. List of clients to pick. For example, when building
+  from the maven repository, it's the artifactId (e.g. "play-services-base") of
+  each client.
+
+- client_filter: String array. Pattern of files to prune from the clients once
+  extracted. Metacharacters are allowed. (e.g. "res/drawable*")
+
+The output is a directory with the following structure:
+
+    OUT_DIR
+    +-- google-play-services.jar
+    +-- res
+    |   +-- CLIENT_1
+    |   |   +-- color
+    |   |   +-- values
+    |   |   +-- etc.
+    |   +-- CLIENT_2
+    |       +-- ...
+    +-- stub
+        +-- res/[.git-keep-directory]
+        +-- src/android/UnusedStub.java
+
+Requires the `jar` utility in the path.
+
+'''
+
+import argparse
+import glob
+import itertools
+import json
+import os
+import shutil
+import stat
+import sys
+
+from pylib import cmd_helper
+from pylib import constants
+
+sys.path.append(
+    os.path.join(constants.DIR_SOURCE_ROOT, 'build', 'android', 'gyp'))
+from util import build_utils
+
+
+M2_PKG_PATH = os.path.join('com', 'google', 'android', 'gms')
+
+
+def main():
+  parser = argparse.ArgumentParser(description=("Prepares the Google Play "
+      "services split client libraries before usage by Chrome's build system"))
+  parser.add_argument('-r',
+                      '--repository',
+                      help='The Google Play services repository location',
+                      required=True,
+                      metavar='FILE')
+  parser.add_argument('-o',
+                      '--out-dir',
+                      help='The output directory',
+                      required=True,
+                      metavar='FILE')
+  parser.add_argument('-c',
+                      '--config-file',
+                      help='Config file path',
+                      required=True,
+                      metavar='FILE')
+  parser.add_argument('-g',
+                      '--git-friendly',
+                      action='store_true',
+                      default=False,
+                      help='Add a .gitkeep file to the empty directories')
+  parser.add_argument('-x',
+                      '--is-extracted-repo',
+                      action='store_true',
+                      default=False,
+                      help='The provided repository is not made of AAR files.')
+
+  args = parser.parse_args()
+
+  ProcessGooglePlayServices(args.repository,
+                            args.out_dir,
+                            args.config_file,
+                            args.git_friendly,
+                            args.is_extracted_repo)
+
+
+def ProcessGooglePlayServices(repo, out_dir, config_path, git_friendly,
+                              is_extracted_repo):
+  with open(config_path, 'r') as json_file:
+    config = json.load(json_file)
+
+  with build_utils.TempDir() as tmp_root:
+    tmp_paths = _SetupTempDir(tmp_root)
+
+    if is_extracted_repo:
+      _ImportFromExtractedRepo(config, tmp_paths, repo)
+    else:
+      _ImportFromAars(config, tmp_paths, repo)
+
+    _GenerateCombinedJar(tmp_paths)
+    _ProcessResources(config, tmp_paths)
+    _BuildOutput(config, tmp_paths, out_dir, git_friendly)
+
+
+def _SetupTempDir(tmp_root):
+  tmp_paths = {
+    'root': tmp_root,
+    'imported_clients': os.path.join(tmp_root, 'imported_clients'),
+    'extracted_jars': os.path.join(tmp_root, 'jar'),
+    'combined_jar': os.path.join(tmp_root, 'google-play-services.jar'),
+  }
+  os.mkdir(tmp_paths['imported_clients'])
+  os.mkdir(tmp_paths['extracted_jars'])
+
+  return tmp_paths
+
+
+def _SetupOutputDir(out_dir):
+  out_paths = {
+    'root': out_dir,
+    'res': os.path.join(out_dir, 'res'),
+    'jar': os.path.join(out_dir, 'google-play-services.jar'),
+    'stub': os.path.join(out_dir, 'stub'),
+  }
+
+  shutil.rmtree(out_paths['jar'], ignore_errors=True)
+  shutil.rmtree(out_paths['res'], ignore_errors=True)
+  shutil.rmtree(out_paths['stub'], ignore_errors=True)
+
+  return out_paths
+
+
+def _MakeWritable(dir_path):
+  for root, dirs, files in os.walk(dir_path):
+    for path in itertools.chain(dirs, files):
+      st = os.stat(os.path.join(root, path))
+      os.chmod(os.path.join(root, path), st.st_mode | stat.S_IWUSR)
+
+
+def _ImportFromAars(config, tmp_paths, repo):
+  for client in config['clients']:
+    aar_name = '%s-%s.aar' % (client, config['lib_version'])
+    aar_path = os.path.join(repo, M2_PKG_PATH, client,
+                            config['lib_version'], aar_name)
+    aar_out_path = os.path.join(tmp_paths['imported_clients'], client)
+    build_utils.ExtractAll(aar_path, aar_out_path)
+
+    client_jar_path = os.path.join(aar_out_path, 'classes.jar')
+    build_utils.ExtractAll(client_jar_path, tmp_paths['extracted_jars'],
+                           no_clobber=False)
+
+
+def _ImportFromExtractedRepo(config, tmp_paths, repo):
+  # Import the clients
+  try:
+    for client in config['clients']:
+      client_out_dir = os.path.join(tmp_paths['imported_clients'], client)
+      shutil.copytree(os.path.join(repo, client), client_out_dir)
+
+      client_jar_path = os.path.join(client_out_dir, 'classes.jar')
+      build_utils.ExtractAll(client_jar_path, tmp_paths['extracted_jars'],
+                             no_clobber=False)
+  finally:
+    _MakeWritable(tmp_paths['imported_clients'])
+
+
+def _GenerateCombinedJar(tmp_paths):
+  out_file_name = tmp_paths['combined_jar']
+  working_dir = tmp_paths['extracted_jars']
+  cmd_helper.Call(['jar', '-cf', out_file_name, '-C', working_dir, '.'])
+
+
+def _ProcessResources(config, tmp_paths):
+  # Prune unused resources
+  for res_filter in config['client_filter']:
+    glob_pattern = os.path.join(tmp_paths['imported_clients'], '*', res_filter)
+    for prune_target in glob.glob(glob_pattern):
+      shutil.rmtree(prune_target)
+
+
+def _BuildOutput(config, tmp_paths, out_dir, git_friendly):
+  out_paths = _SetupOutputDir(out_dir)
+
+  # Copy the resources to the output dir
+  for client in config['clients']:
+    res_in_tmp_dir = os.path.join(tmp_paths['imported_clients'], client, 'res')
+    if os.path.isdir(res_in_tmp_dir) and os.listdir(res_in_tmp_dir):
+      res_in_final_dir = os.path.join(out_paths['res'], client)
+      shutil.copytree(res_in_tmp_dir, res_in_final_dir)
+
+  # Copy the jar
+  shutil.copyfile(tmp_paths['combined_jar'], out_paths['jar'])
+
+  # Write the java dummy stub. Needed for gyp to create the resource jar
+  stub_location = os.path.join(out_paths['stub'], 'src', 'android')
+  os.makedirs(stub_location)
+  with open(os.path.join(stub_location, 'UnusedStub.java'), 'w') as stub:
+    stub.write('package android;'
+               'public final class UnusedStub {'
+               '    private UnusedStub() {}'
+               '}')
+
+  # Create the main res directory. Will be empty but is needed by gyp
+  stub_res_location = os.path.join(out_paths['stub'], 'res')
+  os.makedirs(stub_res_location)
+  if git_friendly:
+    build_utils.Touch(os.path.join(stub_res_location, '.git-keep-directory'))
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/android/provision_devices.py b/build/android/provision_devices.py
new file mode 100755
index 0000000..a5f8fc6
--- /dev/null
+++ b/build/android/provision_devices.py
@@ -0,0 +1,349 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Provisions Android devices with settings required for bots.
+
+Usage:
+  ./provision_devices.py [-d <device serial number>]
+"""
+
+import argparse
+import json
+import logging
+import os
+import posixpath
+import re
+import subprocess
+import sys
+import time
+
+from pylib import constants
+from pylib import device_settings
+from pylib.device import battery_utils
+from pylib.device import device_blacklist
+from pylib.device import device_errors
+from pylib.device import device_utils
+from pylib.utils import run_tests_helper
+from pylib.utils import timeout_retry
+
+sys.path.append(os.path.join(constants.DIR_SOURCE_ROOT,
+                             'third_party', 'android_testrunner'))
+import errors
+
+
+class _DEFAULT_TIMEOUTS(object):
+  # L can take a while to reboot after a wipe.
+  LOLLIPOP = 600
+  PRE_LOLLIPOP = 180
+
+  HELP_TEXT = '{}s on L, {}s on pre-L'.format(LOLLIPOP, PRE_LOLLIPOP)
+
+
+class _PHASES(object):
+  WIPE = 'wipe'
+  PROPERTIES = 'properties'
+  FINISH = 'finish'
+
+  ALL = [WIPE, PROPERTIES, FINISH]
+
+
+def ProvisionDevices(options):
+  devices = device_utils.DeviceUtils.HealthyDevices()
+  if options.device:
+    devices = [d for d in devices if d == options.device]
+    if not devices:
+      raise device_errors.DeviceUnreachableError(options.device)
+
+  parallel_devices = device_utils.DeviceUtils.parallel(devices)
+  parallel_devices.pMap(ProvisionDevice, options)
+  if options.auto_reconnect:
+    _LaunchHostHeartbeat()
+  blacklist = device_blacklist.ReadBlacklist()
+  if options.output_device_blacklist:
+    with open(options.output_device_blacklist, 'w') as f:
+      json.dump(blacklist, f)
+  if all(d in blacklist for d in devices):
+    raise device_errors.NoDevicesError
+  return 0
+
+
+def ProvisionDevice(device, options):
+  if options.reboot_timeout:
+    reboot_timeout = options.reboot_timeout
+  elif (device.build_version_sdk >=
+        constants.ANDROID_SDK_VERSION_CODES.LOLLIPOP):
+    reboot_timeout = _DEFAULT_TIMEOUTS.LOLLIPOP
+  else:
+    reboot_timeout = _DEFAULT_TIMEOUTS.PRE_LOLLIPOP
+
+  def should_run_phase(phase_name):
+    return not options.phases or phase_name in options.phases
+
+  def run_phase(phase_func, reboot=True):
+    try:
+      device.WaitUntilFullyBooted(timeout=reboot_timeout, retries=0)
+    except device_errors.CommandTimeoutError:
+      logging.error('Device did not finish booting. Will try to reboot.')
+      device.Reboot(timeout=reboot_timeout)
+    phase_func(device, options)
+    if reboot:
+      device.Reboot(False, retries=0)
+      device.adb.WaitForDevice()
+
+  try:
+    if should_run_phase(_PHASES.WIPE):
+      run_phase(WipeDevice)
+
+    if should_run_phase(_PHASES.PROPERTIES):
+      run_phase(SetProperties)
+
+    if should_run_phase(_PHASES.FINISH):
+      run_phase(FinishProvisioning, reboot=False)
+
+  except (errors.WaitForResponseTimedOutError,
+          device_errors.CommandTimeoutError):
+    logging.exception('Timed out waiting for device %s. Adding to blacklist.',
+                      str(device))
+    device_blacklist.ExtendBlacklist([str(device)])
+
+  except device_errors.CommandFailedError:
+    logging.exception('Failed to provision device %s. Adding to blacklist.',
+                      str(device))
+    device_blacklist.ExtendBlacklist([str(device)])
+
+
+def WipeDevice(device, options):
+  """Wipes data from device, keeping only the adb_keys for authorization.
+
+  After wiping data on a device that has been authorized, adb can still
+  communicate with the device, but after reboot the device will need to be
+  re-authorized because the adb keys file is stored in /data/misc/adb/.
+  Thus, adb_keys file is rewritten so the device does not need to be
+  re-authorized.
+
+  Arguments:
+    device: the device to wipe
+  """
+  if options.skip_wipe:
+    return
+
+  try:
+    device.EnableRoot()
+    device_authorized = device.FileExists(constants.ADB_KEYS_FILE)
+    if device_authorized:
+      adb_keys = device.ReadFile(constants.ADB_KEYS_FILE,
+                                 as_root=True).splitlines()
+    device.RunShellCommand(['wipe', 'data'],
+                           as_root=True, check_return=True)
+    device.adb.WaitForDevice()
+
+    if device_authorized:
+      adb_keys_set = set(adb_keys)
+      for adb_key_file in options.adb_key_files or []:
+        try:
+          with open(adb_key_file, 'r') as f:
+            adb_public_keys = f.readlines()
+          adb_keys_set.update(adb_public_keys)
+        except IOError:
+          logging.warning('Unable to find adb keys file %s.' % adb_key_file)
+      _WriteAdbKeysFile(device, '\n'.join(adb_keys_set))
+  except device_errors.CommandFailedError:
+    logging.exception('Possible failure while wiping the device. '
+                      'Attempting to continue.')
+
+
+def _WriteAdbKeysFile(device, adb_keys_string):
+  dir_path = posixpath.dirname(constants.ADB_KEYS_FILE)
+  device.RunShellCommand(['mkdir', '-p', dir_path],
+                         as_root=True, check_return=True)
+  device.RunShellCommand(['restorecon', dir_path],
+                         as_root=True, check_return=True)
+  device.WriteFile(constants.ADB_KEYS_FILE, adb_keys_string, as_root=True)
+  device.RunShellCommand(['restorecon', constants.ADB_KEYS_FILE],
+                         as_root=True, check_return=True)
+
+
+def SetProperties(device, options):
+  try:
+    device.EnableRoot()
+  except device_errors.CommandFailedError as e:
+    logging.warning(str(e))
+
+  _ConfigureLocalProperties(device, options.enable_java_debug)
+  device_settings.ConfigureContentSettings(
+      device, device_settings.DETERMINISTIC_DEVICE_SETTINGS)
+  if options.disable_location:
+    device_settings.ConfigureContentSettings(
+        device, device_settings.DISABLE_LOCATION_SETTINGS)
+  else:
+    device_settings.ConfigureContentSettings(
+        device, device_settings.ENABLE_LOCATION_SETTINGS)
+
+  if options.disable_mock_location:
+    device_settings.ConfigureContentSettings(
+        device, device_settings.DISABLE_MOCK_LOCATION_SETTINGS)
+  else:
+    device_settings.ConfigureContentSettings(
+        device, device_settings.ENABLE_MOCK_LOCATION_SETTINGS)
+
+  device_settings.SetLockScreenSettings(device)
+  if options.disable_network:
+    device_settings.ConfigureContentSettings(
+        device, device_settings.NETWORK_DISABLED_SETTINGS)
+
+def _ConfigureLocalProperties(device, java_debug=True):
+  """Set standard readonly testing device properties prior to reboot."""
+  local_props = [
+      'persist.sys.usb.config=adb',
+      'ro.monkey=1',
+      'ro.test_harness=1',
+      'ro.audio.silent=1',
+      'ro.setupwizard.mode=DISABLED',
+      ]
+  if java_debug:
+    local_props.append(
+        '%s=all' % device_utils.DeviceUtils.JAVA_ASSERT_PROPERTY)
+    local_props.append('debug.checkjni=1')
+  try:
+    device.WriteFile(
+        constants.DEVICE_LOCAL_PROPERTIES_PATH,
+        '\n'.join(local_props), as_root=True)
+    # Android will not respect the local props file if it is world writable.
+    device.RunShellCommand(
+        ['chmod', '644', constants.DEVICE_LOCAL_PROPERTIES_PATH],
+        as_root=True, check_return=True)
+  except device_errors.CommandFailedError:
+    logging.exception('Failed to configure local properties.')
+
+
+def FinishProvisioning(device, options):
+  if options.min_battery_level is not None:
+    try:
+      battery = battery_utils.BatteryUtils(device)
+      battery.ChargeDeviceToLevel(options.min_battery_level)
+    except device_errors.CommandFailedError:
+      logging.exception('Unable to charge device to specified level.')
+
+  if options.max_battery_temp is not None:
+    try:
+      battery = battery_utils.BatteryUtils(device)
+      battery.LetBatteryCoolToTemperature(options.max_battery_temp)
+    except device_errors.CommandFailedError:
+      logging.exception('Unable to let battery cool to specified temperature.')
+
+  device.RunShellCommand(
+      ['date', '-s', time.strftime('%Y%m%d.%H%M%S', time.gmtime())],
+      as_root=True, check_return=True)
+  props = device.RunShellCommand('getprop', check_return=True)
+  for prop in props:
+    logging.info('  %s' % prop)
+  if options.auto_reconnect:
+    _PushAndLaunchAdbReboot(device, options.target)
+
+
+def _PushAndLaunchAdbReboot(device, target):
+  """Pushes and launches the adb_reboot binary on the device.
+
+  Arguments:
+    device: The DeviceUtils instance for the device to which the adb_reboot
+            binary should be pushed.
+    target: The build target (example, Debug or Release) which helps in
+            locating the adb_reboot binary.
+  """
+  logging.info('Will push and launch adb_reboot on %s' % str(device))
+  # Kill if adb_reboot is already running.
+  device.KillAll('adb_reboot', blocking=True, timeout=2, quiet=True)
+  # Push adb_reboot
+  logging.info('  Pushing adb_reboot ...')
+  adb_reboot = os.path.join(constants.DIR_SOURCE_ROOT,
+                            'out/%s/adb_reboot' % target)
+  device.PushChangedFiles([(adb_reboot, '/data/local/tmp/')])
+  # Launch adb_reboot
+  logging.info('  Launching adb_reboot ...')
+  device.RunShellCommand(
+      ['/data/local/tmp/adb_reboot'],
+      check_return=True)
+
+
+def _LaunchHostHeartbeat():
+  # Kill if existing host_heartbeat
+  KillHostHeartbeat()
+  # Launch a new host_heartbeat
+  logging.info('Spawning host heartbeat...')
+  subprocess.Popen([os.path.join(constants.DIR_SOURCE_ROOT,
+                                 'build/android/host_heartbeat.py')])
+
+
+def KillHostHeartbeat():
+  ps = subprocess.Popen(['ps', 'aux'], stdout=subprocess.PIPE)
+  stdout, _ = ps.communicate()
+  matches = re.findall('\\n.*host_heartbeat.*', stdout)
+  for match in matches:
+    logging.info('An instance of host heart beart running... will kill')
+    pid = re.findall(r'(\S+)', match)[1]
+    subprocess.call(['kill', str(pid)])
+
+
+def main():
+  # Recommended options on perf bots:
+  # --disable-network
+  #     TODO(tonyg): We eventually want network on. However, currently radios
+  #     can cause perfbots to drain faster than they charge.
+  # --min-battery-level 95
+  #     Some perf bots run benchmarks with USB charging disabled which leads
+  #     to gradual draining of the battery. We must wait for a full charge
+  #     before starting a run in order to keep the devices online.
+
+  parser = argparse.ArgumentParser(
+      description='Provision Android devices with settings required for bots.')
+  parser.add_argument('-d', '--device', metavar='SERIAL',
+                      help='the serial number of the device to be provisioned'
+                      ' (the default is to provision all devices attached)')
+  parser.add_argument('--phase', action='append', choices=_PHASES.ALL,
+                      dest='phases',
+                      help='Phases of provisioning to run. '
+                           '(If omitted, all phases will be run.)')
+  parser.add_argument('--skip-wipe', action='store_true', default=False,
+                      help="don't wipe device data during provisioning")
+  parser.add_argument('--reboot-timeout', metavar='SECS', type=int,
+                      help='when wiping the device, max number of seconds to'
+                      ' wait after each reboot '
+                      '(default: %s)' % _DEFAULT_TIMEOUTS.HELP_TEXT)
+  parser.add_argument('--min-battery-level', type=int, metavar='NUM',
+                      help='wait for the device to reach this minimum battery'
+                      ' level before trying to continue')
+  parser.add_argument('--disable-location', action='store_true',
+                      help='disable Google location services on devices')
+  parser.add_argument('--disable-mock-location', action='store_true',
+                      default=False, help='Set ALLOW_MOCK_LOCATION to false')
+  parser.add_argument('--disable-network', action='store_true',
+                      help='disable network access on devices')
+  parser.add_argument('--disable-java-debug', action='store_false',
+                      dest='enable_java_debug', default=True,
+                      help='disable Java property asserts and JNI checking')
+  parser.add_argument('-t', '--target', default='Debug',
+                      help='the build target (default: %(default)s)')
+  parser.add_argument('-r', '--auto-reconnect', action='store_true',
+                      help='push binary which will reboot the device on adb'
+                      ' disconnections')
+  parser.add_argument('--adb-key-files', type=str, nargs='+',
+                      help='list of adb keys to push to device')
+  parser.add_argument('-v', '--verbose', action='count', default=1,
+                      help='Log more information.')
+  parser.add_argument('--max-battery-temp', type=int, metavar='NUM',
+                      help='Wait for the battery to have this temp or lower.')
+  parser.add_argument('--output-device-blacklist',
+                      help='Json file to output the device blacklist.')
+  args = parser.parse_args()
+  constants.SetBuildType(args.target)
+
+  run_tests_helper.SetLogLevel(args.verbose)
+
+  return ProvisionDevices(args)
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/android/push_libraries.gypi b/build/android/push_libraries.gypi
new file mode 100644
index 0000000..773c44f
--- /dev/null
+++ b/build/android/push_libraries.gypi
@@ -0,0 +1,49 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into an action to provide a rule that
+# pushes stripped shared libraries to the attached Android device. This should
+# only be used with the gyp_managed_install flag set.
+#
+# To use this, create a gyp target with the following form:
+#  {
+#    'actions': [
+#      'variables': {
+#        'ordered_libraries_file': 'file generated by write_ordered_libraries'
+#        'strip_stamp': 'stamp from strip action to block on'
+#        'libraries_source_dir': 'location where stripped libraries are stored'
+#        'device_library_dir': 'location on the device where to put pushed libraries',
+#        'push_stamp': 'file to touch when the action is complete'
+#        'configuration_name': 'The build CONFIGURATION_NAME'
+#      },
+#      'includes': [ '../../build/android/push_libraries.gypi' ],
+#    ],
+#  },
+#
+
+{
+  'action_name': 'push_libraries_<(_target_name)',
+  'message': 'Pushing libraries to device for <(_target_name)',
+  'inputs': [
+    '<(DEPTH)/build/android/gyp/util/build_utils.py',
+    '<(DEPTH)/build/android/gyp/util/md5_check.py',
+    '<(DEPTH)/build/android/gyp/push_libraries.py',
+    '<(strip_stamp)',
+    '<(strip_additional_stamp)',
+    '<(build_device_config_path)',
+    '<(pack_relocations_stamp)',
+  ],
+  'outputs': [
+    '<(push_stamp)',
+  ],
+  'action': [
+    'python', '<(DEPTH)/build/android/gyp/push_libraries.py',
+    '--build-device-configuration=<(build_device_config_path)',
+    '--libraries-dir=<(libraries_source_dir)',
+    '--device-dir=<(device_library_dir)',
+    '--libraries=@FileArg(<(ordered_libraries_file):libraries)',
+    '--stamp=<(push_stamp)',
+    '--configuration-name=<(configuration_name)',
+  ],
+}
diff --git a/build/android/pylib/OWNERS b/build/android/pylib/OWNERS
new file mode 100644
index 0000000..dbbbba7
--- /dev/null
+++ b/build/android/pylib/OWNERS
@@ -0,0 +1,4 @@
+jbudorick@chromium.org
+klundberg@chromium.org
+navabi@chromium.org
+skyostil@chromium.org
diff --git a/build/android/pylib/__init__.py b/build/android/pylib/__init__.py
new file mode 100644
index 0000000..96196cf
--- /dev/null
+++ b/build/android/pylib/__init__.py
@@ -0,0 +1,3 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
diff --git a/build/android/pylib/android_commands.py b/build/android/pylib/android_commands.py
new file mode 100644
index 0000000..f7191f7
--- /dev/null
+++ b/build/android/pylib/android_commands.py
@@ -0,0 +1,1976 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Provides an interface to communicate with the device via the adb command.
+
+Assumes adb binary is currently on system path.
+
+Note that this module is deprecated.
+"""
+# TODO(jbudorick): Delete this file once no clients use it.
+
+# pylint: skip-file
+
+import collections
+import datetime
+import inspect
+import logging
+import os
+import random
+import re
+import shlex
+import signal
+import subprocess
+import sys
+import tempfile
+import time
+
+import cmd_helper
+import constants
+import system_properties
+from utils import host_utils
+
+try:
+  from pylib import pexpect
+except ImportError:
+  pexpect = None
+
+sys.path.append(os.path.join(
+    constants.DIR_SOURCE_ROOT, 'third_party', 'android_testrunner'))
+import adb_interface
+import am_instrument_parser
+import errors
+
+from pylib.device import device_blacklist
+from pylib.device import device_errors
+
+# Pattern to search for the next whole line of pexpect output and capture it
+# into a match group. We can't use ^ and $ for line start end with pexpect,
+# see http://www.noah.org/python/pexpect/#doc for explanation why.
+PEXPECT_LINE_RE = re.compile('\n([^\r]*)\r')
+
+# Set the adb shell prompt to be a unique marker that will [hopefully] not
+# appear at the start of any line of a command's output.
+SHELL_PROMPT = '~+~PQ\x17RS~+~'
+
+# Java properties file
+LOCAL_PROPERTIES_PATH = constants.DEVICE_LOCAL_PROPERTIES_PATH
+
+# Property in /data/local.prop that controls Java assertions.
+JAVA_ASSERT_PROPERTY = 'dalvik.vm.enableassertions'
+
+# Keycode "enum" suitable for passing to AndroidCommands.SendKey().
+KEYCODE_HOME = 3
+KEYCODE_BACK = 4
+KEYCODE_DPAD_UP = 19
+KEYCODE_DPAD_DOWN = 20
+KEYCODE_DPAD_RIGHT = 22
+KEYCODE_ENTER = 66
+KEYCODE_MENU = 82
+
+MD5SUM_DEVICE_FOLDER = constants.TEST_EXECUTABLE_DIR + '/md5sum/'
+MD5SUM_DEVICE_PATH = MD5SUM_DEVICE_FOLDER + 'md5sum_bin'
+
+PIE_WRAPPER_PATH = constants.TEST_EXECUTABLE_DIR + '/run_pie'
+
+CONTROL_USB_CHARGING_COMMANDS = [
+  {
+    # Nexus 4
+    'witness_file': '/sys/module/pm8921_charger/parameters/disabled',
+    'enable_command': 'echo 0 > /sys/module/pm8921_charger/parameters/disabled',
+    'disable_command':
+        'echo 1 > /sys/module/pm8921_charger/parameters/disabled',
+  },
+  {
+    # Nexus 5
+    # Setting the HIZ bit of the bq24192 causes the charger to actually ignore
+    # energy coming from USB. Setting the power_supply offline just updates the
+    # Android system to reflect that.
+    'witness_file': '/sys/kernel/debug/bq24192/INPUT_SRC_CONT',
+    'enable_command': (
+        'echo 0x4A > /sys/kernel/debug/bq24192/INPUT_SRC_CONT && '
+        'echo 1 > /sys/class/power_supply/usb/online'),
+    'disable_command': (
+        'echo 0xCA > /sys/kernel/debug/bq24192/INPUT_SRC_CONT && '
+        'chmod 644 /sys/class/power_supply/usb/online && '
+        'echo 0 > /sys/class/power_supply/usb/online'),
+  },
+]
+
+class DeviceTempFile(object):
+  def __init__(self, android_commands, prefix='temp_file', suffix=''):
+    """Find an unused temporary file path in the devices external directory.
+
+    When this object is closed, the file will be deleted on the device.
+    """
+    self.android_commands = android_commands
+    while True:
+      # TODO(cjhopman): This could actually return the same file in multiple
+      # calls if the caller doesn't write to the files immediately. This is
+      # expected to never happen.
+      i = random.randint(0, 1000000)
+      self.name = '%s/%s-%d-%010d%s' % (
+          android_commands.GetExternalStorage(),
+          prefix, int(time.time()), i, suffix)
+      if not android_commands.FileExistsOnDevice(self.name):
+        break
+
+  def __enter__(self):
+    return self
+
+  def __exit__(self, type, value, traceback):
+    self.close()
+
+  def close(self):
+    self.android_commands.RunShellCommand('rm ' + self.name)
+
+
+def GetAVDs():
+  """Returns a list of AVDs."""
+  re_avd = re.compile('^[ ]+Name: ([a-zA-Z0-9_:.-]+)', re.MULTILINE)
+  avds = re_avd.findall(cmd_helper.GetCmdOutput(['android', 'list', 'avd']))
+  return avds
+
+def ResetBadDevices():
+  """Removes the blacklist that keeps track of bad devices for a current
+     build.
+  """
+  device_blacklist.ResetBlacklist()
+
+def ExtendBadDevices(devices):
+  """Adds devices to the blacklist that keeps track of bad devices for a
+     current build.
+
+  The devices listed in the bad devices file will not be returned by
+  GetAttachedDevices.
+
+  Args:
+    devices: list of bad devices to be added to the bad devices file.
+  """
+  device_blacklist.ExtendBlacklist(devices)
+
+
+def GetAttachedDevices(hardware=True, emulator=True, offline=False):
+  """Returns a list of attached, android devices and emulators.
+
+  If a preferred device has been set with ANDROID_SERIAL, it will be first in
+  the returned list. The arguments specify what devices to include in the list.
+
+  Example output:
+
+    * daemon not running. starting it now on port 5037 *
+    * daemon started successfully *
+    List of devices attached
+    027c10494100b4d7        device
+    emulator-5554   offline
+
+  Args:
+    hardware: Include attached actual devices that are online.
+    emulator: Include emulators (i.e. AVD's) currently on host.
+    offline: Include devices and emulators that are offline.
+
+  Returns: List of devices.
+  """
+  adb_devices_output = cmd_helper.GetCmdOutput([constants.GetAdbPath(),
+                                                'devices'])
+
+  re_device = re.compile('^([a-zA-Z0-9_:.-]+)\tdevice$', re.MULTILINE)
+  online_devices = re_device.findall(adb_devices_output)
+
+  re_device = re.compile('^(emulator-[0-9]+)\tdevice', re.MULTILINE)
+  emulator_devices = re_device.findall(adb_devices_output)
+
+  re_device = re.compile('^([a-zA-Z0-9_:.-]+)\t(?:offline|unauthorized)$',
+                         re.MULTILINE)
+  offline_devices = re_device.findall(adb_devices_output)
+
+  devices = []
+  # First determine list of online devices (e.g. hardware and/or emulator).
+  if hardware and emulator:
+    devices = online_devices
+  elif hardware:
+    devices = [device for device in online_devices
+               if device not in emulator_devices]
+  elif emulator:
+    devices = emulator_devices
+
+  # Now add offline devices if offline is true
+  if offline:
+    devices = devices + offline_devices
+
+  # Remove any devices in the blacklist.
+  blacklist = device_blacklist.ReadBlacklist()
+  if len(blacklist):
+    logging.info('Avoiding bad devices %s', ' '.join(blacklist))
+    devices = [device for device in devices if device not in blacklist]
+
+  preferred_device = os.environ.get('ANDROID_SERIAL')
+  if preferred_device in devices:
+    devices.remove(preferred_device)
+    devices.insert(0, preferred_device)
+  return devices
+
+
+def IsDeviceAttached(device):
+  """Return true if the device is attached and online."""
+  return device in GetAttachedDevices()
+
+
+def _GetFilesFromRecursiveLsOutput(path, ls_output, re_file, utc_offset=None):
+  """Gets a list of files from `ls` command output.
+
+  Python's os.walk isn't used because it doesn't work over adb shell.
+
+  Args:
+    path: The path to list.
+    ls_output: A list of lines returned by an `ls -lR` command.
+    re_file: A compiled regular expression which parses a line into named groups
+        consisting of at minimum "filename", "date", "time", "size" and
+        optionally "timezone".
+    utc_offset: A 5-character string of the form +HHMM or -HHMM, where HH is a
+        2-digit string giving the number of UTC offset hours, and MM is a
+        2-digit string giving the number of UTC offset minutes. If the input
+        utc_offset is None, will try to look for the value of "timezone" if it
+        is specified in re_file.
+
+  Returns:
+    A dict of {"name": (size, lastmod), ...} where:
+      name: The file name relative to |path|'s directory.
+      size: The file size in bytes (0 for directories).
+      lastmod: The file last modification date in UTC.
+  """
+  re_directory = re.compile('^%s/(?P<dir>[^:]+):$' % re.escape(path))
+  path_dir = os.path.dirname(path)
+
+  current_dir = ''
+  files = {}
+  for line in ls_output:
+    directory_match = re_directory.match(line)
+    if directory_match:
+      current_dir = directory_match.group('dir')
+      continue
+    file_match = re_file.match(line)
+    if file_match:
+      filename = os.path.join(current_dir, file_match.group('filename'))
+      if filename.startswith(path_dir):
+        filename = filename[len(path_dir) + 1:]
+      lastmod = datetime.datetime.strptime(
+          file_match.group('date') + ' ' + file_match.group('time')[:5],
+          '%Y-%m-%d %H:%M')
+      if not utc_offset and 'timezone' in re_file.groupindex:
+        utc_offset = file_match.group('timezone')
+      if isinstance(utc_offset, str) and len(utc_offset) == 5:
+        utc_delta = datetime.timedelta(hours=int(utc_offset[1:3]),
+                                       minutes=int(utc_offset[3:5]))
+        if utc_offset[0:1] == '-':
+          utc_delta = -utc_delta
+        lastmod -= utc_delta
+      files[filename] = (int(file_match.group('size')), lastmod)
+  return files
+
+
+def _ParseMd5SumOutput(md5sum_output):
+  """Returns a list of tuples from the provided md5sum output.
+
+  Args:
+    md5sum_output: output directly from md5sum binary.
+
+  Returns:
+    List of namedtuples with attributes |hash| and |path|, where |path| is the
+    absolute path to the file with an Md5Sum of |hash|.
+  """
+  HashAndPath = collections.namedtuple('HashAndPath', ['hash', 'path'])
+  split_lines = [line.split('  ') for line in md5sum_output]
+  return [HashAndPath._make(s) for s in split_lines if len(s) == 2]
+
+
+def _HasAdbPushSucceeded(command_output):
+  """Returns whether adb push has succeeded from the provided output."""
+  # TODO(frankf): We should look at the return code instead of the command
+  # output for many of the commands in this file.
+  if not command_output:
+    return True
+  # Success looks like this: "3035 KB/s (12512056 bytes in 4.025s)"
+  # Errors look like this: "failed to copy  ... "
+  if not re.search('^[0-9]', command_output.splitlines()[-1]):
+    logging.critical('PUSH FAILED: ' + command_output)
+    return False
+  return True
+
+
+def GetLogTimestamp(log_line, year):
+  """Returns the timestamp of the given |log_line| in the given year."""
+  try:
+    return datetime.datetime.strptime('%s-%s' % (year, log_line[:18]),
+                                      '%Y-%m-%d %H:%M:%S.%f')
+  except (ValueError, IndexError):
+    logging.critical('Error reading timestamp from ' + log_line)
+    return None
+
+
+class AndroidCommands(object):
+  """Helper class for communicating with Android device via adb."""
+
+  def __init__(self, device=None):
+    """Constructor.
+
+    Args:
+      device: If given, adb commands are only send to the device of this ID.
+          Otherwise commands are sent to all attached devices.
+    """
+    self._adb = adb_interface.AdbInterface(constants.GetAdbPath())
+    if device:
+      self._adb.SetTargetSerial(device)
+    self._device = device
+    self._logcat = None
+    self.logcat_process = None
+    self._logcat_tmpoutfile = None
+    self._pushed_files = []
+    self._device_utc_offset = None
+    self._potential_push_size = 0
+    self._actual_push_size = 0
+    self._external_storage = ''
+    self._util_wrapper = ''
+    self._system_properties = system_properties.SystemProperties(self.Adb())
+    self._push_if_needed_cache = {}
+    self._control_usb_charging_command = {
+        'command': None,
+        'cached': False,
+    }
+    self._protected_file_access_method_initialized = None
+    self._privileged_command_runner = None
+    self._pie_wrapper = None
+
+  @property
+  def system_properties(self):
+    return self._system_properties
+
+  def _LogShell(self, cmd):
+    """Logs the adb shell command."""
+    if self._device:
+      device_repr = self._device[-4:]
+    else:
+      device_repr = '????'
+    logging.info('[%s]> %s', device_repr, cmd)
+
+  def Adb(self):
+    """Returns our AdbInterface to avoid us wrapping all its methods."""
+    # TODO(tonyg): Goal should be to git rid of this method by making this API
+    # complete and alleviating the need.
+    return self._adb
+
+  def GetDevice(self):
+    """Returns the device serial."""
+    return self._device
+
+  def IsOnline(self):
+    """Checks whether the device is online.
+
+    Returns:
+      True if device is in 'device' mode, False otherwise.
+    """
+    # TODO(aurimas): revert to using adb get-state when android L adb is fixed.
+    #out = self._adb.SendCommand('get-state')
+    #return out.strip() == 'device'
+
+    out = self._adb.SendCommand('devices')
+    for line in out.split('\n'):
+      if self._device in line and 'device' in line:
+        return True
+    return False
+
+  def IsRootEnabled(self):
+    """Checks if root is enabled on the device."""
+    root_test_output = self.RunShellCommand('ls /root') or ['']
+    return not 'Permission denied' in root_test_output[0]
+
+  def EnableAdbRoot(self):
+    """Enables adb root on the device.
+
+    Returns:
+      True: if output from executing adb root was as expected.
+      False: otherwise.
+    """
+    if self.GetBuildType() == 'user':
+      logging.warning("Can't enable root in production builds with type user")
+      return False
+    else:
+      return_value = self._adb.EnableAdbRoot()
+      # EnableAdbRoot inserts a call for wait-for-device only when adb logcat
+      # output matches what is expected. Just to be safe add a call to
+      # wait-for-device.
+      self._adb.SendCommand('wait-for-device')
+      return return_value
+
+  def GetDeviceYear(self):
+    """Returns the year information of the date on device."""
+    return self.RunShellCommand('date +%Y')[0]
+
+  def GetExternalStorage(self):
+    if not self._external_storage:
+      self._external_storage = self.RunShellCommand('echo $EXTERNAL_STORAGE')[0]
+      if not self._external_storage:
+        raise device_errors.CommandFailedError(
+            ['shell', "'echo $EXTERNAL_STORAGE'"],
+            'Unable to find $EXTERNAL_STORAGE')
+    return self._external_storage
+
+  def WaitForDevicePm(self, timeout=120):
+    """Blocks until the device's package manager is available.
+
+    To workaround http://b/5201039, we restart the shell and retry if the
+    package manager isn't back after 120 seconds.
+
+    Raises:
+      errors.WaitForResponseTimedOutError after max retries reached.
+    """
+    last_err = None
+    retries = 3
+    while retries:
+      try:
+        self._adb.WaitForDevicePm(wait_time=timeout)
+        return  # Success
+      except errors.WaitForResponseTimedOutError as e:
+        last_err = e
+        logging.warning('Restarting and retrying after timeout: %s', e)
+        retries -= 1
+        self.RestartShell()
+    raise last_err # Only reached after max retries, re-raise the last error.
+
+  def RestartShell(self):
+    """Restarts the shell on the device. Does not block for it to return."""
+    self.RunShellCommand('stop')
+    self.RunShellCommand('start')
+
+  def Reboot(self, full_reboot=True):
+    """Reboots the device and waits for the package manager to return.
+
+    Args:
+      full_reboot: Whether to fully reboot the device or just restart the shell.
+    """
+    # TODO(torne): hive can't reboot the device either way without breaking the
+    # connection; work out if we can handle this better
+    if os.environ.get('USING_HIVE'):
+      logging.warning('Ignoring reboot request as we are on hive')
+      return
+    if full_reboot or not self.IsRootEnabled():
+      self._adb.SendCommand('reboot')
+      self._system_properties = system_properties.SystemProperties(self.Adb())
+      timeout = 300
+      retries = 1
+      # Wait for the device to disappear.
+      while retries < 10 and self.IsOnline():
+        time.sleep(1)
+        retries += 1
+    else:
+      self.RestartShell()
+      timeout = 120
+    # To run tests we need at least the package manager and the sd card (or
+    # other external storage) to be ready.
+    self.WaitForDevicePm(timeout)
+    self.WaitForSdCardReady(timeout)
+
+  def Shutdown(self):
+    """Shuts down the device."""
+    self._adb.SendCommand('reboot -p')
+    self._system_properties = system_properties.SystemProperties(self.Adb())
+
+  def Uninstall(self, package):
+    """Uninstalls the specified package from the device.
+
+    Args:
+      package: Name of the package to remove.
+
+    Returns:
+      A status string returned by adb uninstall
+    """
+    uninstall_command = 'uninstall %s' % package
+
+    self._LogShell(uninstall_command)
+    return self._adb.SendCommand(uninstall_command, timeout_time=60)
+
+  def Install(self, package_file_path, reinstall=False):
+    """Installs the specified package to the device.
+
+    Args:
+      package_file_path: Path to .apk file to install.
+      reinstall: Reinstall an existing apk, keeping the data.
+
+    Returns:
+      A status string returned by adb install
+    """
+    assert os.path.isfile(package_file_path), ('<%s> is not file' %
+                                               package_file_path)
+
+    install_cmd = ['install']
+
+    if reinstall:
+      install_cmd.append('-r')
+
+    install_cmd.append(package_file_path)
+    install_cmd = ' '.join(install_cmd)
+
+    self._LogShell(install_cmd)
+    return self._adb.SendCommand(install_cmd,
+                                 timeout_time=2 * 60,
+                                 retry_count=0)
+
+  def ManagedInstall(self, apk_path, keep_data=False, package_name=None,
+                     reboots_on_timeout=2):
+    """Installs specified package and reboots device on timeouts.
+
+    If package_name is supplied, checks if the package is already installed and
+    doesn't reinstall if the apk md5sums match.
+
+    Args:
+      apk_path: Path to .apk file to install.
+      keep_data: Reinstalls instead of uninstalling first, preserving the
+        application data.
+      package_name: Package name (only needed if keep_data=False).
+      reboots_on_timeout: number of time to reboot if package manager is frozen.
+    """
+    # Check if package is already installed and up to date.
+    if package_name:
+      installed_apk_path = self.GetApplicationPath(package_name)
+      if (installed_apk_path and
+          not self.GetFilesChanged(apk_path, installed_apk_path,
+                                   ignore_filenames=True)):
+        logging.info('Skipped install: identical %s APK already installed' %
+            package_name)
+        return
+    # Install.
+    reboots_left = reboots_on_timeout
+    while True:
+      try:
+        if not keep_data:
+          assert package_name
+          self.Uninstall(package_name)
+        install_status = self.Install(apk_path, reinstall=keep_data)
+        if 'Success' in install_status:
+          return
+        else:
+          raise Exception('Install failure: %s' % install_status)
+      except errors.WaitForResponseTimedOutError:
+        print '@@@STEP_WARNINGS@@@'
+        logging.info('Timeout on installing %s on device %s', apk_path,
+                     self._device)
+
+        if reboots_left <= 0:
+          raise Exception('Install timed out')
+
+        # Force a hard reboot on last attempt
+        self.Reboot(full_reboot=(reboots_left == 1))
+        reboots_left -= 1
+
+  def MakeSystemFolderWritable(self):
+    """Remounts the /system folder rw."""
+    out = self._adb.SendCommand('remount')
+    if out.strip() != 'remount succeeded':
+      raise errors.MsgException('Remount failed: %s' % out)
+
+  def RestartAdbdOnDevice(self):
+    logging.info('Restarting adbd on the device...')
+    with DeviceTempFile(self, suffix=".sh") as temp_script_file:
+      host_script_path = os.path.join(constants.DIR_SOURCE_ROOT,
+                                      'build',
+                                      'android',
+                                      'pylib',
+                                      'restart_adbd.sh')
+      self._adb.Push(host_script_path, temp_script_file.name)
+      self.RunShellCommand('. %s' % temp_script_file.name)
+      self._adb.SendCommand('wait-for-device')
+
+  def RestartAdbServer(self):
+    """Restart the adb server."""
+    ret = self.KillAdbServer()
+    if ret != 0:
+      raise errors.MsgException('KillAdbServer: %d' % ret)
+
+    ret = self.StartAdbServer()
+    if ret != 0:
+      raise errors.MsgException('StartAdbServer: %d' % ret)
+
+  @staticmethod
+  def KillAdbServer():
+    """Kill adb server."""
+    adb_cmd = [constants.GetAdbPath(), 'kill-server']
+    ret = cmd_helper.RunCmd(adb_cmd)
+    retry = 0
+    while retry < 3:
+      ret, _ = cmd_helper.GetCmdStatusAndOutput(['pgrep', 'adb'])
+      if ret != 0:
+        # pgrep didn't find adb, kill-server succeeded.
+        return 0
+      retry += 1
+      time.sleep(retry)
+    return ret
+
+  def StartAdbServer(self):
+    """Start adb server."""
+    adb_cmd = ['taskset', '-c', '0', constants.GetAdbPath(), 'start-server']
+    ret, _ = cmd_helper.GetCmdStatusAndOutput(adb_cmd)
+    retry = 0
+    while retry < 3:
+      ret, _ = cmd_helper.GetCmdStatusAndOutput(['pgrep', 'adb'])
+      if ret == 0:
+        # pgrep found adb, start-server succeeded.
+        # Waiting for device to reconnect before returning success.
+        self._adb.SendCommand('wait-for-device')
+        return 0
+      retry += 1
+      time.sleep(retry)
+    return ret
+
+  def WaitForSystemBootCompleted(self, wait_time):
+    """Waits for targeted system's boot_completed flag to be set.
+
+    Args:
+      wait_time: time in seconds to wait
+
+    Raises:
+      WaitForResponseTimedOutError if wait_time elapses and flag still not
+      set.
+    """
+    logging.info('Waiting for system boot completed...')
+    self._adb.SendCommand('wait-for-device')
+    # Now the device is there, but system not boot completed.
+    # Query the sys.boot_completed flag with a basic command
+    boot_completed = False
+    attempts = 0
+    wait_period = 5
+    while not boot_completed and (attempts * wait_period) < wait_time:
+      output = self.system_properties['sys.boot_completed']
+      output = output.strip()
+      if output == '1':
+        boot_completed = True
+      else:
+        # If 'error: xxx' returned when querying the flag, it means
+        # adb server lost the connection to the emulator, so restart the adb
+        # server.
+        if 'error:' in output:
+          self.RestartAdbServer()
+        time.sleep(wait_period)
+        attempts += 1
+    if not boot_completed:
+      raise errors.WaitForResponseTimedOutError(
+          'sys.boot_completed flag was not set after %s seconds' % wait_time)
+
+  def WaitForSdCardReady(self, timeout_time):
+    """Wait for the SD card ready before pushing data into it."""
+    logging.info('Waiting for SD card ready...')
+    sdcard_ready = False
+    attempts = 0
+    wait_period = 5
+    external_storage = self.GetExternalStorage()
+    while not sdcard_ready and attempts * wait_period < timeout_time:
+      output = self.RunShellCommand('ls ' + external_storage)
+      if output:
+        sdcard_ready = True
+      else:
+        time.sleep(wait_period)
+        attempts += 1
+    if not sdcard_ready:
+      raise errors.WaitForResponseTimedOutError(
+          'SD card not ready after %s seconds' % timeout_time)
+
+  def GetAndroidToolStatusAndOutput(self, command, lib_path=None, *args, **kw):
+    """Runs a native Android binary, wrapping the command as necessary.
+
+    This is a specialization of GetShellCommandStatusAndOutput, which is meant
+    for running tools/android/ binaries and handle properly: (1) setting the
+    lib path (for component=shared_library), (2) using the PIE wrapper on ICS.
+    See crbug.com/373219 for more context.
+
+    Args:
+      command: String containing the command to send.
+      lib_path: (optional) path to the folder containing the dependent libs.
+      Same other arguments of GetCmdStatusAndOutput.
+    """
+    # The first time this command is run the device is inspected to check
+    # whether a wrapper for running PIE executable is needed (only Android ICS)
+    # or not. The results is cached, so the wrapper is pushed only once.
+    if self._pie_wrapper is None:
+      # None: did not check; '': did check and not needed; '/path': use /path.
+      self._pie_wrapper = ''
+      if self.GetBuildId().startswith('I'):  # Ixxxx = Android ICS.
+        run_pie_dist_path = os.path.join(constants.GetOutDirectory(), 'run_pie')
+        assert os.path.exists(run_pie_dist_path), 'Please build run_pie'
+        # The PIE loader must be pushed manually (i.e. no PushIfNeeded) because
+        # PushIfNeeded requires md5sum and md5sum requires the wrapper as well.
+        adb_command = 'push %s %s' % (run_pie_dist_path, PIE_WRAPPER_PATH)
+        assert _HasAdbPushSucceeded(self._adb.SendCommand(adb_command))
+        self._pie_wrapper = PIE_WRAPPER_PATH
+
+    if self._pie_wrapper:
+      command = '%s %s' % (self._pie_wrapper, command)
+    if lib_path:
+      command = 'LD_LIBRARY_PATH=%s %s' % (lib_path, command)
+    return self.GetShellCommandStatusAndOutput(command, *args, **kw)
+
+  # It is tempting to turn this function into a generator, however this is not
+  # possible without using a private (local) adb_shell instance (to ensure no
+  # other command interleaves usage of it), which would defeat the main aim of
+  # being able to reuse the adb shell instance across commands.
+  def RunShellCommand(self, command, timeout_time=20, log_result=False):
+    """Send a command to the adb shell and return the result.
+
+    Args:
+      command: String containing the shell command to send.
+      timeout_time: Number of seconds to wait for command to respond before
+        retrying, used by AdbInterface.SendShellCommand.
+      log_result: Boolean to indicate whether we should log the result of the
+                  shell command.
+
+    Returns:
+      list containing the lines of output received from running the command
+    """
+    self._LogShell(command)
+    if "'" in command:
+      command = command.replace('\'', '\'\\\'\'')
+    result = self._adb.SendShellCommand(
+        "'%s'" % command, timeout_time).splitlines()
+    # TODO(b.kelemen): we should really be able to drop the stderr of the
+    # command or raise an exception based on what the caller wants.
+    result = [ l for l in result if not l.startswith('WARNING') ]
+    if ['error: device not found'] == result:
+      raise errors.DeviceUnresponsiveError('device not found')
+    if log_result:
+      self._LogShell('\n'.join(result))
+    return result
+
+  def GetShellCommandStatusAndOutput(self, command, timeout_time=20,
+                                     log_result=False):
+    """See RunShellCommand() above.
+
+    Returns:
+      The tuple (exit code, list of output lines).
+    """
+    lines = self.RunShellCommand(
+        command + '; echo %$?', timeout_time, log_result)
+    last_line = lines[-1]
+    status_pos = last_line.rfind('%')
+    assert status_pos >= 0
+    status = int(last_line[status_pos + 1:])
+    if status_pos == 0:
+      lines = lines[:-1]
+    else:
+      lines = lines[:-1] + [last_line[:status_pos]]
+    return (status, lines)
+
+  def KillAll(self, process, signum=9, with_su=False):
+    """Android version of killall, connected via adb.
+
+    Args:
+      process: name of the process to kill off.
+      signum: signal to use, 9 (SIGKILL) by default.
+      with_su: wether or not to use su to kill the processes.
+
+    Returns:
+      the number of processes killed
+    """
+    pids = self.ExtractPid(process)
+    if pids:
+      cmd = 'kill -%d %s' % (signum, ' '.join(pids))
+      if with_su:
+        self.RunShellCommandWithSU(cmd)
+      else:
+        self.RunShellCommand(cmd)
+    return len(pids)
+
+  def KillAllBlocking(self, process, timeout_sec, signum=9, with_su=False):
+    """Blocking version of killall, connected via adb.
+
+    This waits until no process matching the corresponding name appears in ps'
+    output anymore.
+
+    Args:
+      process: name of the process to kill off
+      timeout_sec: the timeout in seconds
+      signum: same as |KillAll|
+      with_su: same as |KillAll|
+    Returns:
+      the number of processes killed
+    """
+    processes_killed = self.KillAll(process, signum=signum, with_su=with_su)
+    if processes_killed:
+      elapsed = 0
+      wait_period = 0.1
+      # Note that this doesn't take into account the time spent in ExtractPid().
+      while self.ExtractPid(process) and elapsed < timeout_sec:
+        time.sleep(wait_period)
+        elapsed += wait_period
+      if elapsed >= timeout_sec:
+        return processes_killed - self.ExtractPid(process)
+    return processes_killed
+
+  @staticmethod
+  def _GetActivityCommand(package, activity, wait_for_completion, action,
+                          category, data, extras, trace_file_name, force_stop,
+                          flags):
+    """Creates command to start |package|'s activity on the device.
+
+    Args - as for StartActivity
+
+    Returns:
+      the command to run on the target to start the activity
+    """
+    cmd = 'am start -a %s' % action
+    if force_stop:
+      cmd += ' -S'
+    if wait_for_completion:
+      cmd += ' -W'
+    if category:
+      cmd += ' -c %s' % category
+    if package and activity:
+      cmd += ' -n %s/%s' % (package, activity)
+    if data:
+      cmd += ' -d "%s"' % data
+    if extras:
+      for key in extras:
+        value = extras[key]
+        if isinstance(value, str):
+          cmd += ' --es'
+        elif isinstance(value, bool):
+          cmd += ' --ez'
+        elif isinstance(value, int):
+          cmd += ' --ei'
+        else:
+          raise NotImplementedError(
+              'Need to teach StartActivity how to pass %s extras' % type(value))
+        cmd += ' %s %s' % (key, value)
+    if trace_file_name:
+      cmd += ' --start-profiler ' + trace_file_name
+    if flags:
+      cmd += ' -f %s' % flags
+    return cmd
+
+  def StartActivity(self, package, activity, wait_for_completion=False,
+                    action='android.intent.action.VIEW',
+                    category=None, data=None,
+                    extras=None, trace_file_name=None,
+                    force_stop=False, flags=None):
+    """Starts |package|'s activity on the device.
+
+    Args:
+      package: Name of package to start (e.g. 'com.google.android.apps.chrome').
+      activity: Name of activity (e.g. '.Main' or
+        'com.google.android.apps.chrome.Main').
+      wait_for_completion: wait for the activity to finish launching (-W flag).
+      action: string (e.g. "android.intent.action.MAIN"). Default is VIEW.
+      category: string (e.g. "android.intent.category.HOME")
+      data: Data string to pass to activity (e.g. 'http://www.example.com/').
+      extras: Dict of extras to pass to activity. Values are significant.
+      trace_file_name: If used, turns on and saves the trace to this file name.
+      force_stop: force stop the target app before starting the activity (-S
+        flag).
+    Returns:
+      The output of the underlying command as a list of lines.
+    """
+    cmd = self._GetActivityCommand(package, activity, wait_for_completion,
+                                   action, category, data, extras,
+                                   trace_file_name, force_stop, flags)
+    return self.RunShellCommand(cmd)
+
+  def StartActivityTimed(self, package, activity, wait_for_completion=False,
+                         action='android.intent.action.VIEW',
+                         category=None, data=None,
+                         extras=None, trace_file_name=None,
+                         force_stop=False, flags=None):
+    """Starts |package|'s activity on the device, returning the start time
+
+    Args - as for StartActivity
+
+    Returns:
+      A tuple containing:
+        - the output of the underlying command as a list of lines, and
+        - a timestamp string for the time at which the activity started
+    """
+    cmd = self._GetActivityCommand(package, activity, wait_for_completion,
+                                   action, category, data, extras,
+                                   trace_file_name, force_stop, flags)
+    self.StartMonitoringLogcat()
+    out = self.RunShellCommand('log starting activity; ' + cmd)
+    activity_started_re = re.compile('.*starting activity.*')
+    m = self.WaitForLogMatch(activity_started_re, None)
+    assert m
+    start_line = m.group(0)
+    return (out, GetLogTimestamp(start_line, self.GetDeviceYear()))
+
+  def StartCrashUploadService(self, package):
+    # TODO(frankf): We really need a python wrapper around Intent
+    # to be shared with StartActivity/BroadcastIntent.
+    cmd = (
+      'am startservice -a %s.crash.ACTION_FIND_ALL -n '
+      '%s/%s.crash.MinidumpUploadService' %
+      (constants.PACKAGE_INFO['chrome'].package,
+       package,
+       constants.PACKAGE_INFO['chrome'].package))
+    am_output = self.RunShellCommandWithSU(cmd)
+    assert am_output and 'Starting' in am_output[-1], (
+        'Service failed to start: %s' % am_output)
+    time.sleep(15)
+
+  def BroadcastIntent(self, package, intent, *args):
+    """Send a broadcast intent.
+
+    Args:
+      package: Name of package containing the intent.
+      intent: Name of the intent.
+      args: Optional extra arguments for the intent.
+    """
+    cmd = 'am broadcast -a %s.%s %s' % (package, intent, ' '.join(args))
+    self.RunShellCommand(cmd)
+
+  def GoHome(self):
+    """Tell the device to return to the home screen. Blocks until completion."""
+    self.RunShellCommand('am start -W '
+        '-a android.intent.action.MAIN -c android.intent.category.HOME')
+
+  def CloseApplication(self, package):
+    """Attempt to close down the application, using increasing violence.
+
+    Args:
+      package: Name of the process to kill off, e.g.
+      com.google.android.apps.chrome
+    """
+    self.RunShellCommand('am force-stop ' + package)
+
+  def GetApplicationPath(self, package):
+    """Get the installed apk path on the device for the given package.
+
+    Args:
+      package: Name of the package.
+
+    Returns:
+      Path to the apk on the device if it exists, None otherwise.
+    """
+    pm_path_output  = self.RunShellCommand('pm path ' + package)
+    # The path output contains anything if and only if the package
+    # exists.
+    if pm_path_output:
+      # pm_path_output is of the form: "package:/path/to/foo.apk"
+      return pm_path_output[0].split(':')[1]
+    else:
+      return None
+
+  def ClearApplicationState(self, package):
+    """Closes and clears all state for the given |package|."""
+    # Check that the package exists before clearing it. Necessary because
+    # calling pm clear on a package that doesn't exist may never return.
+    pm_path_output  = self.RunShellCommand('pm path ' + package)
+    # The path output only contains anything if and only if the package exists.
+    if pm_path_output:
+      self.RunShellCommand('pm clear ' + package)
+
+  def SendKeyEvent(self, keycode):
+    """Sends keycode to the device.
+
+    Args:
+      keycode: Numeric keycode to send (see "enum" at top of file).
+    """
+    self.RunShellCommand('input keyevent %d' % keycode)
+
+  def _RunMd5Sum(self, host_path, device_path):
+    """Gets the md5sum of a host path and device path.
+
+    Args:
+      host_path: Path (file or directory) on the host.
+      device_path: Path on the device.
+
+    Returns:
+      A tuple containing lists of the host and device md5sum results as
+      created by _ParseMd5SumOutput().
+    """
+    md5sum_dist_path = os.path.join(constants.GetOutDirectory(),
+                                    'md5sum_dist')
+    assert os.path.exists(md5sum_dist_path), 'Please build md5sum.'
+    md5sum_dist_mtime = os.stat(md5sum_dist_path).st_mtime
+    if (md5sum_dist_path not in self._push_if_needed_cache or
+        self._push_if_needed_cache[md5sum_dist_path] != md5sum_dist_mtime):
+      command = 'push %s %s' % (md5sum_dist_path, MD5SUM_DEVICE_FOLDER)
+      assert _HasAdbPushSucceeded(self._adb.SendCommand(command))
+      self._push_if_needed_cache[md5sum_dist_path] = md5sum_dist_mtime
+
+    (_, md5_device_output) = self.GetAndroidToolStatusAndOutput(
+        self._util_wrapper + ' ' + MD5SUM_DEVICE_PATH + ' ' + device_path,
+        lib_path=MD5SUM_DEVICE_FOLDER,
+        timeout_time=2 * 60)
+    device_hash_tuples = _ParseMd5SumOutput(md5_device_output)
+    assert os.path.exists(host_path), 'Local path not found %s' % host_path
+    md5sum_output = cmd_helper.GetCmdOutput(
+        [os.path.join(constants.GetOutDirectory(), 'md5sum_bin_host'),
+         host_path])
+    host_hash_tuples = _ParseMd5SumOutput(md5sum_output.splitlines())
+    return (host_hash_tuples, device_hash_tuples)
+
+  def GetFilesChanged(self, host_path, device_path, ignore_filenames=False):
+    """Compares the md5sum of a host path against a device path.
+
+    Note: Ignores extra files on the device.
+
+    Args:
+      host_path: Path (file or directory) on the host.
+      device_path: Path on the device.
+      ignore_filenames: If True only the file contents are considered when
+          checking whether a file has changed, otherwise the relative path
+          must also match.
+
+    Returns:
+      A list of tuples of the form (host_path, device_path) for files whose
+      md5sums do not match.
+    """
+
+    # Md5Sum resolves symbolic links in path names so the calculation of
+    # relative path names from its output will need the real path names of the
+    # base directories. Having calculated these they are used throughout the
+    # function since this makes us less subject to any future changes to Md5Sum.
+    real_host_path = os.path.realpath(host_path)
+    real_device_path = self.RunShellCommand('realpath "%s"' % device_path)[0]
+
+    host_hash_tuples, device_hash_tuples = self._RunMd5Sum(
+        real_host_path, real_device_path)
+
+    if len(host_hash_tuples) > len(device_hash_tuples):
+      logging.info('%s files do not exist on the device' %
+                   (len(host_hash_tuples) - len(device_hash_tuples)))
+
+    host_rel = [(os.path.relpath(os.path.normpath(t.path), real_host_path),
+                 t.hash)
+                for t in host_hash_tuples]
+
+    if os.path.isdir(real_host_path):
+      def RelToRealPaths(rel_path):
+        return (os.path.join(real_host_path, rel_path),
+                os.path.join(real_device_path, rel_path))
+    else:
+      assert len(host_rel) == 1
+      def RelToRealPaths(_):
+        return (real_host_path, real_device_path)
+
+    if ignore_filenames:
+      # If we are ignoring file names, then we want to push any file for which
+      # a file with an equivalent MD5 sum does not exist on the device.
+      device_hashes = set([h.hash for h in device_hash_tuples])
+      ShouldPush = lambda p, h: h not in device_hashes
+    else:
+      # Otherwise, we want to push any file on the host for which a file with
+      # an equivalent MD5 sum does not exist at the same relative path on the
+      # device.
+      device_rel = dict([(os.path.relpath(os.path.normpath(t.path),
+                                          real_device_path),
+                          t.hash)
+                         for t in device_hash_tuples])
+      ShouldPush = lambda p, h: p not in device_rel or h != device_rel[p]
+
+    return [RelToRealPaths(path) for path, host_hash in host_rel
+            if ShouldPush(path, host_hash)]
+
+  def PushIfNeeded(self, host_path, device_path):
+    """Pushes |host_path| to |device_path|.
+
+    Works for files and directories. This method skips copying any paths in
+    |test_data_paths| that already exist on the device with the same hash.
+
+    All pushed files can be removed by calling RemovePushedFiles().
+    """
+    MAX_INDIVIDUAL_PUSHES = 50
+    if not os.path.exists(host_path):
+      raise device_errors.CommandFailedError(
+          'Local path not found %s' % host_path, device=str(self))
+
+    # See if the file on the host changed since the last push (if any) and
+    # return early if it didn't. Note that this shortcut assumes that the tests
+    # on the device don't modify the files.
+    if not os.path.isdir(host_path):
+      if host_path in self._push_if_needed_cache:
+        host_path_mtime = self._push_if_needed_cache[host_path]
+        if host_path_mtime == os.stat(host_path).st_mtime:
+          return
+
+    size = host_utils.GetRecursiveDiskUsage(host_path)
+    self._pushed_files.append(device_path)
+    self._potential_push_size += size
+
+    if os.path.isdir(host_path):
+      self.RunShellCommand('mkdir -p "%s"' % device_path)
+
+    changed_files = self.GetFilesChanged(host_path, device_path)
+    logging.info('Found %d files that need to be pushed to %s',
+        len(changed_files), device_path)
+    if not changed_files:
+      return
+
+    def Push(host, device):
+      # NOTE: We can't use adb_interface.Push() because it hardcodes a timeout
+      # of 60 seconds which isn't sufficient for a lot of users of this method.
+      push_command = 'push %s %s' % (host, device)
+      self._LogShell(push_command)
+
+      # Retry push with increasing backoff if the device is busy.
+      retry = 0
+      while True:
+        output = self._adb.SendCommand(push_command, timeout_time=30 * 60)
+        if _HasAdbPushSucceeded(output):
+          if not os.path.isdir(host_path):
+            self._push_if_needed_cache[host] = os.stat(host).st_mtime
+          return
+        if retry < 3:
+          retry += 1
+          wait_time = 5 * retry
+          logging.error('Push failed, retrying in %d seconds: %s' %
+                        (wait_time, output))
+          time.sleep(wait_time)
+        else:
+          raise Exception('Push failed: %s' % output)
+
+    diff_size = 0
+    if len(changed_files) <= MAX_INDIVIDUAL_PUSHES:
+      diff_size = sum(host_utils.GetRecursiveDiskUsage(f[0])
+                      for f in changed_files)
+
+    # TODO(craigdh): Replace this educated guess with a heuristic that
+    # approximates the push time for each method.
+    if len(changed_files) > MAX_INDIVIDUAL_PUSHES or diff_size > 0.5 * size:
+      self._actual_push_size += size
+      Push(host_path, device_path)
+    else:
+      for f in changed_files:
+        Push(f[0], f[1])
+      self._actual_push_size += diff_size
+
+  def GetPushSizeInfo(self):
+    """Get total size of pushes to the device done via PushIfNeeded()
+
+    Returns:
+      A tuple:
+        1. Total size of push requests to PushIfNeeded (MB)
+        2. Total size that was actually pushed (MB)
+    """
+    return (self._potential_push_size, self._actual_push_size)
+
+  def GetFileContents(self, filename, log_result=False):
+    """Gets contents from the file specified by |filename|."""
+    return self.RunShellCommand('cat "%s" 2>/dev/null' % filename,
+                                log_result=log_result)
+
+  def SetFileContents(self, filename, contents):
+    """Writes |contents| to the file specified by |filename|."""
+    with tempfile.NamedTemporaryFile() as f:
+      f.write(contents)
+      f.flush()
+      self._adb.Push(f.name, filename)
+
+  def RunShellCommandWithSU(self, command, timeout_time=20, log_result=False):
+    return self.RunShellCommand('su -c %s' % command, timeout_time, log_result)
+
+  def CanAccessProtectedFileContents(self):
+    """Returns True if Get/SetProtectedFileContents would work via "su" or adb
+    shell running as root.
+
+    Devices running user builds don't have adb root, but may provide "su" which
+    can be used for accessing protected files.
+    """
+    return (self._GetProtectedFileCommandRunner() != None)
+
+  def _GetProtectedFileCommandRunner(self):
+    """Finds the best method to access protected files on the device.
+
+    Returns:
+      1. None when privileged files cannot be accessed on the device.
+      2. Otherwise: A function taking a single parameter: a string with command
+         line arguments. Running that function executes the command with
+         the appropriate method.
+    """
+    if self._protected_file_access_method_initialized:
+      return self._privileged_command_runner
+
+    self._privileged_command_runner = None
+    self._protected_file_access_method_initialized = True
+
+    for cmd in [self.RunShellCommand, self.RunShellCommandWithSU]:
+      # Get contents of the auxv vector for the init(8) process from a small
+      # binary file that always exists on linux and is always read-protected.
+      contents = cmd('cat /proc/1/auxv')
+      # The leading 4 or 8-bytes of auxv vector is a_type. There are not many
+      # reserved a_type values, hence byte 2 must always be '\0' for a realistic
+      # auxv. See /usr/include/elf.h.
+      if len(contents) > 0 and (contents[0][2] == '\0'):
+        self._privileged_command_runner = cmd
+        break
+    return self._privileged_command_runner
+
+  def GetProtectedFileContents(self, filename):
+    """Gets contents from the protected file specified by |filename|.
+
+    This is potentially less efficient than GetFileContents.
+    """
+    command = 'cat "%s" 2> /dev/null' % filename
+    command_runner = self._GetProtectedFileCommandRunner()
+    if command_runner:
+      return command_runner(command)
+    else:
+      logging.warning('Could not access protected file: %s' % filename)
+      return []
+
+  def SetProtectedFileContents(self, filename, contents):
+    """Writes |contents| to the protected file specified by |filename|.
+
+    This is less efficient than SetFileContents.
+    """
+    with DeviceTempFile(self) as temp_file:
+      with DeviceTempFile(self, suffix=".sh") as temp_script:
+        # Put the contents in a temporary file
+        self.SetFileContents(temp_file.name, contents)
+        # Create a script to copy the file contents to its final destination
+        self.SetFileContents(temp_script.name,
+                             'cat %s > %s' % (temp_file.name, filename))
+
+        command = 'sh %s' % temp_script.name
+        command_runner = self._GetProtectedFileCommandRunner()
+        if command_runner:
+          return command_runner(command)
+        else:
+          logging.warning(
+              'Could not set contents of protected file: %s' % filename)
+
+
+  def RemovePushedFiles(self):
+    """Removes all files pushed with PushIfNeeded() from the device."""
+    for p in self._pushed_files:
+      self.RunShellCommand('rm -r %s' % p, timeout_time=2 * 60)
+
+  def ListPathContents(self, path):
+    """Lists files in all subdirectories of |path|.
+
+    Args:
+      path: The path to list.
+
+    Returns:
+      A dict of {"name": (size, lastmod), ...}.
+    """
+    # Example output:
+    # /foo/bar:
+    # -rw-r----- user group   102 2011-05-12 12:29:54.131623387 +0100 baz.txt
+    re_file = re.compile('^-(?P<perms>[^\s]+)\s+'
+                         '(?P<user>[^\s]+)\s+'
+                         '(?P<group>[^\s]+)\s+'
+                         '(?P<size>[^\s]+)\s+'
+                         '(?P<date>[^\s]+)\s+'
+                         '(?P<time>[^\s]+)\s+'
+                         '(?P<filename>[^\s]+)$')
+    return _GetFilesFromRecursiveLsOutput(
+        path, self.RunShellCommand('ls -lR %s' % path), re_file,
+        self.GetUtcOffset())
+
+  def GetUtcOffset(self):
+    if not self._device_utc_offset:
+      self._device_utc_offset = self.RunShellCommand('date +%z')[0]
+    return self._device_utc_offset
+
+  def SetJavaAssertsEnabled(self, enable):
+    """Sets or removes the device java assertions property.
+
+    Args:
+      enable: If True the property will be set.
+
+    Returns:
+      True if the file was modified (reboot is required for it to take effect).
+    """
+    # First ensure the desired property is persisted.
+    temp_props_file = tempfile.NamedTemporaryFile()
+    properties = ''
+    if self._adb.Pull(LOCAL_PROPERTIES_PATH, temp_props_file.name):
+      with open(temp_props_file.name) as f:
+        properties = f.read()
+    re_search = re.compile(r'^\s*' + re.escape(JAVA_ASSERT_PROPERTY) +
+                           r'\s*=\s*all\s*$', re.MULTILINE)
+    if enable != bool(re.search(re_search, properties)):
+      re_replace = re.compile(r'^\s*' + re.escape(JAVA_ASSERT_PROPERTY) +
+                              r'\s*=\s*\w+\s*$', re.MULTILINE)
+      properties = re.sub(re_replace, '', properties)
+      if enable:
+        properties += '\n%s=all\n' % JAVA_ASSERT_PROPERTY
+
+      file(temp_props_file.name, 'w').write(properties)
+      self._adb.Push(temp_props_file.name, LOCAL_PROPERTIES_PATH)
+
+    # Next, check the current runtime value is what we need, and
+    # if not, set it and report that a reboot is required.
+    was_set = 'all' in self.system_properties[JAVA_ASSERT_PROPERTY]
+    if was_set == enable:
+      return False
+    self.system_properties[JAVA_ASSERT_PROPERTY] = enable and 'all' or ''
+    return True
+
+  def GetBuildId(self):
+    """Returns the build ID of the system (e.g. JRM79C)."""
+    build_id = self.system_properties['ro.build.id']
+    assert build_id
+    return build_id
+
+  def GetBuildType(self):
+    """Returns the build type of the system (e.g. eng)."""
+    build_type = self.system_properties['ro.build.type']
+    assert build_type
+    return build_type
+
+  def GetBuildProduct(self):
+    """Returns the build product of the device (e.g. maguro)."""
+    build_product = self.system_properties['ro.build.product']
+    assert build_product
+    return build_product
+
+  def GetProductName(self):
+    """Returns the product name of the device (e.g. takju)."""
+    name = self.system_properties['ro.product.name']
+    assert name
+    return name
+
+  def GetBuildFingerprint(self):
+    """Returns the build fingerprint of the device."""
+    build_fingerprint = self.system_properties['ro.build.fingerprint']
+    assert build_fingerprint
+    return build_fingerprint
+
+  def GetDescription(self):
+    """Returns the description of the system.
+
+    For example, "yakju-userdebug 4.1 JRN54F 364167 dev-keys".
+    """
+    description = self.system_properties['ro.build.description']
+    assert description
+    return description
+
+  def GetProductModel(self):
+    """Returns the name of the product model (e.g. "Galaxy Nexus") """
+    model = self.system_properties['ro.product.model']
+    assert model
+    return model
+
+  def GetWifiIP(self):
+    """Returns the wifi IP on the device."""
+    wifi_ip = self.system_properties['dhcp.wlan0.ipaddress']
+    # Do not assert here. Devices (e.g. emulators) may not have a WifiIP.
+    return wifi_ip
+
+  def GetSubscriberInfo(self):
+    """Returns the device subscriber info (e.g. GSM and device ID) as string."""
+    iphone_sub = self.RunShellCommand('dumpsys iphonesubinfo')
+    # Do not assert here. Devices (e.g. Nakasi on K) may not have iphonesubinfo.
+    return '\n'.join(iphone_sub)
+
+  def GetBatteryInfo(self):
+    """Returns a {str: str} dict of battery info (e.g. status, level, etc)."""
+    battery = self.RunShellCommand('dumpsys battery')
+    assert battery
+    battery_info = {}
+    for line in battery[1:]:
+      k, _, v = line.partition(': ')
+      battery_info[k.strip()] = v.strip()
+    return battery_info
+
+  def GetSetupWizardStatus(self):
+    """Returns the status of the device setup wizard (e.g. DISABLED)."""
+    status = self.system_properties['ro.setupwizard.mode']
+    # On some devices, the status is empty if not otherwise set. In such cases
+    # the caller should expect an empty string to be returned.
+    return status
+
+  def StartMonitoringLogcat(self, clear=True, logfile=None, filters=None):
+    """Starts monitoring the output of logcat, for use with WaitForLogMatch.
+
+    Args:
+      clear: If True the existing logcat output will be cleared, to avoiding
+             matching historical output lurking in the log.
+      filters: A list of logcat filters to be used.
+    """
+    if clear:
+      self.RunShellCommand('logcat -c')
+    args = []
+    if self._adb._target_arg:
+      args += shlex.split(self._adb._target_arg)
+    args += ['logcat', '-v', 'threadtime']
+    if filters:
+      args.extend(filters)
+    else:
+      args.append('*:v')
+
+    if logfile:
+      logfile = NewLineNormalizer(logfile)
+
+    # Spawn logcat and synchronize with it.
+    for _ in range(4):
+      self._logcat = pexpect.spawn(constants.GetAdbPath(), args, timeout=10,
+                                   logfile=logfile)
+      if not clear or self.SyncLogCat():
+        break
+      self._logcat.close(force=True)
+    else:
+      logging.critical('Error reading from logcat: ' + str(self._logcat.match))
+      sys.exit(1)
+
+  def SyncLogCat(self):
+    """Synchronize with logcat.
+
+    Synchronize with the monitored logcat so that WaitForLogMatch will only
+    consider new message that are received after this point in time.
+
+    Returns:
+      True if the synchronization succeeded.
+    """
+    assert self._logcat
+    tag = 'logcat_sync_%s' % time.time()
+    self.RunShellCommand('log ' + tag)
+    return self._logcat.expect([tag, pexpect.EOF, pexpect.TIMEOUT]) == 0
+
+  def GetMonitoredLogCat(self):
+    """Returns an "adb logcat" command as created by pexpected.spawn."""
+    if not self._logcat:
+      self.StartMonitoringLogcat(clear=False)
+    return self._logcat
+
+  def WaitForLogMatch(self, success_re, error_re, clear=False, timeout=10):
+    """Blocks until a matching line is logged or a timeout occurs.
+
+    Args:
+      success_re: A compiled re to search each line for.
+      error_re: A compiled re which, if found, terminates the search for
+          |success_re|. If None is given, no error condition will be detected.
+      clear: If True the existing logcat output will be cleared, defaults to
+          false.
+      timeout: Timeout in seconds to wait for a log match.
+
+    Raises:
+      pexpect.TIMEOUT after |timeout| seconds without a match for |success_re|
+      or |error_re|.
+
+    Returns:
+      The re match object if |success_re| is matched first or None if |error_re|
+      is matched first.
+    """
+    logging.info('<<< Waiting for logcat:' + str(success_re.pattern))
+    t0 = time.time()
+    while True:
+      if not self._logcat:
+        self.StartMonitoringLogcat(clear)
+      try:
+        while True:
+          # Note this will block for upto the timeout _per log line_, so we need
+          # to calculate the overall timeout remaining since t0.
+          time_remaining = t0 + timeout - time.time()
+          if time_remaining < 0:
+            raise pexpect.TIMEOUT(self._logcat)
+          self._logcat.expect(PEXPECT_LINE_RE, timeout=time_remaining)
+          line = self._logcat.match.group(1)
+          if error_re:
+            error_match = error_re.search(line)
+            if error_match:
+              return None
+          success_match = success_re.search(line)
+          if success_match:
+            return success_match
+          logging.info('<<< Skipped Logcat Line:' + str(line))
+      except pexpect.TIMEOUT:
+        raise pexpect.TIMEOUT(
+            'Timeout (%ds) exceeded waiting for pattern "%s" (tip: use -vv '
+            'to debug)' %
+            (timeout, success_re.pattern))
+      except pexpect.EOF:
+        # It seems that sometimes logcat can end unexpectedly. This seems
+        # to happen during Chrome startup after a reboot followed by a cache
+        # clean. I don't understand why this happens, but this code deals with
+        # getting EOF in logcat.
+        logging.critical('Found EOF in adb logcat. Restarting...')
+        # Rerun spawn with original arguments. Note that self._logcat.args[0] is
+        # the path of adb, so we don't want it in the arguments.
+        self._logcat = pexpect.spawn(constants.GetAdbPath(),
+                                     self._logcat.args[1:],
+                                     timeout=self._logcat.timeout,
+                                     logfile=self._logcat.logfile)
+
+  def StartRecordingLogcat(self, clear=True, filters=None):
+    """Starts recording logcat output to eventually be saved as a string.
+
+    This call should come before some series of tests are run, with either
+    StopRecordingLogcat or SearchLogcatRecord following the tests.
+
+    Args:
+      clear: True if existing log output should be cleared.
+      filters: A list of logcat filters to be used.
+    """
+    if not filters:
+      filters = ['*:v']
+    if clear:
+      self._adb.SendCommand('logcat -c')
+    logcat_command = 'adb %s logcat -v threadtime %s' % (self._adb._target_arg,
+                                                         ' '.join(filters))
+    self._logcat_tmpoutfile = tempfile.NamedTemporaryFile(bufsize=0)
+    self.logcat_process = subprocess.Popen(logcat_command, shell=True,
+                                           stdout=self._logcat_tmpoutfile)
+
+  def GetCurrentRecordedLogcat(self):
+    """Return the current content of the logcat being recorded.
+       Call this after StartRecordingLogcat() and before StopRecordingLogcat().
+       This can be useful to perform timed polling/parsing.
+    Returns:
+       Current logcat output as a single string, or None if
+       StopRecordingLogcat() was already called.
+    """
+    if not self._logcat_tmpoutfile:
+      return None
+
+    with open(self._logcat_tmpoutfile.name) as f:
+      return f.read()
+
+  def StopRecordingLogcat(self):
+    """Stops an existing logcat recording subprocess and returns output.
+
+    Returns:
+      The logcat output as a string or an empty string if logcat was not
+      being recorded at the time.
+    """
+    if not self.logcat_process:
+      return ''
+    # Cannot evaluate directly as 0 is a possible value.
+    # Better to read the self.logcat_process.stdout before killing it,
+    # Otherwise the communicate may return incomplete output due to pipe break.
+    if self.logcat_process.poll() is None:
+      self.logcat_process.kill()
+    self.logcat_process.wait()
+    self.logcat_process = None
+    self._logcat_tmpoutfile.seek(0)
+    output = self._logcat_tmpoutfile.read()
+    self._logcat_tmpoutfile.close()
+    self._logcat_tmpoutfile = None
+    return output
+
+  @staticmethod
+  def SearchLogcatRecord(record, message, thread_id=None, proc_id=None,
+                         log_level=None, component=None):
+    """Searches the specified logcat output and returns results.
+
+    This method searches through the logcat output specified by record for a
+    certain message, narrowing results by matching them against any other
+    specified criteria.  It returns all matching lines as described below.
+
+    Args:
+      record: A string generated by Start/StopRecordingLogcat to search.
+      message: An output string to search for.
+      thread_id: The thread id that is the origin of the message.
+      proc_id: The process that is the origin of the message.
+      log_level: The log level of the message.
+      component: The name of the component that would create the message.
+
+    Returns:
+      A list of dictionaries represeting matching entries, each containing keys
+      thread_id, proc_id, log_level, component, and message.
+    """
+    if thread_id:
+      thread_id = str(thread_id)
+    if proc_id:
+      proc_id = str(proc_id)
+    results = []
+    reg = re.compile('(\d+)\s+(\d+)\s+([A-Z])\s+([A-Za-z]+)\s*:(.*)$',
+                     re.MULTILINE)
+    log_list = reg.findall(record)
+    for (tid, pid, log_lev, comp, msg) in log_list:
+      if ((not thread_id or thread_id == tid) and
+          (not proc_id or proc_id == pid) and
+          (not log_level or log_level == log_lev) and
+          (not component or component == comp) and msg.find(message) > -1):
+        match = dict({'thread_id': tid, 'proc_id': pid,
+                      'log_level': log_lev, 'component': comp,
+                      'message': msg})
+        results.append(match)
+    return results
+
+  def ExtractPid(self, process_name):
+    """Extracts Process Ids for a given process name from Android Shell.
+
+    Args:
+      process_name: name of the process on the device.
+
+    Returns:
+      List of all the process ids (as strings) that match the given name.
+      If the name of a process exactly matches the given name, the pid of
+      that process will be inserted to the front of the pid list.
+    """
+    pids = []
+    for line in self.RunShellCommand('ps', log_result=False):
+      data = line.split()
+      try:
+        if process_name in data[-1]:  # name is in the last column
+          if process_name == data[-1]:
+            pids.insert(0, data[1])  # PID is in the second column
+          else:
+            pids.append(data[1])
+      except IndexError:
+        pass
+    return pids
+
+  def GetIoStats(self):
+    """Gets cumulative disk IO stats since boot (for all processes).
+
+    Returns:
+      Dict of {num_reads, num_writes, read_ms, write_ms} or None if there
+      was an error.
+    """
+    IoStats = collections.namedtuple(
+        'IoStats',
+        ['device',
+         'num_reads_issued',
+         'num_reads_merged',
+         'num_sectors_read',
+         'ms_spent_reading',
+         'num_writes_completed',
+         'num_writes_merged',
+         'num_sectors_written',
+         'ms_spent_writing',
+         'num_ios_in_progress',
+         'ms_spent_doing_io',
+         'ms_spent_doing_io_weighted',
+        ])
+
+    for line in self.GetFileContents('/proc/diskstats', log_result=False):
+      fields = line.split()
+      stats = IoStats._make([fields[2]] + [int(f) for f in fields[3:]])
+      if stats.device == 'mmcblk0':
+        return {
+            'num_reads': stats.num_reads_issued,
+            'num_writes': stats.num_writes_completed,
+            'read_ms': stats.ms_spent_reading,
+            'write_ms': stats.ms_spent_writing,
+        }
+    logging.warning('Could not find disk IO stats.')
+    return None
+
+  def GetMemoryUsageForPid(self, pid):
+    """Returns the memory usage for given pid.
+
+    Args:
+      pid: The pid number of the specific process running on device.
+
+    Returns:
+      Dict of {metric:usage_kb}, for the process which has specified pid.
+      The metric keys which may be included are: Size, Rss, Pss, Shared_Clean,
+      Shared_Dirty, Private_Clean, Private_Dirty, VmHWM.
+    """
+    showmap = self.RunShellCommand('showmap %d' % pid)
+    if not showmap or not showmap[-1].endswith('TOTAL'):
+      logging.warning('Invalid output for showmap %s', str(showmap))
+      return {}
+    items = showmap[-1].split()
+    if len(items) != 9:
+      logging.warning('Invalid TOTAL for showmap %s', str(items))
+      return {}
+    usage_dict = collections.defaultdict(int)
+    usage_dict.update({
+        'Size': int(items[0].strip()),
+        'Rss': int(items[1].strip()),
+        'Pss': int(items[2].strip()),
+        'Shared_Clean': int(items[3].strip()),
+        'Shared_Dirty': int(items[4].strip()),
+        'Private_Clean': int(items[5].strip()),
+        'Private_Dirty': int(items[6].strip()),
+    })
+    peak_value_kb = 0
+    for line in self.GetProtectedFileContents('/proc/%s/status' % pid):
+      if not line.startswith('VmHWM:'):  # Format: 'VmHWM: +[0-9]+ kB'
+        continue
+      peak_value_kb = int(line.split(':')[1].strip().split(' ')[0])
+      break
+    usage_dict['VmHWM'] = peak_value_kb
+    if not peak_value_kb:
+      logging.warning('Could not find memory peak value for pid ' + str(pid))
+
+    return usage_dict
+
+  def ProcessesUsingDevicePort(self, device_port):
+    """Lists processes using the specified device port on loopback interface.
+
+    Args:
+      device_port: Port on device we want to check.
+
+    Returns:
+      A list of (pid, process_name) tuples using the specified port.
+    """
+    tcp_results = self.RunShellCommand('cat /proc/net/tcp', log_result=False)
+    tcp_address = '0100007F:%04X' % device_port
+    pids = []
+    for single_connect in tcp_results:
+      connect_results = single_connect.split()
+      # Column 1 is the TCP port, and Column 9 is the inode of the socket
+      if connect_results[1] == tcp_address:
+        socket_inode = connect_results[9]
+        socket_name = 'socket:[%s]' % socket_inode
+        lsof_results = self.RunShellCommand('lsof', log_result=False)
+        for single_process in lsof_results:
+          process_results = single_process.split()
+          # Ignore the line if it has less than nine columns in it, which may
+          # be the case when a process stops while lsof is executing.
+          if len(process_results) <= 8:
+            continue
+          # Column 0 is the executable name
+          # Column 1 is the pid
+          # Column 8 is the Inode in use
+          if process_results[8] == socket_name:
+            pids.append((int(process_results[1]), process_results[0]))
+        break
+    logging.info('PidsUsingDevicePort: %s', pids)
+    return pids
+
+  def FileExistsOnDevice(self, file_name):
+    """Checks whether the given file exists on the device.
+
+    Args:
+      file_name: Full path of file to check.
+
+    Returns:
+      True if the file exists, False otherwise.
+    """
+    assert '"' not in file_name, 'file_name cannot contain double quotes'
+    try:
+      status = self._adb.SendShellCommand(
+          '\'test -e "%s"; echo $?\'' % (file_name))
+      if 'test: not found' not in status:
+        return int(status) == 0
+
+      status = self._adb.SendShellCommand(
+          '\'ls "%s" >/dev/null 2>&1; echo $?\'' % (file_name))
+      return int(status) == 0
+    except ValueError:
+      if IsDeviceAttached(self._device):
+        raise errors.DeviceUnresponsiveError('Device may be offline.')
+
+      return False
+
+  def IsFileWritableOnDevice(self, file_name):
+    """Checks whether the given file (or directory) is writable on the device.
+
+    Args:
+      file_name: Full path of file/directory to check.
+
+    Returns:
+      True if writable, False otherwise.
+    """
+    assert '"' not in file_name, 'file_name cannot contain double quotes'
+    try:
+      status = self._adb.SendShellCommand(
+          '\'test -w "%s"; echo $?\'' % (file_name))
+      if 'test: not found' not in status:
+        return int(status) == 0
+      raise errors.AbortError('"test" binary not found. OS too old.')
+
+    except ValueError:
+      if IsDeviceAttached(self._device):
+        raise errors.DeviceUnresponsiveError('Device may be offline.')
+
+      return False
+
+  @staticmethod
+  def GetTimestamp():
+    return time.strftime('%Y-%m-%d-%H%M%S', time.localtime())
+
+  @staticmethod
+  def EnsureHostDirectory(host_file):
+    host_dir = os.path.dirname(os.path.abspath(host_file))
+    if not os.path.exists(host_dir):
+      os.makedirs(host_dir)
+
+  def TakeScreenshot(self, host_file=None):
+    """Saves a screenshot image to |host_file| on the host.
+
+    Args:
+      host_file: Absolute path to the image file to store on the host or None to
+                 use an autogenerated file name.
+
+    Returns:
+      Resulting host file name of the screenshot.
+    """
+    host_file = os.path.abspath(host_file or
+                                'screenshot-%s.png' % self.GetTimestamp())
+    self.EnsureHostDirectory(host_file)
+    device_file = '%s/screenshot.png' % self.GetExternalStorage()
+    self.RunShellCommand(
+        '/system/bin/screencap -p %s' % device_file)
+    self.PullFileFromDevice(device_file, host_file)
+    self.RunShellCommand('rm -f "%s"' % device_file)
+    return host_file
+
+  def PullFileFromDevice(self, device_file, host_file):
+    """Download |device_file| on the device from to |host_file| on the host.
+
+    Args:
+      device_file: Absolute path to the file to retrieve from the device.
+      host_file: Absolute path to the file to store on the host.
+    """
+    if not self._adb.Pull(device_file, host_file):
+      raise device_errors.AdbCommandFailedError(
+          ['pull', device_file, host_file], 'Failed to pull file from device.')
+    assert os.path.exists(host_file)
+
+  def SetUtilWrapper(self, util_wrapper):
+    """Sets a wrapper prefix to be used when running a locally-built
+    binary on the device (ex.: md5sum_bin).
+    """
+    self._util_wrapper = util_wrapper
+
+  def RunUIAutomatorTest(self, test, test_package, timeout):
+    """Runs a single uiautomator test.
+
+    Args:
+      test: Test class/method.
+      test_package: Name of the test jar.
+      timeout: Timeout time in seconds.
+
+    Returns:
+      An instance of am_instrument_parser.TestResult object.
+    """
+    cmd = 'uiautomator runtest %s -e class %s' % (test_package, test)
+    self._LogShell(cmd)
+    output = self._adb.SendShellCommand(cmd, timeout_time=timeout)
+    # uiautomator doesn't fully conform to the instrumenation test runner
+    # convention and doesn't terminate with INSTRUMENTATION_CODE.
+    # Just assume the first result is valid.
+    (test_results, _) = am_instrument_parser.ParseAmInstrumentOutput(output)
+    if not test_results:
+      raise errors.InstrumentationError(
+          'no test results... device setup correctly?')
+    return test_results[0]
+
+  def DismissCrashDialogIfNeeded(self):
+    """Dismiss the error/ANR dialog if present.
+
+    Returns: Name of the crashed package if a dialog is focused,
+             None otherwise.
+    """
+    re_focus = re.compile(
+        r'\s*mCurrentFocus.*Application (Error|Not Responding): (\S+)}')
+
+    def _FindFocusedWindow():
+      match = None
+      for line in self.RunShellCommand('dumpsys window windows'):
+        match = re.match(re_focus, line)
+        if match:
+          break
+      return match
+
+    match = _FindFocusedWindow()
+    if not match:
+      return
+    package = match.group(2)
+    logging.warning('Trying to dismiss %s dialog for %s' % match.groups())
+    self.SendKeyEvent(KEYCODE_DPAD_RIGHT)
+    self.SendKeyEvent(KEYCODE_DPAD_RIGHT)
+    self.SendKeyEvent(KEYCODE_ENTER)
+    match = _FindFocusedWindow()
+    if match:
+      logging.error('Still showing a %s dialog for %s' % match.groups())
+    return package
+
+  def EfficientDeviceDirectoryCopy(self, source, dest):
+    """ Copy a directory efficiently on the device
+
+    Uses a shell script running on the target to copy new and changed files the
+    source directory to the destination directory and remove added files. This
+    is in some cases much faster than cp -r.
+
+    Args:
+      source: absolute path of source directory
+      dest: absolute path of destination directory
+    """
+    logging.info('In EfficientDeviceDirectoryCopy %s %s', source, dest)
+    with DeviceTempFile(self, suffix=".sh") as temp_script_file:
+      host_script_path = os.path.join(constants.DIR_SOURCE_ROOT,
+                                      'build',
+                                      'android',
+                                      'pylib',
+                                      'efficient_android_directory_copy.sh')
+      self._adb.Push(host_script_path, temp_script_file.name)
+      out = self.RunShellCommand(
+          'sh %s %s %s' % (temp_script_file.name, source, dest),
+          timeout_time=120)
+      if self._device:
+        device_repr = self._device[-4:]
+      else:
+        device_repr = '????'
+      for line in out:
+        logging.info('[%s]> %s', device_repr, line)
+
+  def _GetControlUsbChargingCommand(self):
+    if self._control_usb_charging_command['cached']:
+      return self._control_usb_charging_command['command']
+    self._control_usb_charging_command['cached'] = True
+    if not self.IsRootEnabled():
+      return None
+    for command in CONTROL_USB_CHARGING_COMMANDS:
+      # Assert command is valid.
+      assert 'disable_command' in command
+      assert 'enable_command' in command
+      assert 'witness_file' in command
+      witness_file = command['witness_file']
+      if self.FileExistsOnDevice(witness_file):
+        self._control_usb_charging_command['command'] = command
+        return command
+    return None
+
+  def CanControlUsbCharging(self):
+    return self._GetControlUsbChargingCommand() is not None
+
+  def DisableUsbCharging(self, timeout=10):
+    command = self._GetControlUsbChargingCommand()
+    if not command:
+      raise Exception('Unable to act on usb charging.')
+    disable_command = command['disable_command']
+    t0 = time.time()
+    # Do not loop directly on self.IsDeviceCharging to cut the number of calls
+    # to the device.
+    while True:
+      if t0 + timeout - time.time() < 0:
+        raise pexpect.TIMEOUT('Unable to disable USB charging in time: %s' % (
+            self.GetBatteryInfo()))
+      self.RunShellCommand(disable_command)
+      if not self.IsDeviceCharging():
+        break
+
+  def EnableUsbCharging(self, timeout=10):
+    command = self._GetControlUsbChargingCommand()
+    if not command:
+      raise Exception('Unable to act on usb charging.')
+    disable_command = command['enable_command']
+    t0 = time.time()
+    # Do not loop directly on self.IsDeviceCharging to cut the number of calls
+    # to the device.
+    while True:
+      if t0 + timeout - time.time() < 0:
+        raise pexpect.TIMEOUT('Unable to enable USB charging in time.')
+      self.RunShellCommand(disable_command)
+      if self.IsDeviceCharging():
+        break
+
+  def IsDeviceCharging(self):
+    for line in self.RunShellCommand('dumpsys battery'):
+      if 'powered: ' in line:
+        if line.split('powered: ')[1] == 'true':
+          return True
+
+
+class NewLineNormalizer(object):
+  """A file-like object to normalize EOLs to '\n'.
+
+  Pexpect runs adb within a pseudo-tty device (see
+  http://www.noah.org/wiki/pexpect), so any '\n' printed by adb is written
+  as '\r\n' to the logfile. Since adb already uses '\r\n' to terminate
+  lines, the log ends up having '\r\r\n' at the end of each line. This
+  filter replaces the above with a single '\n' in the data stream.
+  """
+  def __init__(self, output):
+    self._output = output
+
+  def write(self, data):
+    data = data.replace('\r\r\n', '\n')
+    self._output.write(data)
+
+  def flush(self):
+    self._output.flush()
diff --git a/build/android/pylib/android_commands_unittest.py b/build/android/pylib/android_commands_unittest.py
new file mode 100644
index 0000000..21c34f9
--- /dev/null
+++ b/build/android/pylib/android_commands_unittest.py
@@ -0,0 +1,191 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import shutil
+import sys
+import unittest
+
+sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir))
+
+from pylib import android_commands
+
+# pylint: disable=W0212,W0702
+
+class TestDeviceTempFile(unittest.TestCase):
+  def setUp(self):
+    if not os.getenv('BUILDTYPE'):
+      os.environ['BUILDTYPE'] = 'Debug'
+
+    devices = android_commands.GetAttachedDevices()
+    self.assertGreater(len(devices), 0, 'No device attached!')
+    self.ac = android_commands.AndroidCommands(device=devices[0])
+
+  def testTempFileDeleted(self):
+    """Tests that DeviceTempFile deletes files when closed."""
+    temp_file = android_commands.DeviceTempFile(self.ac)
+    self.assertFalse(self.ac.FileExistsOnDevice(temp_file.name))
+    self.ac.SetFileContents(temp_file.name, "contents")
+    self.assertTrue(self.ac.FileExistsOnDevice(temp_file.name))
+    temp_file.close()
+    self.assertFalse(self.ac.FileExistsOnDevice(temp_file.name))
+
+    with android_commands.DeviceTempFile(self.ac) as with_temp_file:
+      self.assertFalse(self.ac.FileExistsOnDevice(with_temp_file.name))
+      self.ac.SetFileContents(with_temp_file.name, "contents")
+      self.assertTrue(self.ac.FileExistsOnDevice(with_temp_file.name))
+
+    self.assertFalse(self.ac.FileExistsOnDevice(with_temp_file.name))
+
+  def testTempFileNotWritten(self):
+    """Tests that device temp files work successfully even if not written to."""
+    temp_file = android_commands.DeviceTempFile(self.ac)
+    temp_file.close()
+    self.assertFalse(self.ac.FileExistsOnDevice(temp_file.name))
+
+    with android_commands.DeviceTempFile(self.ac) as with_temp_file:
+      pass
+    self.assertFalse(self.ac.FileExistsOnDevice(with_temp_file.name))
+
+  def testNaming(self):
+    """Tests that returned filenames are as requested."""
+    temp_file = android_commands.DeviceTempFile(self.ac, prefix="cat")
+    self.assertTrue(os.path.basename(temp_file.name).startswith("cat"))
+
+    temp_file = android_commands.DeviceTempFile(self.ac, suffix="dog")
+    self.assertTrue(temp_file.name.endswith("dog"))
+
+    temp_file = android_commands.DeviceTempFile(
+        self.ac, prefix="cat", suffix="dog")
+    self.assertTrue(os.path.basename(temp_file.name).startswith("cat"))
+    self.assertTrue(temp_file.name.endswith("dog"))
+
+
+class TestGetFilesChanged(unittest.TestCase):
+
+  def setUp(self):
+    if not os.getenv('BUILDTYPE'):
+      os.environ['BUILDTYPE'] = 'Debug'
+
+    devices = android_commands.GetAttachedDevices()
+    self.assertGreater(len(devices), 0, 'No device attached!')
+    self.ac = android_commands.AndroidCommands(device=devices[0])
+    self.host_data_dir = os.path.realpath('test_push_data')
+    self.device_data_dir = '%s/test_push_data' % (
+        self.ac.RunShellCommand('realpath %s' %
+            self.ac.GetExternalStorage())[0])
+
+    os.mkdir(self.host_data_dir)
+    for i in xrange(1, 10):
+      with open('%s/%d.txt' % (self.host_data_dir, i), 'w') as f:
+        f.write('file #%d' % i)
+
+    self.ac.RunShellCommand('mkdir %s' % self.device_data_dir)
+
+  def testGetFilesChangedAllNeeded(self):
+    """ Tests GetFilesChanged when none of the files are on the device.
+    """
+    expected = [('%s/%d.txt' % (self.host_data_dir, i),
+                 '%s/%d.txt' % (self.device_data_dir, i))
+                for i in xrange(1, 10)]
+    actual = self.ac.GetFilesChanged(self.host_data_dir, self.device_data_dir)
+    self.assertSequenceEqual(expected, actual)
+
+  def testGetFilesChangedSomeIdentical(self):
+    """ Tests GetFilesChanged when some of the files are on the device.
+    """
+    for i in xrange(1, 5):
+      self.ac._adb.Push('%s/%d.txt' % (self.host_data_dir, i),
+                        self.device_data_dir)
+    expected = [('%s/%d.txt' % (self.host_data_dir, i),
+                 '%s/%d.txt' % (self.device_data_dir, i))
+                for i in xrange(5, 10)]
+    actual = self.ac.GetFilesChanged(self.host_data_dir, self.device_data_dir)
+    self.assertSequenceEqual(expected, actual)
+
+  def testGetFilesChangedAllIdentical(self):
+    """ Tests GetFilesChanged when all of the files are on the device.
+    """
+    for i in xrange(1, 10):
+      self.ac._adb.Push('%s/%d.txt' % (self.host_data_dir, i),
+                        self.device_data_dir)
+    expected = []
+    actual = self.ac.GetFilesChanged(self.host_data_dir, self.device_data_dir)
+    self.assertSequenceEqual(expected, actual)
+
+  def testGetFilesChangedRename(self):
+    """ Tests GetFilesChanged when one of the files has been renamed.
+
+        This tests both with and without the ignore_filenames flag set.
+    """
+    for i in xrange(5, 10):
+      self.ac._adb.Push('%s/%d.txt' % (self.host_data_dir, i),
+                        self.device_data_dir)
+    os.rename('%s/5.txt' % (self.host_data_dir),
+              '%s/99.txt' % (self.host_data_dir))
+
+    expected = [('%s/%d.txt' % (self.host_data_dir, i),
+                 '%s/%d.txt' % (self.device_data_dir, i))
+                for i in xrange(1, 5)]
+    actual = self.ac.GetFilesChanged(self.host_data_dir, self.device_data_dir,
+                                     ignore_filenames=True)
+    self.assertSequenceEqual(expected, actual)
+
+    expected.append(('%s/99.txt' % self.host_data_dir,
+                     '%s/99.txt' % self.device_data_dir))
+    actual = self.ac.GetFilesChanged(self.host_data_dir, self.device_data_dir)
+    self.assertSequenceEqual(expected, actual)
+
+  def testGetFilesChangedCopy(self):
+    """ Tests GetFilesChanged when one of the files has been copied.
+
+        This tests both with and without the ignore_filenames flag set.
+    """
+    for i in xrange(5, 10):
+      self.ac._adb.Push('%s/%d.txt' % (self.host_data_dir, i),
+                        self.device_data_dir)
+    shutil.copy('%s/5.txt' % self.host_data_dir,
+                '%s/99.txt' % self.host_data_dir)
+
+    expected = [('%s/%d.txt' % (self.host_data_dir, i),
+                 '%s/%d.txt' % (self.device_data_dir, i))
+                for i in xrange(1, 5)]
+    actual = self.ac.GetFilesChanged(self.host_data_dir, self.device_data_dir,
+                                     ignore_filenames=True)
+    self.assertSequenceEqual(expected, actual)
+
+    expected.append(('%s/99.txt' % self.host_data_dir,
+                     '%s/99.txt' % self.device_data_dir))
+    actual = self.ac.GetFilesChanged(self.host_data_dir, self.device_data_dir)
+    self.assertSequenceEqual(expected, actual)
+
+  def testGetFilesChangedIndividual(self):
+    """ Tests GetFilesChanged when provided one file.
+    """
+    expected = [('%s/1.txt' % self.host_data_dir,
+                 '%s/1.txt' % self.device_data_dir)]
+    actual = self.ac.GetFilesChanged('%s/1.txt' % self.host_data_dir,
+                                     '%s/1.txt' % self.device_data_dir)
+    self.assertSequenceEqual(expected, actual)
+
+  def testGetFilesChangedFileToDirectory(self):
+    """ Tests GetFilesChanged when provided a file from the host and a
+        directory on the device.
+    """
+    expected = [('%s/1.txt' % self.host_data_dir,
+                 '%s' % self.device_data_dir)]
+    actual = self.ac.GetFilesChanged('%s/1.txt' % self.host_data_dir,
+                                     '%s' % self.device_data_dir)
+    self.assertSequenceEqual(expected, actual)
+
+  def tearDown(self):
+    try:
+      shutil.rmtree(self.host_data_dir)
+      self.ac.RunShellCommand('rm -rf %s' % self.device_data_dir)
+    except:
+      pass
+
+if __name__ == '__main__':
+  unittest.main()
+
diff --git a/build/android/pylib/base/__init__.py b/build/android/pylib/base/__init__.py
new file mode 100644
index 0000000..727e987
--- /dev/null
+++ b/build/android/pylib/base/__init__.py
@@ -0,0 +1,4 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
diff --git a/build/android/pylib/base/base_setup.py b/build/android/pylib/base/base_setup.py
new file mode 100644
index 0000000..a416380
--- /dev/null
+++ b/build/android/pylib/base/base_setup.py
@@ -0,0 +1,63 @@
+# Copyright (c) 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Base script for doing test setup."""
+
+import logging
+import os
+
+from pylib import constants
+from pylib import valgrind_tools
+from pylib.utils import isolator
+
+def GenerateDepsDirUsingIsolate(suite_name, isolate_file_path,
+                                isolate_file_paths, deps_exclusion_list):
+  """Generate the dependency dir for the test suite using isolate.
+
+  Args:
+    suite_name: Name of the test suite (e.g. base_unittests).
+    isolate_file_path: .isolate file path to use. If there is a default .isolate
+                       file path for the suite_name, this will override it.
+    isolate_file_paths: Dictionary with the default .isolate file paths for
+                        the test suites.
+    deps_exclusion_list: A list of files that are listed as dependencies in the
+                         .isolate files but should not be pushed to the device.
+  Returns:
+    The Isolator instance used to remap the dependencies, or None.
+  """
+  if isolate_file_path:
+    if os.path.isabs(isolate_file_path):
+      isolate_abs_path = isolate_file_path
+    else:
+      isolate_abs_path = os.path.join(constants.DIR_SOURCE_ROOT,
+                                      isolate_file_path)
+  else:
+    isolate_rel_path = isolate_file_paths.get(suite_name)
+    if not isolate_rel_path:
+      logging.info('Did not find an isolate file for the test suite.')
+      return
+    isolate_abs_path = os.path.join(constants.DIR_SOURCE_ROOT, isolate_rel_path)
+
+  isolated_abs_path = os.path.join(
+      constants.GetOutDirectory(), '%s.isolated' % suite_name)
+  assert os.path.exists(isolate_abs_path), 'Cannot find %s' % isolate_abs_path
+
+  i = isolator.Isolator(constants.ISOLATE_DEPS_DIR)
+  i.Clear()
+  i.Remap(isolate_abs_path, isolated_abs_path)
+  # We're relying on the fact that timestamps are preserved
+  # by the remap command (hardlinked). Otherwise, all the data
+  # will be pushed to the device once we move to using time diff
+  # instead of md5sum. Perform a sanity check here.
+  i.VerifyHardlinks()
+  i.PurgeExcluded(deps_exclusion_list)
+  i.MoveOutputDeps()
+  return i
+
+
+def PushDataDeps(device, device_dir, test_options):
+  valgrind_tools.PushFilesForTool(test_options.tool, device)
+  if os.path.exists(constants.ISOLATE_DEPS_DIR):
+    device.PushChangedFiles([(constants.ISOLATE_DEPS_DIR, device_dir)],
+                            delete_device_stale=test_options.delete_stale_data)
diff --git a/build/android/pylib/base/base_test_result.py b/build/android/pylib/base/base_test_result.py
new file mode 100644
index 0000000..58200f6
--- /dev/null
+++ b/build/android/pylib/base/base_test_result.py
@@ -0,0 +1,216 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Module containing base test results classes."""
+
+class ResultType(object):
+  """Class enumerating test types."""
+  PASS = 'PASS'
+  SKIP = 'SKIP'
+  FAIL = 'FAIL'
+  CRASH = 'CRASH'
+  TIMEOUT = 'TIMEOUT'
+  UNKNOWN = 'UNKNOWN'
+
+  @staticmethod
+  def GetTypes():
+    """Get a list of all test types."""
+    return [ResultType.PASS, ResultType.SKIP, ResultType.FAIL,
+            ResultType.CRASH, ResultType.TIMEOUT, ResultType.UNKNOWN]
+
+
+class BaseTestResult(object):
+  """Base class for a single test result."""
+
+  def __init__(self, name, test_type, duration=0, log=''):
+    """Construct a BaseTestResult.
+
+    Args:
+      name: Name of the test which defines uniqueness.
+      test_type: Type of the test result as defined in ResultType.
+      duration: Time it took for the test to run in milliseconds.
+      log: An optional string listing any errors.
+    """
+    assert name
+    assert test_type in ResultType.GetTypes()
+    self._name = name
+    self._test_type = test_type
+    self._duration = duration
+    self._log = log
+
+  def __str__(self):
+    return self._name
+
+  def __repr__(self):
+    return self._name
+
+  def __cmp__(self, other):
+    # pylint: disable=W0212
+    return cmp(self._name, other._name)
+
+  def __hash__(self):
+    return hash(self._name)
+
+  def SetName(self, name):
+    """Set the test name.
+
+    Because we're putting this into a set, this should only be used if moving
+    this test result into another set.
+    """
+    self._name = name
+
+  def GetName(self):
+    """Get the test name."""
+    return self._name
+
+  def SetType(self, test_type):
+    """Set the test result type."""
+    assert test_type in ResultType.GetTypes()
+    self._test_type = test_type
+
+  def GetType(self):
+    """Get the test result type."""
+    return self._test_type
+
+  def GetDuration(self):
+    """Get the test duration."""
+    return self._duration
+
+  def SetLog(self, log):
+    """Set the test log."""
+    self._log = log
+
+  def GetLog(self):
+    """Get the test log."""
+    return self._log
+
+
+class TestRunResults(object):
+  """Set of results for a test run."""
+
+  def __init__(self):
+    self._results = set()
+
+  def GetLogs(self):
+    """Get the string representation of all test logs."""
+    s = []
+    for test_type in ResultType.GetTypes():
+      if test_type != ResultType.PASS:
+        for t in sorted(self._GetType(test_type)):
+          log = t.GetLog()
+          if log:
+            s.append('[%s] %s:' % (test_type, t))
+            s.append(log)
+    return '\n'.join(s)
+
+  def GetGtestForm(self):
+    """Get the gtest string representation of this object."""
+    s = []
+    plural = lambda n, s, p: '%d %s' % (n, p if n != 1 else s)
+    tests = lambda n: plural(n, 'test', 'tests')
+
+    s.append('[==========] %s ran.' % (tests(len(self.GetAll()))))
+    s.append('[  PASSED  ] %s.' % (tests(len(self.GetPass()))))
+
+    skipped = self.GetSkip()
+    if skipped:
+      s.append('[  SKIPPED ] Skipped %s, listed below:' % tests(len(skipped)))
+      for t in sorted(skipped):
+        s.append('[  SKIPPED ] %s' % str(t))
+
+    all_failures = self.GetFail().union(self.GetCrash(), self.GetTimeout(),
+        self.GetUnknown())
+    if all_failures:
+      s.append('[  FAILED  ] %s, listed below:' % tests(len(all_failures)))
+      for t in sorted(self.GetFail()):
+        s.append('[  FAILED  ] %s' % str(t))
+      for t in sorted(self.GetCrash()):
+        s.append('[  FAILED  ] %s (CRASHED)' % str(t))
+      for t in sorted(self.GetTimeout()):
+        s.append('[  FAILED  ] %s (TIMEOUT)' % str(t))
+      for t in sorted(self.GetUnknown()):
+        s.append('[  FAILED  ] %s (UNKNOWN)' % str(t))
+      s.append('')
+      s.append(plural(len(all_failures), 'FAILED TEST', 'FAILED TESTS'))
+    return '\n'.join(s)
+
+  def GetShortForm(self):
+    """Get the short string representation of this object."""
+    s = []
+    s.append('ALL: %d' % len(self._results))
+    for test_type in ResultType.GetTypes():
+      s.append('%s: %d' % (test_type, len(self._GetType(test_type))))
+    return ''.join([x.ljust(15) for x in s])
+
+  def __str__(self):
+    return self.GetLongForm()
+
+  def AddResult(self, result):
+    """Add |result| to the set.
+
+    Args:
+      result: An instance of BaseTestResult.
+    """
+    assert isinstance(result, BaseTestResult)
+    self._results.add(result)
+
+  def AddResults(self, results):
+    """Add |results| to the set.
+
+    Args:
+      results: An iterable of BaseTestResult objects.
+    """
+    for t in results:
+      self.AddResult(t)
+
+  def AddTestRunResults(self, results):
+    """Add the set of test results from |results|.
+
+    Args:
+      results: An instance of TestRunResults.
+    """
+    assert isinstance(results, TestRunResults)
+    # pylint: disable=W0212
+    self._results.update(results._results)
+
+  def GetAll(self):
+    """Get the set of all test results."""
+    return self._results.copy()
+
+  def _GetType(self, test_type):
+    """Get the set of test results with the given test type."""
+    return set(t for t in self._results if t.GetType() == test_type)
+
+  def GetPass(self):
+    """Get the set of all passed test results."""
+    return self._GetType(ResultType.PASS)
+
+  def GetSkip(self):
+    """Get the set of all skipped test results."""
+    return self._GetType(ResultType.SKIP)
+
+  def GetFail(self):
+    """Get the set of all failed test results."""
+    return self._GetType(ResultType.FAIL)
+
+  def GetCrash(self):
+    """Get the set of all crashed test results."""
+    return self._GetType(ResultType.CRASH)
+
+  def GetTimeout(self):
+    """Get the set of all timed out test results."""
+    return self._GetType(ResultType.TIMEOUT)
+
+  def GetUnknown(self):
+    """Get the set of all unknown test results."""
+    return self._GetType(ResultType.UNKNOWN)
+
+  def GetNotPass(self):
+    """Get the set of all non-passed test results."""
+    return self.GetAll() - self.GetPass()
+
+  def DidRunPass(self):
+    """Return whether the test run was successful."""
+    return not self.GetNotPass() - self.GetSkip()
+
diff --git a/build/android/pylib/base/base_test_result_unittest.py b/build/android/pylib/base/base_test_result_unittest.py
new file mode 100644
index 0000000..6f0cba7
--- /dev/null
+++ b/build/android/pylib/base/base_test_result_unittest.py
@@ -0,0 +1,82 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unittests for TestRunResults."""
+
+import unittest
+
+from pylib.base.base_test_result import BaseTestResult
+from pylib.base.base_test_result import TestRunResults
+from pylib.base.base_test_result import ResultType
+
+
+class TestTestRunResults(unittest.TestCase):
+  def setUp(self):
+    self.p1 = BaseTestResult('p1', ResultType.PASS, log='pass1')
+    other_p1 = BaseTestResult('p1', ResultType.PASS)
+    self.p2 = BaseTestResult('p2', ResultType.PASS)
+    self.f1 = BaseTestResult('f1', ResultType.FAIL, log='failure1')
+    self.c1 = BaseTestResult('c1', ResultType.CRASH, log='crash1')
+    self.u1 = BaseTestResult('u1', ResultType.UNKNOWN)
+    self.tr = TestRunResults()
+    self.tr.AddResult(self.p1)
+    self.tr.AddResult(other_p1)
+    self.tr.AddResult(self.p2)
+    self.tr.AddResults(set([self.f1, self.c1, self.u1]))
+
+  def testGetAll(self):
+    self.assertFalse(
+        self.tr.GetAll().symmetric_difference(
+            [self.p1, self.p2, self.f1, self.c1, self.u1]))
+
+  def testGetPass(self):
+    self.assertFalse(self.tr.GetPass().symmetric_difference(
+        [self.p1, self.p2]))
+
+  def testGetNotPass(self):
+    self.assertFalse(self.tr.GetNotPass().symmetric_difference(
+        [self.f1, self.c1, self.u1]))
+
+  def testGetAddTestRunResults(self):
+    tr2 = TestRunResults()
+    other_p1 = BaseTestResult('p1', ResultType.PASS)
+    f2 = BaseTestResult('f2', ResultType.FAIL)
+    tr2.AddResult(other_p1)
+    tr2.AddResult(f2)
+    tr2.AddTestRunResults(self.tr)
+    self.assertFalse(
+        tr2.GetAll().symmetric_difference(
+            [self.p1, self.p2, self.f1, self.c1, self.u1, f2]))
+
+  def testGetLogs(self):
+    log_print = ('[FAIL] f1:\n'
+                 'failure1\n'
+                 '[CRASH] c1:\n'
+                 'crash1')
+    self.assertEqual(self.tr.GetLogs(), log_print)
+
+  def testGetShortForm(self):
+    short_print = ('ALL: 5         PASS: 2        FAIL: 1        '
+                   'CRASH: 1       TIMEOUT: 0     UNKNOWN: 1     ')
+    self.assertEqual(self.tr.GetShortForm(), short_print)
+
+  def testGetGtestForm(self):
+    gtest_print = ('[==========] 5 tests ran.\n'
+                   '[  PASSED  ] 2 tests.\n'
+                   '[  FAILED  ] 3 tests, listed below:\n'
+                   '[  FAILED  ] f1\n'
+                   '[  FAILED  ] c1 (CRASHED)\n'
+                   '[  FAILED  ] u1 (UNKNOWN)\n'
+                   '\n'
+                   '3 FAILED TESTS')
+    self.assertEqual(gtest_print, self.tr.GetGtestForm())
+
+  def testRunPassed(self):
+    self.assertFalse(self.tr.DidRunPass())
+    tr2 = TestRunResults()
+    self.assertTrue(tr2.DidRunPass())
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/build/android/pylib/base/base_test_runner.py b/build/android/pylib/base/base_test_runner.py
new file mode 100644
index 0000000..2a7fdd3
--- /dev/null
+++ b/build/android/pylib/base/base_test_runner.py
@@ -0,0 +1,138 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Base class for running tests on a single device."""
+
+# TODO(jbudorick) Deprecate and remove this class and all subclasses after
+# any relevant parts have been ported to the new environment + test instance
+# model.
+
+import logging
+
+from pylib import ports
+from pylib.device import device_utils
+from pylib.forwarder import Forwarder
+from pylib.valgrind_tools import CreateTool
+# TODO(frankf): Move this to pylib/utils
+import lighttpd_server
+
+
+# A file on device to store ports of net test server. The format of the file is
+# test-spawner-server-port:test-server-port
+NET_TEST_SERVER_PORT_INFO_FILE = 'net-test-server-ports'
+
+
+class BaseTestRunner(object):
+  """Base class for running tests on a single device."""
+
+  def __init__(self, device, tool):
+    """
+      Args:
+        device: An instance of DeviceUtils that the tests will run on.
+        tool: Name of the Valgrind tool.
+    """
+    assert isinstance(device, device_utils.DeviceUtils)
+    self.device = device
+    self.device_serial = self.device.adb.GetDeviceSerial()
+    self.tool = CreateTool(tool, self.device)
+    self._http_server = None
+    self._forwarder_device_port = 8000
+    self.forwarder_base_url = ('http://localhost:%d' %
+        self._forwarder_device_port)
+    # We will allocate port for test server spawner when calling method
+    # LaunchChromeTestServerSpawner and allocate port for test server when
+    # starting it in TestServerThread.
+    self.test_server_spawner_port = 0
+    self.test_server_port = 0
+
+  def _PushTestServerPortInfoToDevice(self):
+    """Pushes the latest port information to device."""
+    self.device.WriteFile(
+        self.device.GetExternalStoragePath() + '/' +
+            NET_TEST_SERVER_PORT_INFO_FILE,
+        '%d:%d' % (self.test_server_spawner_port, self.test_server_port))
+
+  def RunTest(self, test):
+    """Runs a test. Needs to be overridden.
+
+    Args:
+      test: A test to run.
+
+    Returns:
+      Tuple containing:
+        (base_test_result.TestRunResults, tests to rerun or None)
+    """
+    raise NotImplementedError
+
+  def InstallTestPackage(self):
+    """Installs the test package once before all tests are run."""
+    pass
+
+  def SetUp(self):
+    """Run once before all tests are run."""
+    self.InstallTestPackage()
+
+  def TearDown(self):
+    """Run once after all tests are run."""
+    self.ShutdownHelperToolsForTestSuite()
+
+  def LaunchTestHttpServer(self, document_root, port=None,
+                           extra_config_contents=None):
+    """Launches an HTTP server to serve HTTP tests.
+
+    Args:
+      document_root: Document root of the HTTP server.
+      port: port on which we want to the http server bind.
+      extra_config_contents: Extra config contents for the HTTP server.
+    """
+    self._http_server = lighttpd_server.LighttpdServer(
+        document_root, port=port, extra_config_contents=extra_config_contents)
+    if self._http_server.StartupHttpServer():
+      logging.info('http server started: http://localhost:%s',
+                   self._http_server.port)
+    else:
+      logging.critical('Failed to start http server')
+    self._ForwardPortsForHttpServer()
+    return (self._forwarder_device_port, self._http_server.port)
+
+  def _ForwardPorts(self, port_pairs):
+    """Forwards a port."""
+    Forwarder.Map(port_pairs, self.device, self.tool)
+
+  def _UnmapPorts(self, port_pairs):
+    """Unmap previously forwarded ports."""
+    for (device_port, _) in port_pairs:
+      Forwarder.UnmapDevicePort(device_port, self.device)
+
+  # Deprecated: Use ForwardPorts instead.
+  def StartForwarder(self, port_pairs):
+    """Starts TCP traffic forwarding for the given |port_pairs|.
+
+    Args:
+      host_port_pairs: A list of (device_port, local_port) tuples to forward.
+    """
+    self._ForwardPorts(port_pairs)
+
+  def _ForwardPortsForHttpServer(self):
+    """Starts a forwarder for the HTTP server.
+
+    The forwarder forwards HTTP requests and responses between host and device.
+    """
+    self._ForwardPorts([(self._forwarder_device_port, self._http_server.port)])
+
+  def _RestartHttpServerForwarderIfNecessary(self):
+    """Restarts the forwarder if it's not open."""
+    # Checks to see if the http server port is being used.  If not forwards the
+    # request.
+    # TODO(dtrainor): This is not always reliable because sometimes the port
+    # will be left open even after the forwarder has been killed.
+    if not ports.IsDevicePortUsed(self.device, self._forwarder_device_port):
+      self._ForwardPortsForHttpServer()
+
+  def ShutdownHelperToolsForTestSuite(self):
+    """Shuts down the server and the forwarder."""
+    if self._http_server:
+      self._UnmapPorts([(self._forwarder_device_port, self._http_server.port)])
+      self._http_server.ShutdownHttpServer()
+
diff --git a/build/android/pylib/base/environment.py b/build/android/pylib/base/environment.py
new file mode 100644
index 0000000..3f49f41
--- /dev/null
+++ b/build/android/pylib/base/environment.py
@@ -0,0 +1,34 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+class Environment(object):
+  """An environment in which tests can be run.
+
+  This is expected to handle all logic that is applicable to an entire specific
+  environment but is independent of the test type.
+
+  Examples include:
+    - The local device environment, for running tests on devices attached to
+      the local machine.
+    - The local machine environment, for running tests directly on the local
+      machine.
+  """
+
+  def __init__(self):
+    pass
+
+  def SetUp(self):
+    raise NotImplementedError
+
+  def TearDown(self):
+    raise NotImplementedError
+
+  def __enter__(self):
+    self.SetUp()
+    return self
+
+  def __exit__(self, _exc_type, _exc_val, _exc_tb):
+    self.TearDown()
+
diff --git a/build/android/pylib/base/environment_factory.py b/build/android/pylib/base/environment_factory.py
new file mode 100644
index 0000000..31b4952
--- /dev/null
+++ b/build/android/pylib/base/environment_factory.py
@@ -0,0 +1,18 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from pylib import constants
+from pylib.local.device import local_device_environment
+from pylib.remote.device import remote_device_environment
+
+def CreateEnvironment(args, error_func):
+
+  if args.environment == 'local':
+    if args.command not in constants.LOCAL_MACHINE_TESTS:
+      return local_device_environment.LocalDeviceEnvironment(args, error_func)
+    # TODO(jbudorick) Add local machine environment.
+  if args.environment == 'remote_device':
+    return remote_device_environment.RemoteDeviceEnvironment(args,
+                                                             error_func)
+  error_func('Unable to create %s environment.' % args.environment)
diff --git a/build/android/pylib/base/test_collection.py b/build/android/pylib/base/test_collection.py
new file mode 100644
index 0000000..de51027
--- /dev/null
+++ b/build/android/pylib/base/test_collection.py
@@ -0,0 +1,80 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import threading
+
+class TestCollection(object):
+  """A threadsafe collection of tests.
+
+  Args:
+    tests: List of tests to put in the collection.
+  """
+
+  def __init__(self, tests=None):
+    if not tests:
+      tests = []
+    self._lock = threading.Lock()
+    self._tests = []
+    self._tests_in_progress = 0
+    # Used to signal that an item is available or all items have been handled.
+    self._item_available_or_all_done = threading.Event()
+    for t in tests:
+      self.add(t)
+
+  def _pop(self):
+    """Pop a test from the collection.
+
+    Waits until a test is available or all tests have been handled.
+
+    Returns:
+      A test or None if all tests have been handled.
+    """
+    while True:
+      # Wait for a test to be available or all tests to have been handled.
+      self._item_available_or_all_done.wait()
+      with self._lock:
+        # Check which of the two conditions triggered the signal.
+        if self._tests_in_progress == 0:
+          return None
+        try:
+          return self._tests.pop(0)
+        except IndexError:
+          # Another thread beat us to the available test, wait again.
+          self._item_available_or_all_done.clear()
+
+  def add(self, test):
+    """Add a test to the collection.
+
+    Args:
+      test: A test to add.
+    """
+    with self._lock:
+      self._tests.append(test)
+      self._item_available_or_all_done.set()
+      self._tests_in_progress += 1
+
+  def test_completed(self):
+    """Indicate that a test has been fully handled."""
+    with self._lock:
+      self._tests_in_progress -= 1
+      if self._tests_in_progress == 0:
+        # All tests have been handled, signal all waiting threads.
+        self._item_available_or_all_done.set()
+
+  def __iter__(self):
+    """Iterate through tests in the collection until all have been handled."""
+    while True:
+      r = self._pop()
+      if r is None:
+        break
+      yield r
+
+  def __len__(self):
+    """Return the number of tests currently in the collection."""
+    return len(self._tests)
+
+  def test_names(self):
+    """Return a list of the names of the tests currently in the collection."""
+    with self._lock:
+      return list(t.test for t in self._tests)
diff --git a/build/android/pylib/base/test_dispatcher.py b/build/android/pylib/base/test_dispatcher.py
new file mode 100644
index 0000000..f919965
--- /dev/null
+++ b/build/android/pylib/base/test_dispatcher.py
@@ -0,0 +1,332 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Dispatches tests, either sharding or replicating them.
+
+Performs the following steps:
+* Create a test collection factory, using the given tests
+  - If sharding: test collection factory returns the same shared test collection
+    to all test runners
+  - If replciating: test collection factory returns a unique test collection to
+    each test runner, with the same set of tests in each.
+* Create a test runner for each device.
+* Run each test runner in its own thread, grabbing tests from the test
+  collection until there are no tests left.
+"""
+
+# TODO(jbudorick) Deprecate and remove this class after any relevant parts have
+# been ported to the new environment / test instance model.
+
+import logging
+import threading
+
+from pylib import constants
+from pylib.base import base_test_result
+from pylib.base import test_collection
+from pylib.device import device_errors
+from pylib.utils import reraiser_thread
+from pylib.utils import watchdog_timer
+
+
+DEFAULT_TIMEOUT = 7 * 60  # seven minutes
+
+
+class _ThreadSafeCounter(object):
+  """A threadsafe counter."""
+
+  def __init__(self):
+    self._lock = threading.Lock()
+    self._value = 0
+
+  def GetAndIncrement(self):
+    """Get the current value and increment it atomically.
+
+    Returns:
+      The value before incrementing.
+    """
+    with self._lock:
+      pre_increment = self._value
+      self._value += 1
+      return pre_increment
+
+
+class _Test(object):
+  """Holds a test with additional metadata."""
+
+  def __init__(self, test, tries=0):
+    """Initializes the _Test object.
+
+    Args:
+      test: The test.
+      tries: Number of tries so far.
+    """
+    self.test = test
+    self.tries = tries
+
+
+def _RunTestsFromQueue(runner, collection, out_results, watcher,
+                       num_retries, tag_results_with_device=False):
+  """Runs tests from the collection until empty using the given runner.
+
+  Adds TestRunResults objects to the out_results list and may add tests to the
+  out_retry list.
+
+  Args:
+    runner: A TestRunner object used to run the tests.
+    collection: A TestCollection from which to get _Test objects to run.
+    out_results: A list to add TestRunResults to.
+    watcher: A watchdog_timer.WatchdogTimer object, used as a shared timeout.
+    num_retries: Number of retries for a test.
+    tag_results_with_device: If True, appends the name of the device on which
+        the test was run to the test name. Used when replicating to identify
+        which device ran each copy of the test, and to ensure each copy of the
+        test is recorded separately.
+  """
+
+  def TagTestRunResults(test_run_results):
+    """Tags all results with the last 4 digits of the device id.
+
+    Used when replicating tests to distinguish the same tests run on different
+    devices. We use a set to store test results, so the hash (generated from
+    name and tag) must be unique to be considered different results.
+    """
+    new_test_run_results = base_test_result.TestRunResults()
+    for test_result in test_run_results.GetAll():
+      test_result.SetName('%s_%s' % (runner.device_serial[-4:],
+                                     test_result.GetName()))
+      new_test_run_results.AddResult(test_result)
+    return new_test_run_results
+
+  for test in collection:
+    watcher.Reset()
+    try:
+      if not runner.device.IsOnline():
+        # Device is unresponsive, stop handling tests on this device.
+        msg = 'Device %s is unresponsive.' % runner.device_serial
+        logging.warning(msg)
+        raise device_errors.DeviceUnreachableError(msg)
+      result, retry = runner.RunTest(test.test)
+      if tag_results_with_device:
+        result = TagTestRunResults(result)
+      test.tries += 1
+      if retry and test.tries <= num_retries:
+        # Retry non-passing results, only record passing results.
+        pass_results = base_test_result.TestRunResults()
+        pass_results.AddResults(result.GetPass())
+        out_results.append(pass_results)
+        logging.warning('Will retry test %s, try #%s.', retry, test.tries)
+        collection.add(_Test(test=retry, tries=test.tries))
+      else:
+        # All tests passed or retry limit reached. Either way, record results.
+        out_results.append(result)
+    except:
+      # An unhandleable exception, ensure tests get run by another device and
+      # reraise this exception on the main thread.
+      collection.add(test)
+      raise
+    finally:
+      # Retries count as separate tasks so always mark the popped test as done.
+      collection.test_completed()
+
+
+def _SetUp(runner_factory, device, out_runners, threadsafe_counter):
+  """Creates a test runner for each device and calls SetUp() in parallel.
+
+  Note: if a device is unresponsive the corresponding TestRunner will not be
+    added to out_runners.
+
+  Args:
+    runner_factory: Callable that takes a device and index and returns a
+      TestRunner object.
+    device: The device serial number to set up.
+    out_runners: List to add the successfully set up TestRunner object.
+    threadsafe_counter: A _ThreadSafeCounter object used to get shard indices.
+  """
+  try:
+    index = threadsafe_counter.GetAndIncrement()
+    logging.warning('Creating shard %s for device %s.', index, device)
+    runner = runner_factory(device, index)
+    runner.SetUp()
+    out_runners.append(runner)
+  except device_errors.DeviceUnreachableError as e:
+    logging.warning('Failed to create shard for %s: [%s]', device, e)
+
+
+def _RunAllTests(runners, test_collection_factory, num_retries, timeout=None,
+                 tag_results_with_device=False):
+  """Run all tests using the given TestRunners.
+
+  Args:
+    runners: A list of TestRunner objects.
+    test_collection_factory: A callable to generate a TestCollection object for
+        each test runner.
+    num_retries: Number of retries for a test.
+    timeout: Watchdog timeout in seconds.
+    tag_results_with_device: If True, appends the name of the device on which
+        the test was run to the test name. Used when replicating to identify
+        which device ran each copy of the test, and to ensure each copy of the
+        test is recorded separately.
+
+  Returns:
+    A tuple of (TestRunResults object, exit code)
+  """
+  logging.warning('Running tests with %s test runners.' % (len(runners)))
+  results = []
+  exit_code = 0
+  run_results = base_test_result.TestRunResults()
+  watcher = watchdog_timer.WatchdogTimer(timeout)
+  test_collections = [test_collection_factory() for _ in runners]
+
+  threads = [
+      reraiser_thread.ReraiserThread(
+          _RunTestsFromQueue,
+          [r, tc, results, watcher, num_retries, tag_results_with_device],
+          name=r.device_serial[-4:])
+      for r, tc in zip(runners, test_collections)]
+
+  workers = reraiser_thread.ReraiserThreadGroup(threads)
+  workers.StartAll()
+
+  # Catch DeviceUnreachableErrors and set a warning exit code
+  try:
+    workers.JoinAll(watcher)
+  except device_errors.DeviceUnreachableError as e:
+    logging.error(e)
+
+  if not all((len(tc) == 0 for tc in test_collections)):
+    logging.error('Only ran %d tests (all devices are likely offline).' %
+                  len(results))
+    for tc in test_collections:
+      run_results.AddResults(base_test_result.BaseTestResult(
+          t, base_test_result.ResultType.UNKNOWN) for t in tc.test_names())
+
+  for r in results:
+    run_results.AddTestRunResults(r)
+  if not run_results.DidRunPass():
+    exit_code = constants.ERROR_EXIT_CODE
+  return (run_results, exit_code)
+
+
+def _CreateRunners(runner_factory, devices, timeout=None):
+  """Creates a test runner for each device and calls SetUp() in parallel.
+
+  Note: if a device is unresponsive the corresponding TestRunner will not be
+    included in the returned list.
+
+  Args:
+    runner_factory: Callable that takes a device and index and returns a
+      TestRunner object.
+    devices: List of device serial numbers as strings.
+    timeout: Watchdog timeout in seconds, defaults to the default timeout.
+
+  Returns:
+    A list of TestRunner objects.
+  """
+  logging.warning('Creating %s test runners.' % len(devices))
+  runners = []
+  counter = _ThreadSafeCounter()
+  threads = reraiser_thread.ReraiserThreadGroup(
+      [reraiser_thread.ReraiserThread(_SetUp,
+                                      [runner_factory, d, runners, counter],
+                                      name=str(d)[-4:])
+       for d in devices])
+  threads.StartAll()
+  threads.JoinAll(watchdog_timer.WatchdogTimer(timeout))
+  return runners
+
+
+def _TearDownRunners(runners, timeout=None):
+  """Calls TearDown() for each test runner in parallel.
+
+  Args:
+    runners: A list of TestRunner objects.
+    timeout: Watchdog timeout in seconds, defaults to the default timeout.
+  """
+  threads = reraiser_thread.ReraiserThreadGroup(
+      [reraiser_thread.ReraiserThread(r.TearDown, name=r.device_serial[-4:])
+       for r in runners])
+  threads.StartAll()
+  threads.JoinAll(watchdog_timer.WatchdogTimer(timeout))
+
+
+def ApplyMaxPerRun(tests, max_per_run):
+  """Rearrange the tests so that no group contains more than max_per_run tests.
+
+  Args:
+    tests:
+    max_per_run:
+
+  Returns:
+    A list of tests with no more than max_per_run per run.
+  """
+  tests_expanded = []
+  for test_group in tests:
+    if type(test_group) != str:
+      # Do not split test objects which are not strings.
+      tests_expanded.append(test_group)
+    else:
+      test_split = test_group.split(':')
+      for i in range(0, len(test_split), max_per_run):
+        tests_expanded.append(':'.join(test_split[i:i+max_per_run]))
+  return tests_expanded
+
+
+def RunTests(tests, runner_factory, devices, shard=True,
+             test_timeout=DEFAULT_TIMEOUT, setup_timeout=DEFAULT_TIMEOUT,
+             num_retries=2, max_per_run=256):
+  """Run all tests on attached devices, retrying tests that don't pass.
+
+  Args:
+    tests: List of tests to run.
+    runner_factory: Callable that takes a device and index and returns a
+        TestRunner object.
+    devices: List of attached devices.
+    shard: True if we should shard, False if we should replicate tests.
+      - Sharding tests will distribute tests across all test runners through a
+        shared test collection.
+      - Replicating tests will copy all tests to each test runner through a
+        unique test collection for each test runner.
+    test_timeout: Watchdog timeout in seconds for running tests.
+    setup_timeout: Watchdog timeout in seconds for creating and cleaning up
+        test runners.
+    num_retries: Number of retries for a test.
+    max_per_run: Maximum number of tests to run in any group.
+
+  Returns:
+    A tuple of (base_test_result.TestRunResults object, exit code).
+  """
+  if not tests:
+    logging.critical('No tests to run.')
+    return (base_test_result.TestRunResults(), constants.ERROR_EXIT_CODE)
+
+  tests_expanded = ApplyMaxPerRun(tests, max_per_run)
+  if shard:
+    # Generate a shared TestCollection object for all test runners, so they
+    # draw from a common pool of tests.
+    shared_test_collection = test_collection.TestCollection(
+        [_Test(t) for t in tests_expanded])
+    test_collection_factory = lambda: shared_test_collection
+    tag_results_with_device = False
+    log_string = 'sharded across devices'
+  else:
+    # Generate a unique TestCollection object for each test runner, but use
+    # the same set of tests.
+    test_collection_factory = lambda: test_collection.TestCollection(
+        [_Test(t) for t in tests_expanded])
+    tag_results_with_device = True
+    log_string = 'replicated on each device'
+
+  logging.info('Will run %d tests (%s): %s',
+               len(tests_expanded), log_string, str(tests_expanded))
+  runners = _CreateRunners(runner_factory, devices, setup_timeout)
+  try:
+    return _RunAllTests(runners, test_collection_factory,
+                        num_retries, test_timeout, tag_results_with_device)
+  finally:
+    try:
+      _TearDownRunners(runners, setup_timeout)
+    except device_errors.DeviceUnreachableError as e:
+      logging.warning('Device unresponsive during TearDown: [%s]', e)
+    except Exception as e:
+      logging.error('Unexpected exception caught during TearDown: %s' % str(e))
diff --git a/build/android/pylib/base/test_dispatcher_unittest.py b/build/android/pylib/base/test_dispatcher_unittest.py
new file mode 100755
index 0000000..cace9a6
--- /dev/null
+++ b/build/android/pylib/base/test_dispatcher_unittest.py
@@ -0,0 +1,241 @@
+#!/usr/bin/env python
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unittests for test_dispatcher.py."""
+# pylint: disable=R0201
+# pylint: disable=W0212
+
+import os
+import sys
+import unittest
+
+
+from pylib import constants
+from pylib.base import base_test_result
+from pylib.base import test_collection
+from pylib.base import test_dispatcher
+from pylib.device import adb_wrapper
+from pylib.device import device_utils
+from pylib.utils import watchdog_timer
+
+sys.path.append(
+    os.path.join(constants.DIR_SOURCE_ROOT, 'third_party', 'pymock'))
+import mock
+
+
+class TestException(Exception):
+  pass
+
+
+def _MockDevice(serial):
+  d = mock.MagicMock(spec=device_utils.DeviceUtils)
+  d.__str__.return_value = serial
+  d.adb = mock.MagicMock(spec=adb_wrapper.AdbWrapper)
+  d.adb.GetDeviceSerial = mock.MagicMock(return_value=serial)
+  d.IsOnline = mock.MagicMock(return_value=True)
+  return d
+
+
+class MockRunner(object):
+  """A mock TestRunner."""
+  def __init__(self, device=None, shard_index=0):
+    self.device = device or _MockDevice('0')
+    self.device_serial = self.device.adb.GetDeviceSerial()
+    self.shard_index = shard_index
+    self.setups = 0
+    self.teardowns = 0
+
+  def RunTest(self, test):
+    results = base_test_result.TestRunResults()
+    results.AddResult(
+        base_test_result.BaseTestResult(test, base_test_result.ResultType.PASS))
+    return (results, None)
+
+  def SetUp(self):
+    self.setups += 1
+
+  def TearDown(self):
+    self.teardowns += 1
+
+
+class MockRunnerFail(MockRunner):
+  def RunTest(self, test):
+    results = base_test_result.TestRunResults()
+    results.AddResult(
+        base_test_result.BaseTestResult(test, base_test_result.ResultType.FAIL))
+    return (results, test)
+
+
+class MockRunnerFailTwice(MockRunner):
+  def __init__(self, device=None, shard_index=0):
+    super(MockRunnerFailTwice, self).__init__(device, shard_index)
+    self._fails = 0
+
+  def RunTest(self, test):
+    self._fails += 1
+    results = base_test_result.TestRunResults()
+    if self._fails <= 2:
+      results.AddResult(base_test_result.BaseTestResult(
+          test, base_test_result.ResultType.FAIL))
+      return (results, test)
+    else:
+      results.AddResult(base_test_result.BaseTestResult(
+          test, base_test_result.ResultType.PASS))
+      return (results, None)
+
+
+class MockRunnerException(MockRunner):
+  def RunTest(self, test):
+    raise TestException
+
+
+class TestFunctions(unittest.TestCase):
+  """Tests test_dispatcher._RunTestsFromQueue."""
+  @staticmethod
+  def _RunTests(mock_runner, tests):
+    results = []
+    tests = test_collection.TestCollection(
+        [test_dispatcher._Test(t) for t in tests])
+    test_dispatcher._RunTestsFromQueue(mock_runner, tests, results,
+                                       watchdog_timer.WatchdogTimer(None), 2)
+    run_results = base_test_result.TestRunResults()
+    for r in results:
+      run_results.AddTestRunResults(r)
+    return run_results
+
+  def testRunTestsFromQueue(self):
+    results = TestFunctions._RunTests(MockRunner(), ['a', 'b'])
+    self.assertEqual(len(results.GetPass()), 2)
+    self.assertEqual(len(results.GetNotPass()), 0)
+
+  def testRunTestsFromQueueRetry(self):
+    results = TestFunctions._RunTests(MockRunnerFail(), ['a', 'b'])
+    self.assertEqual(len(results.GetPass()), 0)
+    self.assertEqual(len(results.GetFail()), 2)
+
+  def testRunTestsFromQueueFailTwice(self):
+    results = TestFunctions._RunTests(MockRunnerFailTwice(), ['a', 'b'])
+    self.assertEqual(len(results.GetPass()), 2)
+    self.assertEqual(len(results.GetNotPass()), 0)
+
+  def testSetUp(self):
+    runners = []
+    counter = test_dispatcher._ThreadSafeCounter()
+    test_dispatcher._SetUp(MockRunner, _MockDevice('0'), runners, counter)
+    self.assertEqual(len(runners), 1)
+    self.assertEqual(runners[0].setups, 1)
+
+  def testThreadSafeCounter(self):
+    counter = test_dispatcher._ThreadSafeCounter()
+    for i in xrange(5):
+      self.assertEqual(counter.GetAndIncrement(), i)
+
+  def testApplyMaxPerRun(self):
+    self.assertEqual(
+        ['A:B', 'C:D', 'E', 'F:G', 'H:I'],
+        test_dispatcher.ApplyMaxPerRun(['A:B', 'C:D:E', 'F:G:H:I'], 2))
+
+
+class TestThreadGroupFunctions(unittest.TestCase):
+  """Tests test_dispatcher._RunAllTests and test_dispatcher._CreateRunners."""
+  def setUp(self):
+    self.tests = ['a', 'b', 'c', 'd', 'e', 'f', 'g']
+    shared_test_collection = test_collection.TestCollection(
+        [test_dispatcher._Test(t) for t in self.tests])
+    self.test_collection_factory = lambda: shared_test_collection
+
+  def testCreate(self):
+    runners = test_dispatcher._CreateRunners(
+        MockRunner, [_MockDevice('0'), _MockDevice('1')])
+    for runner in runners:
+      self.assertEqual(runner.setups, 1)
+    self.assertEqual(set([r.device_serial for r in runners]),
+                     set(['0', '1']))
+    self.assertEqual(set([r.shard_index for r in runners]),
+                     set([0, 1]))
+
+  def testRun(self):
+    runners = [MockRunner(_MockDevice('0')), MockRunner(_MockDevice('1'))]
+    results, exit_code = test_dispatcher._RunAllTests(
+        runners, self.test_collection_factory, 0)
+    self.assertEqual(len(results.GetPass()), len(self.tests))
+    self.assertEqual(exit_code, 0)
+
+  def testTearDown(self):
+    runners = [MockRunner(_MockDevice('0')), MockRunner(_MockDevice('1'))]
+    test_dispatcher._TearDownRunners(runners)
+    for runner in runners:
+      self.assertEqual(runner.teardowns, 1)
+
+  def testRetry(self):
+    runners = test_dispatcher._CreateRunners(
+        MockRunnerFail, [_MockDevice('0'), _MockDevice('1')])
+    results, exit_code = test_dispatcher._RunAllTests(
+        runners, self.test_collection_factory, 0)
+    self.assertEqual(len(results.GetFail()), len(self.tests))
+    self.assertEqual(exit_code, constants.ERROR_EXIT_CODE)
+
+  def testReraise(self):
+    runners = test_dispatcher._CreateRunners(
+        MockRunnerException, [_MockDevice('0'), _MockDevice('1')])
+    with self.assertRaises(TestException):
+      test_dispatcher._RunAllTests(runners, self.test_collection_factory, 0)
+
+
+class TestShard(unittest.TestCase):
+  """Tests test_dispatcher.RunTests with sharding."""
+  @staticmethod
+  def _RunShard(runner_factory):
+    return test_dispatcher.RunTests(
+        ['a', 'b', 'c'], runner_factory, [_MockDevice('0'), _MockDevice('1')],
+        shard=True)
+
+  def testShard(self):
+    results, exit_code = TestShard._RunShard(MockRunner)
+    self.assertEqual(len(results.GetPass()), 3)
+    self.assertEqual(exit_code, 0)
+
+  def testFailing(self):
+    results, exit_code = TestShard._RunShard(MockRunnerFail)
+    self.assertEqual(len(results.GetPass()), 0)
+    self.assertEqual(len(results.GetFail()), 3)
+    self.assertEqual(exit_code, constants.ERROR_EXIT_CODE)
+
+  def testNoTests(self):
+    results, exit_code = test_dispatcher.RunTests(
+        [], MockRunner, [_MockDevice('0'), _MockDevice('1')], shard=True)
+    self.assertEqual(len(results.GetAll()), 0)
+    self.assertEqual(exit_code, constants.ERROR_EXIT_CODE)
+
+
+class TestReplicate(unittest.TestCase):
+  """Tests test_dispatcher.RunTests with replication."""
+  @staticmethod
+  def _RunReplicate(runner_factory):
+    return test_dispatcher.RunTests(
+        ['a', 'b', 'c'], runner_factory, [_MockDevice('0'), _MockDevice('1')],
+        shard=False)
+
+  def testReplicate(self):
+    results, exit_code = TestReplicate._RunReplicate(MockRunner)
+    # We expect 6 results since each test should have been run on every device
+    self.assertEqual(len(results.GetPass()), 6)
+    self.assertEqual(exit_code, 0)
+
+  def testFailing(self):
+    results, exit_code = TestReplicate._RunReplicate(MockRunnerFail)
+    self.assertEqual(len(results.GetPass()), 0)
+    self.assertEqual(len(results.GetFail()), 6)
+    self.assertEqual(exit_code, constants.ERROR_EXIT_CODE)
+
+  def testNoTests(self):
+    results, exit_code = test_dispatcher.RunTests(
+        [], MockRunner, [_MockDevice('0'), _MockDevice('1')], shard=False)
+    self.assertEqual(len(results.GetAll()), 0)
+    self.assertEqual(exit_code, constants.ERROR_EXIT_CODE)
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/build/android/pylib/base/test_instance.py b/build/android/pylib/base/test_instance.py
new file mode 100644
index 0000000..cdf678f
--- /dev/null
+++ b/build/android/pylib/base/test_instance.py
@@ -0,0 +1,35 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+class TestInstance(object):
+  """A type of test.
+
+  This is expected to handle all logic that is test-type specific but
+  independent of the environment or device.
+
+  Examples include:
+    - gtests
+    - instrumentation tests
+  """
+
+  def __init__(self):
+    pass
+
+  def TestType(self):
+    raise NotImplementedError
+
+  def SetUp(self):
+    raise NotImplementedError
+
+  def TearDown(self):
+    raise NotImplementedError
+
+  def __enter__(self):
+    self.SetUp()
+    return self
+
+  def __exit__(self, _exc_type, _exc_val, _exc_tb):
+    self.TearDown()
+
diff --git a/build/android/pylib/base/test_instance_factory.py b/build/android/pylib/base/test_instance_factory.py
new file mode 100644
index 0000000..7e7cb0c
--- /dev/null
+++ b/build/android/pylib/base/test_instance_factory.py
@@ -0,0 +1,24 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from pylib import constants
+from pylib.gtest import gtest_test_instance
+from pylib.instrumentation import instrumentation_test_instance
+from pylib.utils import isolator
+from pylib.uirobot import uirobot_test_instance
+
+
+
+def CreateTestInstance(args, error_func):
+
+  if args.command == 'gtest':
+    return gtest_test_instance.GtestTestInstance(
+        args, isolator.Isolator(constants.ISOLATE_DEPS_DIR), error_func)
+  elif args.command == 'instrumentation':
+    return instrumentation_test_instance.InstrumentationTestInstance(
+        args, isolator.Isolator(constants.ISOLATE_DEPS_DIR), error_func)
+  elif args.command == 'uirobot':
+    return uirobot_test_instance.UirobotTestInstance(args, error_func)
+
+  error_func('Unable to create %s test instance.' % args.command)
diff --git a/build/android/pylib/base/test_run.py b/build/android/pylib/base/test_run.py
new file mode 100644
index 0000000..7380e78
--- /dev/null
+++ b/build/android/pylib/base/test_run.py
@@ -0,0 +1,39 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+class TestRun(object):
+  """An execution of a particular test on a particular device.
+
+  This is expected to handle all logic that is specific to the combination of
+  environment and test type.
+
+  Examples include:
+    - local gtests
+    - local instrumentation tests
+  """
+
+  def __init__(self, env, test_instance):
+    self._env = env
+    self._test_instance = test_instance
+
+  def TestPackage(self):
+    raise NotImplementedError
+
+  def SetUp(self):
+    raise NotImplementedError
+
+  def RunTests(self):
+    raise NotImplementedError
+
+  def TearDown(self):
+    raise NotImplementedError
+
+  def __enter__(self):
+    self.SetUp()
+    return self
+
+  def __exit__(self, exc_type, exc_val, exc_tb):
+    self.TearDown()
+
diff --git a/build/android/pylib/base/test_run_factory.py b/build/android/pylib/base/test_run_factory.py
new file mode 100644
index 0000000..8c71ebbd
--- /dev/null
+++ b/build/android/pylib/base/test_run_factory.py
@@ -0,0 +1,41 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from pylib.gtest import gtest_test_instance
+from pylib.gtest import local_device_gtest_run
+from pylib.instrumentation import instrumentation_test_instance
+from pylib.local.device import local_device_environment
+from pylib.local.device import local_device_instrumentation_test_run
+from pylib.remote.device import remote_device_environment
+from pylib.remote.device import remote_device_gtest_run
+from pylib.remote.device import remote_device_instrumentation_test_run
+from pylib.remote.device import remote_device_uirobot_test_run
+from pylib.uirobot import uirobot_test_instance
+
+
+def CreateTestRun(_args, env, test_instance, error_func):
+  if isinstance(env, local_device_environment.LocalDeviceEnvironment):
+    if isinstance(test_instance, gtest_test_instance.GtestTestInstance):
+      return local_device_gtest_run.LocalDeviceGtestRun(env, test_instance)
+    if isinstance(test_instance,
+                  instrumentation_test_instance.InstrumentationTestInstance):
+      return (local_device_instrumentation_test_run
+              .LocalDeviceInstrumentationTestRun(env, test_instance))
+
+  if isinstance(env, remote_device_environment.RemoteDeviceEnvironment):
+    if isinstance(test_instance, gtest_test_instance.GtestTestInstance):
+      return remote_device_gtest_run.RemoteDeviceGtestTestRun(
+          env, test_instance)
+    if isinstance(test_instance,
+                  instrumentation_test_instance.InstrumentationTestInstance):
+      return (remote_device_instrumentation_test_run
+              .RemoteDeviceInstrumentationTestRun(env, test_instance))
+    if isinstance(test_instance, uirobot_test_instance.UirobotTestInstance):
+      return remote_device_uirobot_test_run.RemoteDeviceUirobotTestRun(
+          env, test_instance)
+
+
+  error_func('Unable to create test run for %s tests in %s environment'
+             % (str(test_instance), str(env)))
+
diff --git a/build/android/pylib/base/test_server.py b/build/android/pylib/base/test_server.py
new file mode 100644
index 0000000..085a51e
--- /dev/null
+++ b/build/android/pylib/base/test_server.py
@@ -0,0 +1,19 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+class TestServer(object):
+  """Base class for any server that needs to be set up for the tests."""
+
+  def __init__(self, *args, **kwargs):
+    pass
+
+  def SetUp(self):
+    raise NotImplementedError
+
+  def Reset(self):
+    raise NotImplementedError
+
+  def TearDown(self):
+    raise NotImplementedError
+
diff --git a/build/android/pylib/chrome_test_server_spawner.py b/build/android/pylib/chrome_test_server_spawner.py
new file mode 100644
index 0000000..052c2fd
--- /dev/null
+++ b/build/android/pylib/chrome_test_server_spawner.py
@@ -0,0 +1,422 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""A "Test Server Spawner" that handles killing/stopping per-test test servers.
+
+It's used to accept requests from the device to spawn and kill instances of the
+chrome test server on the host.
+"""
+# pylint: disable=W0702
+
+import BaseHTTPServer
+import json
+import logging
+import os
+import select
+import struct
+import subprocess
+import sys
+import threading
+import time
+import urlparse
+
+from pylib import constants
+from pylib import ports
+
+from pylib.forwarder import Forwarder
+
+
+# Path that are needed to import necessary modules when launching a testserver.
+os.environ['PYTHONPATH'] = os.environ.get('PYTHONPATH', '') + (':%s:%s:%s:%s:%s'
+    % (os.path.join(constants.DIR_SOURCE_ROOT, 'third_party'),
+       os.path.join(constants.DIR_SOURCE_ROOT, 'third_party', 'tlslite'),
+       os.path.join(constants.DIR_SOURCE_ROOT, 'third_party', 'pyftpdlib',
+                    'src'),
+       os.path.join(constants.DIR_SOURCE_ROOT, 'net', 'tools', 'testserver'),
+       os.path.join(constants.DIR_SOURCE_ROOT, 'sync', 'tools', 'testserver')))
+
+
+SERVER_TYPES = {
+    'http': '',
+    'ftp': '-f',
+    'sync': '',  # Sync uses its own script, and doesn't take a server type arg.
+    'tcpecho': '--tcp-echo',
+    'udpecho': '--udp-echo',
+}
+
+
+# The timeout (in seconds) of starting up the Python test server.
+TEST_SERVER_STARTUP_TIMEOUT = 10
+
+def _WaitUntil(predicate, max_attempts=5):
+  """Blocks until the provided predicate (function) is true.
+
+  Returns:
+    Whether the provided predicate was satisfied once (before the timeout).
+  """
+  sleep_time_sec = 0.025
+  for _ in xrange(1, max_attempts):
+    if predicate():
+      return True
+    time.sleep(sleep_time_sec)
+    sleep_time_sec = min(1, sleep_time_sec * 2)  # Don't wait more than 1 sec.
+  return False
+
+
+def _CheckPortAvailable(port):
+  """Returns True if |port| is available."""
+  return _WaitUntil(lambda: ports.IsHostPortAvailable(port))
+
+
+def _CheckPortNotAvailable(port):
+  """Returns True if |port| is not available."""
+  return _WaitUntil(lambda: not ports.IsHostPortAvailable(port))
+
+
+def _CheckDevicePortStatus(device, port):
+  """Returns whether the provided port is used."""
+  return _WaitUntil(lambda: ports.IsDevicePortUsed(device, port))
+
+
+def _GetServerTypeCommandLine(server_type):
+  """Returns the command-line by the given server type.
+
+  Args:
+    server_type: the server type to be used (e.g. 'http').
+
+  Returns:
+    A string containing the command-line argument.
+  """
+  if server_type not in SERVER_TYPES:
+    raise NotImplementedError('Unknown server type: %s' % server_type)
+  if server_type == 'udpecho':
+    raise Exception('Please do not run UDP echo tests because we do not have '
+                    'a UDP forwarder tool.')
+  return SERVER_TYPES[server_type]
+
+
+class TestServerThread(threading.Thread):
+  """A thread to run the test server in a separate process."""
+
+  def __init__(self, ready_event, arguments, device, tool):
+    """Initialize TestServerThread with the following argument.
+
+    Args:
+      ready_event: event which will be set when the test server is ready.
+      arguments: dictionary of arguments to run the test server.
+      device: An instance of DeviceUtils.
+      tool: instance of runtime error detection tool.
+    """
+    threading.Thread.__init__(self)
+    self.wait_event = threading.Event()
+    self.stop_flag = False
+    self.ready_event = ready_event
+    self.ready_event.clear()
+    self.arguments = arguments
+    self.device = device
+    self.tool = tool
+    self.test_server_process = None
+    self.is_ready = False
+    self.host_port = self.arguments['port']
+    assert isinstance(self.host_port, int)
+    # The forwarder device port now is dynamically allocated.
+    self.forwarder_device_port = 0
+    # Anonymous pipe in order to get port info from test server.
+    self.pipe_in = None
+    self.pipe_out = None
+    self.process = None
+    self.command_line = []
+
+  def _WaitToStartAndGetPortFromTestServer(self):
+    """Waits for the Python test server to start and gets the port it is using.
+
+    The port information is passed by the Python test server with a pipe given
+    by self.pipe_out. It is written as a result to |self.host_port|.
+
+    Returns:
+      Whether the port used by the test server was successfully fetched.
+    """
+    assert self.host_port == 0 and self.pipe_out and self.pipe_in
+    (in_fds, _, _) = select.select([self.pipe_in, ], [], [],
+                                   TEST_SERVER_STARTUP_TIMEOUT)
+    if len(in_fds) == 0:
+      logging.error('Failed to wait to the Python test server to be started.')
+      return False
+    # First read the data length as an unsigned 4-byte value.  This
+    # is _not_ using network byte ordering since the Python test server packs
+    # size as native byte order and all Chromium platforms so far are
+    # configured to use little-endian.
+    # TODO(jnd): Change the Python test server and local_test_server_*.cc to
+    # use a unified byte order (either big-endian or little-endian).
+    data_length = os.read(self.pipe_in, struct.calcsize('=L'))
+    if data_length:
+      (data_length,) = struct.unpack('=L', data_length)
+      assert data_length
+    if not data_length:
+      logging.error('Failed to get length of server data.')
+      return False
+    port_json = os.read(self.pipe_in, data_length)
+    if not port_json:
+      logging.error('Failed to get server data.')
+      return False
+    logging.info('Got port json data: %s', port_json)
+    port_json = json.loads(port_json)
+    if port_json.has_key('port') and isinstance(port_json['port'], int):
+      self.host_port = port_json['port']
+      return _CheckPortNotAvailable(self.host_port)
+    logging.error('Failed to get port information from the server data.')
+    return False
+
+  def _GenerateCommandLineArguments(self):
+    """Generates the command line to run the test server.
+
+    Note that all options are processed by following the definitions in
+    testserver.py.
+    """
+    if self.command_line:
+      return
+
+    args_copy = dict(self.arguments)
+
+    # Translate the server type.
+    type_cmd = _GetServerTypeCommandLine(args_copy.pop('server-type'))
+    if type_cmd:
+      self.command_line.append(type_cmd)
+
+    # Use a pipe to get the port given by the instance of Python test server
+    # if the test does not specify the port.
+    assert self.host_port == args_copy['port']
+    if self.host_port == 0:
+      (self.pipe_in, self.pipe_out) = os.pipe()
+      self.command_line.append('--startup-pipe=%d' % self.pipe_out)
+
+    # Pass the remaining arguments as-is.
+    for key, values in args_copy.iteritems():
+      if not isinstance(values, list):
+        values = [values]
+      for value in values:
+        if value is None:
+          self.command_line.append('--%s' % key)
+        else:
+          self.command_line.append('--%s=%s' % (key, value))
+
+  def _CloseUnnecessaryFDsForTestServerProcess(self):
+    # This is required to avoid subtle deadlocks that could be caused by the
+    # test server child process inheriting undesirable file descriptors such as
+    # file lock file descriptors.
+    for fd in xrange(0, 1024):
+      if fd != self.pipe_out:
+        try:
+          os.close(fd)
+        except:
+          pass
+
+  def run(self):
+    logging.info('Start running the thread!')
+    self.wait_event.clear()
+    self._GenerateCommandLineArguments()
+    command = constants.DIR_SOURCE_ROOT
+    if self.arguments['server-type'] == 'sync':
+      command = [os.path.join(command, 'sync', 'tools', 'testserver',
+                              'sync_testserver.py')] + self.command_line
+    else:
+      command = [os.path.join(command, 'net', 'tools', 'testserver',
+                              'testserver.py')] + self.command_line
+    logging.info('Running: %s', command)
+    # Pass DIR_SOURCE_ROOT as the child's working directory so that relative
+    # paths in the arguments are resolved correctly.
+    self.process = subprocess.Popen(
+        command, preexec_fn=self._CloseUnnecessaryFDsForTestServerProcess,
+        cwd=constants.DIR_SOURCE_ROOT)
+    if self.process:
+      if self.pipe_out:
+        self.is_ready = self._WaitToStartAndGetPortFromTestServer()
+      else:
+        self.is_ready = _CheckPortNotAvailable(self.host_port)
+    if self.is_ready:
+      Forwarder.Map([(0, self.host_port)], self.device, self.tool)
+      # Check whether the forwarder is ready on the device.
+      self.is_ready = False
+      device_port = Forwarder.DevicePortForHostPort(self.host_port)
+      if device_port and _CheckDevicePortStatus(self.device, device_port):
+        self.is_ready = True
+        self.forwarder_device_port = device_port
+    # Wake up the request handler thread.
+    self.ready_event.set()
+    # Keep thread running until Stop() gets called.
+    _WaitUntil(lambda: self.stop_flag, max_attempts=sys.maxint)
+    if self.process.poll() is None:
+      self.process.kill()
+    Forwarder.UnmapDevicePort(self.forwarder_device_port, self.device)
+    self.process = None
+    self.is_ready = False
+    if self.pipe_out:
+      os.close(self.pipe_in)
+      os.close(self.pipe_out)
+      self.pipe_in = None
+      self.pipe_out = None
+    logging.info('Test-server has died.')
+    self.wait_event.set()
+
+  def Stop(self):
+    """Blocks until the loop has finished.
+
+    Note that this must be called in another thread.
+    """
+    if not self.process:
+      return
+    self.stop_flag = True
+    self.wait_event.wait()
+
+
+class SpawningServerRequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
+  """A handler used to process http GET/POST request."""
+
+  def _SendResponse(self, response_code, response_reason, additional_headers,
+                    contents):
+    """Generates a response sent to the client from the provided parameters.
+
+    Args:
+      response_code: number of the response status.
+      response_reason: string of reason description of the response.
+      additional_headers: dict of additional headers. Each key is the name of
+                          the header, each value is the content of the header.
+      contents: string of the contents we want to send to client.
+    """
+    self.send_response(response_code, response_reason)
+    self.send_header('Content-Type', 'text/html')
+    # Specify the content-length as without it the http(s) response will not
+    # be completed properly (and the browser keeps expecting data).
+    self.send_header('Content-Length', len(contents))
+    for header_name in additional_headers:
+      self.send_header(header_name, additional_headers[header_name])
+    self.end_headers()
+    self.wfile.write(contents)
+    self.wfile.flush()
+
+  def _StartTestServer(self):
+    """Starts the test server thread."""
+    logging.info('Handling request to spawn a test server.')
+    content_type = self.headers.getheader('content-type')
+    if content_type != 'application/json':
+      raise Exception('Bad content-type for start request.')
+    content_length = self.headers.getheader('content-length')
+    if not content_length:
+      content_length = 0
+    try:
+      content_length = int(content_length)
+    except:
+      raise Exception('Bad content-length for start request.')
+    logging.info(content_length)
+    test_server_argument_json = self.rfile.read(content_length)
+    logging.info(test_server_argument_json)
+    assert not self.server.test_server_instance
+    ready_event = threading.Event()
+    self.server.test_server_instance = TestServerThread(
+        ready_event,
+        json.loads(test_server_argument_json),
+        self.server.device,
+        self.server.tool)
+    self.server.test_server_instance.setDaemon(True)
+    self.server.test_server_instance.start()
+    ready_event.wait()
+    if self.server.test_server_instance.is_ready:
+      self._SendResponse(200, 'OK', {}, json.dumps(
+          {'port': self.server.test_server_instance.forwarder_device_port,
+           'message': 'started'}))
+      logging.info('Test server is running on port: %d.',
+                   self.server.test_server_instance.host_port)
+    else:
+      self.server.test_server_instance.Stop()
+      self.server.test_server_instance = None
+      self._SendResponse(500, 'Test Server Error.', {}, '')
+      logging.info('Encounter problem during starting a test server.')
+
+  def _KillTestServer(self):
+    """Stops the test server instance."""
+    # There should only ever be one test server at a time. This may do the
+    # wrong thing if we try and start multiple test servers.
+    if not self.server.test_server_instance:
+      return
+    port = self.server.test_server_instance.host_port
+    logging.info('Handling request to kill a test server on port: %d.', port)
+    self.server.test_server_instance.Stop()
+    # Make sure the status of test server is correct before sending response.
+    if _CheckPortAvailable(port):
+      self._SendResponse(200, 'OK', {}, 'killed')
+      logging.info('Test server on port %d is killed', port)
+    else:
+      self._SendResponse(500, 'Test Server Error.', {}, '')
+      logging.info('Encounter problem during killing a test server.')
+    self.server.test_server_instance = None
+
+  def do_POST(self):
+    parsed_path = urlparse.urlparse(self.path)
+    action = parsed_path.path
+    logging.info('Action for POST method is: %s.', action)
+    if action == '/start':
+      self._StartTestServer()
+    else:
+      self._SendResponse(400, 'Unknown request.', {}, '')
+      logging.info('Encounter unknown request: %s.', action)
+
+  def do_GET(self):
+    parsed_path = urlparse.urlparse(self.path)
+    action = parsed_path.path
+    params = urlparse.parse_qs(parsed_path.query, keep_blank_values=1)
+    logging.info('Action for GET method is: %s.', action)
+    for param in params:
+      logging.info('%s=%s', param, params[param][0])
+    if action == '/kill':
+      self._KillTestServer()
+    elif action == '/ping':
+      # The ping handler is used to check whether the spawner server is ready
+      # to serve the requests. We don't need to test the status of the test
+      # server when handling ping request.
+      self._SendResponse(200, 'OK', {}, 'ready')
+      logging.info('Handled ping request and sent response.')
+    else:
+      self._SendResponse(400, 'Unknown request', {}, '')
+      logging.info('Encounter unknown request: %s.', action)
+
+
+class SpawningServer(object):
+  """The class used to start/stop a http server."""
+
+  def __init__(self, test_server_spawner_port, device, tool):
+    logging.info('Creating new spawner on port: %d.', test_server_spawner_port)
+    self.server = BaseHTTPServer.HTTPServer(('', test_server_spawner_port),
+                                            SpawningServerRequestHandler)
+    self.server.device = device
+    self.server.tool = tool
+    self.server.test_server_instance = None
+    self.server.build_type = constants.GetBuildType()
+
+  def _Listen(self):
+    logging.info('Starting test server spawner')
+    self.server.serve_forever()
+
+  def Start(self):
+    """Starts the test server spawner."""
+    listener_thread = threading.Thread(target=self._Listen)
+    listener_thread.setDaemon(True)
+    listener_thread.start()
+
+  def Stop(self):
+    """Stops the test server spawner.
+
+    Also cleans the server state.
+    """
+    self.CleanupState()
+    self.server.shutdown()
+
+  def CleanupState(self):
+    """Cleans up the spawning server state.
+
+    This should be called if the test server spawner is reused,
+    to avoid sharing the test server instance.
+    """
+    if self.server.test_server_instance:
+      self.server.test_server_instance.Stop()
+      self.server.test_server_instance = None
diff --git a/build/android/pylib/cmd_helper.py b/build/android/pylib/cmd_helper.py
new file mode 100644
index 0000000..f881553
--- /dev/null
+++ b/build/android/pylib/cmd_helper.py
@@ -0,0 +1,261 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""A wrapper for subprocess to make calling shell commands easier."""
+
+import logging
+import os
+import pipes
+import select
+import signal
+import string
+import StringIO
+import subprocess
+import time
+
+# fcntl is not available on Windows.
+try:
+  import fcntl
+except ImportError:
+  fcntl = None
+
+_SafeShellChars = frozenset(string.ascii_letters + string.digits + '@%_-+=:,./')
+
+def SingleQuote(s):
+  """Return an shell-escaped version of the string using single quotes.
+
+  Reliably quote a string which may contain unsafe characters (e.g. space,
+  quote, or other special characters such as '$').
+
+  The returned value can be used in a shell command line as one token that gets
+  to be interpreted literally.
+
+  Args:
+    s: The string to quote.
+
+  Return:
+    The string quoted using single quotes.
+  """
+  return pipes.quote(s)
+
+def DoubleQuote(s):
+  """Return an shell-escaped version of the string using double quotes.
+
+  Reliably quote a string which may contain unsafe characters (e.g. space
+  or quote characters), while retaining some shell features such as variable
+  interpolation.
+
+  The returned value can be used in a shell command line as one token that gets
+  to be further interpreted by the shell.
+
+  The set of characters that retain their special meaning may depend on the
+  shell implementation. This set usually includes: '$', '`', '\', '!', '*',
+  and '@'.
+
+  Args:
+    s: The string to quote.
+
+  Return:
+    The string quoted using double quotes.
+  """
+  if not s:
+    return '""'
+  elif all(c in _SafeShellChars for c in s):
+    return s
+  else:
+    return '"' + s.replace('"', '\\"') + '"'
+
+
+def Popen(args, stdout=None, stderr=None, shell=None, cwd=None, env=None):
+  return subprocess.Popen(
+      args=args, cwd=cwd, stdout=stdout, stderr=stderr,
+      shell=shell, close_fds=True, env=env,
+      preexec_fn=lambda: signal.signal(signal.SIGPIPE, signal.SIG_DFL))
+
+
+def Call(args, stdout=None, stderr=None, shell=None, cwd=None, env=None):
+  pipe = Popen(args, stdout=stdout, stderr=stderr, shell=shell, cwd=cwd,
+               env=env)
+  pipe.communicate()
+  return pipe.wait()
+
+
+def RunCmd(args, cwd=None):
+  """Opens a subprocess to execute a program and returns its return value.
+
+  Args:
+    args: A string or a sequence of program arguments. The program to execute is
+      the string or the first item in the args sequence.
+    cwd: If not None, the subprocess's current directory will be changed to
+      |cwd| before it's executed.
+
+  Returns:
+    Return code from the command execution.
+  """
+  logging.info(str(args) + ' ' + (cwd or ''))
+  return Call(args, cwd=cwd)
+
+
+def GetCmdOutput(args, cwd=None, shell=False):
+  """Open a subprocess to execute a program and returns its output.
+
+  Args:
+    args: A string or a sequence of program arguments. The program to execute is
+      the string or the first item in the args sequence.
+    cwd: If not None, the subprocess's current directory will be changed to
+      |cwd| before it's executed.
+    shell: Whether to execute args as a shell command.
+
+  Returns:
+    Captures and returns the command's stdout.
+    Prints the command's stderr to logger (which defaults to stdout).
+  """
+  (_, output) = GetCmdStatusAndOutput(args, cwd, shell)
+  return output
+
+
+def _ValidateAndLogCommand(args, cwd, shell):
+  if isinstance(args, basestring):
+    if not shell:
+      raise Exception('string args must be run with shell=True')
+  else:
+    if shell:
+      raise Exception('array args must be run with shell=False')
+    args = ' '.join(SingleQuote(c) for c in args)
+  if cwd is None:
+    cwd = ''
+  else:
+    cwd = ':' + cwd
+  logging.info('[host]%s> %s', cwd, args)
+  return args
+
+
+def GetCmdStatusAndOutput(args, cwd=None, shell=False):
+  """Executes a subprocess and returns its exit code and output.
+
+  Args:
+    args: A string or a sequence of program arguments. The program to execute is
+      the string or the first item in the args sequence.
+    cwd: If not None, the subprocess's current directory will be changed to
+      |cwd| before it's executed.
+    shell: Whether to execute args as a shell command. Must be True if args
+      is a string and False if args is a sequence.
+
+  Returns:
+    The 2-tuple (exit code, output).
+  """
+  _ValidateAndLogCommand(args, cwd, shell)
+  pipe = Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
+               shell=shell, cwd=cwd)
+  stdout, stderr = pipe.communicate()
+
+  if stderr:
+    logging.critical(stderr)
+  if len(stdout) > 4096:
+    logging.debug('Truncated output:')
+  logging.debug(stdout[:4096])
+  return (pipe.returncode, stdout)
+
+
+class TimeoutError(Exception):
+  """Module-specific timeout exception."""
+  pass
+
+
+def _IterProcessStdout(process, timeout=None, buffer_size=4096,
+                       poll_interval=1):
+  assert fcntl, 'fcntl module is required'
+  try:
+    # Enable non-blocking reads from the child's stdout.
+    child_fd = process.stdout.fileno()
+    fl = fcntl.fcntl(child_fd, fcntl.F_GETFL)
+    fcntl.fcntl(child_fd, fcntl.F_SETFL, fl | os.O_NONBLOCK)
+
+    end_time = (time.time() + timeout) if timeout else None
+    while True:
+      if end_time and time.time() > end_time:
+        raise TimeoutError
+      read_fds, _, _ = select.select([child_fd], [], [], poll_interval)
+      if child_fd in read_fds:
+        data = os.read(child_fd, buffer_size)
+        if not data:
+          break
+        yield data
+      if process.poll() is not None:
+        break
+  finally:
+    try:
+      # Make sure the process doesn't stick around if we fail with an
+      # exception.
+      process.kill()
+    except OSError:
+      pass
+    process.wait()
+
+
+def GetCmdStatusAndOutputWithTimeout(args, timeout, cwd=None, shell=False,
+                                     logfile=None):
+  """Executes a subprocess with a timeout.
+
+  Args:
+    args: List of arguments to the program, the program to execute is the first
+      element.
+    timeout: the timeout in seconds or None to wait forever.
+    cwd: If not None, the subprocess's current directory will be changed to
+      |cwd| before it's executed.
+    shell: Whether to execute args as a shell command. Must be True if args
+      is a string and False if args is a sequence.
+    logfile: Optional file-like object that will receive output from the
+      command as it is running.
+
+  Returns:
+    The 2-tuple (exit code, output).
+  """
+  _ValidateAndLogCommand(args, cwd, shell)
+  output = StringIO.StringIO()
+  process = Popen(args, cwd=cwd, shell=shell, stdout=subprocess.PIPE,
+                  stderr=subprocess.STDOUT)
+  for data in _IterProcessStdout(process, timeout=timeout):
+    if logfile:
+      logfile.write(data)
+    output.write(data)
+  return process.returncode, output.getvalue()
+
+
+def IterCmdOutputLines(args, timeout=None, cwd=None, shell=False,
+                       check_status=True):
+  """Executes a subprocess and continuously yields lines from its output.
+
+  Args:
+    args: List of arguments to the program, the program to execute is the first
+      element.
+    cwd: If not None, the subprocess's current directory will be changed to
+      |cwd| before it's executed.
+    shell: Whether to execute args as a shell command. Must be True if args
+      is a string and False if args is a sequence.
+    check_status: A boolean indicating whether to check the exit status of the
+      process after all output has been read.
+
+  Yields:
+    The output of the subprocess, line by line.
+
+  Raises:
+    CalledProcessError if check_status is True and the process exited with a
+      non-zero exit status.
+  """
+  cmd = _ValidateAndLogCommand(args, cwd, shell)
+  process = Popen(args, cwd=cwd, shell=shell, stdout=subprocess.PIPE,
+                  stderr=subprocess.STDOUT)
+  buffer_output = ''
+  for data in _IterProcessStdout(process, timeout=timeout):
+    buffer_output += data
+    has_incomplete_line = buffer_output[-1] not in '\r\n'
+    lines = buffer_output.splitlines()
+    buffer_output = lines.pop() if has_incomplete_line else ''
+    for line in lines:
+      yield line
+  if buffer_output:
+    yield buffer_output
+  if check_status and process.returncode:
+    raise subprocess.CalledProcessError(process.returncode, cmd)
diff --git a/build/android/pylib/cmd_helper_test.py b/build/android/pylib/cmd_helper_test.py
new file mode 100644
index 0000000..5155cea
--- /dev/null
+++ b/build/android/pylib/cmd_helper_test.py
@@ -0,0 +1,83 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Tests for the cmd_helper module."""
+
+import unittest
+import subprocess
+
+from pylib import cmd_helper
+
+
+class CmdHelperSingleQuoteTest(unittest.TestCase):
+
+  def testSingleQuote_basic(self):
+    self.assertEquals('hello',
+                      cmd_helper.SingleQuote('hello'))
+
+  def testSingleQuote_withSpaces(self):
+    self.assertEquals("'hello world'",
+                      cmd_helper.SingleQuote('hello world'))
+
+  def testSingleQuote_withUnsafeChars(self):
+    self.assertEquals("""'hello'"'"'; rm -rf /'""",
+                      cmd_helper.SingleQuote("hello'; rm -rf /"))
+
+  def testSingleQuote_dontExpand(self):
+    test_string = 'hello $TEST_VAR'
+    cmd = 'TEST_VAR=world; echo %s' % cmd_helper.SingleQuote(test_string)
+    self.assertEquals(test_string,
+                      cmd_helper.GetCmdOutput(cmd, shell=True).rstrip())
+
+
+class CmdHelperDoubleQuoteTest(unittest.TestCase):
+
+  def testDoubleQuote_basic(self):
+    self.assertEquals('hello',
+                      cmd_helper.DoubleQuote('hello'))
+
+  def testDoubleQuote_withSpaces(self):
+    self.assertEquals('"hello world"',
+                      cmd_helper.DoubleQuote('hello world'))
+
+  def testDoubleQuote_withUnsafeChars(self):
+    self.assertEquals('''"hello\\"; rm -rf /"''',
+                      cmd_helper.DoubleQuote('hello"; rm -rf /'))
+
+  def testSingleQuote_doExpand(self):
+    test_string = 'hello $TEST_VAR'
+    cmd = 'TEST_VAR=world; echo %s' % cmd_helper.DoubleQuote(test_string)
+    self.assertEquals('hello world',
+                      cmd_helper.GetCmdOutput(cmd, shell=True).rstrip())
+
+
+class CmdHelperIterCmdOutputLinesTest(unittest.TestCase):
+  """Test IterCmdOutputLines with some calls to the unix 'seq' command."""
+
+  def testIterCmdOutputLines_success(self):
+    for num, line in enumerate(
+        cmd_helper.IterCmdOutputLines(['seq', '10']), 1):
+      self.assertEquals(num, int(line))
+
+  def testIterCmdOutputLines_exitStatusFail(self):
+    with self.assertRaises(subprocess.CalledProcessError):
+      for num, line in enumerate(
+          cmd_helper.IterCmdOutputLines('seq 10 && false', shell=True), 1):
+        self.assertEquals(num, int(line))
+      # after reading all the output we get an exit status of 1
+
+  def testIterCmdOutputLines_exitStatusIgnored(self):
+    for num, line in enumerate(
+        cmd_helper.IterCmdOutputLines('seq 10 && false', shell=True,
+                                      check_status=False), 1):
+      self.assertEquals(num, int(line))
+
+  def testIterCmdOutputLines_exitStatusSkipped(self):
+    for num, line in enumerate(
+        cmd_helper.IterCmdOutputLines('seq 10 && false', shell=True), 1):
+      self.assertEquals(num, int(line))
+      # no exception will be raised because we don't attempt to read past
+      # the end of the output and, thus, the status never gets checked
+      if num == 10:
+        break
diff --git a/build/android/pylib/constants/__init__.py b/build/android/pylib/constants/__init__.py
new file mode 100644
index 0000000..8821f97
--- /dev/null
+++ b/build/android/pylib/constants/__init__.py
@@ -0,0 +1,308 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Defines a set of constants shared by test runners and other scripts."""
+
+# TODO(jbudorick): Split these constants into coherent modules.
+
+# pylint: disable=W0212
+
+import collections
+import logging
+import os
+import subprocess
+
+
+DIR_SOURCE_ROOT = os.environ.get('CHECKOUT_SOURCE_ROOT',
+    os.path.abspath(os.path.join(os.path.dirname(__file__),
+                                 os.pardir, os.pardir, os.pardir, os.pardir)))
+ISOLATE_DEPS_DIR = os.path.join(DIR_SOURCE_ROOT, 'isolate_deps_dir')
+
+CHROME_SHELL_HOST_DRIVEN_DIR = os.path.join(
+    DIR_SOURCE_ROOT, 'chrome', 'android')
+
+
+PackageInfo = collections.namedtuple('PackageInfo',
+    ['package', 'activity', 'cmdline_file', 'devtools_socket',
+     'test_package'])
+
+PACKAGE_INFO = {
+    'chrome_document': PackageInfo(
+        'com.google.android.apps.chrome.document',
+        'com.google.android.apps.chrome.document.ChromeLauncherActivity',
+        '/data/local/chrome-command-line',
+        'chrome_devtools_remote',
+        None),
+    'chrome': PackageInfo(
+        'com.google.android.apps.chrome',
+        'com.google.android.apps.chrome.Main',
+        '/data/local/chrome-command-line',
+        'chrome_devtools_remote',
+        'com.google.android.apps.chrome.tests'),
+    'chrome_beta': PackageInfo(
+        'com.chrome.beta',
+        'com.google.android.apps.chrome.Main',
+        '/data/local/chrome-command-line',
+        'chrome_devtools_remote',
+        None),
+    'chrome_stable': PackageInfo(
+        'com.android.chrome',
+        'com.google.android.apps.chrome.Main',
+        '/data/local/chrome-command-line',
+        'chrome_devtools_remote',
+        None),
+    'chrome_dev': PackageInfo(
+        'com.chrome.dev',
+        'com.google.android.apps.chrome.Main',
+        '/data/local/chrome-command-line',
+        'chrome_devtools_remote',
+        None),
+    'chrome_canary': PackageInfo(
+        'com.chrome.canary',
+        'com.google.android.apps.chrome.Main',
+        '/data/local/chrome-command-line',
+        'chrome_devtools_remote',
+        None),
+    'chrome_work': PackageInfo(
+        'com.chrome.work',
+        'com.google.android.apps.chrome.Main',
+        '/data/local/chrome-command-line',
+        'chrome_devtools_remote',
+        None),
+    'chromium': PackageInfo(
+        'org.chromium.chrome',
+        'com.google.android.apps.chrome.Main',
+        '/data/local/chrome-command-line',
+        'chrome_devtools_remote',
+        None),
+    'legacy_browser': PackageInfo(
+        'com.google.android.browser',
+        'com.android.browser.BrowserActivity',
+        None,
+        None,
+        None),
+    'chromecast_shell': PackageInfo(
+        'com.google.android.apps.mediashell',
+        'com.google.android.apps.mediashell.MediaShellActivity',
+        '/data/local/tmp/castshell-command-line',
+        None,
+        None),
+    'content_shell': PackageInfo(
+        'org.chromium.content_shell_apk',
+        'org.chromium.content_shell_apk.ContentShellActivity',
+        '/data/local/tmp/content-shell-command-line',
+        None,
+        'org.chromium.content_shell_apk.tests'),
+    'chrome_shell': PackageInfo(
+        'org.chromium.chrome.shell',
+        'org.chromium.chrome.shell.ChromeShellActivity',
+        '/data/local/tmp/chrome-shell-command-line',
+        'chrome_shell_devtools_remote',
+        'org.chromium.chrome.shell.tests'),
+    'android_webview_shell': PackageInfo(
+        'org.chromium.android_webview.shell',
+        'org.chromium.android_webview.shell.AwShellActivity',
+        '/data/local/tmp/android-webview-command-line',
+        None,
+        'org.chromium.android_webview.test'),
+    'gtest': PackageInfo(
+        'org.chromium.native_test',
+        'org.chromium.native_test.NativeUnitTestActivity',
+        '/data/local/tmp/chrome-native-tests-command-line',
+        None,
+        None),
+    'components_browsertests': PackageInfo(
+        'org.chromium.components_browsertests_apk',
+        ('org.chromium.components_browsertests_apk' +
+         '.ComponentsBrowserTestsActivity'),
+        '/data/local/tmp/chrome-native-tests-command-line',
+        None,
+        None),
+    'content_browsertests': PackageInfo(
+        'org.chromium.content_browsertests_apk',
+        'org.chromium.content_browsertests_apk.ContentBrowserTestsActivity',
+        '/data/local/tmp/chrome-native-tests-command-line',
+        None,
+        None),
+    'chromedriver_webview_shell': PackageInfo(
+        'org.chromium.chromedriver_webview_shell',
+        'org.chromium.chromedriver_webview_shell.Main',
+        None,
+        None,
+        None),
+}
+
+
+# Ports arrangement for various test servers used in Chrome for Android.
+# Lighttpd server will attempt to use 9000 as default port, if unavailable it
+# will find a free port from 8001 - 8999.
+LIGHTTPD_DEFAULT_PORT = 9000
+LIGHTTPD_RANDOM_PORT_FIRST = 8001
+LIGHTTPD_RANDOM_PORT_LAST = 8999
+TEST_SYNC_SERVER_PORT = 9031
+TEST_SEARCH_BY_IMAGE_SERVER_PORT = 9041
+TEST_POLICY_SERVER_PORT = 9051
+
+# The net test server is started from port 10201.
+# TODO(pliard): http://crbug.com/239014. Remove this dirty workaround once
+# http://crbug.com/239014 is fixed properly.
+TEST_SERVER_PORT_FIRST = 10201
+TEST_SERVER_PORT_LAST = 30000
+# A file to record next valid port of test server.
+TEST_SERVER_PORT_FILE = '/tmp/test_server_port'
+TEST_SERVER_PORT_LOCKFILE = '/tmp/test_server_port.lock'
+
+TEST_EXECUTABLE_DIR = '/data/local/tmp'
+# Directories for common java libraries for SDK build.
+# These constants are defined in build/android/ant/common.xml
+SDK_BUILD_JAVALIB_DIR = 'lib.java'
+SDK_BUILD_TEST_JAVALIB_DIR = 'test.lib.java'
+SDK_BUILD_APKS_DIR = 'apks'
+
+ADB_KEYS_FILE = '/data/misc/adb/adb_keys'
+
+PERF_OUTPUT_DIR = os.path.join(DIR_SOURCE_ROOT, 'out', 'step_results')
+# The directory on the device where perf test output gets saved to.
+DEVICE_PERF_OUTPUT_DIR = (
+    '/data/data/' + PACKAGE_INFO['chrome'].package + '/files')
+
+SCREENSHOTS_DIR = os.path.join(DIR_SOURCE_ROOT, 'out_screenshots')
+
+class ANDROID_SDK_VERSION_CODES(object):
+  """Android SDK version codes.
+
+  http://developer.android.com/reference/android/os/Build.VERSION_CODES.html
+  """
+
+  JELLY_BEAN = 16
+  JELLY_BEAN_MR1 = 17
+  JELLY_BEAN_MR2 = 18
+  KITKAT = 19
+  KITKAT_WATCH = 20
+  LOLLIPOP = 21
+  LOLLIPOP_MR1 = 22
+
+ANDROID_SDK_VERSION = ANDROID_SDK_VERSION_CODES.LOLLIPOP_MR1
+ANDROID_SDK_BUILD_TOOLS_VERSION = '22.0.1'
+ANDROID_SDK_ROOT = os.path.join(DIR_SOURCE_ROOT,
+                                'third_party/android_tools/sdk')
+ANDROID_SDK_TOOLS = os.path.join(ANDROID_SDK_ROOT,
+                                 'build-tools', ANDROID_SDK_BUILD_TOOLS_VERSION)
+ANDROID_NDK_ROOT = os.path.join(DIR_SOURCE_ROOT,
+                                'third_party/android_tools/ndk')
+
+EMULATOR_SDK_ROOT = os.environ.get('ANDROID_EMULATOR_SDK_ROOT',
+                                   os.path.join(DIR_SOURCE_ROOT,
+                                                'android_emulator_sdk'))
+
+BAD_DEVICES_JSON = os.path.join(DIR_SOURCE_ROOT,
+                                os.environ.get('CHROMIUM_OUT_DIR', 'out'),
+                                'bad_devices.json')
+
+UPSTREAM_FLAKINESS_SERVER = 'test-results.appspot.com'
+
+DEVICE_LOCAL_PROPERTIES_PATH = '/data/local.prop'
+
+PYTHON_UNIT_TEST_SUITES = {
+  'pylib_py_unittests': {
+    'path': os.path.join(DIR_SOURCE_ROOT, 'build', 'android'),
+    'test_modules': [
+      'pylib.cmd_helper_test',
+      'pylib.device.device_utils_test',
+      'pylib.results.json_results_test',
+      'pylib.utils.md5sum_test',
+    ]
+  },
+  'gyp_py_unittests': {
+    'path': os.path.join(DIR_SOURCE_ROOT, 'build', 'android', 'gyp'),
+    'test_modules': [
+      'java_cpp_enum_tests',
+    ]
+  },
+}
+
+LOCAL_MACHINE_TESTS = ['junit', 'python']
+VALID_ENVIRONMENTS = ['local', 'remote_device']
+VALID_TEST_TYPES = ['gtest', 'instrumentation', 'junit', 'linker', 'monkey',
+                    'perf', 'python', 'uiautomator', 'uirobot']
+VALID_DEVICE_TYPES = ['Android', 'iOS']
+
+
+def GetBuildType():
+  try:
+    return os.environ['BUILDTYPE']
+  except KeyError:
+    raise EnvironmentError(
+        'The BUILDTYPE environment variable has not been set')
+
+
+def SetBuildType(build_type):
+  os.environ['BUILDTYPE'] = build_type
+
+
+def SetBuildDirectory(build_directory):
+  os.environ['CHROMIUM_OUT_DIR'] = build_directory
+
+
+def SetOutputDirectory(output_directory):
+  os.environ['CHROMIUM_OUTPUT_DIR'] = output_directory
+
+
+def GetOutDirectory(build_type=None):
+  """Returns the out directory where the output binaries are built.
+
+  Args:
+    build_type: Build type, generally 'Debug' or 'Release'. Defaults to the
+      globally set build type environment variable BUILDTYPE.
+  """
+  if 'CHROMIUM_OUTPUT_DIR' in os.environ:
+    return os.path.abspath(os.path.join(
+        DIR_SOURCE_ROOT, os.environ.get('CHROMIUM_OUTPUT_DIR')))
+
+  return os.path.abspath(os.path.join(
+      DIR_SOURCE_ROOT, os.environ.get('CHROMIUM_OUT_DIR', 'out'),
+      GetBuildType() if build_type is None else build_type))
+
+
+def _Memoize(func):
+  def Wrapper():
+    try:
+      return func._result
+    except AttributeError:
+      func._result = func()
+      return func._result
+  return Wrapper
+
+
+def SetAdbPath(adb_path):
+  os.environ['ADB_PATH'] = adb_path
+
+
+def GetAdbPath():
+  # Check if a custom adb path as been set. If not, try to find adb
+  # on the system.
+  if os.environ.get('ADB_PATH'):
+    return os.environ.get('ADB_PATH')
+  else:
+    return _FindAdbPath()
+
+
+@_Memoize
+def _FindAdbPath():
+  if os.environ.get('ANDROID_SDK_ROOT'):
+    return 'adb'
+  # If envsetup.sh hasn't been sourced and there's no adb in the path,
+  # set it here.
+  try:
+    with file(os.devnull, 'w') as devnull:
+      subprocess.call(['adb', 'version'], stdout=devnull, stderr=devnull)
+    return 'adb'
+  except OSError:
+    logging.debug('No adb found in $PATH, fallback to checked in binary.')
+    return os.path.join(ANDROID_SDK_ROOT, 'platform-tools', 'adb')
+
+# Exit codes
+ERROR_EXIT_CODE = 1
+INFRA_EXIT_CODE = 87
+WARNING_EXIT_CODE = 88
diff --git a/build/android/pylib/constants/keyevent.py b/build/android/pylib/constants/keyevent.py
new file mode 100644
index 0000000..06736b3
--- /dev/null
+++ b/build/android/pylib/constants/keyevent.py
@@ -0,0 +1,14 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Android KeyEvent constants.
+
+http://developer.android.com/reference/android/view/KeyEvent.html
+"""
+
+KEYCODE_BACK = 4
+KEYCODE_DPAD_RIGHT = 22
+KEYCODE_ENTER = 66
+KEYCODE_MENU = 82
+
diff --git a/build/android/pylib/content_settings.py b/build/android/pylib/content_settings.py
new file mode 100644
index 0000000..8594140
--- /dev/null
+++ b/build/android/pylib/content_settings.py
@@ -0,0 +1,82 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from pylib import constants
+
+
+class ContentSettings(dict):
+
+  """A dict interface to interact with device content settings.
+
+  System properties are key/value pairs as exposed by adb shell content.
+  """
+
+  def __init__(self, table, device):
+    super(ContentSettings, self).__init__()
+    self._table = table
+    self._device = device
+
+  @staticmethod
+  def _GetTypeBinding(value):
+    if isinstance(value, bool):
+      return 'b'
+    if isinstance(value, float):
+      return 'f'
+    if isinstance(value, int):
+      return 'i'
+    if isinstance(value, long):
+      return 'l'
+    if isinstance(value, str):
+      return 's'
+    raise ValueError('Unsupported type %s' % type(value))
+
+  def iteritems(self):
+    # Example row:
+    # 'Row: 0 _id=13, name=logging_id2, value=-1fccbaa546705b05'
+    for row in self._device.RunShellCommand(
+        'content query --uri content://%s' % self._table, as_root=True):
+      fields = row.split(', ')
+      key = None
+      value = None
+      for field in fields:
+        k, _, v = field.partition('=')
+        if k == 'name':
+          key = v
+        elif k == 'value':
+          value = v
+      if not key:
+        continue
+      if not value:
+        value = ''
+      yield key, value
+
+  def __getitem__(self, key):
+    return self._device.RunShellCommand(
+        'content query --uri content://%s --where "name=\'%s\'" '
+        '--projection value' % (self._table, key), as_root=True).strip()
+
+  def __setitem__(self, key, value):
+    if key in self:
+      self._device.RunShellCommand(
+          'content update --uri content://%s '
+          '--bind value:%s:%s --where "name=\'%s\'"' % (
+              self._table,
+              self._GetTypeBinding(value), value, key),
+          as_root=True)
+    else:
+      self._device.RunShellCommand(
+          'content insert --uri content://%s '
+          '--bind name:%s:%s --bind value:%s:%s' % (
+              self._table,
+              self._GetTypeBinding(key), key,
+              self._GetTypeBinding(value), value),
+          as_root=True)
+
+  def __delitem__(self, key):
+    self._device.RunShellCommand(
+        'content delete --uri content://%s '
+        '--bind name:%s:%s' % (
+            self._table,
+            self._GetTypeBinding(key), key),
+        as_root=True)
diff --git a/build/android/pylib/device/OWNERS b/build/android/pylib/device/OWNERS
new file mode 100644
index 0000000..c35d7ac
--- /dev/null
+++ b/build/android/pylib/device/OWNERS
@@ -0,0 +1,2 @@
+jbudorick@chromium.org
+perezju@chromium.org
diff --git a/build/android/pylib/device/__init__.py b/build/android/pylib/device/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/build/android/pylib/device/__init__.py
diff --git a/build/android/pylib/device/adb_wrapper.py b/build/android/pylib/device/adb_wrapper.py
new file mode 100644
index 0000000..e897326
--- /dev/null
+++ b/build/android/pylib/device/adb_wrapper.py
@@ -0,0 +1,608 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""This module wraps Android's adb tool.
+
+This is a thin wrapper around the adb interface. Any additional complexity
+should be delegated to a higher level (ex. DeviceUtils).
+"""
+
+import collections
+import errno
+import logging
+import os
+import re
+
+from pylib import cmd_helper
+from pylib import constants
+from pylib.device import decorators
+from pylib.device import device_errors
+from pylib.utils import timeout_retry
+
+
+_DEFAULT_TIMEOUT = 30
+_DEFAULT_RETRIES = 2
+
+_EMULATOR_RE = re.compile(r'^emulator-[0-9]+$')
+
+_READY_STATE = 'device'
+
+
+def _VerifyLocalFileExists(path):
+  """Verifies a local file exists.
+
+  Args:
+    path: Path to the local file.
+
+  Raises:
+    IOError: If the file doesn't exist.
+  """
+  if not os.path.exists(path):
+    raise IOError(errno.ENOENT, os.strerror(errno.ENOENT), path)
+
+
+DeviceStat = collections.namedtuple('DeviceStat',
+                                    ['st_mode', 'st_size', 'st_time'])
+
+
+class AdbWrapper(object):
+  """A wrapper around a local Android Debug Bridge executable."""
+
+  def __init__(self, device_serial):
+    """Initializes the AdbWrapper.
+
+    Args:
+      device_serial: The device serial number as a string.
+    """
+    if not device_serial:
+      raise ValueError('A device serial must be specified')
+    self._device_serial = str(device_serial)
+
+  # pylint: disable=unused-argument
+  @classmethod
+  def _BuildAdbCmd(cls, args, device_serial, cpu_affinity=None):
+    if cpu_affinity is not None:
+      cmd = ['taskset', '-c', str(cpu_affinity)]
+    else:
+      cmd = []
+    cmd.append(constants.GetAdbPath())
+    if device_serial is not None:
+      cmd.extend(['-s', device_serial])
+    cmd.extend(args)
+    return cmd
+  # pylint: enable=unused-argument
+
+  # pylint: disable=unused-argument
+  @classmethod
+  @decorators.WithTimeoutAndRetries
+  def _RunAdbCmd(cls, args, timeout=None, retries=None, device_serial=None,
+                 check_error=True, cpu_affinity=None):
+    status, output = cmd_helper.GetCmdStatusAndOutputWithTimeout(
+        cls._BuildAdbCmd(args, device_serial, cpu_affinity=cpu_affinity),
+        timeout_retry.CurrentTimeoutThread().GetRemainingTime())
+    if status != 0:
+      raise device_errors.AdbCommandFailedError(
+          args, output, status, device_serial)
+    # This catches some errors, including when the device drops offline;
+    # unfortunately adb is very inconsistent with error reporting so many
+    # command failures present differently.
+    if check_error and output.startswith('error:'):
+      raise device_errors.AdbCommandFailedError(args, output)
+    return output
+  # pylint: enable=unused-argument
+
+  def _RunDeviceAdbCmd(self, args, timeout, retries, check_error=True):
+    """Runs an adb command on the device associated with this object.
+
+    Args:
+      args: A list of arguments to adb.
+      timeout: Timeout in seconds.
+      retries: Number of retries.
+      check_error: Check that the command doesn't return an error message. This
+        does NOT check the exit status of shell commands.
+
+    Returns:
+      The output of the command.
+    """
+    return self._RunAdbCmd(args, timeout=timeout, retries=retries,
+                           device_serial=self._device_serial,
+                           check_error=check_error)
+
+  def _IterRunDeviceAdbCmd(self, args, timeout):
+    """Runs an adb command and returns an iterator over its output lines.
+
+    Args:
+      args: A list of arguments to adb.
+      timeout: Timeout in seconds.
+
+    Yields:
+      The output of the command line by line.
+    """
+    return cmd_helper.IterCmdOutputLines(
+      self._BuildAdbCmd(args, self._device_serial), timeout=timeout)
+
+  def __eq__(self, other):
+    """Consider instances equal if they refer to the same device.
+
+    Args:
+      other: The instance to compare equality with.
+
+    Returns:
+      True if the instances are considered equal, false otherwise.
+    """
+    return self._device_serial == str(other)
+
+  def __str__(self):
+    """The string representation of an instance.
+
+    Returns:
+      The device serial number as a string.
+    """
+    return self._device_serial
+
+  def __repr__(self):
+    return '%s(\'%s\')' % (self.__class__.__name__, self)
+
+  # pylint: disable=unused-argument
+  @classmethod
+  def IsServerOnline(cls):
+    status, output = cmd_helper.GetCmdStatusAndOutput(['pgrep', 'adb'])
+    output = [int(x) for x in output.split()]
+    logging.info('PIDs for adb found: %r', output)
+    return status == 0
+  # pylint: enable=unused-argument
+
+  @classmethod
+  def KillServer(cls, timeout=_DEFAULT_TIMEOUT, retries=_DEFAULT_RETRIES):
+    cls._RunAdbCmd(['kill-server'], timeout=timeout, retries=retries)
+
+  @classmethod
+  def StartServer(cls, timeout=_DEFAULT_TIMEOUT, retries=_DEFAULT_RETRIES):
+    # CPU affinity is used to reduce adb instability http://crbug.com/268450
+    cls._RunAdbCmd(['start-server'], timeout=timeout, retries=retries,
+                   cpu_affinity=0)
+
+  @classmethod
+  def GetDevices(cls, timeout=_DEFAULT_TIMEOUT, retries=_DEFAULT_RETRIES):
+    """DEPRECATED. Refer to Devices(...) below."""
+    # TODO(jbudorick): Remove this function once no more clients are using it.
+    return cls.Devices(timeout=timeout, retries=retries)
+
+  @classmethod
+  def Devices(cls, is_ready=True, timeout=_DEFAULT_TIMEOUT,
+              retries=_DEFAULT_RETRIES):
+    """Get the list of active attached devices.
+
+    Args:
+      is_ready: Whether the devices should be limited to only those that are
+        ready for use.
+      timeout: (optional) Timeout per try in seconds.
+      retries: (optional) Number of retries to attempt.
+
+    Yields:
+      AdbWrapper instances.
+    """
+    output = cls._RunAdbCmd(['devices'], timeout=timeout, retries=retries)
+    lines = (line.split() for line in output.splitlines())
+    return [AdbWrapper(line[0]) for line in lines
+            if len(line) == 2 and (not is_ready or line[1] == _READY_STATE)]
+
+  def GetDeviceSerial(self):
+    """Gets the device serial number associated with this object.
+
+    Returns:
+      Device serial number as a string.
+    """
+    return self._device_serial
+
+  def Push(self, local, remote, timeout=60*5, retries=_DEFAULT_RETRIES):
+    """Pushes a file from the host to the device.
+
+    Args:
+      local: Path on the host filesystem.
+      remote: Path on the device filesystem.
+      timeout: (optional) Timeout per try in seconds.
+      retries: (optional) Number of retries to attempt.
+    """
+    _VerifyLocalFileExists(local)
+    self._RunDeviceAdbCmd(['push', local, remote], timeout, retries)
+
+  def Pull(self, remote, local, timeout=60*5, retries=_DEFAULT_RETRIES):
+    """Pulls a file from the device to the host.
+
+    Args:
+      remote: Path on the device filesystem.
+      local: Path on the host filesystem.
+      timeout: (optional) Timeout per try in seconds.
+      retries: (optional) Number of retries to attempt.
+    """
+    cmd = ['pull', remote, local]
+    self._RunDeviceAdbCmd(cmd, timeout, retries)
+    try:
+      _VerifyLocalFileExists(local)
+    except IOError:
+      raise device_errors.AdbCommandFailedError(
+          cmd, 'File not found on host: %s' % local, device_serial=str(self))
+
+  def Shell(self, command, expect_status=0, timeout=_DEFAULT_TIMEOUT,
+            retries=_DEFAULT_RETRIES):
+    """Runs a shell command on the device.
+
+    Args:
+      command: A string with the shell command to run.
+      expect_status: (optional) Check that the command's exit status matches
+        this value. Default is 0. If set to None the test is skipped.
+      timeout: (optional) Timeout per try in seconds.
+      retries: (optional) Number of retries to attempt.
+
+    Returns:
+      The output of the shell command as a string.
+
+    Raises:
+      device_errors.AdbCommandFailedError: If the exit status doesn't match
+        |expect_status|.
+    """
+    if expect_status is None:
+      args = ['shell', command]
+    else:
+      args = ['shell', '%s; echo %%$?;' % command.rstrip()]
+    output = self._RunDeviceAdbCmd(args, timeout, retries, check_error=False)
+    if expect_status is not None:
+      output_end = output.rfind('%')
+      if output_end < 0:
+        # causes the status string to become empty and raise a ValueError
+        output_end = len(output)
+
+      try:
+        status = int(output[output_end+1:])
+      except ValueError:
+        logging.warning('exit status of shell command %r missing.', command)
+        raise device_errors.AdbShellCommandFailedError(
+            command, output, status=None, device_serial=self._device_serial)
+      output = output[:output_end]
+      if status != expect_status:
+        raise device_errors.AdbShellCommandFailedError(
+            command, output, status=status, device_serial=self._device_serial)
+    return output
+
+  def IterShell(self, command, timeout):
+    """Runs a shell command and returns an iterator over its output lines.
+
+    Args:
+      command: A string with the shell command to run.
+      timeout: Timeout in seconds.
+
+    Yields:
+      The output of the command line by line.
+    """
+    args = ['shell', command]
+    return cmd_helper.IterCmdOutputLines(
+      self._BuildAdbCmd(args, self._device_serial), timeout=timeout)
+
+  def Ls(self, path, timeout=_DEFAULT_TIMEOUT, retries=_DEFAULT_RETRIES):
+    """List the contents of a directory on the device.
+
+    Args:
+      path: Path on the device filesystem.
+      timeout: (optional) Timeout per try in seconds.
+      retries: (optional) Number of retries to attempt.
+
+    Returns:
+      A list of pairs (filename, stat) for each file found in the directory,
+      where the stat object has the properties: st_mode, st_size, and st_time.
+
+    Raises:
+      AdbCommandFailedError if |path| does not specify a valid and accessible
+          directory in the device.
+    """
+    def ParseLine(line):
+      cols = line.split(None, 3)
+      filename = cols.pop()
+      stat = DeviceStat(*[int(num, base=16) for num in cols])
+      return (filename, stat)
+
+    cmd = ['ls', path]
+    lines = self._RunDeviceAdbCmd(
+        cmd, timeout=timeout, retries=retries).splitlines()
+    if lines:
+      return [ParseLine(line) for line in lines]
+    else:
+      raise device_errors.AdbCommandFailedError(
+          cmd, 'path does not specify an accessible directory in the device',
+          device_serial=self._device_serial)
+
+  def Logcat(self, clear=False, dump=False, filter_specs=None,
+             logcat_format=None, ring_buffer=None, timeout=None,
+             retries=_DEFAULT_RETRIES):
+    """Get an iterable over the logcat output.
+
+    Args:
+      clear: If true, clear the logcat.
+      dump: If true, dump the current logcat contents.
+      filter_specs: If set, a list of specs to filter the logcat.
+      logcat_format: If set, the format in which the logcat should be output.
+        Options include "brief", "process", "tag", "thread", "raw", "time",
+        "threadtime", and "long"
+      ring_buffer: If set, a list of alternate ring buffers to request.
+        Options include "main", "system", "radio", "events", "crash" or "all".
+        The default is equivalent to ["main", "system", "crash"].
+      timeout: (optional) If set, timeout per try in seconds. If clear or dump
+        is set, defaults to _DEFAULT_TIMEOUT.
+      retries: (optional) If clear or dump is set, the number of retries to
+        attempt. Otherwise, does nothing.
+
+    Yields:
+      logcat output line by line.
+    """
+    cmd = ['logcat']
+    use_iter = True
+    if clear:
+      cmd.append('-c')
+      use_iter = False
+    if dump:
+      cmd.append('-d')
+      use_iter = False
+    if logcat_format:
+      cmd.extend(['-v', logcat_format])
+    if ring_buffer:
+      for buffer_name in ring_buffer:
+        cmd.extend(['-b', buffer_name])
+    if filter_specs:
+      cmd.extend(filter_specs)
+
+    if use_iter:
+      return self._IterRunDeviceAdbCmd(cmd, timeout)
+    else:
+      timeout = timeout if timeout is not None else _DEFAULT_TIMEOUT
+      return self._RunDeviceAdbCmd(cmd, timeout, retries).splitlines()
+
+  def Forward(self, local, remote, timeout=_DEFAULT_TIMEOUT,
+              retries=_DEFAULT_RETRIES):
+    """Forward socket connections from the local socket to the remote socket.
+
+    Sockets are specified by one of:
+      tcp:<port>
+      localabstract:<unix domain socket name>
+      localreserved:<unix domain socket name>
+      localfilesystem:<unix domain socket name>
+      dev:<character device name>
+      jdwp:<process pid> (remote only)
+
+    Args:
+      local: The host socket.
+      remote: The device socket.
+      timeout: (optional) Timeout per try in seconds.
+      retries: (optional) Number of retries to attempt.
+    """
+    self._RunDeviceAdbCmd(['forward', str(local), str(remote)], timeout,
+                          retries)
+
+  def JDWP(self, timeout=_DEFAULT_TIMEOUT, retries=_DEFAULT_RETRIES):
+    """List of PIDs of processes hosting a JDWP transport.
+
+    Args:
+      timeout: (optional) Timeout per try in seconds.
+      retries: (optional) Number of retries to attempt.
+
+    Returns:
+      A list of PIDs as strings.
+    """
+    return [a.strip() for a in
+            self._RunDeviceAdbCmd(['jdwp'], timeout, retries).split('\n')]
+
+  def Install(self, apk_path, forward_lock=False, reinstall=False,
+              sd_card=False, timeout=60*2, retries=_DEFAULT_RETRIES):
+    """Install an apk on the device.
+
+    Args:
+      apk_path: Host path to the APK file.
+      forward_lock: (optional) If set forward-locks the app.
+      reinstall: (optional) If set reinstalls the app, keeping its data.
+      sd_card: (optional) If set installs on the SD card.
+      timeout: (optional) Timeout per try in seconds.
+      retries: (optional) Number of retries to attempt.
+    """
+    _VerifyLocalFileExists(apk_path)
+    cmd = ['install']
+    if forward_lock:
+      cmd.append('-l')
+    if reinstall:
+      cmd.append('-r')
+    if sd_card:
+      cmd.append('-s')
+    cmd.append(apk_path)
+    output = self._RunDeviceAdbCmd(cmd, timeout, retries)
+    if 'Success' not in output:
+      raise device_errors.AdbCommandFailedError(
+          cmd, output, device_serial=self._device_serial)
+
+  def InstallMultiple(self, apk_paths, forward_lock=False, reinstall=False,
+                      sd_card=False, allow_downgrade=False, partial=False,
+                      timeout=60*2, retries=_DEFAULT_RETRIES):
+    """Install an apk with splits on the device.
+
+    Args:
+      apk_paths: Host path to the APK file.
+      forward_lock: (optional) If set forward-locks the app.
+      reinstall: (optional) If set reinstalls the app, keeping its data.
+      sd_card: (optional) If set installs on the SD card.
+      timeout: (optional) Timeout per try in seconds.
+      retries: (optional) Number of retries to attempt.
+      allow_downgrade: (optional) Allow versionCode downgrade.
+      partial: (optional) Package ID if apk_paths doesn't include all .apks.
+    """
+    for path in apk_paths:
+      _VerifyLocalFileExists(path)
+    cmd = ['install-multiple']
+    if forward_lock:
+      cmd.append('-l')
+    if reinstall:
+      cmd.append('-r')
+    if sd_card:
+      cmd.append('-s')
+    if allow_downgrade:
+      cmd.append('-d')
+    if partial:
+      cmd.extend(('-p', partial))
+    cmd.extend(apk_paths)
+    output = self._RunDeviceAdbCmd(cmd, timeout, retries)
+    if 'Success' not in output:
+      raise device_errors.AdbCommandFailedError(
+          cmd, output, device_serial=self._device_serial)
+
+  def Uninstall(self, package, keep_data=False, timeout=_DEFAULT_TIMEOUT,
+                retries=_DEFAULT_RETRIES):
+    """Remove the app |package| from the device.
+
+    Args:
+      package: The package to uninstall.
+      keep_data: (optional) If set keep the data and cache directories.
+      timeout: (optional) Timeout per try in seconds.
+      retries: (optional) Number of retries to attempt.
+    """
+    cmd = ['uninstall']
+    if keep_data:
+      cmd.append('-k')
+    cmd.append(package)
+    output = self._RunDeviceAdbCmd(cmd, timeout, retries)
+    if 'Failure' in output:
+      raise device_errors.AdbCommandFailedError(
+          cmd, output, device_serial=self._device_serial)
+
+  def Backup(self, path, packages=None, apk=False, shared=False,
+             nosystem=True, include_all=False, timeout=_DEFAULT_TIMEOUT,
+             retries=_DEFAULT_RETRIES):
+    """Write an archive of the device's data to |path|.
+
+    Args:
+      path: Local path to store the backup file.
+      packages: List of to packages to be backed up.
+      apk: (optional) If set include the .apk files in the archive.
+      shared: (optional) If set buckup the device's SD card.
+      nosystem: (optional) If set exclude system applications.
+      include_all: (optional) If set back up all installed applications and
+        |packages| is optional.
+      timeout: (optional) Timeout per try in seconds.
+      retries: (optional) Number of retries to attempt.
+    """
+    cmd = ['backup', '-f', path]
+    if apk:
+      cmd.append('-apk')
+    if shared:
+      cmd.append('-shared')
+    if nosystem:
+      cmd.append('-nosystem')
+    if include_all:
+      cmd.append('-all')
+    if packages:
+      cmd.extend(packages)
+    assert bool(packages) ^ bool(include_all), (
+        'Provide \'packages\' or set \'include_all\' but not both.')
+    ret = self._RunDeviceAdbCmd(cmd, timeout, retries)
+    _VerifyLocalFileExists(path)
+    return ret
+
+  def Restore(self, path, timeout=_DEFAULT_TIMEOUT, retries=_DEFAULT_RETRIES):
+    """Restore device contents from the backup archive.
+
+    Args:
+      path: Host path to the backup archive.
+      timeout: (optional) Timeout per try in seconds.
+      retries: (optional) Number of retries to attempt.
+    """
+    _VerifyLocalFileExists(path)
+    self._RunDeviceAdbCmd(['restore'] + [path], timeout, retries)
+
+  def WaitForDevice(self, timeout=60*5, retries=_DEFAULT_RETRIES):
+    """Block until the device is online.
+
+    Args:
+      timeout: (optional) Timeout per try in seconds.
+      retries: (optional) Number of retries to attempt.
+    """
+    self._RunDeviceAdbCmd(['wait-for-device'], timeout, retries)
+
+  def GetState(self, timeout=_DEFAULT_TIMEOUT, retries=_DEFAULT_RETRIES):
+    """Get device state.
+
+    Args:
+      timeout: (optional) Timeout per try in seconds.
+      retries: (optional) Number of retries to attempt.
+
+    Returns:
+      One of 'offline', 'bootloader', or 'device'.
+    """
+    return self._RunDeviceAdbCmd(['get-state'], timeout, retries).strip()
+
+  def GetDevPath(self, timeout=_DEFAULT_TIMEOUT, retries=_DEFAULT_RETRIES):
+    """Gets the device path.
+
+    Args:
+      timeout: (optional) Timeout per try in seconds.
+      retries: (optional) Number of retries to attempt.
+
+    Returns:
+      The device path (e.g. usb:3-4)
+    """
+    return self._RunDeviceAdbCmd(['get-devpath'], timeout, retries)
+
+  def Remount(self, timeout=_DEFAULT_TIMEOUT, retries=_DEFAULT_RETRIES):
+    """Remounts the /system partition on the device read-write."""
+    self._RunDeviceAdbCmd(['remount'], timeout, retries)
+
+  def Reboot(self, to_bootloader=False, timeout=60*5,
+             retries=_DEFAULT_RETRIES):
+    """Reboots the device.
+
+    Args:
+      to_bootloader: (optional) If set reboots to the bootloader.
+      timeout: (optional) Timeout per try in seconds.
+      retries: (optional) Number of retries to attempt.
+    """
+    if to_bootloader:
+      cmd = ['reboot-bootloader']
+    else:
+      cmd = ['reboot']
+    self._RunDeviceAdbCmd(cmd, timeout, retries)
+
+  def Root(self, timeout=_DEFAULT_TIMEOUT, retries=_DEFAULT_RETRIES):
+    """Restarts the adbd daemon with root permissions, if possible.
+
+    Args:
+      timeout: (optional) Timeout per try in seconds.
+      retries: (optional) Number of retries to attempt.
+    """
+    output = self._RunDeviceAdbCmd(['root'], timeout, retries)
+    if 'cannot' in output:
+      raise device_errors.AdbCommandFailedError(
+          ['root'], output, device_serial=self._device_serial)
+
+  def Emu(self, cmd, timeout=_DEFAULT_TIMEOUT,
+               retries=_DEFAULT_RETRIES):
+    """Runs an emulator console command.
+
+    See http://developer.android.com/tools/devices/emulator.html#console
+
+    Args:
+      cmd: The command to run on the emulator console.
+      timeout: (optional) Timeout per try in seconds.
+      retries: (optional) Number of retries to attempt.
+
+    Returns:
+      The output of the emulator console command.
+    """
+    if isinstance(cmd, basestring):
+      cmd = [cmd]
+    return self._RunDeviceAdbCmd(['emu'] + cmd, timeout, retries)
+
+  @property
+  def is_emulator(self):
+    return _EMULATOR_RE.match(self._device_serial)
+
+  @property
+  def is_ready(self):
+    try:
+      return self.GetState() == _READY_STATE
+    except device_errors.CommandFailedError:
+      return False
diff --git a/build/android/pylib/device/adb_wrapper_test.py b/build/android/pylib/device/adb_wrapper_test.py
new file mode 100644
index 0000000..5fc9eb6
--- /dev/null
+++ b/build/android/pylib/device/adb_wrapper_test.py
@@ -0,0 +1,96 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Tests for the AdbWrapper class."""
+
+import os
+import tempfile
+import time
+import unittest
+
+from pylib.device import adb_wrapper
+from pylib.device import device_errors
+
+
+class TestAdbWrapper(unittest.TestCase):
+
+  def setUp(self):
+    devices = adb_wrapper.AdbWrapper.Devices()
+    assert devices, 'A device must be attached'
+    self._adb = devices[0]
+    self._adb.WaitForDevice()
+
+  @staticmethod
+  def _MakeTempFile(contents):
+    """Make a temporary file with the given contents.
+
+    Args:
+      contents: string to write to the temporary file.
+
+    Returns:
+      The absolute path to the file.
+    """
+    fi, path = tempfile.mkstemp()
+    with os.fdopen(fi, 'wb') as f:
+      f.write(contents)
+    return path
+
+  def testShell(self):
+    output = self._adb.Shell('echo test', expect_status=0)
+    self.assertEqual(output.strip(), 'test')
+    output = self._adb.Shell('echo test')
+    self.assertEqual(output.strip(), 'test')
+    with self.assertRaises(device_errors.AdbCommandFailedError):
+        self._adb.Shell('echo test', expect_status=1)
+
+  def testPushLsPull(self):
+    path = self._MakeTempFile('foo')
+    device_path = '/data/local/tmp/testfile.txt'
+    local_tmpdir = os.path.dirname(path)
+    self._adb.Push(path, device_path)
+    files = dict(self._adb.Ls('/data/local/tmp'))
+    self.assertTrue('testfile.txt' in files)
+    self.assertEquals(3, files['testfile.txt'].st_size)
+    self.assertEqual(self._adb.Shell('cat %s' % device_path), 'foo')
+    self._adb.Pull(device_path, local_tmpdir)
+    with open(os.path.join(local_tmpdir, 'testfile.txt'), 'r') as f:
+      self.assertEqual(f.read(), 'foo')
+
+  def testInstall(self):
+    path = self._MakeTempFile('foo')
+    with self.assertRaises(device_errors.AdbCommandFailedError):
+      self._adb.Install(path)
+
+  def testForward(self):
+    with self.assertRaises(device_errors.AdbCommandFailedError):
+      self._adb.Forward(0, 0)
+
+  def testUninstall(self):
+    with self.assertRaises(device_errors.AdbCommandFailedError):
+      self._adb.Uninstall('some.nonexistant.package')
+
+  def testRebootWaitForDevice(self):
+    self._adb.Reboot()
+    print 'waiting for device to reboot...'
+    while self._adb.GetState() == 'device':
+      time.sleep(1)
+    self._adb.WaitForDevice()
+    self.assertEqual(self._adb.GetState(), 'device')
+    print 'waiting for package manager...'
+    while 'package:' not in self._adb.Shell('pm path android'):
+      time.sleep(1)
+
+  def testRootRemount(self):
+    self._adb.Root()
+    while True:
+      try:
+        self._adb.Shell('start')
+        break
+      except device_errors.AdbCommandFailedError:
+        time.sleep(1)
+    self._adb.Remount()
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/build/android/pylib/device/battery_utils.py b/build/android/pylib/device/battery_utils.py
new file mode 100644
index 0000000..eab558e
--- /dev/null
+++ b/build/android/pylib/device/battery_utils.py
@@ -0,0 +1,593 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Provides a variety of device interactions with power.
+"""
+# pylint: disable=unused-argument
+
+import collections
+import contextlib
+import csv
+import logging
+
+from pylib import constants
+from pylib.device import decorators
+from pylib.device import device_errors
+from pylib.device import device_utils
+from pylib.utils import timeout_retry
+
+_DEFAULT_TIMEOUT = 30
+_DEFAULT_RETRIES = 3
+
+
+_DEVICE_PROFILES = [
+  {
+    'name': 'Nexus 4',
+    'witness_file': '/sys/module/pm8921_charger/parameters/disabled',
+    'enable_command': (
+        'echo 0 > /sys/module/pm8921_charger/parameters/disabled && '
+        'dumpsys battery reset'),
+    'disable_command': (
+        'echo 1 > /sys/module/pm8921_charger/parameters/disabled && '
+        'dumpsys battery set ac 0 && dumpsys battery set usb 0'),
+    'charge_counter': None,
+    'voltage': None,
+    'current': None,
+  },
+  {
+    'name': 'Nexus 5',
+    # Nexus 5
+    # Setting the HIZ bit of the bq24192 causes the charger to actually ignore
+    # energy coming from USB. Setting the power_supply offline just updates the
+    # Android system to reflect that.
+    'witness_file': '/sys/kernel/debug/bq24192/INPUT_SRC_CONT',
+    'enable_command': (
+        'echo 0x4A > /sys/kernel/debug/bq24192/INPUT_SRC_CONT && '
+        'echo 1 > /sys/class/power_supply/usb/online &&'
+        'dumpsys battery reset'),
+    'disable_command': (
+        'echo 0xCA > /sys/kernel/debug/bq24192/INPUT_SRC_CONT && '
+        'chmod 644 /sys/class/power_supply/usb/online && '
+        'echo 0 > /sys/class/power_supply/usb/online && '
+        'dumpsys battery set ac 0 && dumpsys battery set usb 0'),
+    'charge_counter': None,
+    'voltage': None,
+    'current': None,
+  },
+  {
+    'name': 'Nexus 6',
+    'witness_file': None,
+    'enable_command': (
+        'echo 1 > /sys/class/power_supply/battery/charging_enabled && '
+        'dumpsys battery reset'),
+    'disable_command': (
+        'echo 0 > /sys/class/power_supply/battery/charging_enabled && '
+        'dumpsys battery set ac 0 && dumpsys battery set usb 0'),
+    'charge_counter': (
+        '/sys/class/power_supply/max170xx_battery/charge_counter_ext'),
+    'voltage': '/sys/class/power_supply/max170xx_battery/voltage_now',
+    'current': '/sys/class/power_supply/max170xx_battery/current_now',
+  },
+  {
+    'name': 'Nexus 9',
+    'witness_file': None,
+    'enable_command': (
+        'echo Disconnected > '
+        '/sys/bus/i2c/drivers/bq2419x/0-006b/input_cable_state && '
+        'dumpsys battery reset'),
+    'disable_command': (
+        'echo Connected > '
+        '/sys/bus/i2c/drivers/bq2419x/0-006b/input_cable_state && '
+        'dumpsys battery set ac 0 && dumpsys battery set usb 0'),
+    'charge_counter': '/sys/class/power_supply/battery/charge_counter_ext',
+    'voltage': '/sys/class/power_supply/battery/voltage_now',
+    'current': '/sys/class/power_supply/battery/current_now',
+  },
+  {
+    'name': 'Nexus 10',
+    'witness_file': None,
+    'enable_command': None,
+    'disable_command': None,
+    'charge_counter': None,
+    'voltage': '/sys/class/power_supply/ds2784-fuelgauge/voltage_now',
+    'current': '/sys/class/power_supply/ds2784-fuelgauge/current_now',
+
+  },
+]
+
+# The list of useful dumpsys columns.
+# Index of the column containing the format version.
+_DUMP_VERSION_INDEX = 0
+# Index of the column containing the type of the row.
+_ROW_TYPE_INDEX = 3
+# Index of the column containing the uid.
+_PACKAGE_UID_INDEX = 4
+# Index of the column containing the application package.
+_PACKAGE_NAME_INDEX = 5
+# The column containing the uid of the power data.
+_PWI_UID_INDEX = 1
+# The column containing the type of consumption. Only consumtion since last
+# charge are of interest here.
+_PWI_AGGREGATION_INDEX = 2
+# The column containing the amount of power used, in mah.
+_PWI_POWER_CONSUMPTION_INDEX = 5
+
+
+class BatteryUtils(object):
+
+  def __init__(self, device, default_timeout=_DEFAULT_TIMEOUT,
+               default_retries=_DEFAULT_RETRIES):
+    """BatteryUtils constructor.
+
+      Args:
+        device: A DeviceUtils instance.
+        default_timeout: An integer containing the default number of seconds to
+                         wait for an operation to complete if no explicit value
+                         is provided.
+        default_retries: An integer containing the default number or times an
+                         operation should be retried on failure if no explicit
+                         value is provided.
+
+      Raises:
+        TypeError: If it is not passed a DeviceUtils instance.
+    """
+    if not isinstance(device, device_utils.DeviceUtils):
+      raise TypeError('Must be initialized with DeviceUtils object.')
+    self._device = device
+    self._cache = device.GetClientCache(self.__class__.__name__)
+    self._default_timeout = default_timeout
+    self._default_retries = default_retries
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def SupportsFuelGauge(self, timeout=None, retries=None):
+    """Detect if fuel gauge chip is present.
+
+    Args:
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Returns:
+      True if known fuel gauge files are present.
+      False otherwise.
+    """
+    self._DiscoverDeviceProfile()
+    return (self._cache['profile']['enable_command'] != None
+        and self._cache['profile']['charge_counter'] != None)
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def GetFuelGaugeChargeCounter(self, timeout=None, retries=None):
+    """Get value of charge_counter on fuel gauge chip.
+
+    Device must have charging disabled for this, not just battery updates
+    disabled. The only device that this currently works with is the nexus 5.
+
+    Args:
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Returns:
+      value of charge_counter for fuel gauge chip in units of nAh.
+
+    Raises:
+      device_errors.CommandFailedError: If fuel gauge chip not found.
+    """
+    if self.SupportsFuelGauge():
+       return int(self._device.ReadFile(
+          self._cache['profile']['charge_counter']))
+    raise device_errors.CommandFailedError(
+        'Unable to find fuel gauge.')
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def GetNetworkData(self, package, timeout=None, retries=None):
+    """Get network data for specific package.
+
+    Args:
+      package: package name you want network data for.
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Returns:
+      Tuple of (sent_data, recieved_data)
+      None if no network data found
+    """
+    # If device_utils clears cache, cache['uids'] doesn't exist
+    if 'uids' not in self._cache:
+      self._cache['uids'] = {}
+    if package not in self._cache['uids']:
+      self.GetPowerData()
+      if package not in self._cache['uids']:
+        logging.warning('No UID found for %s. Can\'t get network data.',
+                        package)
+        return None
+
+    network_data_path = '/proc/uid_stat/%s/' % self._cache['uids'][package]
+    try:
+      send_data = int(self._device.ReadFile(network_data_path + 'tcp_snd'))
+    # If ReadFile throws exception, it means no network data usage file for
+    # package has been recorded. Return 0 sent and 0 received.
+    except device_errors.AdbShellCommandFailedError:
+      logging.warning('No sent data found for package %s', package)
+      send_data = 0
+    try:
+      recv_data = int(self._device.ReadFile(network_data_path + 'tcp_rcv'))
+    except device_errors.AdbShellCommandFailedError:
+      logging.warning('No received data found for package %s', package)
+      recv_data = 0
+    return (send_data, recv_data)
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def GetPowerData(self, timeout=None, retries=None):
+    """Get power data for device.
+
+    Args:
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Returns:
+      Dict of power data, keyed on package names.
+      {
+        package_name: {
+          'uid': uid,
+          'data': [1,2,3]
+        },
+      }
+    """
+    if 'uids' not in self._cache:
+      self._cache['uids'] = {}
+    dumpsys_output = self._device.RunShellCommand(
+        ['dumpsys', 'batterystats', '-c'], check_return=True)
+    csvreader = csv.reader(dumpsys_output)
+    pwi_entries = collections.defaultdict(list)
+    for entry in csvreader:
+      if entry[_DUMP_VERSION_INDEX] not in ['8', '9']:
+        # Wrong dumpsys version.
+        raise device_errors.DeviceVersionError(
+            'Dumpsys version must be 8 or 9. %s found.'
+            % entry[_DUMP_VERSION_INDEX])
+      if _ROW_TYPE_INDEX < len(entry) and entry[_ROW_TYPE_INDEX] == 'uid':
+        current_package = entry[_PACKAGE_NAME_INDEX]
+        if (self._cache['uids'].get(current_package)
+            and self._cache['uids'].get(current_package)
+            != entry[_PACKAGE_UID_INDEX]):
+          raise device_errors.CommandFailedError(
+              'Package %s found multiple times with differnt UIDs %s and %s'
+               % (current_package, self._cache['uids'][current_package],
+               entry[_PACKAGE_UID_INDEX]))
+        self._cache['uids'][current_package] = entry[_PACKAGE_UID_INDEX]
+      elif (_PWI_POWER_CONSUMPTION_INDEX < len(entry)
+          and entry[_ROW_TYPE_INDEX] == 'pwi'
+          and entry[_PWI_AGGREGATION_INDEX] == 'l'):
+        pwi_entries[entry[_PWI_UID_INDEX]].append(
+            float(entry[_PWI_POWER_CONSUMPTION_INDEX]))
+
+    return {p: {'uid': uid, 'data': pwi_entries[uid]}
+            for p, uid in self._cache['uids'].iteritems()}
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def GetPackagePowerData(self, package, timeout=None, retries=None):
+    """Get power data for particular package.
+
+    Args:
+      package: Package to get power data on.
+
+    returns:
+      Dict of UID and power data.
+      {
+        'uid': uid,
+        'data': [1,2,3]
+      }
+      None if the package is not found in the power data.
+    """
+    return self.GetPowerData().get(package)
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def GetBatteryInfo(self, timeout=None, retries=None):
+    """Gets battery info for the device.
+
+    Args:
+      timeout: timeout in seconds
+      retries: number of retries
+    Returns:
+      A dict containing various battery information as reported by dumpsys
+      battery.
+    """
+    result = {}
+    # Skip the first line, which is just a header.
+    for line in self._device.RunShellCommand(
+        ['dumpsys', 'battery'], check_return=True)[1:]:
+      # If usb charging has been disabled, an extra line of header exists.
+      if 'UPDATES STOPPED' in line:
+        logging.warning('Dumpsys battery not receiving updates. '
+                        'Run dumpsys battery reset if this is in error.')
+      elif ':' not in line:
+        logging.warning('Unknown line found in dumpsys battery: "%s"', line)
+      else:
+        k, v = line.split(':', 1)
+        result[k.strip()] = v.strip()
+    return result
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def GetCharging(self, timeout=None, retries=None):
+    """Gets the charging state of the device.
+
+    Args:
+      timeout: timeout in seconds
+      retries: number of retries
+    Returns:
+      True if the device is charging, false otherwise.
+    """
+    battery_info = self.GetBatteryInfo()
+    for k in ('AC powered', 'USB powered', 'Wireless powered'):
+      if (k in battery_info and
+          battery_info[k].lower() in ('true', '1', 'yes')):
+        return True
+    return False
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def SetCharging(self, enabled, timeout=None, retries=None):
+    """Enables or disables charging on the device.
+
+    Args:
+      enabled: A boolean indicating whether charging should be enabled or
+        disabled.
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Raises:
+      device_errors.CommandFailedError: If method of disabling charging cannot
+        be determined.
+    """
+    self._DiscoverDeviceProfile()
+    if not self._cache['profile']['enable_command']:
+      raise device_errors.CommandFailedError(
+          'Unable to find charging commands.')
+
+    if enabled:
+      command = self._cache['profile']['enable_command']
+    else:
+      command = self._cache['profile']['disable_command']
+
+    def set_and_verify_charging():
+      self._device.RunShellCommand(command, check_return=True)
+      return self.GetCharging() == enabled
+
+    timeout_retry.WaitFor(set_and_verify_charging, wait_period=1)
+
+  # TODO(rnephew): Make private when all use cases can use the context manager.
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def DisableBatteryUpdates(self, timeout=None, retries=None):
+    """Resets battery data and makes device appear like it is not
+    charging so that it will collect power data since last charge.
+
+    Args:
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Raises:
+      device_errors.CommandFailedError: When resetting batterystats fails to
+        reset power values.
+      device_errors.DeviceVersionError: If device is not L or higher.
+    """
+    def battery_updates_disabled():
+      return self.GetCharging() is False
+
+    self._ClearPowerData()
+    self._device.RunShellCommand(['dumpsys', 'battery', 'set', 'ac', '0'],
+                                 check_return=True)
+    self._device.RunShellCommand(['dumpsys', 'battery', 'set', 'usb', '0'],
+                                 check_return=True)
+    timeout_retry.WaitFor(battery_updates_disabled, wait_period=1)
+
+  # TODO(rnephew): Make private when all use cases can use the context manager.
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def EnableBatteryUpdates(self, timeout=None, retries=None):
+    """Restarts device charging so that dumpsys no longer collects power data.
+
+    Args:
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Raises:
+      device_errors.DeviceVersionError: If device is not L or higher.
+    """
+    def battery_updates_enabled():
+      return (self.GetCharging()
+              or not bool('UPDATES STOPPED' in self._device.RunShellCommand(
+                  ['dumpsys', 'battery'], check_return=True)))
+
+    self._device.RunShellCommand(['dumpsys', 'battery', 'reset'],
+                                 check_return=True)
+    timeout_retry.WaitFor(battery_updates_enabled, wait_period=1)
+
+  @contextlib.contextmanager
+  def BatteryMeasurement(self, timeout=None, retries=None):
+    """Context manager that enables battery data collection. It makes
+    the device appear to stop charging so that dumpsys will start collecting
+    power data since last charge. Once the with block is exited, charging is
+    resumed and power data since last charge is no longer collected.
+
+    Only for devices L and higher.
+
+    Example usage:
+      with BatteryMeasurement():
+        browser_actions()
+        get_power_data() # report usage within this block
+      after_measurements() # Anything that runs after power
+                           # measurements are collected
+
+    Args:
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Raises:
+      device_errors.DeviceVersionError: If device is not L or higher.
+    """
+    if (self._device.build_version_sdk <
+        constants.ANDROID_SDK_VERSION_CODES.LOLLIPOP):
+      raise device_errors.DeviceVersionError('Device must be L or higher.')
+    try:
+      self.DisableBatteryUpdates(timeout=timeout, retries=retries)
+      yield
+    finally:
+      self.EnableBatteryUpdates(timeout=timeout, retries=retries)
+
+  def ChargeDeviceToLevel(self, level, wait_period=60):
+    """Enables charging and waits for device to be charged to given level.
+
+    Args:
+      level: level of charge to wait for.
+      wait_period: time in seconds to wait between checking.
+    """
+    self.SetCharging(True)
+
+    def device_charged():
+      battery_level = self.GetBatteryInfo().get('level')
+      if battery_level is None:
+        logging.warning('Unable to find current battery level.')
+        battery_level = 100
+      else:
+        logging.info('current battery level: %s', battery_level)
+        battery_level = int(battery_level)
+      return battery_level >= level
+
+    timeout_retry.WaitFor(device_charged, wait_period=wait_period)
+
+  def LetBatteryCoolToTemperature(self, target_temp, wait_period=60):
+    """Lets device sit to give battery time to cool down
+    Args:
+      temp: maximum temperature to allow in tenths of degrees c.
+      wait_period: time in seconds to wait between checking.
+    """
+    def cool_device():
+      temp = self.GetBatteryInfo().get('temperature')
+      if temp is None:
+        logging.warning('Unable to find current battery temperature.')
+        temp = 0
+      else:
+        logging.info('Current battery temperature: %s', temp)
+      return int(temp) <= target_temp
+    self.EnableBatteryUpdates()
+    logging.info('Waiting for the device to cool down to %s (0.1 C)',
+                 target_temp)
+    timeout_retry.WaitFor(cool_device, wait_period=wait_period)
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def TieredSetCharging(self, enabled, timeout=None, retries=None):
+    """Enables or disables charging on the device.
+
+    Args:
+      enabled: A boolean indicating whether charging should be enabled or
+        disabled.
+      timeout: timeout in seconds
+      retries: number of retries
+    """
+    if self.GetCharging() == enabled:
+      logging.warning('Device charging already in expected state: %s', enabled)
+      return
+
+    if enabled:
+      try:
+        self.SetCharging(enabled)
+      except device_errors.CommandFailedError:
+        logging.info('Unable to enable charging via hardware.'
+                     ' Falling back to software enabling.')
+        self.EnableBatteryUpdates()
+    else:
+      try:
+        self._ClearPowerData()
+        self.SetCharging(enabled)
+      except device_errors.CommandFailedError:
+        logging.info('Unable to disable charging via hardware.'
+                     ' Falling back to software disabling.')
+        self.DisableBatteryUpdates()
+
+  @contextlib.contextmanager
+  def PowerMeasurement(self, timeout=None, retries=None):
+    """Context manager that enables battery power collection.
+
+    Once the with block is exited, charging is resumed. Will attempt to disable
+    charging at the hardware level, and if that fails will fall back to software
+    disabling of battery updates.
+
+    Only for devices L and higher.
+
+    Example usage:
+      with PowerMeasurement():
+        browser_actions()
+        get_power_data() # report usage within this block
+      after_measurements() # Anything that runs after power
+                           # measurements are collected
+
+    Args:
+      timeout: timeout in seconds
+      retries: number of retries
+    """
+    try:
+      self.TieredSetCharging(False, timeout=timeout, retries=retries)
+      yield
+    finally:
+      self.TieredSetCharging(True, timeout=timeout, retries=retries)
+
+  def _ClearPowerData(self):
+    """Resets battery data and makes device appear like it is not
+    charging so that it will collect power data since last charge.
+
+    Returns:
+      True if power data cleared.
+      False if power data clearing is not supported (pre-L)
+
+    Raises:
+      device_errors.DeviceVersionError: If power clearing is supported,
+        but fails.
+    """
+    if (self._device.build_version_sdk <
+        constants.ANDROID_SDK_VERSION_CODES.LOLLIPOP):
+      logging.warning('Dumpsys power data only available on 5.0 and above. '
+                      'Cannot clear power data.')
+      return False
+
+    self._device.RunShellCommand(
+        ['dumpsys', 'battery', 'set', 'usb', '1'], check_return=True)
+    self._device.RunShellCommand(
+        ['dumpsys', 'battery', 'set', 'ac', '1'], check_return=True)
+    self._device.RunShellCommand(
+        ['dumpsys', 'batterystats', '--reset'], check_return=True)
+    battery_data = self._device.RunShellCommand(
+        ['dumpsys', 'batterystats', '--charged', '--checkin'],
+        check_return=True, large_output=True)
+    for line in battery_data:
+      l = line.split(',')
+      if (len(l) > _PWI_POWER_CONSUMPTION_INDEX and l[_ROW_TYPE_INDEX] == 'pwi'
+          and l[_PWI_POWER_CONSUMPTION_INDEX] != 0):
+        self._device.RunShellCommand(
+            ['dumpsys', 'battery', 'reset'], check_return=True)
+        raise device_errors.CommandFailedError(
+            'Non-zero pmi value found after reset.')
+    self._device.RunShellCommand(
+        ['dumpsys', 'battery', 'reset'], check_return=True)
+    return True
+
+  def _DiscoverDeviceProfile(self):
+    """Checks and caches device information.
+
+    Returns:
+      True if profile is found, false otherwise.
+    """
+
+    if 'profile' in self._cache:
+      return True
+    for profile in _DEVICE_PROFILES:
+      if self._device.product_model == profile['name']:
+        self._cache['profile'] = profile
+        return True
+    self._cache['profile'] = {
+        'name': None,
+        'witness_file': None,
+        'enable_command': None,
+        'disable_command': None,
+        'charge_counter': None,
+        'voltage': None,
+        'current': None,
+    }
+    return False
diff --git a/build/android/pylib/device/battery_utils_test.py b/build/android/pylib/device/battery_utils_test.py
new file mode 100755
index 0000000..b968fa6
--- /dev/null
+++ b/build/android/pylib/device/battery_utils_test.py
@@ -0,0 +1,574 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Unit tests for the contents of battery_utils.py
+"""
+
+# pylint: disable=W0613
+
+import logging
+import os
+import sys
+import unittest
+
+from pylib import constants
+from pylib.device import battery_utils
+from pylib.device import device_errors
+from pylib.device import device_utils
+from pylib.device import device_utils_test
+from pylib.utils import mock_calls
+
+# RunCommand from third_party/android_testrunner/run_command.py is mocked
+# below, so its path needs to be in sys.path.
+sys.path.append(os.path.join(
+    constants.DIR_SOURCE_ROOT, 'third_party', 'android_testrunner'))
+
+sys.path.append(os.path.join(
+    constants.DIR_SOURCE_ROOT, 'third_party', 'pymock'))
+import mock # pylint: disable=F0401
+
+_DUMPSYS_OUTPUT = [
+    '9,0,i,uid,1000,test_package1',
+    '9,0,i,uid,1001,test_package2',
+    '9,1000,l,pwi,uid,1',
+    '9,1001,l,pwi,uid,2'
+]
+
+
+class BatteryUtilsTest(mock_calls.TestCase):
+
+  _NEXUS_5 = {
+    'name': 'Nexus 5',
+    'witness_file': '/sys/kernel/debug/bq24192/INPUT_SRC_CONT',
+    'enable_command': (
+        'echo 0x4A > /sys/kernel/debug/bq24192/INPUT_SRC_CONT && '
+        'echo 1 > /sys/class/power_supply/usb/online'),
+    'disable_command': (
+        'echo 0xCA > /sys/kernel/debug/bq24192/INPUT_SRC_CONT && '
+        'chmod 644 /sys/class/power_supply/usb/online && '
+        'echo 0 > /sys/class/power_supply/usb/online'),
+    'charge_counter': None,
+    'voltage': None,
+    'current': None,
+  }
+
+  _NEXUS_6 = {
+    'name': 'Nexus 6',
+    'witness_file': None,
+    'enable_command': None,
+    'disable_command': None,
+    'charge_counter': (
+        '/sys/class/power_supply/max170xx_battery/charge_counter_ext'),
+    'voltage': '/sys/class/power_supply/max170xx_battery/voltage_now',
+    'current': '/sys/class/power_supply/max170xx_battery/current_now',
+  }
+
+  _NEXUS_10 = {
+    'name': 'Nexus 10',
+    'witness_file': None,
+    'enable_command': None,
+    'disable_command': None,
+    'charge_counter': (
+        '/sys/class/power_supply/ds2784-fuelgauge/charge_counter_ext'),
+    'voltage': '/sys/class/power_supply/ds2784-fuelgauge/voltage_now',
+    'current': '/sys/class/power_supply/ds2784-fuelgauge/current_now',
+  }
+
+  def ShellError(self, output=None, status=1):
+    def action(cmd, *args, **kwargs):
+      raise device_errors.AdbShellCommandFailedError(
+          cmd, output, status, str(self.device))
+    if output is None:
+      output = 'Permission denied\n'
+    return action
+
+  def setUp(self):
+    self.adb = device_utils_test._AdbWrapperMock('0123456789abcdef')
+    self.device = device_utils.DeviceUtils(
+        self.adb, default_timeout=10, default_retries=0)
+    self.watchMethodCalls(self.call.adb, ignore=['GetDeviceSerial'])
+    self.battery = battery_utils.BatteryUtils(
+        self.device, default_timeout=10, default_retries=0)
+
+
+class BatteryUtilsInitTest(unittest.TestCase):
+
+  def testInitWithDeviceUtil(self):
+    serial = '0fedcba987654321'
+    d = device_utils.DeviceUtils(serial)
+    b = battery_utils.BatteryUtils(d)
+    self.assertEqual(d, b._device)
+
+  def testInitWithMissing_fails(self):
+    with self.assertRaises(TypeError):
+      battery_utils.BatteryUtils(None)
+    with self.assertRaises(TypeError):
+      battery_utils.BatteryUtils('')
+
+
+class BatteryUtilsSetChargingTest(BatteryUtilsTest):
+
+  @mock.patch('time.sleep', mock.Mock())
+  def testSetCharging_enabled(self):
+    self.battery._cache['profile'] = self._NEXUS_5
+    with self.assertCalls(
+        (self.call.device.RunShellCommand(mock.ANY, check_return=True), []),
+        (self.call.battery.GetCharging(), False),
+        (self.call.device.RunShellCommand(mock.ANY, check_return=True), []),
+        (self.call.battery.GetCharging(), True)):
+      self.battery.SetCharging(True)
+
+  def testSetCharging_alreadyEnabled(self):
+    self.battery._cache['profile'] = self._NEXUS_5
+    with self.assertCalls(
+        (self.call.device.RunShellCommand(mock.ANY, check_return=True), []),
+        (self.call.battery.GetCharging(), True)):
+      self.battery.SetCharging(True)
+
+  @mock.patch('time.sleep', mock.Mock())
+  def testSetCharging_disabled(self):
+    self.battery._cache['profile'] = self._NEXUS_5
+    with self.assertCalls(
+        (self.call.device.RunShellCommand(mock.ANY, check_return=True), []),
+        (self.call.battery.GetCharging(), True),
+        (self.call.device.RunShellCommand(mock.ANY, check_return=True), []),
+        (self.call.battery.GetCharging(), False)):
+      self.battery.SetCharging(False)
+
+
+class BatteryUtilsSetBatteryMeasurementTest(BatteryUtilsTest):
+
+  @mock.patch('time.sleep', mock.Mock())
+  def testBatteryMeasurementWifi(self):
+    with self.assertCalls(
+        (self.call.device.RunShellCommand(
+            mock.ANY, retries=0, single_line=True,
+            timeout=10, check_return=True), '22'),
+        (self.call.battery._ClearPowerData(), True),
+        (self.call.device.RunShellCommand(
+            ['dumpsys', 'battery', 'set', 'ac', '0'], check_return=True), []),
+        (self.call.device.RunShellCommand(
+            ['dumpsys', 'battery', 'set', 'usb', '0'], check_return=True), []),
+        (self.call.battery.GetCharging(), False),
+        (self.call.device.RunShellCommand(
+            ['dumpsys', 'battery', 'reset'], check_return=True), []),
+        (self.call.battery.GetCharging(), False),
+        (self.call.device.RunShellCommand(
+            ['dumpsys', 'battery'], check_return=True), ['UPDATES STOPPED']),
+        (self.call.battery.GetCharging(), False),
+        (self.call.device.RunShellCommand(
+            ['dumpsys', 'battery'], check_return=True), [])):
+      with self.battery.BatteryMeasurement():
+        pass
+
+  @mock.patch('time.sleep', mock.Mock())
+  def testBatteryMeasurementUsb(self):
+    with self.assertCalls(
+        (self.call.device.RunShellCommand(
+            mock.ANY, retries=0, single_line=True,
+            timeout=10, check_return=True), '22'),
+        (self.call.battery._ClearPowerData(), True),
+        (self.call.device.RunShellCommand(
+            ['dumpsys', 'battery', 'set', 'ac', '0'], check_return=True), []),
+        (self.call.device.RunShellCommand(
+            ['dumpsys', 'battery', 'set', 'usb', '0'], check_return=True), []),
+        (self.call.battery.GetCharging(), False),
+        (self.call.device.RunShellCommand(
+            ['dumpsys', 'battery', 'reset'], check_return=True), []),
+        (self.call.battery.GetCharging(), False),
+        (self.call.device.RunShellCommand(
+            ['dumpsys', 'battery'], check_return=True), ['UPDATES STOPPED']),
+        (self.call.battery.GetCharging(), True)):
+      with self.battery.BatteryMeasurement():
+        pass
+
+
+class BatteryUtilsGetPowerData(BatteryUtilsTest):
+
+  def testGetPowerData(self):
+    with self.assertCalls(
+        (self.call.device.RunShellCommand(
+            ['dumpsys', 'batterystats', '-c'], check_return=True),
+            _DUMPSYS_OUTPUT)):
+      data = self.battery.GetPowerData()
+      check = {
+          'test_package1': {'uid': '1000', 'data': [1.0]},
+          'test_package2': {'uid': '1001', 'data': [2.0]}
+      }
+      self.assertEqual(data, check)
+
+  def testGetPowerData_packageCollisionSame(self):
+      self.battery._cache['uids'] = {'test_package1': '1000'}
+      with self.assertCall(
+        self.call.device.RunShellCommand(
+            ['dumpsys', 'batterystats', '-c'], check_return=True),
+            _DUMPSYS_OUTPUT):
+        data = self.battery.GetPowerData()
+        check = {
+            'test_package1': {'uid': '1000', 'data': [1.0]},
+            'test_package2': {'uid': '1001', 'data': [2.0]}
+        }
+        self.assertEqual(data, check)
+
+  def testGetPowerData_packageCollisionDifferent(self):
+      self.battery._cache['uids'] = {'test_package1': '1'}
+      with self.assertCall(
+        self.call.device.RunShellCommand(
+            ['dumpsys', 'batterystats', '-c'], check_return=True),
+            _DUMPSYS_OUTPUT):
+        with self.assertRaises(device_errors.CommandFailedError):
+          self.battery.GetPowerData()
+
+  def testGetPowerData_cacheCleared(self):
+    with self.assertCalls(
+        (self.call.device.RunShellCommand(
+            ['dumpsys', 'batterystats', '-c'], check_return=True),
+            _DUMPSYS_OUTPUT)):
+      self.battery._cache.clear()
+      data = self.battery.GetPowerData()
+      check = {
+          'test_package1': {'uid': '1000', 'data': [1.0]},
+          'test_package2': {'uid': '1001', 'data': [2.0]}
+      }
+      self.assertEqual(data, check)
+
+  def testGetPackagePowerData(self):
+    with self.assertCalls(
+        (self.call.device.RunShellCommand(
+            ['dumpsys', 'batterystats', '-c'], check_return=True),
+            _DUMPSYS_OUTPUT)):
+      data = self.battery.GetPackagePowerData('test_package2')
+      self.assertEqual(data, {'uid': '1001', 'data': [2.0]})
+
+  def testGetPackagePowerData_badPackage(self):
+    with self.assertCalls(
+        (self.call.device.RunShellCommand(
+            ['dumpsys', 'batterystats', '-c'], check_return=True),
+            _DUMPSYS_OUTPUT)):
+      data = self.battery.GetPackagePowerData('not_a_package')
+      self.assertEqual(data, None)
+
+
+class BatteryUtilsChargeDevice(BatteryUtilsTest):
+
+  @mock.patch('time.sleep', mock.Mock())
+  def testChargeDeviceToLevel(self):
+    with self.assertCalls(
+        (self.call.battery.SetCharging(True)),
+        (self.call.battery.GetBatteryInfo(), {'level': '50'}),
+        (self.call.battery.GetBatteryInfo(), {'level': '100'})):
+      self.battery.ChargeDeviceToLevel(95)
+
+
+class BatteryUtilsGetBatteryInfoTest(BatteryUtilsTest):
+
+  def testGetBatteryInfo_normal(self):
+    with self.assertCall(
+        self.call.device.RunShellCommand(
+            ['dumpsys', 'battery'], check_return=True),
+        [
+          'Current Battery Service state:',
+          '  AC powered: false',
+          '  USB powered: true',
+          '  level: 100',
+          '  temperature: 321',
+        ]):
+      self.assertEquals(
+          {
+            'AC powered': 'false',
+            'USB powered': 'true',
+            'level': '100',
+            'temperature': '321',
+          },
+          self.battery.GetBatteryInfo())
+
+  def testGetBatteryInfo_nothing(self):
+    with self.assertCall(
+        self.call.device.RunShellCommand(
+            ['dumpsys', 'battery'], check_return=True), []):
+      self.assertEquals({}, self.battery.GetBatteryInfo())
+
+
+class BatteryUtilsGetChargingTest(BatteryUtilsTest):
+
+  def testGetCharging_usb(self):
+    with self.assertCall(
+        self.call.battery.GetBatteryInfo(), {'USB powered': 'true'}):
+      self.assertTrue(self.battery.GetCharging())
+
+  def testGetCharging_usbFalse(self):
+    with self.assertCall(
+        self.call.battery.GetBatteryInfo(), {'USB powered': 'false'}):
+      self.assertFalse(self.battery.GetCharging())
+
+  def testGetCharging_ac(self):
+    with self.assertCall(
+        self.call.battery.GetBatteryInfo(), {'AC powered': 'true'}):
+      self.assertTrue(self.battery.GetCharging())
+
+  def testGetCharging_wireless(self):
+    with self.assertCall(
+        self.call.battery.GetBatteryInfo(), {'Wireless powered': 'true'}):
+      self.assertTrue(self.battery.GetCharging())
+
+  def testGetCharging_unknown(self):
+    with self.assertCall(
+        self.call.battery.GetBatteryInfo(), {'level': '42'}):
+      self.assertFalse(self.battery.GetCharging())
+
+
+class BatteryUtilsGetNetworkDataTest(BatteryUtilsTest):
+
+  def testGetNetworkData_noDataUsage(self):
+    with self.assertCalls(
+        (self.call.device.RunShellCommand(
+            ['dumpsys', 'batterystats', '-c'], check_return=True),
+            _DUMPSYS_OUTPUT),
+        (self.call.device.ReadFile('/proc/uid_stat/1000/tcp_snd'),
+            self.ShellError()),
+        (self.call.device.ReadFile('/proc/uid_stat/1000/tcp_rcv'),
+            self.ShellError())):
+      self.assertEquals(self.battery.GetNetworkData('test_package1'), (0, 0))
+
+  def testGetNetworkData_badPackage(self):
+    with self.assertCall(
+        self.call.device.RunShellCommand(
+            ['dumpsys', 'batterystats', '-c'], check_return=True),
+            _DUMPSYS_OUTPUT):
+      self.assertEqual(self.battery.GetNetworkData('asdf'), None)
+
+  def testGetNetworkData_packageNotCached(self):
+    with self.assertCalls(
+        (self.call.device.RunShellCommand(
+            ['dumpsys', 'batterystats', '-c'], check_return=True),
+            _DUMPSYS_OUTPUT),
+        (self.call.device.ReadFile('/proc/uid_stat/1000/tcp_snd'), 1),
+        (self.call.device.ReadFile('/proc/uid_stat/1000/tcp_rcv'), 2)):
+      self.assertEqual(self.battery.GetNetworkData('test_package1'), (1,2))
+
+  def testGetNetworkData_packageCached(self):
+    self.battery._cache['uids'] = {'test_package1': '1000'}
+    with self.assertCalls(
+        (self.call.device.ReadFile('/proc/uid_stat/1000/tcp_snd'), 1),
+        (self.call.device.ReadFile('/proc/uid_stat/1000/tcp_rcv'), 2)):
+      self.assertEqual(self.battery.GetNetworkData('test_package1'), (1,2))
+
+  def testGetNetworkData_clearedCache(self):
+    with self.assertCalls(
+        (self.call.device.RunShellCommand(
+            ['dumpsys', 'batterystats', '-c'], check_return=True),
+            _DUMPSYS_OUTPUT),
+        (self.call.device.ReadFile('/proc/uid_stat/1000/tcp_snd'), 1),
+        (self.call.device.ReadFile('/proc/uid_stat/1000/tcp_rcv'), 2)):
+      self.battery._cache.clear()
+      self.assertEqual(self.battery.GetNetworkData('test_package1'), (1,2))
+
+
+class BatteryUtilsLetBatteryCoolToTemperatureTest(BatteryUtilsTest):
+
+  @mock.patch('time.sleep', mock.Mock())
+  def testLetBatteryCoolToTemperature_startUnder(self):
+    with self.assertCalls(
+        (self.call.battery.EnableBatteryUpdates(), []),
+        (self.call.battery.GetBatteryInfo(), {'temperature': '500'})):
+      self.battery.LetBatteryCoolToTemperature(600)
+
+  @mock.patch('time.sleep', mock.Mock())
+  def testLetBatteryCoolToTemperature_startOver(self):
+    with self.assertCalls(
+        (self.call.battery.EnableBatteryUpdates(), []),
+        (self.call.battery.GetBatteryInfo(), {'temperature': '500'}),
+        (self.call.battery.GetBatteryInfo(), {'temperature': '400'})):
+      self.battery.LetBatteryCoolToTemperature(400)
+
+class BatteryUtilsSupportsFuelGaugeTest(BatteryUtilsTest):
+
+  def testSupportsFuelGauge_false(self):
+    self.battery._cache['profile'] = self._NEXUS_5
+    self.assertFalse(self.battery.SupportsFuelGauge())
+
+  def testSupportsFuelGauge_trueMax(self):
+    self.battery._cache['profile'] = self._NEXUS_6
+    # TODO(rnephew): Change this to assertTrue when we have support for
+    # disabling hardware charging on nexus 6.
+    self.assertFalse(self.battery.SupportsFuelGauge())
+
+  def testSupportsFuelGauge_trueDS(self):
+    self.battery._cache['profile'] = self._NEXUS_10
+    # TODO(rnephew): Change this to assertTrue when we have support for
+    # disabling hardware charging on nexus 10.
+    self.assertFalse(self.battery.SupportsFuelGauge())
+
+
+class BatteryUtilsGetFuelGaugeChargeCounterTest(BatteryUtilsTest):
+
+  def testGetFuelGaugeChargeCounter_noFuelGauge(self):
+    self.battery._cache['profile'] = self._NEXUS_5
+    with self.assertRaises(device_errors.CommandFailedError):
+        self.battery.GetFuelGaugeChargeCounter()
+
+  def testGetFuelGaugeChargeCounter_fuelGaugePresent(self):
+    self.battery._cache['profile']= self._NEXUS_6
+    with self.assertCalls(
+        (self.call.battery.SupportsFuelGauge(), True),
+        (self.call.device.ReadFile(mock.ANY), '123')):
+      self.assertEqual(self.battery.GetFuelGaugeChargeCounter(), 123)
+
+
+class BatteryUtilsTieredSetCharging(BatteryUtilsTest):
+
+  @mock.patch('time.sleep', mock.Mock())
+  def testTieredSetCharging_softwareSetTrue(self):
+    self.battery._cache['profile'] = self._NEXUS_6
+    with self.assertCalls(
+        (self.call.battery.GetCharging(), False),
+        (self.call.device.RunShellCommand(
+            ['dumpsys', 'battery', 'reset'], check_return=True), []),
+        (self.call.battery.GetCharging(), False),
+        (self.call.device.RunShellCommand(
+            ['dumpsys', 'battery'], check_return=True), ['UPDATES STOPPED']),
+        (self.call.battery.GetCharging(), True)):
+      self.battery.TieredSetCharging(True)
+
+  @mock.patch('time.sleep', mock.Mock())
+  def testTieredSetCharging_softwareSetFalse(self):
+    self.battery._cache['profile'] = self._NEXUS_6
+    with self.assertCalls(
+        (self.call.battery.GetCharging(), True),
+        (self.call.battery._ClearPowerData(), True),
+        (self.call.battery._ClearPowerData(), True),
+        (self.call.device.RunShellCommand(
+            ['dumpsys', 'battery', 'set', 'ac', '0'], check_return=True), []),
+        (self.call.device.RunShellCommand(
+            ['dumpsys', 'battery', 'set', 'usb', '0'], check_return=True), []),
+        (self.call.battery.GetCharging(), False)):
+      self.battery.TieredSetCharging(False)
+
+  @mock.patch('time.sleep', mock.Mock())
+  def testTieredSetCharging_hardwareSetTrue(self):
+    self.battery._cache['profile'] = self._NEXUS_5
+    with self.assertCalls(
+        (self.call.battery.GetCharging(), False),
+        (self.call.battery.SetCharging(True))):
+      self.battery.TieredSetCharging(True)
+
+  @mock.patch('time.sleep', mock.Mock())
+  def testTieredSetCharging_hardwareSetFalse(self):
+    self.battery._cache['profile'] = self._NEXUS_5
+    with self.assertCalls(
+        (self.call.battery.GetCharging(), True),
+        (self.call.battery._ClearPowerData(), True),
+        (self.call.battery.SetCharging(False))):
+      self.battery.TieredSetCharging(False)
+
+  def testTieredSetCharging_expectedStateAlreadyTrue(self):
+    with self.assertCalls((self.call.battery.GetCharging(), True)):
+      self.battery.TieredSetCharging(True)
+
+  def testTieredSetCharging_expectedStateAlreadyFalse(self):
+    with self.assertCalls((self.call.battery.GetCharging(), False)):
+      self.battery.TieredSetCharging(False)
+
+
+class BatteryUtilsPowerMeasurement(BatteryUtilsTest):
+
+  def testPowerMeasurement_hardware(self):
+    self.battery._cache['profile'] = self._NEXUS_5
+    with self.assertCalls(
+        (self.call.battery.GetCharging(), True),
+        (self.call.battery._ClearPowerData(), True),
+        (self.call.battery.SetCharging(False)),
+        (self.call.battery.GetCharging(), False),
+        (self.call.battery.SetCharging(True))):
+      with self.battery.PowerMeasurement():
+        pass
+
+  @mock.patch('time.sleep', mock.Mock())
+  def testPowerMeasurement_software(self):
+    self.battery._cache['profile'] = self._NEXUS_6
+    with self.assertCalls(
+        (self.call.battery.GetCharging(), True),
+        (self.call.battery._ClearPowerData(), True),
+        (self.call.battery._ClearPowerData(), True),
+        (self.call.device.RunShellCommand(
+            ['dumpsys', 'battery', 'set', 'ac', '0'], check_return=True), []),
+        (self.call.device.RunShellCommand(
+            ['dumpsys', 'battery', 'set', 'usb', '0'], check_return=True), []),
+        (self.call.battery.GetCharging(), False),
+        (self.call.battery.GetCharging(), False),
+        (self.call.device.RunShellCommand(
+            ['dumpsys', 'battery', 'reset'], check_return=True), []),
+        (self.call.battery.GetCharging(), False),
+        (self.call.device.RunShellCommand(
+            ['dumpsys', 'battery'], check_return=True), ['UPDATES STOPPED']),
+        (self.call.battery.GetCharging(), True)):
+      with self.battery.PowerMeasurement():
+        pass
+
+
+class BatteryUtilsDiscoverDeviceProfile(BatteryUtilsTest):
+
+  def testDiscoverDeviceProfile_known(self):
+    with self.assertCalls(
+        (self.call.adb.Shell('getprop ro.product.model'), "Nexus 4")):
+      self.battery._DiscoverDeviceProfile()
+      self.assertEqual(self.battery._cache['profile']['name'], "Nexus 4")
+
+  def testDiscoverDeviceProfile_unknown(self):
+    with self.assertCalls(
+        (self.call.adb.Shell('getprop ro.product.model'), "Other")):
+      self.battery._DiscoverDeviceProfile()
+      self.assertEqual(self.battery._cache['profile']['name'], None)
+
+
+class BatteryUtilsClearPowerData(BatteryUtilsTest):
+
+  def testClearPowerData_preL(self):
+    with self.assertCalls(
+        (self.call.device.RunShellCommand(mock.ANY, retries=0,
+            single_line=True, timeout=10, check_return=True), '20')):
+      self.assertFalse(self.battery._ClearPowerData())
+
+  def testClearPowerData_clearedL(self):
+    with self.assertCalls(
+        (self.call.device.RunShellCommand(mock.ANY, retries=0,
+            single_line=True, timeout=10, check_return=True), '22'),
+        (self.call.device.RunShellCommand(
+            ['dumpsys', 'battery', 'set', 'usb', '1'], check_return=True), []),
+        (self.call.device.RunShellCommand(
+            ['dumpsys', 'battery', 'set', 'ac', '1'], check_return=True), []),
+        (self.call.device.RunShellCommand(
+            ['dumpsys', 'batterystats', '--reset'], check_return=True), []),
+        (self.call.device.RunShellCommand(
+            ['dumpsys', 'batterystats', '--charged', '--checkin'],
+            check_return=True, large_output=True), []),
+        (self.call.device.RunShellCommand(
+            ['dumpsys', 'battery', 'reset'], check_return=True), [])):
+      self.assertTrue(self.battery._ClearPowerData())
+
+  def testClearPowerData_notClearedL(self):
+    with self.assertCalls(
+        (self.call.device.RunShellCommand(mock.ANY, retries=0,
+            single_line=True, timeout=10, check_return=True), '22'),
+        (self.call.device.RunShellCommand(
+            ['dumpsys', 'battery', 'set', 'usb', '1'], check_return=True), []),
+        (self.call.device.RunShellCommand(
+            ['dumpsys', 'battery', 'set', 'ac', '1'], check_return=True), []),
+        (self.call.device.RunShellCommand(
+            ['dumpsys', 'batterystats', '--reset'], check_return=True), []),
+        (self.call.device.RunShellCommand(
+            ['dumpsys', 'batterystats', '--charged', '--checkin'],
+            check_return=True, large_output=True),
+            ['9,1000,l,pwi,uid,0.0327']),
+        (self.call.device.RunShellCommand(
+            ['dumpsys', 'battery', 'reset'], check_return=True), [])):
+      with self.assertRaises(device_errors.CommandFailedError):
+        self.battery._ClearPowerData()
+
+
+if __name__ == '__main__':
+  logging.getLogger().setLevel(logging.DEBUG)
+  unittest.main(verbosity=2)
diff --git a/build/android/pylib/device/commands/BUILD.gn b/build/android/pylib/device/commands/BUILD.gn
new file mode 100644
index 0000000..66e1010
--- /dev/null
+++ b/build/android/pylib/device/commands/BUILD.gn
@@ -0,0 +1,17 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/android/rules.gni")
+
+group("commands") {
+  datadeps = [
+    ":chromium_commands",
+  ]
+}
+
+# GYP: //build/android/pylib/device/commands/commands.gyp:chromium_commands
+android_library("chromium_commands") {
+  java_files = [ "java/src/org/chromium/android/commands/unzip/Unzip.java" ]
+  dex_path = "$root_build_dir/lib.java/chromium_commands.dex.jar"
+}
diff --git a/build/android/pylib/device/commands/__init__.py b/build/android/pylib/device/commands/__init__.py
new file mode 100644
index 0000000..4d6aabb
--- /dev/null
+++ b/build/android/pylib/device/commands/__init__.py
@@ -0,0 +1,3 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
diff --git a/build/android/pylib/device/commands/commands.gyp b/build/android/pylib/device/commands/commands.gyp
new file mode 100644
index 0000000..b5b5bc8
--- /dev/null
+++ b/build/android/pylib/device/commands/commands.gyp
@@ -0,0 +1,20 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'targets': [
+    {
+      # GN version: //build/android/pylib/devices/commands:chromium_commands
+      'target_name': 'chromium_commands',
+      'type': 'none',
+      'variables': {
+        'add_to_dependents_classpaths': 0,
+        'java_in_dir': ['java'],
+      },
+      'includes': [
+        '../../../../../build/java.gypi',
+      ],
+    }
+  ],
+}
diff --git a/build/android/pylib/device/commands/install_commands.py b/build/android/pylib/device/commands/install_commands.py
new file mode 100644
index 0000000..58c56cc
--- /dev/null
+++ b/build/android/pylib/device/commands/install_commands.py
@@ -0,0 +1,51 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+
+from pylib import constants
+
+BIN_DIR = '%s/bin' % constants.TEST_EXECUTABLE_DIR
+_FRAMEWORK_DIR = '%s/framework' % constants.TEST_EXECUTABLE_DIR
+
+_COMMANDS = {
+  'unzip': 'org.chromium.android.commands.unzip.Unzip',
+}
+
+_SHELL_COMMAND_FORMAT = (
+"""#!/system/bin/sh
+base=%s
+export CLASSPATH=$base/framework/chromium_commands.jar
+exec app_process $base/bin %s $@
+""")
+
+
+def Installed(device):
+  return (all(device.FileExists('%s/%s' % (BIN_DIR, c)) for c in _COMMANDS)
+          and device.FileExists('%s/chromium_commands.jar' % _FRAMEWORK_DIR))
+
+def InstallCommands(device):
+  if device.IsUserBuild():
+    raise Exception('chromium_commands currently requires a userdebug build.')
+
+  chromium_commands_jar_path = os.path.join(
+      constants.GetOutDirectory(), constants.SDK_BUILD_JAVALIB_DIR,
+      'chromium_commands.dex.jar')
+  if not os.path.exists(chromium_commands_jar_path):
+    raise Exception('%s not found. Please build chromium_commands.'
+                    % chromium_commands_jar_path)
+
+  device.RunShellCommand(['mkdir', BIN_DIR, _FRAMEWORK_DIR])
+  for command, main_class in _COMMANDS.iteritems():
+    shell_command = _SHELL_COMMAND_FORMAT % (
+        constants.TEST_EXECUTABLE_DIR, main_class)
+    shell_file = '%s/%s' % (BIN_DIR, command)
+    device.WriteFile(shell_file, shell_command)
+    device.RunShellCommand(
+        ['chmod', '755', shell_file], check_return=True)
+
+  device.adb.Push(
+      chromium_commands_jar_path,
+      '%s/chromium_commands.jar' % _FRAMEWORK_DIR)
+
diff --git a/build/android/pylib/device/commands/java/src/org/chromium/android/commands/unzip/Unzip.java b/build/android/pylib/device/commands/java/src/org/chromium/android/commands/unzip/Unzip.java
new file mode 100644
index 0000000..7cbbb73
--- /dev/null
+++ b/build/android/pylib/device/commands/java/src/org/chromium/android/commands/unzip/Unzip.java
@@ -0,0 +1,95 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.android.commands.unzip;
+
+import android.util.Log;
+
+import java.io.BufferedInputStream;
+import java.io.BufferedOutputStream;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.io.PrintStream;
+import java.util.zip.ZipEntry;
+import java.util.zip.ZipInputStream;
+
+/**
+ *  Minimal implementation of the command-line unzip utility for Android.
+ */
+public class Unzip {
+
+    private static final String TAG = "Unzip";
+
+    public static void main(String[] args) {
+        try {
+            (new Unzip()).run(args);
+        } catch (RuntimeException e) {
+            Log.e(TAG, e.toString());
+            System.exit(1);
+        }
+    }
+
+    private void showUsage(PrintStream s) {
+        s.println("Usage:");
+        s.println("unzip [zipfile]");
+    }
+
+    @SuppressWarnings("Finally")
+    private void unzip(String[] args) {
+        ZipInputStream zis = null;
+        try {
+            String zipfile = args[0];
+            zis = new ZipInputStream(new BufferedInputStream(new FileInputStream(zipfile)));
+            ZipEntry ze = null;
+
+            byte[] bytes = new byte[1024];
+            while ((ze = zis.getNextEntry()) != null) {
+                File outputFile = new File(ze.getName());
+                if (ze.isDirectory()) {
+                    if (!outputFile.exists() && !outputFile.mkdirs()) {
+                        throw new RuntimeException(
+                                "Failed to create directory: " + outputFile.toString());
+                    }
+                } else {
+                    File parentDir = outputFile.getParentFile();
+                    if (!parentDir.exists() && !parentDir.mkdirs()) {
+                        throw new RuntimeException(
+                                "Failed to create directory: " + parentDir.toString());
+                    }
+                    OutputStream out = new BufferedOutputStream(new FileOutputStream(outputFile));
+                    int actual_bytes = 0;
+                    int total_bytes = 0;
+                    while ((actual_bytes = zis.read(bytes)) != -1) {
+                        out.write(bytes, 0, actual_bytes);
+                        total_bytes += actual_bytes;
+                    }
+                    out.close();
+                }
+                zis.closeEntry();
+            }
+
+        } catch (IOException e) {
+            throw new RuntimeException("Error while unzipping: " + e.toString());
+        } finally {
+            try {
+                if (zis != null) zis.close();
+            } catch (IOException e) {
+                throw new RuntimeException("Error while closing zip: " + e.toString());
+            }
+        }
+    }
+
+    public void run(String[] args) {
+        if (args.length != 1) {
+            showUsage(System.err);
+            throw new RuntimeException("Incorrect usage.");
+        }
+
+        unzip(args);
+    }
+}
+
diff --git a/build/android/pylib/device/decorators.py b/build/android/pylib/device/decorators.py
new file mode 100644
index 0000000..73c13da
--- /dev/null
+++ b/build/android/pylib/device/decorators.py
@@ -0,0 +1,157 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Function/method decorators that provide timeout and retry logic.
+"""
+
+import functools
+import os
+import sys
+import threading
+
+from pylib import cmd_helper
+from pylib import constants
+from pylib.device import device_errors
+from pylib.utils import reraiser_thread
+from pylib.utils import timeout_retry
+
+# TODO(jbudorick) Remove once the DeviceUtils implementations are no longer
+#                 backed by AndroidCommands / android_testrunner.
+sys.path.append(os.path.join(constants.DIR_SOURCE_ROOT, 'third_party',
+                             'android_testrunner'))
+import errors as old_errors
+
+DEFAULT_TIMEOUT_ATTR = '_default_timeout'
+DEFAULT_RETRIES_ATTR = '_default_retries'
+
+
+def _TimeoutRetryWrapper(f, timeout_func, retries_func, pass_values=False):
+  """ Wraps a funcion with timeout and retry handling logic.
+
+  Args:
+    f: The function to wrap.
+    timeout_func: A callable that returns the timeout value.
+    retries_func: A callable that returns the retries value.
+    pass_values: If True, passes the values returned by |timeout_func| and
+                 |retries_func| to the wrapped function as 'timeout' and
+                 'retries' kwargs, respectively.
+  Returns:
+    The wrapped function.
+  """
+  @functools.wraps(f)
+  def TimeoutRetryWrapper(*args, **kwargs):
+    timeout = timeout_func(*args, **kwargs)
+    retries = retries_func(*args, **kwargs)
+    if pass_values:
+      kwargs['timeout'] = timeout
+      kwargs['retries'] = retries
+    def impl():
+      return f(*args, **kwargs)
+    try:
+      if isinstance(threading.current_thread(),
+                    timeout_retry.TimeoutRetryThread):
+        return impl()
+      else:
+        return timeout_retry.Run(impl, timeout, retries)
+    except old_errors.WaitForResponseTimedOutError as e:
+      raise device_errors.CommandTimeoutError(str(e)), None, (
+          sys.exc_info()[2])
+    except old_errors.DeviceUnresponsiveError as e:
+      raise device_errors.DeviceUnreachableError(str(e)), None, (
+          sys.exc_info()[2])
+    except reraiser_thread.TimeoutError as e:
+      raise device_errors.CommandTimeoutError(str(e)), None, (
+          sys.exc_info()[2])
+    except cmd_helper.TimeoutError as e:
+      raise device_errors.CommandTimeoutError(str(e)), None, (
+          sys.exc_info()[2])
+  return TimeoutRetryWrapper
+
+
+def WithTimeoutAndRetries(f):
+  """A decorator that handles timeouts and retries.
+
+  'timeout' and 'retries' kwargs must be passed to the function.
+
+  Args:
+    f: The function to decorate.
+  Returns:
+    The decorated function.
+  """
+  get_timeout = lambda *a, **kw: kw['timeout']
+  get_retries = lambda *a, **kw: kw['retries']
+  return _TimeoutRetryWrapper(f, get_timeout, get_retries)
+
+
+def WithExplicitTimeoutAndRetries(timeout, retries):
+  """Returns a decorator that handles timeouts and retries.
+
+  The provided |timeout| and |retries| values are always used.
+
+  Args:
+    timeout: The number of seconds to wait for the decorated function to
+             return. Always used.
+    retries: The number of times the decorated function should be retried on
+             failure. Always used.
+  Returns:
+    The actual decorator.
+  """
+  def decorator(f):
+    get_timeout = lambda *a, **kw: timeout
+    get_retries = lambda *a, **kw: retries
+    return _TimeoutRetryWrapper(f, get_timeout, get_retries)
+  return decorator
+
+
+def WithTimeoutAndRetriesDefaults(default_timeout, default_retries):
+  """Returns a decorator that handles timeouts and retries.
+
+  The provided |default_timeout| and |default_retries| values are used only
+  if timeout and retries values are not provided.
+
+  Args:
+    default_timeout: The number of seconds to wait for the decorated function
+                     to return. Only used if a 'timeout' kwarg is not passed
+                     to the decorated function.
+    default_retries: The number of times the decorated function should be
+                     retried on failure. Only used if a 'retries' kwarg is not
+                     passed to the decorated function.
+  Returns:
+    The actual decorator.
+  """
+  def decorator(f):
+    get_timeout = lambda *a, **kw: kw.get('timeout', default_timeout)
+    get_retries = lambda *a, **kw: kw.get('retries', default_retries)
+    return _TimeoutRetryWrapper(f, get_timeout, get_retries, pass_values=True)
+  return decorator
+
+
+def WithTimeoutAndRetriesFromInstance(
+    default_timeout_name=DEFAULT_TIMEOUT_ATTR,
+    default_retries_name=DEFAULT_RETRIES_ATTR):
+  """Returns a decorator that handles timeouts and retries.
+
+  The provided |default_timeout_name| and |default_retries_name| are used to
+  get the default timeout value and the default retries value from the object
+  instance if timeout and retries values are not provided.
+
+  Note that this should only be used to decorate methods, not functions.
+
+  Args:
+    default_timeout_name: The name of the default timeout attribute of the
+                          instance.
+    default_retries_name: The name of the default retries attribute of the
+                          instance.
+  Returns:
+    The actual decorator.
+  """
+  def decorator(f):
+    def get_timeout(inst, *_args, **kwargs):
+      return kwargs.get('timeout', getattr(inst, default_timeout_name))
+    def get_retries(inst, *_args, **kwargs):
+      return kwargs.get('retries', getattr(inst, default_retries_name))
+    return _TimeoutRetryWrapper(f, get_timeout, get_retries, pass_values=True)
+  return decorator
+
diff --git a/build/android/pylib/device/decorators_test.py b/build/android/pylib/device/decorators_test.py
new file mode 100644
index 0000000..b75618b
--- /dev/null
+++ b/build/android/pylib/device/decorators_test.py
@@ -0,0 +1,365 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Unit tests for decorators.py.
+"""
+
+# pylint: disable=W0613
+
+import os
+import sys
+import time
+import traceback
+import unittest
+
+from pylib import constants
+from pylib.device import decorators
+from pylib.device import device_errors
+from pylib.utils import reraiser_thread
+
+# TODO(jbudorick) Remove once the DeviceUtils implementations are no longer
+#                 backed by AndroidCommands / android_testrunner.
+sys.path.append(os.path.join(constants.DIR_SOURCE_ROOT, 'third_party',
+                             'android_testrunner'))
+import errors as old_errors
+
+_DEFAULT_TIMEOUT = 30
+_DEFAULT_RETRIES = 3
+
+class DecoratorsTest(unittest.TestCase):
+  _decorated_function_called_count = 0
+
+  def testFunctionDecoratorDoesTimeouts(self):
+    """Tests that the base decorator handles the timeout logic."""
+    DecoratorsTest._decorated_function_called_count = 0
+    @decorators.WithTimeoutAndRetries
+    def alwaysTimesOut(timeout=None, retries=None):
+      DecoratorsTest._decorated_function_called_count += 1
+      time.sleep(100)
+
+    start_time = time.time()
+    with self.assertRaises(device_errors.CommandTimeoutError):
+      alwaysTimesOut(timeout=1, retries=0)
+    elapsed_time = time.time() - start_time
+    self.assertTrue(elapsed_time >= 1)
+    self.assertEquals(1, DecoratorsTest._decorated_function_called_count)
+
+  def testFunctionDecoratorDoesRetries(self):
+    """Tests that the base decorator handles the retries logic."""
+    DecoratorsTest._decorated_function_called_count = 0
+    @decorators.WithTimeoutAndRetries
+    def alwaysRaisesCommandFailedError(timeout=None, retries=None):
+      DecoratorsTest._decorated_function_called_count += 1
+      raise device_errors.CommandFailedError('testCommand failed')
+
+    with self.assertRaises(device_errors.CommandFailedError):
+      alwaysRaisesCommandFailedError(timeout=30, retries=10)
+    self.assertEquals(11, DecoratorsTest._decorated_function_called_count)
+
+  def testFunctionDecoratorRequiresParams(self):
+    """Tests that the base decorator requires timeout and retries params."""
+    @decorators.WithTimeoutAndRetries
+    def requiresExplicitTimeoutAndRetries(timeout=None, retries=None):
+      return (timeout, retries)
+
+    with self.assertRaises(KeyError):
+      requiresExplicitTimeoutAndRetries()
+    with self.assertRaises(KeyError):
+      requiresExplicitTimeoutAndRetries(timeout=10)
+    with self.assertRaises(KeyError):
+      requiresExplicitTimeoutAndRetries(retries=0)
+    expected_timeout = 10
+    expected_retries = 1
+    (actual_timeout, actual_retries) = (
+        requiresExplicitTimeoutAndRetries(timeout=expected_timeout,
+                                          retries=expected_retries))
+    self.assertEquals(expected_timeout, actual_timeout)
+    self.assertEquals(expected_retries, actual_retries)
+
+  def testFunctionDecoratorTranslatesOldExceptions(self):
+    """Tests that the explicit decorator translates old exceptions."""
+    @decorators.WithTimeoutAndRetries
+    def alwaysRaisesProvidedException(exception, timeout=None, retries=None):
+      raise exception
+
+    exception_desc = 'Old response timeout error'
+    with self.assertRaises(device_errors.CommandTimeoutError) as e:
+      alwaysRaisesProvidedException(
+          old_errors.WaitForResponseTimedOutError(exception_desc),
+          timeout=10, retries=1)
+    self.assertEquals(exception_desc, str(e.exception))
+
+    exception_desc = 'Old device error'
+    with self.assertRaises(device_errors.DeviceUnreachableError) as e:
+      alwaysRaisesProvidedException(
+          old_errors.DeviceUnresponsiveError(exception_desc),
+          timeout=10, retries=1)
+    self.assertEquals(exception_desc, str(e.exception))
+
+  def testFunctionDecoratorTranslatesReraiserExceptions(self):
+    """Tests that the explicit decorator translates reraiser exceptions."""
+    @decorators.WithTimeoutAndRetries
+    def alwaysRaisesProvidedException(exception, timeout=None, retries=None):
+      raise exception
+
+    exception_desc = 'Reraiser thread timeout error'
+    with self.assertRaises(device_errors.CommandTimeoutError) as e:
+      alwaysRaisesProvidedException(
+          reraiser_thread.TimeoutError(exception_desc),
+          timeout=10, retries=1)
+    self.assertEquals(exception_desc, str(e.exception))
+
+  def testDefaultsFunctionDecoratorDoesTimeouts(self):
+    """Tests that the defaults decorator handles timeout logic."""
+    DecoratorsTest._decorated_function_called_count = 0
+    @decorators.WithTimeoutAndRetriesDefaults(1, 0)
+    def alwaysTimesOut(timeout=None, retries=None):
+      DecoratorsTest._decorated_function_called_count += 1
+      time.sleep(100)
+
+    start_time = time.time()
+    with self.assertRaises(device_errors.CommandTimeoutError):
+      alwaysTimesOut()
+    elapsed_time = time.time() - start_time
+    self.assertTrue(elapsed_time >= 1)
+    self.assertEquals(1, DecoratorsTest._decorated_function_called_count)
+
+    DecoratorsTest._decorated_function_called_count = 0
+    with self.assertRaises(device_errors.CommandTimeoutError):
+      alwaysTimesOut(timeout=2)
+    elapsed_time = time.time() - start_time
+    self.assertTrue(elapsed_time >= 2)
+    self.assertEquals(1, DecoratorsTest._decorated_function_called_count)
+
+  def testDefaultsFunctionDecoratorDoesRetries(self):
+    """Tests that the defaults decorator handles retries logic."""
+    DecoratorsTest._decorated_function_called_count = 0
+    @decorators.WithTimeoutAndRetriesDefaults(30, 10)
+    def alwaysRaisesCommandFailedError(timeout=None, retries=None):
+      DecoratorsTest._decorated_function_called_count += 1
+      raise device_errors.CommandFailedError('testCommand failed')
+
+    with self.assertRaises(device_errors.CommandFailedError):
+      alwaysRaisesCommandFailedError()
+    self.assertEquals(11, DecoratorsTest._decorated_function_called_count)
+
+    DecoratorsTest._decorated_function_called_count = 0
+    with self.assertRaises(device_errors.CommandFailedError):
+      alwaysRaisesCommandFailedError(retries=5)
+    self.assertEquals(6, DecoratorsTest._decorated_function_called_count)
+
+  def testDefaultsFunctionDecoratorPassesValues(self):
+    """Tests that the defaults decorator passes timeout and retries kwargs."""
+    @decorators.WithTimeoutAndRetriesDefaults(30, 10)
+    def alwaysReturnsTimeouts(timeout=None, retries=None):
+      return timeout
+
+    self.assertEquals(30, alwaysReturnsTimeouts())
+    self.assertEquals(120, alwaysReturnsTimeouts(timeout=120))
+
+    @decorators.WithTimeoutAndRetriesDefaults(30, 10)
+    def alwaysReturnsRetries(timeout=None, retries=None):
+      return retries
+
+    self.assertEquals(10, alwaysReturnsRetries())
+    self.assertEquals(1, alwaysReturnsRetries(retries=1))
+
+  def testDefaultsFunctionDecoratorTranslatesOldExceptions(self):
+    """Tests that the explicit decorator translates old exceptions."""
+    @decorators.WithTimeoutAndRetriesDefaults(30, 10)
+    def alwaysRaisesProvidedException(exception, timeout=None, retries=None):
+      raise exception
+
+    exception_desc = 'Old response timeout error'
+    with self.assertRaises(device_errors.CommandTimeoutError) as e:
+      alwaysRaisesProvidedException(
+          old_errors.WaitForResponseTimedOutError(exception_desc))
+    self.assertEquals(exception_desc, str(e.exception))
+
+    exception_desc = 'Old device error'
+    with self.assertRaises(device_errors.DeviceUnreachableError) as e:
+      alwaysRaisesProvidedException(
+          old_errors.DeviceUnresponsiveError(exception_desc))
+    self.assertEquals(exception_desc, str(e.exception))
+
+  def testDefaultsFunctionDecoratorTranslatesReraiserExceptions(self):
+    """Tests that the explicit decorator translates reraiser exceptions."""
+    @decorators.WithTimeoutAndRetriesDefaults(30, 10)
+    def alwaysRaisesProvidedException(exception, timeout=None, retries=None):
+      raise exception
+
+    exception_desc = 'Reraiser thread timeout error'
+    with self.assertRaises(device_errors.CommandTimeoutError) as e:
+      alwaysRaisesProvidedException(
+          reraiser_thread.TimeoutError(exception_desc))
+    self.assertEquals(exception_desc, str(e.exception))
+
+  def testExplicitFunctionDecoratorDoesTimeouts(self):
+    """Tests that the explicit decorator handles timeout logic."""
+    DecoratorsTest._decorated_function_called_count = 0
+    @decorators.WithExplicitTimeoutAndRetries(1, 0)
+    def alwaysTimesOut():
+      DecoratorsTest._decorated_function_called_count += 1
+      time.sleep(100)
+
+    start_time = time.time()
+    with self.assertRaises(device_errors.CommandTimeoutError):
+      alwaysTimesOut()
+    elapsed_time = time.time() - start_time
+    self.assertTrue(elapsed_time >= 1)
+    self.assertEquals(1, DecoratorsTest._decorated_function_called_count)
+
+  def testExplicitFunctionDecoratorDoesRetries(self):
+    """Tests that the explicit decorator handles retries logic."""
+    DecoratorsTest._decorated_function_called_count = 0
+    @decorators.WithExplicitTimeoutAndRetries(30, 10)
+    def alwaysRaisesCommandFailedError():
+      DecoratorsTest._decorated_function_called_count += 1
+      raise device_errors.CommandFailedError('testCommand failed')
+
+    with self.assertRaises(device_errors.CommandFailedError):
+      alwaysRaisesCommandFailedError()
+    self.assertEquals(11, DecoratorsTest._decorated_function_called_count)
+
+  def testExplicitDecoratorTranslatesOldExceptions(self):
+    """Tests that the explicit decorator translates old exceptions."""
+    @decorators.WithExplicitTimeoutAndRetries(30, 10)
+    def alwaysRaisesProvidedException(exception):
+      raise exception
+
+    exception_desc = 'Old response timeout error'
+    with self.assertRaises(device_errors.CommandTimeoutError) as e:
+      alwaysRaisesProvidedException(
+          old_errors.WaitForResponseTimedOutError(exception_desc))
+    self.assertEquals(exception_desc, str(e.exception))
+
+    exception_desc = 'Old device error'
+    with self.assertRaises(device_errors.DeviceUnreachableError) as e:
+      alwaysRaisesProvidedException(
+          old_errors.DeviceUnresponsiveError(exception_desc))
+    self.assertEquals(exception_desc, str(e.exception))
+
+  def testExplicitDecoratorTranslatesReraiserExceptions(self):
+    """Tests that the explicit decorator translates reraiser exceptions."""
+    @decorators.WithExplicitTimeoutAndRetries(30, 10)
+    def alwaysRaisesProvidedException(exception):
+      raise exception
+
+    exception_desc = 'Reraiser thread timeout error'
+    with self.assertRaises(device_errors.CommandTimeoutError) as e:
+      alwaysRaisesProvidedException(
+          reraiser_thread.TimeoutError(exception_desc))
+    self.assertEquals(exception_desc, str(e.exception))
+
+  class _MethodDecoratorTestObject(object):
+    """An object suitable for testing the method decorator."""
+
+    def __init__(self, test_case, default_timeout=_DEFAULT_TIMEOUT,
+                 default_retries=_DEFAULT_RETRIES):
+      self._test_case = test_case
+      self.default_timeout = default_timeout
+      self.default_retries = default_retries
+      self.function_call_counters = {
+          'alwaysRaisesCommandFailedError': 0,
+          'alwaysTimesOut': 0,
+          'requiresExplicitTimeoutAndRetries': 0,
+      }
+
+    @decorators.WithTimeoutAndRetriesFromInstance(
+        'default_timeout', 'default_retries')
+    def alwaysTimesOut(self, timeout=None, retries=None):
+      self.function_call_counters['alwaysTimesOut'] += 1
+      time.sleep(100)
+      self._test_case.assertFalse(True, msg='Failed to time out?')
+
+    @decorators.WithTimeoutAndRetriesFromInstance(
+        'default_timeout', 'default_retries')
+    def alwaysRaisesCommandFailedError(self, timeout=None, retries=None):
+      self.function_call_counters['alwaysRaisesCommandFailedError'] += 1
+      raise device_errors.CommandFailedError('testCommand failed')
+
+    # pylint: disable=no-self-use
+
+    @decorators.WithTimeoutAndRetriesFromInstance(
+        'default_timeout', 'default_retries')
+    def alwaysReturnsTimeout(self, timeout=None, retries=None):
+      return timeout
+
+    @decorators.WithTimeoutAndRetriesFromInstance(
+        'default_timeout', 'default_retries')
+    def alwaysReturnsRetries(self, timeout=None, retries=None):
+      return retries
+
+    @decorators.WithTimeoutAndRetriesFromInstance(
+        'default_timeout', 'default_retries')
+    def alwaysRaisesProvidedException(self, exception, timeout=None,
+                                      retries=None):
+      raise exception
+
+    # pylint: enable=no-self-use
+
+
+  def testMethodDecoratorDoesTimeout(self):
+    """Tests that the method decorator handles timeout logic."""
+    test_obj = self._MethodDecoratorTestObject(self)
+    start_time = time.time()
+    with self.assertRaises(device_errors.CommandTimeoutError):
+      try:
+        test_obj.alwaysTimesOut(timeout=1, retries=0)
+      except:
+        traceback.print_exc()
+        raise
+    elapsed_time = time.time() - start_time
+    self.assertTrue(elapsed_time >= 1)
+    self.assertEquals(1, test_obj.function_call_counters['alwaysTimesOut'])
+
+  def testMethodDecoratorDoesRetries(self):
+    """Tests that the method decorator handles retries logic."""
+    test_obj = self._MethodDecoratorTestObject(self)
+    with self.assertRaises(device_errors.CommandFailedError):
+      try:
+        test_obj.alwaysRaisesCommandFailedError(retries=10)
+      except:
+        traceback.print_exc()
+        raise
+    self.assertEquals(
+        11, test_obj.function_call_counters['alwaysRaisesCommandFailedError'])
+
+  def testMethodDecoratorPassesValues(self):
+    """Tests that the method decorator passes timeout and retries kwargs."""
+    test_obj = self._MethodDecoratorTestObject(
+        self, default_timeout=42, default_retries=31)
+    self.assertEquals(42, test_obj.alwaysReturnsTimeout())
+    self.assertEquals(41, test_obj.alwaysReturnsTimeout(timeout=41))
+    self.assertEquals(31, test_obj.alwaysReturnsRetries())
+    self.assertEquals(32, test_obj.alwaysReturnsRetries(retries=32))
+
+  def testMethodDecoratorTranslatesOldExceptions(self):
+    test_obj = self._MethodDecoratorTestObject(self)
+
+    exception_desc = 'Old response timeout error'
+    with self.assertRaises(device_errors.CommandTimeoutError) as e:
+      test_obj.alwaysRaisesProvidedException(
+          old_errors.WaitForResponseTimedOutError(exception_desc))
+    self.assertEquals(exception_desc, str(e.exception))
+
+    exception_desc = 'Old device error'
+    with self.assertRaises(device_errors.DeviceUnreachableError) as e:
+      test_obj.alwaysRaisesProvidedException(
+          old_errors.DeviceUnresponsiveError(exception_desc))
+    self.assertEquals(exception_desc, str(e.exception))
+
+  def testMethodDecoratorTranslatesReraiserExceptions(self):
+    test_obj = self._MethodDecoratorTestObject(self)
+
+    exception_desc = 'Reraiser thread timeout error'
+    with self.assertRaises(device_errors.CommandTimeoutError) as e:
+      test_obj.alwaysRaisesProvidedException(
+          reraiser_thread.TimeoutError(exception_desc))
+    self.assertEquals(exception_desc, str(e.exception))
+
+if __name__ == '__main__':
+  unittest.main(verbosity=2)
+
diff --git a/build/android/pylib/device/device_blacklist.py b/build/android/pylib/device/device_blacklist.py
new file mode 100644
index 0000000..a141d62
--- /dev/null
+++ b/build/android/pylib/device/device_blacklist.py
@@ -0,0 +1,61 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import json
+import os
+import threading
+
+from pylib import constants
+_BLACKLIST_JSON = os.path.join(
+    constants.DIR_SOURCE_ROOT,
+    os.environ.get('CHROMIUM_OUT_DIR', 'out'),
+    'bad_devices.json')
+
+# Note that this only protects against concurrent accesses to the blacklist
+# within a process.
+_blacklist_lock = threading.RLock()
+
+def ReadBlacklist():
+  """Reads the blacklist from the _BLACKLIST_JSON file.
+
+  Returns:
+    A list containing bad devices.
+  """
+  with _blacklist_lock:
+    if not os.path.exists(_BLACKLIST_JSON):
+      return []
+
+    with open(_BLACKLIST_JSON, 'r') as f:
+      return json.load(f)
+
+
+def WriteBlacklist(blacklist):
+  """Writes the provided blacklist to the _BLACKLIST_JSON file.
+
+  Args:
+    blacklist: list of bad devices to write to the _BLACKLIST_JSON file.
+  """
+  with _blacklist_lock:
+    with open(_BLACKLIST_JSON, 'w') as f:
+      json.dump(list(set(blacklist)), f)
+
+
+def ExtendBlacklist(devices):
+  """Adds devices to _BLACKLIST_JSON file.
+
+  Args:
+    devices: list of bad devices to be added to the _BLACKLIST_JSON file.
+  """
+  with _blacklist_lock:
+    blacklist = ReadBlacklist()
+    blacklist.extend(devices)
+    WriteBlacklist(blacklist)
+
+
+def ResetBlacklist():
+  """Erases the _BLACKLIST_JSON file if it exists."""
+  with _blacklist_lock:
+    if os.path.exists(_BLACKLIST_JSON):
+      os.remove(_BLACKLIST_JSON)
+
diff --git a/build/android/pylib/device/device_errors.py b/build/android/pylib/device/device_errors.py
new file mode 100644
index 0000000..2492015
--- /dev/null
+++ b/build/android/pylib/device/device_errors.py
@@ -0,0 +1,89 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Exception classes raised by AdbWrapper and DeviceUtils.
+"""
+
+from pylib import cmd_helper
+from pylib.utils import base_error
+
+
+class CommandFailedError(base_error.BaseError):
+  """Exception for command failures."""
+
+  def __init__(self, message, device_serial=None):
+    if device_serial is not None:
+      message = '(device: %s) %s' % (device_serial, message)
+    self.device_serial = device_serial
+    super(CommandFailedError, self).__init__(message)
+
+
+class AdbCommandFailedError(CommandFailedError):
+  """Exception for adb command failures."""
+
+  def __init__(self, args, output, status=None, device_serial=None,
+               message=None):
+    self.args = args
+    self.output = output
+    self.status = status
+    if not message:
+      adb_cmd = ' '.join(cmd_helper.SingleQuote(arg) for arg in self.args)
+      message = ['adb %s: failed ' % adb_cmd]
+      if status:
+        message.append('with exit status %s ' % self.status)
+      if output:
+        message.append('and output:\n')
+        message.extend('- %s\n' % line for line in output.splitlines())
+      else:
+        message.append('and no output.')
+      message = ''.join(message)
+    super(AdbCommandFailedError, self).__init__(message, device_serial)
+
+
+class DeviceVersionError(CommandFailedError):
+  """Exception for device version failures."""
+
+  def __init__(self, message, device_serial=None):
+    super(DeviceVersionError, self).__init__(message, device_serial)
+
+
+class AdbShellCommandFailedError(AdbCommandFailedError):
+  """Exception for shell command failures run via adb."""
+
+  def __init__(self, command, output, status, device_serial=None):
+    self.command = command
+    message = ['shell command run via adb failed on the device:\n',
+               '  command: %s\n' % command]
+    message.append('  exit status: %s\n' % status)
+    if output:
+      message.append('  output:\n')
+      if isinstance(output, basestring):
+        output_lines = output.splitlines()
+      else:
+        output_lines = output
+      message.extend('  - %s\n' % line for line in output_lines)
+    else:
+      message.append("  output: ''\n")
+    message = ''.join(message)
+    super(AdbShellCommandFailedError, self).__init__(
+      ['shell', command], output, status, device_serial, message)
+
+
+class CommandTimeoutError(base_error.BaseError):
+  """Exception for command timeouts."""
+  pass
+
+
+class DeviceUnreachableError(base_error.BaseError):
+  """Exception for device unreachable failures."""
+  pass
+
+
+class NoDevicesError(base_error.BaseError):
+  """Exception for having no devices attached."""
+
+  def __init__(self):
+    super(NoDevicesError, self).__init__(
+        'No devices attached.', is_infra_error=True)
diff --git a/build/android/pylib/device/device_list.py b/build/android/pylib/device/device_list.py
new file mode 100644
index 0000000..0eb6acb
--- /dev/null
+++ b/build/android/pylib/device/device_list.py
@@ -0,0 +1,30 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""A module to keep track of devices across builds."""
+
+import os
+
+LAST_DEVICES_FILENAME = '.last_devices'
+LAST_MISSING_DEVICES_FILENAME = '.last_missing'
+
+
+def GetPersistentDeviceList(file_name):
+  """Returns a list of devices.
+
+  Args:
+    file_name: the file name containing a list of devices.
+
+  Returns: List of device serial numbers that were on the bot.
+  """
+  with open(file_name) as f:
+    return f.read().splitlines()
+
+
+def WritePersistentDeviceList(file_name, device_list):
+  path = os.path.dirname(file_name)
+  if not os.path.exists(path):
+    os.makedirs(path)
+  with open(file_name, 'w') as f:
+    f.write('\n'.join(set(device_list)))
diff --git a/build/android/pylib/device/device_utils.py b/build/android/pylib/device/device_utils.py
new file mode 100644
index 0000000..f201ef3
--- /dev/null
+++ b/build/android/pylib/device/device_utils.py
@@ -0,0 +1,1754 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Provides a variety of device interactions based on adb.
+
+Eventually, this will be based on adb_wrapper.
+"""
+# pylint: disable=unused-argument
+
+import collections
+import contextlib
+import itertools
+import logging
+import multiprocessing
+import os
+import posixpath
+import re
+import shutil
+import sys
+import tempfile
+import time
+import zipfile
+
+import pylib.android_commands
+from pylib import cmd_helper
+from pylib import constants
+from pylib import device_signal
+from pylib.constants import keyevent
+from pylib.device import adb_wrapper
+from pylib.device import decorators
+from pylib.device import device_blacklist
+from pylib.device import device_errors
+from pylib.device import intent
+from pylib.device import logcat_monitor
+from pylib.device.commands import install_commands
+from pylib.sdk import split_select
+from pylib.utils import apk_helper
+from pylib.utils import base_error
+from pylib.utils import device_temp_file
+from pylib.utils import host_utils
+from pylib.utils import md5sum
+from pylib.utils import parallelizer
+from pylib.utils import timeout_retry
+from pylib.utils import zip_utils
+
+_DEFAULT_TIMEOUT = 30
+_DEFAULT_RETRIES = 3
+
+# A sentinel object for default values
+# TODO(jbudorick,perezju): revisit how default values are handled by
+# the timeout_retry decorators.
+DEFAULT = object()
+
+_CONTROL_CHARGING_COMMANDS = [
+  {
+    # Nexus 4
+    'witness_file': '/sys/module/pm8921_charger/parameters/disabled',
+    'enable_command': 'echo 0 > /sys/module/pm8921_charger/parameters/disabled',
+    'disable_command':
+        'echo 1 > /sys/module/pm8921_charger/parameters/disabled',
+  },
+  {
+    # Nexus 5
+    # Setting the HIZ bit of the bq24192 causes the charger to actually ignore
+    # energy coming from USB. Setting the power_supply offline just updates the
+    # Android system to reflect that.
+    'witness_file': '/sys/kernel/debug/bq24192/INPUT_SRC_CONT',
+    'enable_command': (
+        'echo 0x4A > /sys/kernel/debug/bq24192/INPUT_SRC_CONT && '
+        'echo 1 > /sys/class/power_supply/usb/online'),
+    'disable_command': (
+        'echo 0xCA > /sys/kernel/debug/bq24192/INPUT_SRC_CONT && '
+        'chmod 644 /sys/class/power_supply/usb/online && '
+        'echo 0 > /sys/class/power_supply/usb/online'),
+  },
+]
+
+
+@decorators.WithExplicitTimeoutAndRetries(
+    _DEFAULT_TIMEOUT, _DEFAULT_RETRIES)
+def GetAVDs():
+  """Returns a list of Android Virtual Devices.
+
+  Returns:
+    A list containing the configured AVDs.
+  """
+  lines = cmd_helper.GetCmdOutput([
+      os.path.join(constants.ANDROID_SDK_ROOT, 'tools', 'android'),
+      'list', 'avd']).splitlines()
+  avds = []
+  for line in lines:
+    if 'Name:' not in line:
+      continue
+    key, value = (s.strip() for s in line.split(':', 1))
+    if key == 'Name':
+      avds.append(value)
+  return avds
+
+
+@decorators.WithExplicitTimeoutAndRetries(
+    _DEFAULT_TIMEOUT, _DEFAULT_RETRIES)
+def RestartServer():
+  """Restarts the adb server.
+
+  Raises:
+    CommandFailedError if we fail to kill or restart the server.
+  """
+  def adb_killed():
+    return not adb_wrapper.AdbWrapper.IsServerOnline()
+
+  def adb_started():
+    return adb_wrapper.AdbWrapper.IsServerOnline()
+
+  adb_wrapper.AdbWrapper.KillServer()
+  if not timeout_retry.WaitFor(adb_killed, wait_period=1, max_tries=5):
+    # TODO(perezju): raise an exception after fixng http://crbug.com/442319
+    logging.warning('Failed to kill adb server')
+  adb_wrapper.AdbWrapper.StartServer()
+  if not timeout_retry.WaitFor(adb_started, wait_period=1, max_tries=5):
+    raise device_errors.CommandFailedError('Failed to start adb server')
+
+
+def _GetTimeStamp():
+  """Return a basic ISO 8601 time stamp with the current local time."""
+  return time.strftime('%Y%m%dT%H%M%S', time.localtime())
+
+
+def _JoinLines(lines):
+  # makes sure that the last line is also terminated, and is more memory
+  # efficient than first appending an end-line to each line and then joining
+  # all of them together.
+  return ''.join(s for line in lines for s in (line, '\n'))
+
+
+class DeviceUtils(object):
+
+  _MAX_ADB_COMMAND_LENGTH = 512
+  _MAX_ADB_OUTPUT_LENGTH = 32768
+  _LAUNCHER_FOCUSED_RE = re.compile(
+      '\s*mCurrentFocus.*(Launcher|launcher).*')
+  _VALID_SHELL_VARIABLE = re.compile('^[a-zA-Z_][a-zA-Z0-9_]*$')
+
+  # Property in /data/local.prop that controls Java assertions.
+  JAVA_ASSERT_PROPERTY = 'dalvik.vm.enableassertions'
+
+  def __init__(self, device, default_timeout=_DEFAULT_TIMEOUT,
+               default_retries=_DEFAULT_RETRIES):
+    """DeviceUtils constructor.
+
+    Args:
+      device: Either a device serial, an existing AdbWrapper instance, or an
+              an existing AndroidCommands instance.
+      default_timeout: An integer containing the default number of seconds to
+                       wait for an operation to complete if no explicit value
+                       is provided.
+      default_retries: An integer containing the default number or times an
+                       operation should be retried on failure if no explicit
+                       value is provided.
+    """
+    self.adb = None
+    self.old_interface = None
+    if isinstance(device, basestring):
+      self.adb = adb_wrapper.AdbWrapper(device)
+      self.old_interface = pylib.android_commands.AndroidCommands(device)
+    elif isinstance(device, adb_wrapper.AdbWrapper):
+      self.adb = device
+      self.old_interface = pylib.android_commands.AndroidCommands(str(device))
+    elif isinstance(device, pylib.android_commands.AndroidCommands):
+      self.adb = adb_wrapper.AdbWrapper(device.GetDevice())
+      self.old_interface = device
+    else:
+      raise ValueError('Unsupported device value: %r' % device)
+    self._commands_installed = None
+    self._default_timeout = default_timeout
+    self._default_retries = default_retries
+    self._cache = {}
+    self._client_caches = {}
+    assert hasattr(self, decorators.DEFAULT_TIMEOUT_ATTR)
+    assert hasattr(self, decorators.DEFAULT_RETRIES_ATTR)
+
+  def __eq__(self, other):
+    """Checks whether |other| refers to the same device as |self|.
+
+    Args:
+      other: The object to compare to. This can be a basestring, an instance
+        of adb_wrapper.AdbWrapper, or an instance of DeviceUtils.
+    Returns:
+      Whether |other| refers to the same device as |self|.
+    """
+    return self.adb.GetDeviceSerial() == str(other)
+
+  def __lt__(self, other):
+    """Compares two instances of DeviceUtils.
+
+    This merely compares their serial numbers.
+
+    Args:
+      other: The instance of DeviceUtils to compare to.
+    Returns:
+      Whether |self| is less than |other|.
+    """
+    return self.adb.GetDeviceSerial() < other.adb.GetDeviceSerial()
+
+  def __str__(self):
+    """Returns the device serial."""
+    return self.adb.GetDeviceSerial()
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def IsOnline(self, timeout=None, retries=None):
+    """Checks whether the device is online.
+
+    Args:
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Returns:
+      True if the device is online, False otherwise.
+
+    Raises:
+      CommandTimeoutError on timeout.
+    """
+    try:
+      return self.adb.GetState() == 'device'
+    except base_error.BaseError as exc:
+      logging.info('Failed to get state: %s', exc)
+      return False
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def HasRoot(self, timeout=None, retries=None):
+    """Checks whether or not adbd has root privileges.
+
+    Args:
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Returns:
+      True if adbd has root privileges, False otherwise.
+
+    Raises:
+      CommandTimeoutError on timeout.
+      DeviceUnreachableError on missing device.
+    """
+    try:
+      self.RunShellCommand('ls /root', check_return=True)
+      return True
+    except device_errors.AdbCommandFailedError:
+      return False
+
+  def NeedsSU(self, timeout=DEFAULT, retries=DEFAULT):
+    """Checks whether 'su' is needed to access protected resources.
+
+    Args:
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Returns:
+      True if 'su' is available on the device and is needed to to access
+        protected resources; False otherwise if either 'su' is not available
+        (e.g. because the device has a user build), or not needed (because adbd
+        already has root privileges).
+
+    Raises:
+      CommandTimeoutError on timeout.
+      DeviceUnreachableError on missing device.
+    """
+    if 'needs_su' not in self._cache:
+      try:
+        self.RunShellCommand(
+            'su -c ls /root && ! ls /root', check_return=True,
+            timeout=self._default_timeout if timeout is DEFAULT else timeout,
+            retries=self._default_retries if retries is DEFAULT else retries)
+        self._cache['needs_su'] = True
+      except device_errors.AdbCommandFailedError:
+        self._cache['needs_su'] = False
+    return self._cache['needs_su']
+
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def EnableRoot(self, timeout=None, retries=None):
+    """Restarts adbd with root privileges.
+
+    Args:
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Raises:
+      CommandFailedError if root could not be enabled.
+      CommandTimeoutError on timeout.
+    """
+    if self.IsUserBuild():
+      raise device_errors.CommandFailedError(
+          'Cannot enable root in user builds.', str(self))
+    if 'needs_su' in self._cache:
+      del self._cache['needs_su']
+    self.adb.Root()
+    self.WaitUntilFullyBooted()
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def IsUserBuild(self, timeout=None, retries=None):
+    """Checks whether or not the device is running a user build.
+
+    Args:
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Returns:
+      True if the device is running a user build, False otherwise (i.e. if
+        it's running a userdebug build).
+
+    Raises:
+      CommandTimeoutError on timeout.
+      DeviceUnreachableError on missing device.
+    """
+    return self.build_type == 'user'
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def GetExternalStoragePath(self, timeout=None, retries=None):
+    """Get the device's path to its SD card.
+
+    Args:
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Returns:
+      The device's path to its SD card.
+
+    Raises:
+      CommandFailedError if the external storage path could not be determined.
+      CommandTimeoutError on timeout.
+      DeviceUnreachableError on missing device.
+    """
+    if 'external_storage' in self._cache:
+      return self._cache['external_storage']
+
+    value = self.RunShellCommand('echo $EXTERNAL_STORAGE',
+                                 single_line=True,
+                                 check_return=True)
+    if not value:
+      raise device_errors.CommandFailedError('$EXTERNAL_STORAGE is not set',
+                                             str(self))
+    self._cache['external_storage'] = value
+    return value
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def GetApplicationPaths(self, package, timeout=None, retries=None):
+    """Get the paths of the installed apks on the device for the given package.
+
+    Args:
+      package: Name of the package.
+
+    Returns:
+      List of paths to the apks on the device for the given package.
+    """
+    # 'pm path' is liable to incorrectly exit with a nonzero number starting
+    # in Lollipop.
+    # TODO(jbudorick): Check if this is fixed as new Android versions are
+    # released to put an upper bound on this.
+    should_check_return = (self.build_version_sdk <
+                           constants.ANDROID_SDK_VERSION_CODES.LOLLIPOP)
+    output = self.RunShellCommand(
+        ['pm', 'path', package], check_return=should_check_return)
+    apks = []
+    for line in output:
+      if not line.startswith('package:'):
+        raise device_errors.CommandFailedError(
+            'pm path returned: %r' % '\n'.join(output), str(self))
+      apks.append(line[len('package:'):])
+    return apks
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def GetApplicationDataDirectory(self, package, timeout=None, retries=None):
+    """Get the data directory on the device for the given package.
+
+    Args:
+      package: Name of the package.
+
+    Returns:
+      The package's data directory, or None if the package doesn't exist on the
+      device.
+    """
+    try:
+      output = self._RunPipedShellCommand(
+          'pm dump %s | grep dataDir=' % cmd_helper.SingleQuote(package))
+      for line in output:
+        _, _, dataDir = line.partition('dataDir=')
+        if dataDir:
+          return dataDir
+    except device_errors.CommandFailedError:
+      logging.exception('Could not find data directory for %s', package)
+    return None
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def WaitUntilFullyBooted(self, wifi=False, timeout=None, retries=None):
+    """Wait for the device to fully boot.
+
+    This means waiting for the device to boot, the package manager to be
+    available, and the SD card to be ready. It can optionally mean waiting
+    for wifi to come up, too.
+
+    Args:
+      wifi: A boolean indicating if we should wait for wifi to come up or not.
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Raises:
+      CommandFailedError on failure.
+      CommandTimeoutError if one of the component waits times out.
+      DeviceUnreachableError if the device becomes unresponsive.
+    """
+    def sd_card_ready():
+      try:
+        self.RunShellCommand(['test', '-d', self.GetExternalStoragePath()],
+                             check_return=True)
+        return True
+      except device_errors.AdbCommandFailedError:
+        return False
+
+    def pm_ready():
+      try:
+        return self.GetApplicationPaths('android')
+      except device_errors.CommandFailedError:
+        return False
+
+    def boot_completed():
+      return self.GetProp('sys.boot_completed') == '1'
+
+    def wifi_enabled():
+      return 'Wi-Fi is enabled' in self.RunShellCommand(['dumpsys', 'wifi'],
+                                                        check_return=False)
+
+    self.adb.WaitForDevice()
+    timeout_retry.WaitFor(sd_card_ready)
+    timeout_retry.WaitFor(pm_ready)
+    timeout_retry.WaitFor(boot_completed)
+    if wifi:
+      timeout_retry.WaitFor(wifi_enabled)
+
+  REBOOT_DEFAULT_TIMEOUT = 10 * _DEFAULT_TIMEOUT
+  REBOOT_DEFAULT_RETRIES = _DEFAULT_RETRIES
+
+  @decorators.WithTimeoutAndRetriesDefaults(
+      REBOOT_DEFAULT_TIMEOUT,
+      REBOOT_DEFAULT_RETRIES)
+  def Reboot(self, block=True, wifi=False, timeout=None, retries=None):
+    """Reboot the device.
+
+    Args:
+      block: A boolean indicating if we should wait for the reboot to complete.
+      wifi: A boolean indicating if we should wait for wifi to be enabled after
+        the reboot. The option has no effect unless |block| is also True.
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Raises:
+      CommandTimeoutError on timeout.
+      DeviceUnreachableError on missing device.
+    """
+    def device_offline():
+      return not self.IsOnline()
+
+    self.adb.Reboot()
+    self._ClearCache()
+    timeout_retry.WaitFor(device_offline, wait_period=1)
+    if block:
+      self.WaitUntilFullyBooted(wifi=wifi)
+
+  INSTALL_DEFAULT_TIMEOUT = 4 * _DEFAULT_TIMEOUT
+  INSTALL_DEFAULT_RETRIES = _DEFAULT_RETRIES
+
+  @decorators.WithTimeoutAndRetriesDefaults(
+      INSTALL_DEFAULT_TIMEOUT,
+      INSTALL_DEFAULT_RETRIES)
+  def Install(self, apk_path, reinstall=False, timeout=None, retries=None):
+    """Install an APK.
+
+    Noop if an identical APK is already installed.
+
+    Args:
+      apk_path: A string containing the path to the APK to install.
+      reinstall: A boolean indicating if we should keep any existing app data.
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Raises:
+      CommandFailedError if the installation fails.
+      CommandTimeoutError if the installation times out.
+      DeviceUnreachableError on missing device.
+    """
+    package_name = apk_helper.GetPackageName(apk_path)
+    device_paths = self.GetApplicationPaths(package_name)
+    if device_paths:
+      if len(device_paths) > 1:
+        logging.warning(
+            'Installing single APK (%s) when split APKs (%s) are currently '
+            'installed.', apk_path, ' '.join(device_paths))
+      (files_to_push, _) = self._GetChangedAndStaleFiles(
+          apk_path, device_paths[0])
+      should_install = bool(files_to_push)
+      if should_install and not reinstall:
+        self.adb.Uninstall(package_name)
+    else:
+      should_install = True
+    if should_install:
+      self.adb.Install(apk_path, reinstall=reinstall)
+
+  @decorators.WithTimeoutAndRetriesDefaults(
+      INSTALL_DEFAULT_TIMEOUT,
+      INSTALL_DEFAULT_RETRIES)
+  def InstallSplitApk(self, base_apk, split_apks, reinstall=False,
+                      timeout=None, retries=None):
+    """Install a split APK.
+
+    Noop if all of the APK splits are already installed.
+
+    Args:
+      base_apk: A string of the path to the base APK.
+      split_apks: A list of strings of paths of all of the APK splits.
+      reinstall: A boolean indicating if we should keep any existing app data.
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Raises:
+      CommandFailedError if the installation fails.
+      CommandTimeoutError if the installation times out.
+      DeviceUnreachableError on missing device.
+      DeviceVersionError if device SDK is less than Android L.
+    """
+    self._CheckSdkLevel(constants.ANDROID_SDK_VERSION_CODES.LOLLIPOP)
+
+    all_apks = [base_apk] + split_select.SelectSplits(
+        self, base_apk, split_apks)
+    package_name = apk_helper.GetPackageName(base_apk)
+    device_apk_paths = self.GetApplicationPaths(package_name)
+
+    if device_apk_paths:
+      partial_install_package = package_name
+      device_checksums = md5sum.CalculateDeviceMd5Sums(device_apk_paths, self)
+      host_checksums = md5sum.CalculateHostMd5Sums(all_apks)
+      apks_to_install = [k for (k, v) in host_checksums.iteritems()
+                         if v not in device_checksums.values()]
+      if apks_to_install and not reinstall:
+        self.adb.Uninstall(package_name)
+        partial_install_package = None
+        apks_to_install = all_apks
+    else:
+      partial_install_package = None
+      apks_to_install = all_apks
+    if apks_to_install:
+      self.adb.InstallMultiple(
+          apks_to_install, partial=partial_install_package, reinstall=reinstall)
+
+  def _CheckSdkLevel(self, required_sdk_level):
+    """Raises an exception if the device does not have the required SDK level.
+    """
+    if self.build_version_sdk < required_sdk_level:
+      raise device_errors.DeviceVersionError(
+          ('Requires SDK level %s, device is SDK level %s' %
+           (required_sdk_level, self.build_version_sdk)),
+           device_serial=self.adb.GetDeviceSerial())
+
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def RunShellCommand(self, cmd, check_return=False, cwd=None, env=None,
+                      as_root=False, single_line=False, large_output=False,
+                      timeout=None, retries=None):
+    """Run an ADB shell command.
+
+    The command to run |cmd| should be a sequence of program arguments or else
+    a single string.
+
+    When |cmd| is a sequence, it is assumed to contain the name of the command
+    to run followed by its arguments. In this case, arguments are passed to the
+    command exactly as given, without any further processing by the shell. This
+    allows to easily pass arguments containing spaces or special characters
+    without having to worry about getting quoting right. Whenever possible, it
+    is recomended to pass |cmd| as a sequence.
+
+    When |cmd| is given as a string, it will be interpreted and run by the
+    shell on the device.
+
+    This behaviour is consistent with that of command runners in cmd_helper as
+    well as Python's own subprocess.Popen.
+
+    TODO(perezju) Change the default of |check_return| to True when callers
+      have switched to the new behaviour.
+
+    Args:
+      cmd: A string with the full command to run on the device, or a sequence
+        containing the command and its arguments.
+      check_return: A boolean indicating whether or not the return code should
+        be checked.
+      cwd: The device directory in which the command should be run.
+      env: The environment variables with which the command should be run.
+      as_root: A boolean indicating whether the shell command should be run
+        with root privileges.
+      single_line: A boolean indicating if only a single line of output is
+        expected.
+      large_output: Uses a work-around for large shell command output. Without
+        this large output will be truncated.
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Returns:
+      If single_line is False, the output of the command as a list of lines,
+      otherwise, a string with the unique line of output emmited by the command
+      (with the optional newline at the end stripped).
+
+    Raises:
+      AdbCommandFailedError if check_return is True and the exit code of
+        the command run on the device is non-zero.
+      CommandFailedError if single_line is True but the output contains two or
+        more lines.
+      CommandTimeoutError on timeout.
+      DeviceUnreachableError on missing device.
+    """
+    def env_quote(key, value):
+      if not DeviceUtils._VALID_SHELL_VARIABLE.match(key):
+        raise KeyError('Invalid shell variable name %r' % key)
+      # using double quotes here to allow interpolation of shell variables
+      return '%s=%s' % (key, cmd_helper.DoubleQuote(value))
+
+    def run(cmd):
+      return self.adb.Shell(cmd)
+
+    def handle_check_return(cmd):
+      try:
+        return run(cmd)
+      except device_errors.AdbCommandFailedError as exc:
+        if check_return:
+          raise
+        else:
+          return exc.output
+
+    def handle_large_command(cmd):
+      if len(cmd) < self._MAX_ADB_COMMAND_LENGTH:
+        return handle_check_return(cmd)
+      else:
+        with device_temp_file.DeviceTempFile(self.adb, suffix='.sh') as script:
+          self._WriteFileWithPush(script.name, cmd)
+          logging.info('Large shell command will be run from file: %s ...',
+                       cmd[:100])
+          return handle_check_return('sh %s' % script.name_quoted)
+
+    def handle_large_output(cmd, large_output_mode):
+      if large_output_mode:
+        with device_temp_file.DeviceTempFile(self.adb) as large_output_file:
+          cmd = '%s > %s' % (cmd, large_output_file.name)
+          logging.debug('Large output mode enabled. Will write output to '
+                        'device and read results from file.')
+          handle_large_command(cmd)
+          return self.ReadFile(large_output_file.name, force_pull=True)
+      else:
+        try:
+          return handle_large_command(cmd)
+        except device_errors.AdbCommandFailedError as exc:
+          if exc.status is None:
+            logging.exception('No output found for %s', cmd)
+            logging.warning('Attempting to run in large_output mode.')
+            logging.warning('Use RunShellCommand(..., large_output=True) for '
+                            'shell commands that expect a lot of output.')
+            return handle_large_output(cmd, True)
+          else:
+            raise
+
+    if not isinstance(cmd, basestring):
+      cmd = ' '.join(cmd_helper.SingleQuote(s) for s in cmd)
+    if env:
+      env = ' '.join(env_quote(k, v) for k, v in env.iteritems())
+      cmd = '%s %s' % (env, cmd)
+    if cwd:
+      cmd = 'cd %s && %s' % (cmd_helper.SingleQuote(cwd), cmd)
+    if as_root and self.NeedsSU():
+      # "su -c sh -c" allows using shell features in |cmd|
+      cmd = 'su -c sh -c %s' % cmd_helper.SingleQuote(cmd)
+
+    output = handle_large_output(cmd, large_output).splitlines()
+
+    if single_line:
+      if not output:
+        return ''
+      elif len(output) == 1:
+        return output[0]
+      else:
+        msg = 'one line of output was expected, but got: %s'
+        raise device_errors.CommandFailedError(msg % output, str(self))
+    else:
+      return output
+
+  def _RunPipedShellCommand(self, script, **kwargs):
+    PIPESTATUS_LEADER = 'PIPESTATUS: '
+
+    script += '; echo "%s${PIPESTATUS[@]}"' % PIPESTATUS_LEADER
+    kwargs['check_return'] = True
+    output = self.RunShellCommand(script, **kwargs)
+    pipestatus_line = output[-1]
+
+    if not pipestatus_line.startswith(PIPESTATUS_LEADER):
+      logging.error('Pipe exit statuses of shell script missing.')
+      raise device_errors.AdbShellCommandFailedError(
+          script, output, status=None,
+          device_serial=self.adb.GetDeviceSerial())
+
+    output = output[:-1]
+    statuses = [
+        int(s) for s in pipestatus_line[len(PIPESTATUS_LEADER):].split()]
+    if any(statuses):
+      raise device_errors.AdbShellCommandFailedError(
+          script, output, status=statuses,
+          device_serial=self.adb.GetDeviceSerial())
+    return output
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def KillAll(self, process_name, signum=device_signal.SIGKILL, as_root=False,
+              blocking=False, quiet=False, timeout=None, retries=None):
+    """Kill all processes with the given name on the device.
+
+    Args:
+      process_name: A string containing the name of the process to kill.
+      signum: An integer containing the signal number to send to kill. Defaults
+              to SIGKILL (9).
+      as_root: A boolean indicating whether the kill should be executed with
+               root privileges.
+      blocking: A boolean indicating whether we should wait until all processes
+                with the given |process_name| are dead.
+      quiet: A boolean indicating whether to ignore the fact that no processes
+             to kill were found.
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Returns:
+      The number of processes attempted to kill.
+
+    Raises:
+      CommandFailedError if no process was killed and |quiet| is False.
+      CommandTimeoutError on timeout.
+      DeviceUnreachableError on missing device.
+    """
+    pids = self.GetPids(process_name)
+    if not pids:
+      if quiet:
+        return 0
+      else:
+        raise device_errors.CommandFailedError(
+            'No process "%s"' % process_name, str(self))
+
+    cmd = ['kill', '-%d' % signum] + pids.values()
+    self.RunShellCommand(cmd, as_root=as_root, check_return=True)
+
+    if blocking:
+      # TODO(perezu): use timeout_retry.WaitFor
+      wait_period = 0.1
+      while self.GetPids(process_name):
+        time.sleep(wait_period)
+
+    return len(pids)
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def StartActivity(self, intent_obj, blocking=False, trace_file_name=None,
+                    force_stop=False, timeout=None, retries=None):
+    """Start package's activity on the device.
+
+    Args:
+      intent_obj: An Intent object to send.
+      blocking: A boolean indicating whether we should wait for the activity to
+                finish launching.
+      trace_file_name: If present, a string that both indicates that we want to
+                       profile the activity and contains the path to which the
+                       trace should be saved.
+      force_stop: A boolean indicating whether we should stop the activity
+                  before starting it.
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Raises:
+      CommandFailedError if the activity could not be started.
+      CommandTimeoutError on timeout.
+      DeviceUnreachableError on missing device.
+    """
+    cmd = ['am', 'start']
+    if blocking:
+      cmd.append('-W')
+    if trace_file_name:
+      cmd.extend(['--start-profiler', trace_file_name])
+    if force_stop:
+      cmd.append('-S')
+    cmd.extend(intent_obj.am_args)
+    for line in self.RunShellCommand(cmd, check_return=True):
+      if line.startswith('Error:'):
+        raise device_errors.CommandFailedError(line, str(self))
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def StartInstrumentation(self, component, finish=True, raw=False,
+                           extras=None, timeout=None, retries=None):
+    if extras is None:
+      extras = {}
+
+    cmd = ['am', 'instrument']
+    if finish:
+      cmd.append('-w')
+    if raw:
+      cmd.append('-r')
+    for k, v in extras.iteritems():
+      cmd.extend(['-e', str(k), str(v)])
+    cmd.append(component)
+    return self.RunShellCommand(cmd, check_return=True, large_output=True)
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def BroadcastIntent(self, intent_obj, timeout=None, retries=None):
+    """Send a broadcast intent.
+
+    Args:
+      intent: An Intent to broadcast.
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Raises:
+      CommandTimeoutError on timeout.
+      DeviceUnreachableError on missing device.
+    """
+    cmd = ['am', 'broadcast'] + intent_obj.am_args
+    self.RunShellCommand(cmd, check_return=True)
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def GoHome(self, timeout=None, retries=None):
+    """Return to the home screen and obtain launcher focus.
+
+    This command launches the home screen and attempts to obtain
+    launcher focus until the timeout is reached.
+
+    Args:
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Raises:
+      CommandTimeoutError on timeout.
+      DeviceUnreachableError on missing device.
+    """
+    def is_launcher_focused():
+      output = self.RunShellCommand(['dumpsys', 'window', 'windows'],
+                                    check_return=True, large_output=True)
+      return any(self._LAUNCHER_FOCUSED_RE.match(l) for l in output)
+
+    def dismiss_popups():
+      # There is a dialog present; attempt to get rid of it.
+      # Not all dialogs can be dismissed with back.
+      self.SendKeyEvent(keyevent.KEYCODE_ENTER)
+      self.SendKeyEvent(keyevent.KEYCODE_BACK)
+      return is_launcher_focused()
+
+    # If Home is already focused, return early to avoid unnecessary work.
+    if is_launcher_focused():
+      return
+
+    self.StartActivity(
+        intent.Intent(action='android.intent.action.MAIN',
+                      category='android.intent.category.HOME'),
+        blocking=True)
+
+    if not is_launcher_focused():
+      timeout_retry.WaitFor(dismiss_popups, wait_period=1)
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def ForceStop(self, package, timeout=None, retries=None):
+    """Close the application.
+
+    Args:
+      package: A string containing the name of the package to stop.
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Raises:
+      CommandTimeoutError on timeout.
+      DeviceUnreachableError on missing device.
+    """
+    self.RunShellCommand(['am', 'force-stop', package], check_return=True)
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def ClearApplicationState(self, package, timeout=None, retries=None):
+    """Clear all state for the given package.
+
+    Args:
+      package: A string containing the name of the package to stop.
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Raises:
+      CommandTimeoutError on timeout.
+      DeviceUnreachableError on missing device.
+    """
+    # Check that the package exists before clearing it for android builds below
+    # JB MR2. Necessary because calling pm clear on a package that doesn't exist
+    # may never return.
+    if ((self.build_version_sdk >=
+         constants.ANDROID_SDK_VERSION_CODES.JELLY_BEAN_MR2)
+        or self.GetApplicationPaths(package)):
+      self.RunShellCommand(['pm', 'clear', package], check_return=True)
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def SendKeyEvent(self, keycode, timeout=None, retries=None):
+    """Sends a keycode to the device.
+
+    See the pylib.constants.keyevent module for suitable keycode values.
+
+    Args:
+      keycode: A integer keycode to send to the device.
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Raises:
+      CommandTimeoutError on timeout.
+      DeviceUnreachableError on missing device.
+    """
+    self.RunShellCommand(['input', 'keyevent', format(keycode, 'd')],
+                         check_return=True)
+
+  PUSH_CHANGED_FILES_DEFAULT_TIMEOUT = 10 * _DEFAULT_TIMEOUT
+  PUSH_CHANGED_FILES_DEFAULT_RETRIES = _DEFAULT_RETRIES
+
+  @decorators.WithTimeoutAndRetriesDefaults(
+      PUSH_CHANGED_FILES_DEFAULT_TIMEOUT,
+      PUSH_CHANGED_FILES_DEFAULT_RETRIES)
+  def PushChangedFiles(self, host_device_tuples, timeout=None,
+                       retries=None, delete_device_stale=False):
+    """Push files to the device, skipping files that don't need updating.
+
+    When a directory is pushed, it is traversed recursively on the host and
+    all files in it are pushed to the device as needed.
+    Additionally, if delete_device_stale option is True,
+    files that exist on the device but don't exist on the host are deleted.
+
+    Args:
+      host_device_tuples: A list of (host_path, device_path) tuples, where
+        |host_path| is an absolute path of a file or directory on the host
+        that should be minimially pushed to the device, and |device_path| is
+        an absolute path of the destination on the device.
+      timeout: timeout in seconds
+      retries: number of retries
+      delete_device_stale: option to delete stale files on device
+
+    Raises:
+      CommandFailedError on failure.
+      CommandTimeoutError on timeout.
+      DeviceUnreachableError on missing device.
+    """
+
+    all_changed_files = []
+    all_stale_files = []
+    for h, d in host_device_tuples:
+      if os.path.isdir(h):
+        self.RunShellCommand(['mkdir', '-p', d], check_return=True)
+      changed_files, stale_files = (
+          self._GetChangedAndStaleFiles(h, d, delete_device_stale))
+      all_changed_files += changed_files
+      all_stale_files += stale_files
+
+    if delete_device_stale:
+      self.RunShellCommand(['rm', '-f'] + all_stale_files,
+                             check_return=True)
+
+    if not all_changed_files:
+      return
+
+    self._PushFilesImpl(host_device_tuples, all_changed_files)
+
+  def _GetChangedAndStaleFiles(self, host_path, device_path, track_stale=False):
+    """Get files to push and delete
+
+    Args:
+      host_path: an absolute path of a file or directory on the host
+      device_path: an absolute path of a file or directory on the device
+      track_stale: whether to bother looking for stale files (slower)
+
+    Returns:
+      a two-element tuple
+      1st element: a list of (host_files_path, device_files_path) tuples to push
+      2nd element: a list of stale files under device_path, or [] when
+        track_stale == False
+    """
+    real_host_path = os.path.realpath(host_path)
+    try:
+      real_device_path = self.RunShellCommand(
+          ['realpath', device_path], single_line=True, check_return=True)
+    except device_errors.CommandFailedError:
+      real_device_path = None
+    if not real_device_path:
+      return ([(host_path, device_path)], [])
+
+    try:
+      host_checksums = md5sum.CalculateHostMd5Sums([real_host_path])
+      interesting_device_paths = [real_device_path]
+      if not track_stale and os.path.isdir(real_host_path):
+        interesting_device_paths = [
+            posixpath.join(real_device_path, os.path.relpath(p, real_host_path))
+            for p in host_checksums.keys()]
+      device_checksums = md5sum.CalculateDeviceMd5Sums(
+          interesting_device_paths, self)
+    except EnvironmentError as e:
+      logging.warning('Error calculating md5: %s', e)
+      return ([(host_path, device_path)], [])
+
+    if os.path.isfile(host_path):
+      host_checksum = host_checksums.get(real_host_path)
+      device_checksum = device_checksums.get(real_device_path)
+      if host_checksum != device_checksum:
+        return ([(host_path, device_path)], [])
+      else:
+        return ([], [])
+    else:
+      to_push = []
+      for host_abs_path, host_checksum in host_checksums.iteritems():
+        device_abs_path = '%s/%s' % (
+            real_device_path, os.path.relpath(host_abs_path, real_host_path))
+        device_checksum = device_checksums.pop(device_abs_path, None)
+        if device_checksum != host_checksum:
+          to_push.append((host_abs_path, device_abs_path))
+      to_delete = device_checksums.keys()
+      return (to_push, to_delete)
+
+  def _PushFilesImpl(self, host_device_tuples, files):
+    size = sum(host_utils.GetRecursiveDiskUsage(h) for h, _ in files)
+    file_count = len(files)
+    dir_size = sum(host_utils.GetRecursiveDiskUsage(h)
+                   for h, _ in host_device_tuples)
+    dir_file_count = 0
+    for h, _ in host_device_tuples:
+      if os.path.isdir(h):
+        dir_file_count += sum(len(f) for _r, _d, f in os.walk(h))
+      else:
+        dir_file_count += 1
+
+    push_duration = self._ApproximateDuration(
+        file_count, file_count, size, False)
+    dir_push_duration = self._ApproximateDuration(
+        len(host_device_tuples), dir_file_count, dir_size, False)
+    zip_duration = self._ApproximateDuration(1, 1, size, True)
+
+    self._InstallCommands()
+
+    if dir_push_duration < push_duration and (
+        dir_push_duration < zip_duration or not self._commands_installed):
+      self._PushChangedFilesIndividually(host_device_tuples)
+    elif push_duration < zip_duration or not self._commands_installed:
+      self._PushChangedFilesIndividually(files)
+    else:
+      self._PushChangedFilesZipped(files)
+      self.RunShellCommand(
+          ['chmod', '-R', '777'] + [d for _, d in host_device_tuples],
+          as_root=True, check_return=True)
+
+  def _InstallCommands(self):
+    if self._commands_installed is None:
+      try:
+        if not install_commands.Installed(self):
+          install_commands.InstallCommands(self)
+        self._commands_installed = True
+      except Exception as e:
+        logging.warning('unzip not available: %s' % str(e))
+        self._commands_installed = False
+
+  @staticmethod
+  def _ApproximateDuration(adb_calls, file_count, byte_count, is_zipping):
+    # We approximate the time to push a set of files to a device as:
+    #   t = c1 * a + c2 * f + c3 + b / c4 + b / (c5 * c6), where
+    #     t: total time (sec)
+    #     c1: adb call time delay (sec)
+    #     a: number of times adb is called (unitless)
+    #     c2: push time delay (sec)
+    #     f: number of files pushed via adb (unitless)
+    #     c3: zip time delay (sec)
+    #     c4: zip rate (bytes/sec)
+    #     b: total number of bytes (bytes)
+    #     c5: transfer rate (bytes/sec)
+    #     c6: compression ratio (unitless)
+
+    # All of these are approximations.
+    ADB_CALL_PENALTY = 0.1 # seconds
+    ADB_PUSH_PENALTY = 0.01 # seconds
+    ZIP_PENALTY = 2.0 # seconds
+    ZIP_RATE = 10000000.0 # bytes / second
+    TRANSFER_RATE = 2000000.0 # bytes / second
+    COMPRESSION_RATIO = 2.0 # unitless
+
+    adb_call_time = ADB_CALL_PENALTY * adb_calls
+    adb_push_setup_time = ADB_PUSH_PENALTY * file_count
+    if is_zipping:
+      zip_time = ZIP_PENALTY + byte_count / ZIP_RATE
+      transfer_time = byte_count / (TRANSFER_RATE * COMPRESSION_RATIO)
+    else:
+      zip_time = 0
+      transfer_time = byte_count / TRANSFER_RATE
+    return adb_call_time + adb_push_setup_time + zip_time + transfer_time
+
+  def _PushChangedFilesIndividually(self, files):
+    for h, d in files:
+      self.adb.Push(h, d)
+
+  def _PushChangedFilesZipped(self, files):
+    if not files:
+      return
+
+    with tempfile.NamedTemporaryFile(suffix='.zip') as zip_file:
+      zip_proc = multiprocessing.Process(
+          target=DeviceUtils._CreateDeviceZip,
+          args=(zip_file.name, files))
+      zip_proc.start()
+      zip_proc.join()
+
+      zip_on_device = '%s/tmp.zip' % self.GetExternalStoragePath()
+      try:
+        self.adb.Push(zip_file.name, zip_on_device)
+        self.RunShellCommand(
+            ['unzip', zip_on_device],
+            as_root=True,
+            env={'PATH': '%s:$PATH' % install_commands.BIN_DIR},
+            check_return=True)
+      finally:
+        if zip_proc.is_alive():
+          zip_proc.terminate()
+        if self.IsOnline():
+          self.RunShellCommand(['rm', zip_on_device], check_return=True)
+
+  @staticmethod
+  def _CreateDeviceZip(zip_path, host_device_tuples):
+    with zipfile.ZipFile(zip_path, 'w') as zip_file:
+      for host_path, device_path in host_device_tuples:
+        zip_utils.WriteToZipFile(zip_file, host_path, device_path)
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def FileExists(self, device_path, timeout=None, retries=None):
+    """Checks whether the given file exists on the device.
+
+    Args:
+      device_path: A string containing the absolute path to the file on the
+                   device.
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Returns:
+      True if the file exists on the device, False otherwise.
+
+    Raises:
+      CommandTimeoutError on timeout.
+      DeviceUnreachableError on missing device.
+    """
+    try:
+      self.RunShellCommand(['test', '-e', device_path], check_return=True)
+      return True
+    except device_errors.AdbCommandFailedError:
+      return False
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def PullFile(self, device_path, host_path, timeout=None, retries=None):
+    """Pull a file from the device.
+
+    Args:
+      device_path: A string containing the absolute path of the file to pull
+                   from the device.
+      host_path: A string containing the absolute path of the destination on
+                 the host.
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Raises:
+      CommandFailedError on failure.
+      CommandTimeoutError on timeout.
+    """
+    # Create the base dir if it doesn't exist already
+    dirname = os.path.dirname(host_path)
+    if dirname and not os.path.exists(dirname):
+      os.makedirs(dirname)
+    self.adb.Pull(device_path, host_path)
+
+  def _ReadFileWithPull(self, device_path):
+    try:
+      d = tempfile.mkdtemp()
+      host_temp_path = os.path.join(d, 'tmp_ReadFileWithPull')
+      self.adb.Pull(device_path, host_temp_path)
+      with open(host_temp_path, 'r') as host_temp:
+        return host_temp.read()
+    finally:
+      if os.path.exists(d):
+        shutil.rmtree(d)
+
+  _LS_RE = re.compile(
+      r'(?P<perms>\S+) +(?P<owner>\S+) +(?P<group>\S+) +(?:(?P<size>\d+) +)?'
+      + r'(?P<date>\S+) +(?P<time>\S+) +(?P<name>.+)$')
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def ReadFile(self, device_path, as_root=False, force_pull=False,
+               timeout=None, retries=None):
+    """Reads the contents of a file from the device.
+
+    Args:
+      device_path: A string containing the absolute path of the file to read
+                   from the device.
+      as_root: A boolean indicating whether the read should be executed with
+               root privileges.
+      force_pull: A boolean indicating whether to force the operation to be
+          performed by pulling a file from the device. The default is, when the
+          contents are short, to retrieve the contents using cat instead.
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Returns:
+      The contents of |device_path| as a string. Contents are intepreted using
+      universal newlines, so the caller will see them encoded as '\n'. Also,
+      all lines will be terminated.
+
+    Raises:
+      AdbCommandFailedError if the file can't be read.
+      CommandTimeoutError on timeout.
+      DeviceUnreachableError on missing device.
+    """
+    def get_size(path):
+      # TODO(jbudorick): Implement a generic version of Stat() that handles
+      # as_root=True, then switch this implementation to use that.
+      ls_out = self.RunShellCommand(['ls', '-l', device_path], as_root=as_root,
+                                    check_return=True)
+      for line in ls_out:
+        m = self._LS_RE.match(line)
+        if m and m.group('name') == posixpath.basename(device_path):
+          return int(m.group('size'))
+      logging.warning('Could not determine size of %s.', device_path)
+      return None
+
+    if (not force_pull
+        and 0 < get_size(device_path) <= self._MAX_ADB_OUTPUT_LENGTH):
+      return _JoinLines(self.RunShellCommand(
+          ['cat', device_path], as_root=as_root, check_return=True))
+    elif as_root and self.NeedsSU():
+      with device_temp_file.DeviceTempFile(self.adb) as device_temp:
+        self.RunShellCommand(['cp', device_path, device_temp.name],
+                             as_root=True, check_return=True)
+        return self._ReadFileWithPull(device_temp.name)
+    else:
+      return self._ReadFileWithPull(device_path)
+
+  def _WriteFileWithPush(self, device_path, contents):
+    with tempfile.NamedTemporaryFile() as host_temp:
+      host_temp.write(contents)
+      host_temp.flush()
+      self.adb.Push(host_temp.name, device_path)
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def WriteFile(self, device_path, contents, as_root=False, force_push=False,
+                timeout=None, retries=None):
+    """Writes |contents| to a file on the device.
+
+    Args:
+      device_path: A string containing the absolute path to the file to write
+          on the device.
+      contents: A string containing the data to write to the device.
+      as_root: A boolean indicating whether the write should be executed with
+          root privileges (if available).
+      force_push: A boolean indicating whether to force the operation to be
+          performed by pushing a file to the device. The default is, when the
+          contents are short, to pass the contents using a shell script instead.
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Raises:
+      CommandFailedError if the file could not be written on the device.
+      CommandTimeoutError on timeout.
+      DeviceUnreachableError on missing device.
+    """
+    if not force_push and len(contents) < self._MAX_ADB_COMMAND_LENGTH:
+      # If the contents are small, for efficieny we write the contents with
+      # a shell command rather than pushing a file.
+      cmd = 'echo -n %s > %s' % (cmd_helper.SingleQuote(contents),
+                                 cmd_helper.SingleQuote(device_path))
+      self.RunShellCommand(cmd, as_root=as_root, check_return=True)
+    elif as_root and self.NeedsSU():
+      # Adb does not allow to "push with su", so we first push to a temp file
+      # on a safe location, and then copy it to the desired location with su.
+      with device_temp_file.DeviceTempFile(self.adb) as device_temp:
+        self._WriteFileWithPush(device_temp.name, contents)
+        # Here we need 'cp' rather than 'mv' because the temp and
+        # destination files might be on different file systems (e.g.
+        # on internal storage and an external sd card).
+        self.RunShellCommand(['cp', device_temp.name, device_path],
+                             as_root=True, check_return=True)
+    else:
+      # If root is not needed, we can push directly to the desired location.
+      self._WriteFileWithPush(device_path, contents)
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def Ls(self, device_path, timeout=None, retries=None):
+    """Lists the contents of a directory on the device.
+
+    Args:
+      device_path: A string containing the path of the directory on the device
+                   to list.
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Returns:
+      A list of pairs (filename, stat) for each file found in the directory,
+      where the stat object has the properties: st_mode, st_size, and st_time.
+
+    Raises:
+      AdbCommandFailedError if |device_path| does not specify a valid and
+          accessible directory in the device.
+      CommandTimeoutError on timeout.
+      DeviceUnreachableError on missing device.
+    """
+    return self.adb.Ls(device_path)
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def Stat(self, device_path, timeout=None, retries=None):
+    """Get the stat attributes of a file or directory on the device.
+
+    Args:
+      device_path: A string containing the path of from which to get attributes
+                   on the device.
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Returns:
+      A stat object with the properties: st_mode, st_size, and st_time
+
+    Raises:
+      CommandFailedError if device_path cannot be found on the device.
+      CommandTimeoutError on timeout.
+      DeviceUnreachableError on missing device.
+    """
+    dirname, target = device_path.rsplit('/', 1)
+    for filename, stat in self.adb.Ls(dirname):
+      if filename == target:
+        return stat
+    raise device_errors.CommandFailedError(
+        'Cannot find file or directory: %r' % device_path, str(self))
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def SetJavaAsserts(self, enabled, timeout=None, retries=None):
+    """Enables or disables Java asserts.
+
+    Args:
+      enabled: A boolean indicating whether Java asserts should be enabled
+               or disabled.
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Returns:
+      True if the device-side property changed and a restart is required as a
+      result, False otherwise.
+
+    Raises:
+      CommandTimeoutError on timeout.
+    """
+    def find_property(lines, property_name):
+      for index, line in enumerate(lines):
+        if line.strip() == '':
+          continue
+        key, value = (s.strip() for s in line.split('=', 1))
+        if key == property_name:
+          return index, value
+      return None, ''
+
+    new_value = 'all' if enabled else ''
+
+    # First ensure the desired property is persisted.
+    try:
+      properties = self.ReadFile(
+          constants.DEVICE_LOCAL_PROPERTIES_PATH).splitlines()
+    except device_errors.CommandFailedError:
+      properties = []
+    index, value = find_property(properties, self.JAVA_ASSERT_PROPERTY)
+    if new_value != value:
+      if new_value:
+        new_line = '%s=%s' % (self.JAVA_ASSERT_PROPERTY, new_value)
+        if index is None:
+          properties.append(new_line)
+        else:
+          properties[index] = new_line
+      else:
+        assert index is not None # since new_value == '' and new_value != value
+        properties.pop(index)
+      self.WriteFile(constants.DEVICE_LOCAL_PROPERTIES_PATH,
+                     _JoinLines(properties))
+
+    # Next, check the current runtime value is what we need, and
+    # if not, set it and report that a reboot is required.
+    value = self.GetProp(self.JAVA_ASSERT_PROPERTY)
+    if new_value != value:
+      self.SetProp(self.JAVA_ASSERT_PROPERTY, new_value)
+      return True
+    else:
+      return False
+
+  @property
+  def language(self):
+    """Returns the language setting on the device."""
+    return self.GetProp('persist.sys.language', cache=False)
+
+  @property
+  def country(self):
+    """Returns the country setting on the device."""
+    return self.GetProp('persist.sys.country', cache=False)
+
+  @property
+  def screen_density(self):
+    """Returns the screen density of the device."""
+    DPI_TO_DENSITY = {
+      120: 'ldpi',
+      160: 'mdpi',
+      240: 'hdpi',
+      320: 'xhdpi',
+      480: 'xxhdpi',
+      640: 'xxxhdpi',
+    }
+    dpi = int(self.GetProp('ro.sf.lcd_density', cache=True))
+    return DPI_TO_DENSITY.get(dpi, 'tvdpi')
+
+  @property
+  def build_description(self):
+    """Returns the build description of the system.
+
+    For example:
+      nakasi-user 4.4.4 KTU84P 1227136 release-keys
+    """
+    return self.GetProp('ro.build.description', cache=True)
+
+  @property
+  def build_fingerprint(self):
+    """Returns the build fingerprint of the system.
+
+    For example:
+      google/nakasi/grouper:4.4.4/KTU84P/1227136:user/release-keys
+    """
+    return self.GetProp('ro.build.fingerprint', cache=True)
+
+  @property
+  def build_id(self):
+    """Returns the build ID of the system (e.g. 'KTU84P')."""
+    return self.GetProp('ro.build.id', cache=True)
+
+  @property
+  def build_product(self):
+    """Returns the build product of the system (e.g. 'grouper')."""
+    return self.GetProp('ro.build.product', cache=True)
+
+  @property
+  def build_type(self):
+    """Returns the build type of the system (e.g. 'user')."""
+    return self.GetProp('ro.build.type', cache=True)
+
+  @property
+  def build_version_sdk(self):
+    """Returns the build version sdk of the system as a number (e.g. 19).
+
+    For version code numbers see:
+    http://developer.android.com/reference/android/os/Build.VERSION_CODES.html
+
+    For named constants see:
+    pylib.constants.ANDROID_SDK_VERSION_CODES
+
+    Raises:
+      CommandFailedError if the build version sdk is not a number.
+    """
+    value = self.GetProp('ro.build.version.sdk', cache=True)
+    try:
+      return int(value)
+    except ValueError:
+      raise device_errors.CommandFailedError(
+          'Invalid build version sdk: %r' % value)
+
+  @property
+  def product_cpu_abi(self):
+    """Returns the product cpu abi of the device (e.g. 'armeabi-v7a')."""
+    return self.GetProp('ro.product.cpu.abi', cache=True)
+
+  @property
+  def product_model(self):
+    """Returns the name of the product model (e.g. 'Nexus 7')."""
+    return self.GetProp('ro.product.model', cache=True)
+
+  @property
+  def product_name(self):
+    """Returns the product name of the device (e.g. 'nakasi')."""
+    return self.GetProp('ro.product.name', cache=True)
+
+  def GetProp(self, property_name, cache=False, timeout=DEFAULT,
+              retries=DEFAULT):
+    """Gets a property from the device.
+
+    Args:
+      property_name: A string containing the name of the property to get from
+                     the device.
+      cache: A boolean indicating whether to cache the value of this property.
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Returns:
+      The value of the device's |property_name| property.
+
+    Raises:
+      CommandTimeoutError on timeout.
+    """
+    assert isinstance(property_name, basestring), (
+        "property_name is not a string: %r" % property_name)
+
+    cache_key = '_prop:' + property_name
+    if cache and cache_key in self._cache:
+      return self._cache[cache_key]
+    else:
+      # timeout and retries are handled down at run shell, because we don't
+      # want to apply them in the other branch when reading from the cache
+      value = self.RunShellCommand(
+          ['getprop', property_name], single_line=True, check_return=True,
+          timeout=self._default_timeout if timeout is DEFAULT else timeout,
+          retries=self._default_retries if retries is DEFAULT else retries)
+      if cache or cache_key in self._cache:
+        self._cache[cache_key] = value
+      return value
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def SetProp(self, property_name, value, check=False, timeout=None,
+              retries=None):
+    """Sets a property on the device.
+
+    Args:
+      property_name: A string containing the name of the property to set on
+                     the device.
+      value: A string containing the value to set to the property on the
+             device.
+      check: A boolean indicating whether to check that the property was
+             successfully set on the device.
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Raises:
+      CommandFailedError if check is true and the property was not correctly
+        set on the device (e.g. because it is not rooted).
+      CommandTimeoutError on timeout.
+    """
+    assert isinstance(property_name, basestring), (
+        "property_name is not a string: %r" % property_name)
+    assert isinstance(value, basestring), "value is not a string: %r" % value
+
+    self.RunShellCommand(['setprop', property_name, value], check_return=True)
+    if property_name in self._cache:
+      del self._cache[property_name]
+    # TODO(perezju) remove the option and make the check mandatory, but using a
+    # single shell script to both set- and getprop.
+    if check and value != self.GetProp(property_name):
+      raise device_errors.CommandFailedError(
+          'Unable to set property %r on the device to %r'
+          % (property_name, value), str(self))
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def GetABI(self, timeout=None, retries=None):
+    """Gets the device main ABI.
+
+    Args:
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Returns:
+      The device's main ABI name.
+
+    Raises:
+      CommandTimeoutError on timeout.
+    """
+    return self.GetProp('ro.product.cpu.abi')
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def GetPids(self, process_name, timeout=None, retries=None):
+    """Returns the PIDs of processes with the given name.
+
+    Note that the |process_name| is often the package name.
+
+    Args:
+      process_name: A string containing the process name to get the PIDs for.
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Returns:
+      A dict mapping process name to PID for each process that contained the
+      provided |process_name|.
+
+    Raises:
+      CommandTimeoutError on timeout.
+      DeviceUnreachableError on missing device.
+    """
+    procs_pids = {}
+    try:
+      ps_output = self._RunPipedShellCommand(
+          'ps | grep -F %s' % cmd_helper.SingleQuote(process_name))
+    except device_errors.AdbShellCommandFailedError as e:
+      if e.status and isinstance(e.status, list) and not e.status[0]:
+        # If ps succeeded but grep failed, there were no processes with the
+        # given name.
+        return procs_pids
+      else:
+        raise
+
+    for line in ps_output:
+      try:
+        ps_data = line.split()
+        if process_name in ps_data[-1]:
+          procs_pids[ps_data[-1]] = ps_data[1]
+      except IndexError:
+        pass
+    return procs_pids
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def TakeScreenshot(self, host_path=None, timeout=None, retries=None):
+    """Takes a screenshot of the device.
+
+    Args:
+      host_path: A string containing the path on the host to save the
+                 screenshot to. If None, a file name in the current
+                 directory will be generated.
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Returns:
+      The name of the file on the host to which the screenshot was saved.
+
+    Raises:
+      CommandFailedError on failure.
+      CommandTimeoutError on timeout.
+      DeviceUnreachableError on missing device.
+    """
+    if not host_path:
+      host_path = os.path.abspath('screenshot-%s.png' % _GetTimeStamp())
+    with device_temp_file.DeviceTempFile(self.adb, suffix='.png') as device_tmp:
+      self.RunShellCommand(['/system/bin/screencap', '-p', device_tmp.name],
+                           check_return=True)
+      self.PullFile(device_tmp.name, host_path)
+    return host_path
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def GetMemoryUsageForPid(self, pid, timeout=None, retries=None):
+    """Gets the memory usage for the given PID.
+
+    Args:
+      pid: PID of the process.
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Returns:
+      A dict containing memory usage statistics for the PID. May include:
+        Size, Rss, Pss, Shared_Clean, Shared_Dirty, Private_Clean,
+        Private_Dirty, VmHWM
+
+    Raises:
+      CommandTimeoutError on timeout.
+    """
+    result = collections.defaultdict(int)
+
+    try:
+      result.update(self._GetMemoryUsageForPidFromSmaps(pid))
+    except device_errors.CommandFailedError:
+      logging.exception('Error getting memory usage from smaps')
+
+    try:
+      result.update(self._GetMemoryUsageForPidFromStatus(pid))
+    except device_errors.CommandFailedError:
+      logging.exception('Error getting memory usage from status')
+
+    return result
+
+  def _GetMemoryUsageForPidFromSmaps(self, pid):
+    SMAPS_COLUMNS = (
+        'Size', 'Rss', 'Pss', 'Shared_Clean', 'Shared_Dirty', 'Private_Clean',
+        'Private_Dirty')
+
+    showmap_out = self._RunPipedShellCommand(
+        'showmap %d | grep TOTAL' % int(pid), as_root=True)
+
+    split_totals = showmap_out[-1].split()
+    if (not split_totals
+        or len(split_totals) != 9
+        or split_totals[-1] != 'TOTAL'):
+      raise device_errors.CommandFailedError(
+          'Invalid output from showmap: %s' % '\n'.join(showmap_out))
+
+    return dict(itertools.izip(SMAPS_COLUMNS, (int(n) for n in split_totals)))
+
+  def _GetMemoryUsageForPidFromStatus(self, pid):
+    for line in self.ReadFile(
+        '/proc/%s/status' % str(pid), as_root=True).splitlines():
+      if line.startswith('VmHWM:'):
+        return {'VmHWM': int(line.split()[1])}
+    else:
+      raise device_errors.CommandFailedError(
+          'Could not find memory peak value for pid %s', str(pid))
+
+  @decorators.WithTimeoutAndRetriesFromInstance()
+  def GetLogcatMonitor(self, timeout=None, retries=None, *args, **kwargs):
+    """Returns a new LogcatMonitor associated with this device.
+
+    Parameters passed to this function are passed directly to
+    |logcat_monitor.LogcatMonitor| and are documented there.
+
+    Args:
+      timeout: timeout in seconds
+      retries: number of retries
+    """
+    return logcat_monitor.LogcatMonitor(self.adb, *args, **kwargs)
+
+  def GetClientCache(self, client_name):
+    """Returns client cache."""
+    if client_name not in self._client_caches:
+      self._client_caches[client_name] = {}
+    return self._client_caches[client_name]
+
+  def _ClearCache(self):
+    """Clears all caches."""
+    for client in self._client_caches:
+      self._client_caches[client].clear()
+    self._cache.clear()
+
+  @classmethod
+  def parallel(cls, devices=None, async=False):
+    """Creates a Parallelizer to operate over the provided list of devices.
+
+    If |devices| is either |None| or an empty list, the Parallelizer will
+    operate over all attached devices that have not been blacklisted.
+
+    Args:
+      devices: A list of either DeviceUtils instances or objects from
+               from which DeviceUtils instances can be constructed. If None,
+               all attached devices will be used.
+      async: If true, returns a Parallelizer that runs operations
+             asynchronously.
+
+    Returns:
+      A Parallelizer operating over |devices|.
+    """
+    if not devices:
+      devices = cls.HealthyDevices()
+      if not devices:
+        raise device_errors.NoDevicesError()
+
+    devices = [d if isinstance(d, cls) else cls(d) for d in devices]
+    if async:
+      return parallelizer.Parallelizer(devices)
+    else:
+      return parallelizer.SyncParallelizer(devices)
+
+  @classmethod
+  def HealthyDevices(cls):
+    blacklist = device_blacklist.ReadBlacklist()
+    def blacklisted(adb):
+      if adb.GetDeviceSerial() in blacklist:
+        logging.warning('Device %s is blacklisted.', adb.GetDeviceSerial())
+        return True
+      return False
+
+    return [cls(adb) for adb in adb_wrapper.AdbWrapper.Devices()
+            if not blacklisted(adb)]
diff --git a/build/android/pylib/device/device_utils_device_test.py b/build/android/pylib/device/device_utils_device_test.py
new file mode 100755
index 0000000..daae2b6
--- /dev/null
+++ b/build/android/pylib/device/device_utils_device_test.py
@@ -0,0 +1,211 @@
+#!/usr/bin/env python
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Unit tests for the contents of device_utils.py (mostly DeviceUtils).
+The test will invoke real devices
+"""
+
+import os
+import tempfile
+import unittest
+
+from pylib import cmd_helper
+from pylib import constants
+from pylib.device import adb_wrapper
+from pylib.device import device_utils
+from pylib.utils import md5sum
+
+_OLD_CONTENTS = "foo"
+_NEW_CONTENTS = "bar"
+_DEVICE_DIR = "/data/local/tmp/device_utils_test"
+_SUB_DIR = "sub"
+_SUB_DIR1 = "sub1"
+_SUB_DIR2 = "sub2"
+
+class DeviceUtilsPushDeleteFilesTest(unittest.TestCase):
+
+  def setUp(self):
+    devices = adb_wrapper.AdbWrapper.Devices()
+    assert devices, 'A device must be attached'
+    self.adb = devices[0]
+    self.adb.WaitForDevice()
+    self.device = device_utils.DeviceUtils(
+        self.adb, default_timeout=10, default_retries=0)
+    default_build_type = os.environ.get('BUILDTYPE', 'Debug')
+    constants.SetBuildType(default_build_type)
+
+  @staticmethod
+  def _MakeTempFile(contents):
+    """Make a temporary file with the given contents.
+
+    Args:
+      contents: string to write to the temporary file.
+
+    Returns:
+      the tuple contains the absolute path to the file and the file name
+    """
+    fi, path = tempfile.mkstemp(text=True)
+    with os.fdopen(fi, 'w') as f:
+      f.write(contents)
+    file_name = os.path.basename(path)
+    return (path, file_name)
+
+  @staticmethod
+  def _MakeTempFileGivenDir(directory, contents):
+    """Make a temporary file under the given directory
+    with the given contents
+
+    Args:
+      directory: the temp directory to create the file
+      contents: string to write to the temp file
+
+    Returns:
+      the list contains the absolute path to the file and the file name
+    """
+    fi, path = tempfile.mkstemp(dir=directory, text=True)
+    with os.fdopen(fi, 'w') as f:
+      f.write(contents)
+    file_name = os.path.basename(path)
+    return (path, file_name)
+
+  @staticmethod
+  def _ChangeTempFile(path, contents):
+    with os.open(path, 'w') as f:
+      f.write(contents)
+
+  @staticmethod
+  def _DeleteTempFile(path):
+    os.remove(path)
+
+  def testPushChangedFiles_noFileChange(self):
+    (host_file_path, file_name) = self._MakeTempFile(_OLD_CONTENTS)
+    device_file_path = "%s/%s" % (_DEVICE_DIR, file_name)
+    self.adb.Push(host_file_path, device_file_path)
+    self.device.PushChangedFiles([(host_file_path, device_file_path)])
+    result = self.device.RunShellCommand(['cat', device_file_path],
+                                         single_line=True)
+    self.assertEqual(_OLD_CONTENTS, result)
+
+    cmd_helper.RunCmd(['rm', host_file_path])
+    self.device.RunShellCommand(['rm', '-rf',  _DEVICE_DIR])
+
+  def testPushChangedFiles_singleFileChange(self):
+    (host_file_path, file_name) = self._MakeTempFile(_OLD_CONTENTS)
+    device_file_path = "%s/%s" % (_DEVICE_DIR, file_name)
+    self.adb.Push(host_file_path, device_file_path)
+
+    with open(host_file_path, 'w') as f:
+      f.write(_NEW_CONTENTS)
+    self.device.PushChangedFiles([(host_file_path, device_file_path)])
+    result = self.device.RunShellCommand(['cat', device_file_path],
+                                         single_line=True)
+    self.assertEqual(_NEW_CONTENTS, result)
+
+    cmd_helper.RunCmd(['rm', host_file_path])
+    self.device.RunShellCommand(['rm', '-rf',  _DEVICE_DIR])
+
+  def testDeleteFiles(self):
+    host_tmp_dir = tempfile.mkdtemp()
+    (host_file_path, file_name) = self._MakeTempFileGivenDir(
+        host_tmp_dir, _OLD_CONTENTS)
+
+    device_file_path = "%s/%s" % (_DEVICE_DIR, file_name)
+    self.adb.Push(host_file_path, device_file_path)
+
+    cmd_helper.RunCmd(['rm', host_file_path])
+    self.device.PushChangedFiles([(host_tmp_dir, _DEVICE_DIR)],
+                                 delete_device_stale=True)
+    result = self.device.RunShellCommand(['ls', _DEVICE_DIR], single_line=True)
+    self.assertEqual('', result)
+
+    cmd_helper.RunCmd(['rm', '-rf', host_tmp_dir])
+    self.device.RunShellCommand(['rm', '-rf',  _DEVICE_DIR])
+
+  def testPushAndDeleteFiles_noSubDir(self):
+    host_tmp_dir = tempfile.mkdtemp()
+    (host_file_path1, file_name1) = self._MakeTempFileGivenDir(
+        host_tmp_dir, _OLD_CONTENTS)
+    (host_file_path2, file_name2) = self._MakeTempFileGivenDir(
+        host_tmp_dir, _OLD_CONTENTS)
+
+    device_file_path1 = "%s/%s" % (_DEVICE_DIR, file_name1)
+    device_file_path2 = "%s/%s" % (_DEVICE_DIR, file_name2)
+    self.adb.Push(host_file_path1, device_file_path1)
+    self.adb.Push(host_file_path2, device_file_path2)
+
+    with open(host_file_path1, 'w') as f:
+      f.write(_NEW_CONTENTS)
+    cmd_helper.RunCmd(['rm', host_file_path2])
+
+    self.device.PushChangedFiles([(host_tmp_dir, _DEVICE_DIR)],
+                                   delete_device_stale=True)
+    result = self.device.RunShellCommand(['cat', device_file_path1],
+                                         single_line=True)
+    self.assertEqual(_NEW_CONTENTS, result)
+    result = self.device.RunShellCommand(['ls', _DEVICE_DIR], single_line=True)
+    self.assertEqual(file_name1, result)
+
+    self.device.RunShellCommand(['rm', '-rf',  _DEVICE_DIR])
+    cmd_helper.RunCmd(['rm', '-rf', host_tmp_dir])
+
+  def testPushAndDeleteFiles_SubDir(self):
+    host_tmp_dir = tempfile.mkdtemp()
+    host_sub_dir1 = "%s/%s" % (host_tmp_dir, _SUB_DIR1)
+    host_sub_dir2 = "%s/%s/%s" % (host_tmp_dir, _SUB_DIR, _SUB_DIR2)
+    cmd_helper.RunCmd(['mkdir', '-p', host_sub_dir1])
+    cmd_helper.RunCmd(['mkdir', '-p', host_sub_dir2])
+
+    (host_file_path1, file_name1) = self._MakeTempFileGivenDir(
+        host_tmp_dir, _OLD_CONTENTS)
+    (host_file_path2, file_name2) = self._MakeTempFileGivenDir(
+        host_tmp_dir, _OLD_CONTENTS)
+    (host_file_path3, file_name3) = self._MakeTempFileGivenDir(
+        host_sub_dir1, _OLD_CONTENTS)
+    (host_file_path4, file_name4) = self._MakeTempFileGivenDir(
+        host_sub_dir2, _OLD_CONTENTS)
+
+    device_file_path1 = "%s/%s" % (_DEVICE_DIR, file_name1)
+    device_file_path2 = "%s/%s" % (_DEVICE_DIR, file_name2)
+    device_file_path3 = "%s/%s/%s" % (_DEVICE_DIR, _SUB_DIR1, file_name3)
+    device_file_path4 = "%s/%s/%s/%s" % (_DEVICE_DIR, _SUB_DIR,
+                                         _SUB_DIR2, file_name4)
+
+    self.adb.Push(host_file_path1, device_file_path1)
+    self.adb.Push(host_file_path2, device_file_path2)
+    self.adb.Push(host_file_path3, device_file_path3)
+    self.adb.Push(host_file_path4, device_file_path4)
+
+    with open(host_file_path1, 'w') as f:
+      f.write(_NEW_CONTENTS)
+    cmd_helper.RunCmd(['rm', host_file_path2])
+    cmd_helper.RunCmd(['rm', host_file_path4])
+
+    self.device.PushChangedFiles([(host_tmp_dir, _DEVICE_DIR)],
+                                   delete_device_stale=True)
+    result = self.device.RunShellCommand(['cat', device_file_path1],
+                                         single_line=True)
+    self.assertEqual(_NEW_CONTENTS, result)
+
+    result = self.device.RunShellCommand(['ls', _DEVICE_DIR])
+    self.assertIn(file_name1, result)
+    self.assertIn(_SUB_DIR1, result)
+    self.assertIn(_SUB_DIR, result)
+    self.assertEqual(3, len(result))
+
+    result = self.device.RunShellCommand(['cat', device_file_path3],
+                                      single_line=True)
+    self.assertEqual(_OLD_CONTENTS, result)
+
+    result = self.device.RunShellCommand(["ls", "%s/%s/%s"
+                                          % (_DEVICE_DIR, _SUB_DIR, _SUB_DIR2)],
+                                         single_line=True)
+    self.assertEqual('', result)
+
+    self.device.RunShellCommand(['rm', '-rf',  _DEVICE_DIR])
+    cmd_helper.RunCmd(['rm', '-rf', host_tmp_dir])
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/build/android/pylib/device/device_utils_test.py b/build/android/pylib/device/device_utils_test.py
new file mode 100755
index 0000000..6699673
--- /dev/null
+++ b/build/android/pylib/device/device_utils_test.py
@@ -0,0 +1,1845 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Unit tests for the contents of device_utils.py (mostly DeviceUtils).
+"""
+
+# pylint: disable=C0321
+# pylint: disable=W0212
+# pylint: disable=W0613
+
+import collections
+import datetime
+import logging
+import os
+import re
+import sys
+import unittest
+
+from pylib import android_commands
+from pylib import cmd_helper
+from pylib import constants
+from pylib import device_signal
+from pylib.device import adb_wrapper
+from pylib.device import device_errors
+from pylib.device import device_utils
+from pylib.device import intent
+from pylib.sdk import split_select
+from pylib.utils import mock_calls
+
+# RunCommand from third_party/android_testrunner/run_command.py is mocked
+# below, so its path needs to be in sys.path.
+sys.path.append(os.path.join(
+    constants.DIR_SOURCE_ROOT, 'third_party', 'android_testrunner'))
+
+sys.path.append(os.path.join(
+    constants.DIR_SOURCE_ROOT, 'third_party', 'pymock'))
+import mock # pylint: disable=F0401
+
+
+class DeviceUtilsInitTest(unittest.TestCase):
+
+  def testInitWithStr(self):
+    serial_as_str = str('0123456789abcdef')
+    d = device_utils.DeviceUtils('0123456789abcdef')
+    self.assertEqual(serial_as_str, d.adb.GetDeviceSerial())
+
+  def testInitWithUnicode(self):
+    serial_as_unicode = unicode('fedcba9876543210')
+    d = device_utils.DeviceUtils(serial_as_unicode)
+    self.assertEqual(serial_as_unicode, d.adb.GetDeviceSerial())
+
+  def testInitWithAdbWrapper(self):
+    serial = '123456789abcdef0'
+    a = adb_wrapper.AdbWrapper(serial)
+    d = device_utils.DeviceUtils(a)
+    self.assertEqual(serial, d.adb.GetDeviceSerial())
+
+  def testInitWithAndroidCommands(self):
+    serial = '0fedcba987654321'
+    a = android_commands.AndroidCommands(device=serial)
+    d = device_utils.DeviceUtils(a)
+    self.assertEqual(serial, d.adb.GetDeviceSerial())
+
+  def testInitWithMissing_fails(self):
+    with self.assertRaises(ValueError):
+      device_utils.DeviceUtils(None)
+    with self.assertRaises(ValueError):
+      device_utils.DeviceUtils('')
+
+
+class DeviceUtilsGetAVDsTest(mock_calls.TestCase):
+
+  def testGetAVDs(self):
+    with self.assertCall(
+        mock.call.pylib.cmd_helper.GetCmdOutput([mock.ANY, 'list', 'avd']),
+        'Available Android Virtual Devices:\n'
+        '    Name: my_android5.0\n'
+        '    Path: /some/path/to/.android/avd/my_android5.0.avd\n'
+        '  Target: Android 5.0 (API level 21)\n'
+        ' Tag/ABI: default/x86\n'
+        '    Skin: WVGA800\n'):
+      self.assertEquals(['my_android5.0'],
+                        device_utils.GetAVDs())
+
+
+class DeviceUtilsRestartServerTest(mock_calls.TestCase):
+
+  @mock.patch('time.sleep', mock.Mock())
+  def testRestartServer_succeeds(self):
+    with self.assertCalls(
+        mock.call.pylib.device.adb_wrapper.AdbWrapper.KillServer(),
+        (mock.call.pylib.cmd_helper.GetCmdStatusAndOutput(['pgrep', 'adb']),
+         (1, '')),
+        mock.call.pylib.device.adb_wrapper.AdbWrapper.StartServer(),
+        (mock.call.pylib.cmd_helper.GetCmdStatusAndOutput(['pgrep', 'adb']),
+         (1, '')),
+        (mock.call.pylib.cmd_helper.GetCmdStatusAndOutput(['pgrep', 'adb']),
+         (0, '123\n'))):
+      device_utils.RestartServer()
+
+
+class MockTempFile(object):
+
+  def __init__(self, name='/tmp/some/file'):
+    self.file = mock.MagicMock(spec=file)
+    self.file.name = name
+    self.file.name_quoted = cmd_helper.SingleQuote(name)
+
+  def __enter__(self):
+    return self.file
+
+  def __exit__(self, exc_type, exc_val, exc_tb):
+    pass
+
+  @property
+  def name(self):
+    return self.file.name
+
+
+class _PatchedFunction(object):
+  def __init__(self, patched=None, mocked=None):
+    self.patched = patched
+    self.mocked = mocked
+
+
+def _AdbWrapperMock(test_serial):
+  adb = mock.Mock(spec=adb_wrapper.AdbWrapper)
+  adb.__str__ = mock.Mock(return_value=test_serial)
+  adb.GetDeviceSerial.return_value = test_serial
+  return adb
+
+
+class DeviceUtilsTest(mock_calls.TestCase):
+
+  def setUp(self):
+    self.adb = _AdbWrapperMock('0123456789abcdef')
+    self.device = device_utils.DeviceUtils(
+        self.adb, default_timeout=10, default_retries=0)
+    self.watchMethodCalls(self.call.adb, ignore=['GetDeviceSerial'])
+
+  def AdbCommandError(self, args=None, output=None, status=None, msg=None):
+    if args is None:
+      args = ['[unspecified]']
+    return mock.Mock(side_effect=device_errors.AdbCommandFailedError(
+        args, output, status, msg, str(self.device)))
+
+  def CommandError(self, msg=None):
+    if msg is None:
+      msg = 'Command failed'
+    return mock.Mock(side_effect=device_errors.CommandFailedError(
+        msg, str(self.device)))
+
+  def ShellError(self, output=None, status=1):
+    def action(cmd, *args, **kwargs):
+      raise device_errors.AdbShellCommandFailedError(
+          cmd, output, status, str(self.device))
+    if output is None:
+      output = 'Permission denied\n'
+    return action
+
+  def TimeoutError(self, msg=None):
+    if msg is None:
+      msg = 'Operation timed out'
+    return mock.Mock(side_effect=device_errors.CommandTimeoutError(
+        msg, str(self.device)))
+
+
+class DeviceUtilsEqTest(DeviceUtilsTest):
+
+  def testEq_equal_deviceUtils(self):
+    other = device_utils.DeviceUtils(_AdbWrapperMock('0123456789abcdef'))
+    self.assertTrue(self.device == other)
+    self.assertTrue(other == self.device)
+
+  def testEq_equal_adbWrapper(self):
+    other = adb_wrapper.AdbWrapper('0123456789abcdef')
+    self.assertTrue(self.device == other)
+    self.assertTrue(other == self.device)
+
+  def testEq_equal_string(self):
+    other = '0123456789abcdef'
+    self.assertTrue(self.device == other)
+    self.assertTrue(other == self.device)
+
+  def testEq_devicesNotEqual(self):
+    other = device_utils.DeviceUtils(_AdbWrapperMock('0123456789abcdee'))
+    self.assertFalse(self.device == other)
+    self.assertFalse(other == self.device)
+
+  def testEq_identity(self):
+    self.assertTrue(self.device == self.device)
+
+  def testEq_serialInList(self):
+    devices = [self.device]
+    self.assertTrue('0123456789abcdef' in devices)
+
+
+class DeviceUtilsLtTest(DeviceUtilsTest):
+
+  def testLt_lessThan(self):
+    other = device_utils.DeviceUtils(_AdbWrapperMock('ffffffffffffffff'))
+    self.assertTrue(self.device < other)
+    self.assertTrue(other > self.device)
+
+  def testLt_greaterThan_lhs(self):
+    other = device_utils.DeviceUtils(_AdbWrapperMock('0000000000000000'))
+    self.assertFalse(self.device < other)
+    self.assertFalse(other > self.device)
+
+  def testLt_equal(self):
+    other = device_utils.DeviceUtils(_AdbWrapperMock('0123456789abcdef'))
+    self.assertFalse(self.device < other)
+    self.assertFalse(other > self.device)
+
+  def testLt_sorted(self):
+    devices = [
+        device_utils.DeviceUtils(_AdbWrapperMock('ffffffffffffffff')),
+        device_utils.DeviceUtils(_AdbWrapperMock('0000000000000000')),
+    ]
+    sorted_devices = sorted(devices)
+    self.assertEquals('0000000000000000',
+                      sorted_devices[0].adb.GetDeviceSerial())
+    self.assertEquals('ffffffffffffffff',
+                      sorted_devices[1].adb.GetDeviceSerial())
+
+
+class DeviceUtilsStrTest(DeviceUtilsTest):
+
+  def testStr_returnsSerial(self):
+    with self.assertCalls(
+        (self.call.adb.GetDeviceSerial(), '0123456789abcdef')):
+      self.assertEqual('0123456789abcdef', str(self.device))
+
+
+class DeviceUtilsIsOnlineTest(DeviceUtilsTest):
+
+  def testIsOnline_true(self):
+    with self.assertCall(self.call.adb.GetState(), 'device'):
+      self.assertTrue(self.device.IsOnline())
+
+  def testIsOnline_false(self):
+    with self.assertCall(self.call.adb.GetState(), 'offline'):
+      self.assertFalse(self.device.IsOnline())
+
+  def testIsOnline_error(self):
+    with self.assertCall(self.call.adb.GetState(), self.CommandError()):
+      self.assertFalse(self.device.IsOnline())
+
+
+class DeviceUtilsHasRootTest(DeviceUtilsTest):
+
+  def testHasRoot_true(self):
+    with self.assertCall(self.call.adb.Shell('ls /root'), 'foo\n'):
+      self.assertTrue(self.device.HasRoot())
+
+  def testHasRoot_false(self):
+    with self.assertCall(self.call.adb.Shell('ls /root'), self.ShellError()):
+      self.assertFalse(self.device.HasRoot())
+
+
+class DeviceUtilsEnableRootTest(DeviceUtilsTest):
+
+  def testEnableRoot_succeeds(self):
+    with self.assertCalls(
+        (self.call.device.IsUserBuild(), False),
+         self.call.adb.Root(),
+         self.call.device.WaitUntilFullyBooted()):
+      self.device.EnableRoot()
+
+  def testEnableRoot_userBuild(self):
+    with self.assertCalls(
+        (self.call.device.IsUserBuild(), True)):
+      with self.assertRaises(device_errors.CommandFailedError):
+        self.device.EnableRoot()
+
+  def testEnableRoot_rootFails(self):
+    with self.assertCalls(
+        (self.call.device.IsUserBuild(), False),
+        (self.call.adb.Root(), self.CommandError())):
+      with self.assertRaises(device_errors.CommandFailedError):
+        self.device.EnableRoot()
+
+
+class DeviceUtilsIsUserBuildTest(DeviceUtilsTest):
+
+  def testIsUserBuild_yes(self):
+    with self.assertCall(
+        self.call.device.GetProp('ro.build.type', cache=True), 'user'):
+      self.assertTrue(self.device.IsUserBuild())
+
+  def testIsUserBuild_no(self):
+    with self.assertCall(
+        self.call.device.GetProp('ro.build.type', cache=True), 'userdebug'):
+      self.assertFalse(self.device.IsUserBuild())
+
+
+class DeviceUtilsGetExternalStoragePathTest(DeviceUtilsTest):
+
+  def testGetExternalStoragePath_succeeds(self):
+    with self.assertCall(
+        self.call.adb.Shell('echo $EXTERNAL_STORAGE'), '/fake/storage/path\n'):
+      self.assertEquals('/fake/storage/path',
+                        self.device.GetExternalStoragePath())
+
+  def testGetExternalStoragePath_fails(self):
+    with self.assertCall(self.call.adb.Shell('echo $EXTERNAL_STORAGE'), '\n'):
+      with self.assertRaises(device_errors.CommandFailedError):
+        self.device.GetExternalStoragePath()
+
+
+class DeviceUtilsGetApplicationPathsTest(DeviceUtilsTest):
+
+  def testGetApplicationPaths_exists(self):
+    with self.assertCalls(
+        (self.call.adb.Shell('getprop ro.build.version.sdk'), '19\n'),
+        (self.call.adb.Shell('pm path android'),
+         'package:/path/to/android.apk\n')):
+      self.assertEquals(['/path/to/android.apk'],
+                        self.device.GetApplicationPaths('android'))
+
+  def testGetApplicationPaths_notExists(self):
+    with self.assertCalls(
+        (self.call.adb.Shell('getprop ro.build.version.sdk'), '19\n'),
+        (self.call.adb.Shell('pm path not.installed.app'), '')):
+      self.assertEquals([],
+                        self.device.GetApplicationPaths('not.installed.app'))
+
+  def testGetApplicationPaths_fails(self):
+    with self.assertCalls(
+        (self.call.adb.Shell('getprop ro.build.version.sdk'), '19\n'),
+        (self.call.adb.Shell('pm path android'),
+         self.CommandError('ERROR. Is package manager running?\n'))):
+      with self.assertRaises(device_errors.CommandFailedError):
+        self.device.GetApplicationPaths('android')
+
+
+class DeviceUtilsGetApplicationDataDirectoryTest(DeviceUtilsTest):
+
+  def testGetApplicationDataDirectory_exists(self):
+    with self.assertCall(
+        self.call.device._RunPipedShellCommand(
+            'pm dump foo.bar.baz | grep dataDir='),
+        ['dataDir=/data/data/foo.bar.baz']):
+      self.assertEquals(
+          '/data/data/foo.bar.baz',
+          self.device.GetApplicationDataDirectory('foo.bar.baz'))
+
+  def testGetApplicationDataDirectory_notExists(self):
+    with self.assertCall(
+        self.call.device._RunPipedShellCommand(
+            'pm dump foo.bar.baz | grep dataDir='),
+        self.ShellError()):
+      self.assertIsNone(self.device.GetApplicationDataDirectory('foo.bar.baz'))
+
+
+@mock.patch('time.sleep', mock.Mock())
+class DeviceUtilsWaitUntilFullyBootedTest(DeviceUtilsTest):
+
+  def testWaitUntilFullyBooted_succeedsNoWifi(self):
+    with self.assertCalls(
+        self.call.adb.WaitForDevice(),
+        # sd_card_ready
+        (self.call.device.GetExternalStoragePath(), '/fake/storage/path'),
+        (self.call.adb.Shell('test -d /fake/storage/path'), ''),
+        # pm_ready
+        (self.call.device.GetApplicationPaths('android'),
+         ['package:/some/fake/path']),
+        # boot_completed
+        (self.call.device.GetProp('sys.boot_completed'), '1')):
+      self.device.WaitUntilFullyBooted(wifi=False)
+
+  def testWaitUntilFullyBooted_succeedsWithWifi(self):
+    with self.assertCalls(
+        self.call.adb.WaitForDevice(),
+        # sd_card_ready
+        (self.call.device.GetExternalStoragePath(), '/fake/storage/path'),
+        (self.call.adb.Shell('test -d /fake/storage/path'), ''),
+        # pm_ready
+        (self.call.device.GetApplicationPaths('android'),
+         ['package:/some/fake/path']),
+        # boot_completed
+        (self.call.device.GetProp('sys.boot_completed'), '1'),
+        # wifi_enabled
+        (self.call.adb.Shell('dumpsys wifi'),
+         'stuff\nWi-Fi is enabled\nmore stuff\n')):
+      self.device.WaitUntilFullyBooted(wifi=True)
+
+  def testWaitUntilFullyBooted_deviceNotInitiallyAvailable(self):
+    with self.assertCalls(
+        self.call.adb.WaitForDevice(),
+        # sd_card_ready
+        (self.call.device.GetExternalStoragePath(), self.AdbCommandError()),
+        # sd_card_ready
+        (self.call.device.GetExternalStoragePath(), self.AdbCommandError()),
+        # sd_card_ready
+        (self.call.device.GetExternalStoragePath(), self.AdbCommandError()),
+        # sd_card_ready
+        (self.call.device.GetExternalStoragePath(), self.AdbCommandError()),
+        # sd_card_ready
+        (self.call.device.GetExternalStoragePath(), '/fake/storage/path'),
+        (self.call.adb.Shell('test -d /fake/storage/path'), ''),
+        # pm_ready
+        (self.call.device.GetApplicationPaths('android'),
+         ['package:/some/fake/path']),
+        # boot_completed
+        (self.call.device.GetProp('sys.boot_completed'), '1')):
+      self.device.WaitUntilFullyBooted(wifi=False)
+
+  def testWaitUntilFullyBooted_sdCardReadyFails_noPath(self):
+    with self.assertCalls(
+        self.call.adb.WaitForDevice(),
+        # sd_card_ready
+        (self.call.device.GetExternalStoragePath(), self.CommandError())):
+      with self.assertRaises(device_errors.CommandFailedError):
+        self.device.WaitUntilFullyBooted(wifi=False)
+
+  def testWaitUntilFullyBooted_sdCardReadyFails_notExists(self):
+    with self.assertCalls(
+        self.call.adb.WaitForDevice(),
+        # sd_card_ready
+        (self.call.device.GetExternalStoragePath(), '/fake/storage/path'),
+        (self.call.adb.Shell('test -d /fake/storage/path'), self.ShellError()),
+        # sd_card_ready
+        (self.call.device.GetExternalStoragePath(), '/fake/storage/path'),
+        (self.call.adb.Shell('test -d /fake/storage/path'), self.ShellError()),
+        # sd_card_ready
+        (self.call.device.GetExternalStoragePath(), '/fake/storage/path'),
+        (self.call.adb.Shell('test -d /fake/storage/path'),
+         self.TimeoutError())):
+      with self.assertRaises(device_errors.CommandTimeoutError):
+        self.device.WaitUntilFullyBooted(wifi=False)
+
+  def testWaitUntilFullyBooted_devicePmFails(self):
+    with self.assertCalls(
+        self.call.adb.WaitForDevice(),
+        # sd_card_ready
+        (self.call.device.GetExternalStoragePath(), '/fake/storage/path'),
+        (self.call.adb.Shell('test -d /fake/storage/path'), ''),
+        # pm_ready
+        (self.call.device.GetApplicationPaths('android'), self.CommandError()),
+        # pm_ready
+        (self.call.device.GetApplicationPaths('android'), self.CommandError()),
+        # pm_ready
+        (self.call.device.GetApplicationPaths('android'), self.TimeoutError())):
+      with self.assertRaises(device_errors.CommandTimeoutError):
+        self.device.WaitUntilFullyBooted(wifi=False)
+
+  def testWaitUntilFullyBooted_bootFails(self):
+    with self.assertCalls(
+        self.call.adb.WaitForDevice(),
+        # sd_card_ready
+        (self.call.device.GetExternalStoragePath(), '/fake/storage/path'),
+        (self.call.adb.Shell('test -d /fake/storage/path'), ''),
+        # pm_ready
+        (self.call.device.GetApplicationPaths('android'),
+         ['package:/some/fake/path']),
+        # boot_completed
+        (self.call.device.GetProp('sys.boot_completed'), '0'),
+        # boot_completed
+        (self.call.device.GetProp('sys.boot_completed'), '0'),
+        # boot_completed
+        (self.call.device.GetProp('sys.boot_completed'), self.TimeoutError())):
+      with self.assertRaises(device_errors.CommandTimeoutError):
+        self.device.WaitUntilFullyBooted(wifi=False)
+
+  def testWaitUntilFullyBooted_wifiFails(self):
+    with self.assertCalls(
+        self.call.adb.WaitForDevice(),
+        # sd_card_ready
+        (self.call.device.GetExternalStoragePath(), '/fake/storage/path'),
+        (self.call.adb.Shell('test -d /fake/storage/path'), ''),
+        # pm_ready
+        (self.call.device.GetApplicationPaths('android'),
+         ['package:/some/fake/path']),
+        # boot_completed
+        (self.call.device.GetProp('sys.boot_completed'), '1'),
+        # wifi_enabled
+        (self.call.adb.Shell('dumpsys wifi'), 'stuff\nmore stuff\n'),
+        # wifi_enabled
+        (self.call.adb.Shell('dumpsys wifi'), 'stuff\nmore stuff\n'),
+        # wifi_enabled
+        (self.call.adb.Shell('dumpsys wifi'), self.TimeoutError())):
+      with self.assertRaises(device_errors.CommandTimeoutError):
+        self.device.WaitUntilFullyBooted(wifi=True)
+
+
+@mock.patch('time.sleep', mock.Mock())
+class DeviceUtilsRebootTest(DeviceUtilsTest):
+
+  def testReboot_nonBlocking(self):
+    with self.assertCalls(
+        self.call.adb.Reboot(),
+        (self.call.device.IsOnline(), True),
+        (self.call.device.IsOnline(), False)):
+      self.device.Reboot(block=False)
+
+  def testReboot_blocking(self):
+    with self.assertCalls(
+        self.call.adb.Reboot(),
+        (self.call.device.IsOnline(), True),
+        (self.call.device.IsOnline(), False),
+        self.call.device.WaitUntilFullyBooted(wifi=False)):
+      self.device.Reboot(block=True)
+
+  def testReboot_blockUntilWifi(self):
+    with self.assertCalls(
+        self.call.adb.Reboot(),
+        (self.call.device.IsOnline(), True),
+        (self.call.device.IsOnline(), False),
+        self.call.device.WaitUntilFullyBooted(wifi=True)):
+      self.device.Reboot(block=True, wifi=True)
+
+
+class DeviceUtilsInstallTest(DeviceUtilsTest):
+
+  def testInstall_noPriorInstall(self):
+    with self.assertCalls(
+        (mock.call.pylib.utils.apk_helper.GetPackageName('/fake/test/app.apk'),
+         'this.is.a.test.package'),
+        (self.call.device.GetApplicationPaths('this.is.a.test.package'), []),
+        self.call.adb.Install('/fake/test/app.apk', reinstall=False)):
+      self.device.Install('/fake/test/app.apk', retries=0)
+
+  def testInstall_differentPriorInstall(self):
+    with self.assertCalls(
+        (mock.call.pylib.utils.apk_helper.GetPackageName('/fake/test/app.apk'),
+         'this.is.a.test.package'),
+        (self.call.device.GetApplicationPaths('this.is.a.test.package'),
+         ['/fake/data/app/this.is.a.test.package.apk']),
+        (self.call.device._GetChangedAndStaleFiles(
+            '/fake/test/app.apk', '/fake/data/app/this.is.a.test.package.apk'),
+         ([('/fake/test/app.apk', '/fake/data/app/this.is.a.test.package.apk')],
+          [])),
+        self.call.adb.Uninstall('this.is.a.test.package'),
+        self.call.adb.Install('/fake/test/app.apk', reinstall=False)):
+      self.device.Install('/fake/test/app.apk', retries=0)
+
+  def testInstall_differentPriorInstall_reinstall(self):
+    with self.assertCalls(
+        (mock.call.pylib.utils.apk_helper.GetPackageName('/fake/test/app.apk'),
+         'this.is.a.test.package'),
+        (self.call.device.GetApplicationPaths('this.is.a.test.package'),
+         ['/fake/data/app/this.is.a.test.package.apk']),
+        (self.call.device._GetChangedAndStaleFiles(
+            '/fake/test/app.apk', '/fake/data/app/this.is.a.test.package.apk'),
+         ([('/fake/test/app.apk', '/fake/data/app/this.is.a.test.package.apk')],
+          [])),
+        self.call.adb.Install('/fake/test/app.apk', reinstall=True)):
+      self.device.Install('/fake/test/app.apk', reinstall=True, retries=0)
+
+  def testInstall_identicalPriorInstall(self):
+    with self.assertCalls(
+        (mock.call.pylib.utils.apk_helper.GetPackageName('/fake/test/app.apk'),
+         'this.is.a.test.package'),
+        (self.call.device.GetApplicationPaths('this.is.a.test.package'),
+         ['/fake/data/app/this.is.a.test.package.apk']),
+        (self.call.device._GetChangedAndStaleFiles(
+            '/fake/test/app.apk', '/fake/data/app/this.is.a.test.package.apk'),
+         ([], []))):
+      self.device.Install('/fake/test/app.apk', retries=0)
+
+  def testInstall_fails(self):
+    with self.assertCalls(
+        (mock.call.pylib.utils.apk_helper.GetPackageName('/fake/test/app.apk'),
+         'this.is.a.test.package'),
+        (self.call.device.GetApplicationPaths('this.is.a.test.package'), []),
+        (self.call.adb.Install('/fake/test/app.apk', reinstall=False),
+         self.CommandError('Failure\r\n'))):
+      with self.assertRaises(device_errors.CommandFailedError):
+        self.device.Install('/fake/test/app.apk', retries=0)
+
+class DeviceUtilsInstallSplitApkTest(DeviceUtilsTest):
+
+  def testInstallSplitApk_noPriorInstall(self):
+    with self.assertCalls(
+        (self.call.device._CheckSdkLevel(21)),
+        (mock.call.pylib.sdk.split_select.SelectSplits(
+            self.device, 'base.apk',
+            ['split1.apk', 'split2.apk', 'split3.apk']),
+         ['split2.apk']),
+        (mock.call.pylib.utils.apk_helper.GetPackageName('base.apk'),
+         'this.is.a.test.package'),
+        (self.call.device.GetApplicationPaths('this.is.a.test.package'), []),
+        (self.call.adb.InstallMultiple(
+            ['base.apk', 'split2.apk'], partial=None, reinstall=False))):
+      self.device.InstallSplitApk('base.apk',
+          ['split1.apk', 'split2.apk', 'split3.apk'], retries=0)
+
+  def testInstallSplitApk_partialInstall(self):
+    with self.assertCalls(
+        (self.call.device._CheckSdkLevel(21)),
+        (mock.call.pylib.sdk.split_select.SelectSplits(
+            self.device, 'base.apk',
+            ['split1.apk', 'split2.apk', 'split3.apk']),
+         ['split2.apk']),
+        (mock.call.pylib.utils.apk_helper.GetPackageName('base.apk'),
+         'test.package'),
+        (self.call.device.GetApplicationPaths('test.package'),
+         ['base-on-device.apk', 'split2-on-device.apk']),
+        (mock.call.pylib.utils.md5sum.CalculateDeviceMd5Sums(
+            ['base-on-device.apk', 'split2-on-device.apk'], self.device),
+         {'base-on-device.apk': 'AAA', 'split2-on-device.apk': 'BBB'}),
+        (mock.call.pylib.utils.md5sum.CalculateHostMd5Sums(
+            ['base.apk', 'split2.apk']),
+         {'base.apk': 'AAA', 'split2.apk': 'CCC'}),
+        (self.call.adb.InstallMultiple(
+            ['split2.apk'], partial='test.package', reinstall=True))):
+      self.device.InstallSplitApk('base.apk',
+          ['split1.apk', 'split2.apk', 'split3.apk'], reinstall=True, retries=0)
+
+
+class DeviceUtilsRunShellCommandTest(DeviceUtilsTest):
+
+  def setUp(self):
+    super(DeviceUtilsRunShellCommandTest, self).setUp()
+    self.device.NeedsSU = mock.Mock(return_value=False)
+
+  def testRunShellCommand_commandAsList(self):
+    with self.assertCall(self.call.adb.Shell('pm list packages'), ''):
+      self.device.RunShellCommand(['pm', 'list', 'packages'])
+
+  def testRunShellCommand_commandAsListQuoted(self):
+    with self.assertCall(self.call.adb.Shell("echo 'hello world' '$10'"), ''):
+      self.device.RunShellCommand(['echo', 'hello world', '$10'])
+
+  def testRunShellCommand_commandAsString(self):
+    with self.assertCall(self.call.adb.Shell('echo "$VAR"'), ''):
+      self.device.RunShellCommand('echo "$VAR"')
+
+  def testNewRunShellImpl_withEnv(self):
+    with self.assertCall(
+        self.call.adb.Shell('VAR=some_string echo "$VAR"'), ''):
+      self.device.RunShellCommand('echo "$VAR"', env={'VAR': 'some_string'})
+
+  def testNewRunShellImpl_withEnvQuoted(self):
+    with self.assertCall(
+        self.call.adb.Shell('PATH="$PATH:/other/path" run_this'), ''):
+      self.device.RunShellCommand('run_this', env={'PATH': '$PATH:/other/path'})
+
+  def testNewRunShellImpl_withEnv_failure(self):
+    with self.assertRaises(KeyError):
+      self.device.RunShellCommand('some_cmd', env={'INVALID NAME': 'value'})
+
+  def testNewRunShellImpl_withCwd(self):
+    with self.assertCall(self.call.adb.Shell('cd /some/test/path && ls'), ''):
+      self.device.RunShellCommand('ls', cwd='/some/test/path')
+
+  def testNewRunShellImpl_withCwdQuoted(self):
+    with self.assertCall(
+        self.call.adb.Shell("cd '/some test/path with/spaces' && ls"), ''):
+      self.device.RunShellCommand('ls', cwd='/some test/path with/spaces')
+
+  def testRunShellCommand_withHugeCmd(self):
+    payload = 'hi! ' * 1024
+    expected_cmd = "echo '%s'" % payload
+    with self.assertCalls(
+      (mock.call.pylib.utils.device_temp_file.DeviceTempFile(
+          self.adb, suffix='.sh'), MockTempFile('/sdcard/temp-123.sh')),
+      self.call.device._WriteFileWithPush('/sdcard/temp-123.sh', expected_cmd),
+      (self.call.adb.Shell('sh /sdcard/temp-123.sh'), payload + '\n')):
+      self.assertEquals([payload],
+                        self.device.RunShellCommand(['echo', payload]))
+
+  def testRunShellCommand_withHugeCmdAmdSU(self):
+    payload = 'hi! ' * 1024
+    expected_cmd = """su -c sh -c 'echo '"'"'%s'"'"''""" % payload
+    with self.assertCalls(
+      (self.call.device.NeedsSU(), True),
+      (mock.call.pylib.utils.device_temp_file.DeviceTempFile(
+          self.adb, suffix='.sh'), MockTempFile('/sdcard/temp-123.sh')),
+      self.call.device._WriteFileWithPush('/sdcard/temp-123.sh', expected_cmd),
+      (self.call.adb.Shell('sh /sdcard/temp-123.sh'), payload + '\n')):
+      self.assertEquals(
+          [payload],
+          self.device.RunShellCommand(['echo', payload], as_root=True))
+
+  def testRunShellCommand_withSu(self):
+    with self.assertCalls(
+        (self.call.device.NeedsSU(), True),
+        (self.call.adb.Shell("su -c sh -c 'setprop service.adb.root 0'"), '')):
+      self.device.RunShellCommand('setprop service.adb.root 0', as_root=True)
+
+  def testRunShellCommand_manyLines(self):
+    cmd = 'ls /some/path'
+    with self.assertCall(self.call.adb.Shell(cmd), 'file1\nfile2\nfile3\n'):
+      self.assertEquals(['file1', 'file2', 'file3'],
+                        self.device.RunShellCommand(cmd))
+
+  def testRunShellCommand_singleLine_success(self):
+    cmd = 'echo $VALUE'
+    with self.assertCall(self.call.adb.Shell(cmd), 'some value\n'):
+      self.assertEquals('some value',
+                        self.device.RunShellCommand(cmd, single_line=True))
+
+  def testRunShellCommand_singleLine_successEmptyLine(self):
+    cmd = 'echo $VALUE'
+    with self.assertCall(self.call.adb.Shell(cmd), '\n'):
+      self.assertEquals('',
+                        self.device.RunShellCommand(cmd, single_line=True))
+
+  def testRunShellCommand_singleLine_successWithoutEndLine(self):
+    cmd = 'echo -n $VALUE'
+    with self.assertCall(self.call.adb.Shell(cmd), 'some value'):
+      self.assertEquals('some value',
+                        self.device.RunShellCommand(cmd, single_line=True))
+
+  def testRunShellCommand_singleLine_successNoOutput(self):
+    cmd = 'echo -n $VALUE'
+    with self.assertCall(self.call.adb.Shell(cmd), ''):
+      self.assertEquals('',
+                        self.device.RunShellCommand(cmd, single_line=True))
+
+  def testRunShellCommand_singleLine_failTooManyLines(self):
+    cmd = 'echo $VALUE'
+    with self.assertCall(self.call.adb.Shell(cmd),
+                         'some value\nanother value\n'):
+      with self.assertRaises(device_errors.CommandFailedError):
+        self.device.RunShellCommand(cmd, single_line=True)
+
+  def testRunShellCommand_checkReturn_success(self):
+    cmd = 'echo $ANDROID_DATA'
+    output = '/data\n'
+    with self.assertCall(self.call.adb.Shell(cmd), output):
+      self.assertEquals([output.rstrip()],
+                        self.device.RunShellCommand(cmd, check_return=True))
+
+  def testRunShellCommand_checkReturn_failure(self):
+    cmd = 'ls /root'
+    output = 'opendir failed, Permission denied\n'
+    with self.assertCall(self.call.adb.Shell(cmd), self.ShellError(output)):
+      with self.assertRaises(device_errors.AdbCommandFailedError):
+        self.device.RunShellCommand(cmd, check_return=True)
+
+  def testRunShellCommand_checkReturn_disabled(self):
+    cmd = 'ls /root'
+    output = 'opendir failed, Permission denied\n'
+    with self.assertCall(self.call.adb.Shell(cmd), self.ShellError(output)):
+      self.assertEquals([output.rstrip()],
+                        self.device.RunShellCommand(cmd, check_return=False))
+
+  def testRunShellCommand_largeOutput_enabled(self):
+    cmd = 'echo $VALUE'
+    temp_file = MockTempFile('/sdcard/temp-123')
+    cmd_redirect = '%s > %s' % (cmd, temp_file.name)
+    with self.assertCalls(
+        (mock.call.pylib.utils.device_temp_file.DeviceTempFile(self.adb),
+            temp_file),
+        (self.call.adb.Shell(cmd_redirect)),
+        (self.call.device.ReadFile(temp_file.name, force_pull=True),
+         'something')):
+      self.assertEquals(
+          ['something'],
+          self.device.RunShellCommand(
+              cmd, large_output=True, check_return=True))
+
+  def testRunShellCommand_largeOutput_disabledNoTrigger(self):
+    cmd = 'something'
+    with self.assertCall(self.call.adb.Shell(cmd), self.ShellError('')):
+      with self.assertRaises(device_errors.AdbCommandFailedError):
+        self.device.RunShellCommand(cmd, check_return=True)
+
+  def testRunShellCommand_largeOutput_disabledTrigger(self):
+    cmd = 'echo $VALUE'
+    temp_file = MockTempFile('/sdcard/temp-123')
+    cmd_redirect = '%s > %s' % (cmd, temp_file.name)
+    with self.assertCalls(
+        (self.call.adb.Shell(cmd), self.ShellError('', None)),
+        (mock.call.pylib.utils.device_temp_file.DeviceTempFile(self.adb),
+            temp_file),
+        (self.call.adb.Shell(cmd_redirect)),
+        (self.call.device.ReadFile(mock.ANY, force_pull=True),
+         'something')):
+      self.assertEquals(['something'],
+                        self.device.RunShellCommand(cmd, check_return=True))
+
+
+class DeviceUtilsRunPipedShellCommandTest(DeviceUtilsTest):
+
+  def testRunPipedShellCommand_success(self):
+    with self.assertCall(
+        self.call.device.RunShellCommand(
+            'ps | grep foo; echo "PIPESTATUS: ${PIPESTATUS[@]}"',
+            check_return=True),
+        ['This line contains foo', 'PIPESTATUS: 0 0']):
+      self.assertEquals(['This line contains foo'],
+                        self.device._RunPipedShellCommand('ps | grep foo'))
+
+  def testRunPipedShellCommand_firstCommandFails(self):
+    with self.assertCall(
+        self.call.device.RunShellCommand(
+            'ps | grep foo; echo "PIPESTATUS: ${PIPESTATUS[@]}"',
+            check_return=True),
+        ['PIPESTATUS: 1 0']):
+      with self.assertRaises(device_errors.AdbShellCommandFailedError) as ec:
+        self.device._RunPipedShellCommand('ps | grep foo')
+      self.assertEquals([1, 0], ec.exception.status)
+
+  def testRunPipedShellCommand_secondCommandFails(self):
+    with self.assertCall(
+        self.call.device.RunShellCommand(
+            'ps | grep foo; echo "PIPESTATUS: ${PIPESTATUS[@]}"',
+            check_return=True),
+        ['PIPESTATUS: 0 1']):
+      with self.assertRaises(device_errors.AdbShellCommandFailedError) as ec:
+        self.device._RunPipedShellCommand('ps | grep foo')
+      self.assertEquals([0, 1], ec.exception.status)
+
+  def testRunPipedShellCommand_outputCutOff(self):
+    with self.assertCall(
+        self.call.device.RunShellCommand(
+            'ps | grep foo; echo "PIPESTATUS: ${PIPESTATUS[@]}"',
+            check_return=True),
+        ['foo.bar'] * 256 + ['foo.ba']):
+      with self.assertRaises(device_errors.AdbShellCommandFailedError) as ec:
+        self.device._RunPipedShellCommand('ps | grep foo')
+      self.assertIs(None, ec.exception.status)
+
+
+@mock.patch('time.sleep', mock.Mock())
+class DeviceUtilsKillAllTest(DeviceUtilsTest):
+
+  def testKillAll_noMatchingProcessesFailure(self):
+    with self.assertCall(self.call.device.GetPids('test_process'), {}):
+      with self.assertRaises(device_errors.CommandFailedError):
+        self.device.KillAll('test_process')
+
+  def testKillAll_noMatchingProcessesQuiet(self):
+    with self.assertCall(self.call.device.GetPids('test_process'), {}):
+      self.assertEqual(0, self.device.KillAll('test_process', quiet=True))
+
+  def testKillAll_nonblocking(self):
+    with self.assertCalls(
+        (self.call.device.GetPids('some.process'), {'some.process': '1234'}),
+        (self.call.adb.Shell('kill -9 1234'), '')):
+      self.assertEquals(
+          1, self.device.KillAll('some.process', blocking=False))
+
+  def testKillAll_blocking(self):
+    with self.assertCalls(
+        (self.call.device.GetPids('some.process'), {'some.process': '1234'}),
+        (self.call.adb.Shell('kill -9 1234'), ''),
+        (self.call.device.GetPids('some.process'), {'some.process': '1234'}),
+        (self.call.device.GetPids('some.process'), [])):
+      self.assertEquals(
+          1, self.device.KillAll('some.process', blocking=True))
+
+  def testKillAll_root(self):
+    with self.assertCalls(
+        (self.call.device.GetPids('some.process'), {'some.process': '1234'}),
+        (self.call.device.NeedsSU(), True),
+        (self.call.adb.Shell("su -c sh -c 'kill -9 1234'"), '')):
+      self.assertEquals(
+          1, self.device.KillAll('some.process', as_root=True))
+
+  def testKillAll_sigterm(self):
+    with self.assertCalls(
+        (self.call.device.GetPids('some.process'), {'some.process': '1234'}),
+        (self.call.adb.Shell('kill -15 1234'), '')):
+      self.assertEquals(
+          1, self.device.KillAll('some.process', signum=device_signal.SIGTERM))
+
+
+class DeviceUtilsStartActivityTest(DeviceUtilsTest):
+
+  def testStartActivity_actionOnly(self):
+    test_intent = intent.Intent(action='android.intent.action.VIEW')
+    with self.assertCall(
+        self.call.adb.Shell('am start '
+                            '-a android.intent.action.VIEW'),
+        'Starting: Intent { act=android.intent.action.VIEW }'):
+      self.device.StartActivity(test_intent)
+
+  def testStartActivity_success(self):
+    test_intent = intent.Intent(action='android.intent.action.VIEW',
+                                package='this.is.a.test.package',
+                                activity='.Main')
+    with self.assertCall(
+        self.call.adb.Shell('am start '
+                            '-a android.intent.action.VIEW '
+                            '-n this.is.a.test.package/.Main'),
+        'Starting: Intent { act=android.intent.action.VIEW }'):
+      self.device.StartActivity(test_intent)
+
+  def testStartActivity_failure(self):
+    test_intent = intent.Intent(action='android.intent.action.VIEW',
+                                package='this.is.a.test.package',
+                                activity='.Main')
+    with self.assertCall(
+        self.call.adb.Shell('am start '
+                            '-a android.intent.action.VIEW '
+                            '-n this.is.a.test.package/.Main'),
+        'Error: Failed to start test activity'):
+      with self.assertRaises(device_errors.CommandFailedError):
+        self.device.StartActivity(test_intent)
+
+  def testStartActivity_blocking(self):
+    test_intent = intent.Intent(action='android.intent.action.VIEW',
+                                package='this.is.a.test.package',
+                                activity='.Main')
+    with self.assertCall(
+        self.call.adb.Shell('am start '
+                            '-W '
+                            '-a android.intent.action.VIEW '
+                            '-n this.is.a.test.package/.Main'),
+        'Starting: Intent { act=android.intent.action.VIEW }'):
+      self.device.StartActivity(test_intent, blocking=True)
+
+  def testStartActivity_withCategory(self):
+    test_intent = intent.Intent(action='android.intent.action.VIEW',
+                                package='this.is.a.test.package',
+                                activity='.Main',
+                                category='android.intent.category.HOME')
+    with self.assertCall(
+        self.call.adb.Shell('am start '
+                            '-a android.intent.action.VIEW '
+                            '-c android.intent.category.HOME '
+                            '-n this.is.a.test.package/.Main'),
+        'Starting: Intent { act=android.intent.action.VIEW }'):
+      self.device.StartActivity(test_intent)
+
+  def testStartActivity_withMultipleCategories(self):
+    test_intent = intent.Intent(action='android.intent.action.VIEW',
+                                package='this.is.a.test.package',
+                                activity='.Main',
+                                category=['android.intent.category.HOME',
+                                          'android.intent.category.BROWSABLE'])
+    with self.assertCall(
+        self.call.adb.Shell('am start '
+                            '-a android.intent.action.VIEW '
+                            '-c android.intent.category.HOME '
+                            '-c android.intent.category.BROWSABLE '
+                            '-n this.is.a.test.package/.Main'),
+        'Starting: Intent { act=android.intent.action.VIEW }'):
+      self.device.StartActivity(test_intent)
+
+  def testStartActivity_withData(self):
+    test_intent = intent.Intent(action='android.intent.action.VIEW',
+                                package='this.is.a.test.package',
+                                activity='.Main',
+                                data='http://www.google.com/')
+    with self.assertCall(
+        self.call.adb.Shell('am start '
+                            '-a android.intent.action.VIEW '
+                            '-d http://www.google.com/ '
+                            '-n this.is.a.test.package/.Main'),
+        'Starting: Intent { act=android.intent.action.VIEW }'):
+      self.device.StartActivity(test_intent)
+
+  def testStartActivity_withStringExtra(self):
+    test_intent = intent.Intent(action='android.intent.action.VIEW',
+                                package='this.is.a.test.package',
+                                activity='.Main',
+                                extras={'foo': 'test'})
+    with self.assertCall(
+        self.call.adb.Shell('am start '
+                            '-a android.intent.action.VIEW '
+                            '-n this.is.a.test.package/.Main '
+                            '--es foo test'),
+        'Starting: Intent { act=android.intent.action.VIEW }'):
+      self.device.StartActivity(test_intent)
+
+  def testStartActivity_withBoolExtra(self):
+    test_intent = intent.Intent(action='android.intent.action.VIEW',
+                                package='this.is.a.test.package',
+                                activity='.Main',
+                                extras={'foo': True})
+    with self.assertCall(
+        self.call.adb.Shell('am start '
+                            '-a android.intent.action.VIEW '
+                            '-n this.is.a.test.package/.Main '
+                            '--ez foo True'),
+        'Starting: Intent { act=android.intent.action.VIEW }'):
+      self.device.StartActivity(test_intent)
+
+  def testStartActivity_withIntExtra(self):
+    test_intent = intent.Intent(action='android.intent.action.VIEW',
+                                package='this.is.a.test.package',
+                                activity='.Main',
+                                extras={'foo': 123})
+    with self.assertCall(
+        self.call.adb.Shell('am start '
+                            '-a android.intent.action.VIEW '
+                            '-n this.is.a.test.package/.Main '
+                            '--ei foo 123'),
+        'Starting: Intent { act=android.intent.action.VIEW }'):
+      self.device.StartActivity(test_intent)
+
+  def testStartActivity_withTraceFile(self):
+    test_intent = intent.Intent(action='android.intent.action.VIEW',
+                                package='this.is.a.test.package',
+                                activity='.Main')
+    with self.assertCall(
+        self.call.adb.Shell('am start '
+                            '--start-profiler test_trace_file.out '
+                            '-a android.intent.action.VIEW '
+                            '-n this.is.a.test.package/.Main'),
+        'Starting: Intent { act=android.intent.action.VIEW }'):
+      self.device.StartActivity(test_intent,
+                                trace_file_name='test_trace_file.out')
+
+  def testStartActivity_withForceStop(self):
+    test_intent = intent.Intent(action='android.intent.action.VIEW',
+                                package='this.is.a.test.package',
+                                activity='.Main')
+    with self.assertCall(
+        self.call.adb.Shell('am start '
+                            '-S '
+                            '-a android.intent.action.VIEW '
+                            '-n this.is.a.test.package/.Main'),
+        'Starting: Intent { act=android.intent.action.VIEW }'):
+      self.device.StartActivity(test_intent, force_stop=True)
+
+  def testStartActivity_withFlags(self):
+    test_intent = intent.Intent(action='android.intent.action.VIEW',
+                                package='this.is.a.test.package',
+                                activity='.Main',
+                                flags='0x10000000')
+    with self.assertCall(
+        self.call.adb.Shell('am start '
+                            '-a android.intent.action.VIEW '
+                            '-n this.is.a.test.package/.Main '
+                            '-f 0x10000000'),
+        'Starting: Intent { act=android.intent.action.VIEW }'):
+      self.device.StartActivity(test_intent)
+
+
+class DeviceUtilsStartInstrumentationTest(DeviceUtilsTest):
+
+  def testStartInstrumentation_nothing(self):
+    with self.assertCalls(
+        self.call.device.RunShellCommand(
+            ['am', 'instrument', 'test.package/.TestInstrumentation'],
+            check_return=True, large_output=True)):
+      self.device.StartInstrumentation(
+          'test.package/.TestInstrumentation',
+          finish=False, raw=False, extras=None)
+
+  def testStartInstrumentation_finish(self):
+    with self.assertCalls(
+        (self.call.device.RunShellCommand(
+            ['am', 'instrument', '-w', 'test.package/.TestInstrumentation'],
+            check_return=True, large_output=True),
+         ['OK (1 test)'])):
+      output = self.device.StartInstrumentation(
+          'test.package/.TestInstrumentation',
+          finish=True, raw=False, extras=None)
+      self.assertEquals(['OK (1 test)'], output)
+
+  def testStartInstrumentation_raw(self):
+    with self.assertCalls(
+        self.call.device.RunShellCommand(
+            ['am', 'instrument', '-r', 'test.package/.TestInstrumentation'],
+            check_return=True, large_output=True)):
+      self.device.StartInstrumentation(
+          'test.package/.TestInstrumentation',
+          finish=False, raw=True, extras=None)
+
+  def testStartInstrumentation_extras(self):
+    with self.assertCalls(
+        self.call.device.RunShellCommand(
+            ['am', 'instrument', '-e', 'foo', 'Foo', '-e', 'bar', 'Bar',
+             'test.package/.TestInstrumentation'],
+            check_return=True, large_output=True)):
+      self.device.StartInstrumentation(
+          'test.package/.TestInstrumentation',
+          finish=False, raw=False, extras={'foo': 'Foo', 'bar': 'Bar'})
+
+
+class DeviceUtilsBroadcastIntentTest(DeviceUtilsTest):
+
+  def testBroadcastIntent_noExtras(self):
+    test_intent = intent.Intent(action='test.package.with.an.INTENT')
+    with self.assertCall(
+        self.call.adb.Shell('am broadcast -a test.package.with.an.INTENT'),
+        'Broadcasting: Intent { act=test.package.with.an.INTENT } '):
+      self.device.BroadcastIntent(test_intent)
+
+  def testBroadcastIntent_withExtra(self):
+    test_intent = intent.Intent(action='test.package.with.an.INTENT',
+                                extras={'foo': 'bar value'})
+    with self.assertCall(
+        self.call.adb.Shell(
+            "am broadcast -a test.package.with.an.INTENT --es foo 'bar value'"),
+        'Broadcasting: Intent { act=test.package.with.an.INTENT } '):
+      self.device.BroadcastIntent(test_intent)
+
+  def testBroadcastIntent_withExtra_noValue(self):
+    test_intent = intent.Intent(action='test.package.with.an.INTENT',
+                                extras={'foo': None})
+    with self.assertCall(
+        self.call.adb.Shell(
+            'am broadcast -a test.package.with.an.INTENT --esn foo'),
+        'Broadcasting: Intent { act=test.package.with.an.INTENT } '):
+      self.device.BroadcastIntent(test_intent)
+
+
+class DeviceUtilsGoHomeTest(DeviceUtilsTest):
+
+  def testGoHome_popupsExist(self):
+    with self.assertCalls(
+        (self.call.device.RunShellCommand(
+            ['dumpsys', 'window', 'windows'], check_return=True,
+            large_output=True), []),
+        (self.call.device.RunShellCommand(
+            ['am', 'start', '-W', '-a', 'android.intent.action.MAIN',
+            '-c', 'android.intent.category.HOME'], check_return=True),
+         'Starting: Intent { act=android.intent.action.MAIN }\r\n'''),
+        (self.call.device.RunShellCommand(
+            ['dumpsys', 'window', 'windows'], check_return=True,
+            large_output=True), []),
+        (self.call.device.RunShellCommand(
+            ['input', 'keyevent', '66'], check_return=True)),
+        (self.call.device.RunShellCommand(
+            ['input', 'keyevent', '4'], check_return=True)),
+        (self.call.device.RunShellCommand(
+            ['dumpsys', 'window', 'windows'], check_return=True,
+            large_output=True),
+         ['mCurrentFocus Launcher'])):
+      self.device.GoHome()
+
+  def testGoHome_willRetry(self):
+    with self.assertCalls(
+        (self.call.device.RunShellCommand(
+            ['dumpsys', 'window', 'windows'], check_return=True,
+            large_output=True), []),
+        (self.call.device.RunShellCommand(
+            ['am', 'start', '-W', '-a', 'android.intent.action.MAIN',
+            '-c', 'android.intent.category.HOME'], check_return=True),
+         'Starting: Intent { act=android.intent.action.MAIN }\r\n'''),
+        (self.call.device.RunShellCommand(
+            ['dumpsys', 'window', 'windows'], check_return=True,
+            large_output=True), []),
+        (self.call.device.RunShellCommand(
+            ['input', 'keyevent', '66'], check_return=True,)),
+        (self.call.device.RunShellCommand(
+            ['input', 'keyevent', '4'], check_return=True)),
+        (self.call.device.RunShellCommand(
+            ['dumpsys', 'window', 'windows'], check_return=True,
+            large_output=True), []),
+        (self.call.device.RunShellCommand(
+            ['input', 'keyevent', '66'], check_return=True)),
+        (self.call.device.RunShellCommand(
+            ['input', 'keyevent', '4'], check_return=True)),
+        (self.call.device.RunShellCommand(
+            ['dumpsys', 'window', 'windows'], check_return=True,
+            large_output=True),
+         self.TimeoutError())):
+      with self.assertRaises(device_errors.CommandTimeoutError):
+        self.device.GoHome()
+
+  def testGoHome_alreadyFocused(self):
+    with self.assertCall(
+        self.call.device.RunShellCommand(
+            ['dumpsys', 'window', 'windows'], check_return=True,
+            large_output=True),
+        ['mCurrentFocus Launcher']):
+      self.device.GoHome()
+
+  def testGoHome_alreadyFocusedAlternateCase(self):
+    with self.assertCall(
+        self.call.device.RunShellCommand(
+            ['dumpsys', 'window', 'windows'], check_return=True,
+            large_output=True),
+        [' mCurrentFocus .launcher/.']):
+      self.device.GoHome()
+
+  def testGoHome_obtainsFocusAfterGoingHome(self):
+    with self.assertCalls(
+        (self.call.device.RunShellCommand(
+            ['dumpsys', 'window', 'windows'], check_return=True,
+            large_output=True), []),
+        (self.call.device.RunShellCommand(
+            ['am', 'start', '-W', '-a', 'android.intent.action.MAIN',
+            '-c', 'android.intent.category.HOME'], check_return=True),
+         'Starting: Intent { act=android.intent.action.MAIN }\r\n'''),
+        (self.call.device.RunShellCommand(
+            ['dumpsys', 'window', 'windows'], check_return=True,
+            large_output=True),
+         ['mCurrentFocus Launcher'])):
+      self.device.GoHome()
+
+class DeviceUtilsForceStopTest(DeviceUtilsTest):
+
+  def testForceStop(self):
+    with self.assertCall(
+        self.call.adb.Shell('am force-stop this.is.a.test.package'),
+        ''):
+      self.device.ForceStop('this.is.a.test.package')
+
+
+class DeviceUtilsClearApplicationStateTest(DeviceUtilsTest):
+
+  def testClearApplicationState_packageDoesntExist(self):
+    with self.assertCalls(
+        (self.call.adb.Shell('getprop ro.build.version.sdk'), '17\n'),
+        (self.call.device.GetApplicationPaths('this.package.does.not.exist'),
+         [])):
+      self.device.ClearApplicationState('this.package.does.not.exist')
+
+  def testClearApplicationState_packageDoesntExistOnAndroidJBMR2OrAbove(self):
+    with self.assertCalls(
+        (self.call.adb.Shell('getprop ro.build.version.sdk'), '18\n'),
+        (self.call.adb.Shell('pm clear this.package.does.not.exist'),
+         'Failed\r\n')):
+      self.device.ClearApplicationState('this.package.does.not.exist')
+
+  def testClearApplicationState_packageExists(self):
+    with self.assertCalls(
+        (self.call.adb.Shell('getprop ro.build.version.sdk'), '17\n'),
+        (self.call.device.GetApplicationPaths('this.package.exists'),
+         ['/data/app/this.package.exists.apk']),
+        (self.call.adb.Shell('pm clear this.package.exists'),
+         'Success\r\n')):
+      self.device.ClearApplicationState('this.package.exists')
+
+  def testClearApplicationState_packageExistsOnAndroidJBMR2OrAbove(self):
+    with self.assertCalls(
+        (self.call.adb.Shell('getprop ro.build.version.sdk'), '18\n'),
+        (self.call.adb.Shell('pm clear this.package.exists'),
+         'Success\r\n')):
+      self.device.ClearApplicationState('this.package.exists')
+
+
+class DeviceUtilsSendKeyEventTest(DeviceUtilsTest):
+
+  def testSendKeyEvent(self):
+    with self.assertCall(self.call.adb.Shell('input keyevent 66'), ''):
+      self.device.SendKeyEvent(66)
+
+
+class DeviceUtilsPushChangedFilesIndividuallyTest(DeviceUtilsTest):
+
+  def testPushChangedFilesIndividually_empty(self):
+    test_files = []
+    with self.assertCalls():
+      self.device._PushChangedFilesIndividually(test_files)
+
+  def testPushChangedFilesIndividually_single(self):
+    test_files = [('/test/host/path', '/test/device/path')]
+    with self.assertCalls(self.call.adb.Push(*test_files[0])):
+      self.device._PushChangedFilesIndividually(test_files)
+
+  def testPushChangedFilesIndividually_multiple(self):
+    test_files = [
+        ('/test/host/path/file1', '/test/device/path/file1'),
+        ('/test/host/path/file2', '/test/device/path/file2')]
+    with self.assertCalls(
+        self.call.adb.Push(*test_files[0]),
+        self.call.adb.Push(*test_files[1])):
+      self.device._PushChangedFilesIndividually(test_files)
+
+
+class DeviceUtilsPushChangedFilesZippedTest(DeviceUtilsTest):
+
+  def testPushChangedFilesZipped_empty(self):
+    test_files = []
+    with self.assertCalls():
+      self.device._PushChangedFilesZipped(test_files)
+
+  def _testPushChangedFilesZipped_spec(self, test_files):
+    mock_zip_temp = mock.mock_open()
+    mock_zip_temp.return_value.name = '/test/temp/file/tmp.zip'
+    with self.assertCalls(
+        (mock.call.tempfile.NamedTemporaryFile(suffix='.zip'), mock_zip_temp),
+        (mock.call.multiprocessing.Process(
+            target=device_utils.DeviceUtils._CreateDeviceZip,
+            args=('/test/temp/file/tmp.zip', test_files)), mock.Mock()),
+        (self.call.device.GetExternalStoragePath(),
+         '/test/device/external_dir'),
+        self.call.adb.Push(
+            '/test/temp/file/tmp.zip', '/test/device/external_dir/tmp.zip'),
+        self.call.device.RunShellCommand(
+            ['unzip', '/test/device/external_dir/tmp.zip'],
+            as_root=True,
+            env={'PATH': '/data/local/tmp/bin:$PATH'},
+            check_return=True),
+        (self.call.device.IsOnline(), True),
+        self.call.device.RunShellCommand(
+            ['rm', '/test/device/external_dir/tmp.zip'], check_return=True)):
+      self.device._PushChangedFilesZipped(test_files)
+
+  def testPushChangedFilesZipped_single(self):
+    self._testPushChangedFilesZipped_spec(
+        [('/test/host/path/file1', '/test/device/path/file1')])
+
+  def testPushChangedFilesZipped_multiple(self):
+    self._testPushChangedFilesZipped_spec(
+        [('/test/host/path/file1', '/test/device/path/file1'),
+         ('/test/host/path/file2', '/test/device/path/file2')])
+
+
+class DeviceUtilsFileExistsTest(DeviceUtilsTest):
+
+  def testFileExists_usingTest_fileExists(self):
+    with self.assertCall(
+        self.call.device.RunShellCommand(
+            ['test', '-e', '/path/file.exists'], check_return=True), ''):
+      self.assertTrue(self.device.FileExists('/path/file.exists'))
+
+  def testFileExists_usingTest_fileDoesntExist(self):
+    with self.assertCall(
+        self.call.device.RunShellCommand(
+            ['test', '-e', '/does/not/exist'], check_return=True),
+        self.ShellError('', 1)):
+      self.assertFalse(self.device.FileExists('/does/not/exist'))
+
+
+class DeviceUtilsPullFileTest(DeviceUtilsTest):
+
+  def testPullFile_existsOnDevice(self):
+    with mock.patch('os.path.exists', return_value=True):
+      with self.assertCall(
+          self.call.adb.Pull('/data/app/test.file.exists',
+                             '/test/file/host/path')):
+        self.device.PullFile('/data/app/test.file.exists',
+                             '/test/file/host/path')
+
+  def testPullFile_doesntExistOnDevice(self):
+    with mock.patch('os.path.exists', return_value=True):
+      with self.assertCall(
+          self.call.adb.Pull('/data/app/test.file.does.not.exist',
+                             '/test/file/host/path'),
+          self.CommandError('remote object does not exist')):
+        with self.assertRaises(device_errors.CommandFailedError):
+          self.device.PullFile('/data/app/test.file.does.not.exist',
+                               '/test/file/host/path')
+
+
+class DeviceUtilsReadFileTest(DeviceUtilsTest):
+
+  def testReadFileWithPull_success(self):
+    tmp_host_dir = '/tmp/dir/on.host/'
+    tmp_host = MockTempFile('/tmp/dir/on.host/tmp_ReadFileWithPull')
+    tmp_host.file.read.return_value = 'some interesting contents'
+    with self.assertCalls(
+        (mock.call.tempfile.mkdtemp(), tmp_host_dir),
+        (self.call.adb.Pull('/path/to/device/file', mock.ANY)),
+        (mock.call.__builtin__.open(mock.ANY, 'r'), tmp_host),
+        (mock.call.os.path.exists(tmp_host_dir), True),
+        (mock.call.shutil.rmtree(tmp_host_dir), None)):
+      self.assertEquals('some interesting contents',
+                        self.device._ReadFileWithPull('/path/to/device/file'))
+    tmp_host.file.read.assert_called_once_with()
+
+  def testReadFileWithPull_rejected(self):
+    tmp_host_dir = '/tmp/dir/on.host/'
+    with self.assertCalls(
+        (mock.call.tempfile.mkdtemp(), tmp_host_dir),
+        (self.call.adb.Pull('/path/to/device/file', mock.ANY),
+         self.CommandError()),
+        (mock.call.os.path.exists(tmp_host_dir), True),
+        (mock.call.shutil.rmtree(tmp_host_dir), None)):
+      with self.assertRaises(device_errors.CommandFailedError):
+        self.device._ReadFileWithPull('/path/to/device/file')
+
+  def testReadFile_exists(self):
+    with self.assertCalls(
+        (self.call.device.RunShellCommand(
+            ['ls', '-l', '/read/this/test/file'],
+            as_root=False, check_return=True),
+         ['-rw-rw---- root foo 256 1970-01-01 00:00 file']),
+        (self.call.device.RunShellCommand(
+            ['cat', '/read/this/test/file'],
+            as_root=False, check_return=True),
+         ['this is a test file'])):
+      self.assertEqual('this is a test file\n',
+                       self.device.ReadFile('/read/this/test/file'))
+
+  def testReadFile_doesNotExist(self):
+    with self.assertCall(
+        self.call.device.RunShellCommand(
+            ['ls', '-l', '/this/file/does.not.exist'],
+            as_root=False, check_return=True),
+        self.CommandError('File does not exist')):
+      with self.assertRaises(device_errors.CommandFailedError):
+        self.device.ReadFile('/this/file/does.not.exist')
+
+  def testReadFile_zeroSize(self):
+    with self.assertCalls(
+        (self.call.device.RunShellCommand(
+            ['ls', '-l', '/this/file/has/zero/size'],
+            as_root=False, check_return=True),
+         ['-r--r--r-- root foo 0 1970-01-01 00:00 zero_size_file']),
+        (self.call.device._ReadFileWithPull('/this/file/has/zero/size'),
+         'but it has contents\n')):
+      self.assertEqual('but it has contents\n',
+                       self.device.ReadFile('/this/file/has/zero/size'))
+
+  def testReadFile_withSU(self):
+    with self.assertCalls(
+        (self.call.device.RunShellCommand(
+            ['ls', '-l', '/this/file/can.be.read.with.su'],
+            as_root=True, check_return=True),
+         ['-rw------- root root 256 1970-01-01 00:00 can.be.read.with.su']),
+        (self.call.device.RunShellCommand(
+            ['cat', '/this/file/can.be.read.with.su'],
+            as_root=True, check_return=True),
+         ['this is a test file', 'read with su'])):
+      self.assertEqual(
+          'this is a test file\nread with su\n',
+          self.device.ReadFile('/this/file/can.be.read.with.su',
+                               as_root=True))
+
+  def testReadFile_withPull(self):
+    contents = 'a' * 123456
+    with self.assertCalls(
+        (self.call.device.RunShellCommand(
+            ['ls', '-l', '/read/this/big/test/file'],
+            as_root=False, check_return=True),
+         ['-rw-rw---- root foo 123456 1970-01-01 00:00 file']),
+        (self.call.device._ReadFileWithPull('/read/this/big/test/file'),
+         contents)):
+      self.assertEqual(
+          contents, self.device.ReadFile('/read/this/big/test/file'))
+
+  def testReadFile_withPullAndSU(self):
+    contents = 'b' * 123456
+    with self.assertCalls(
+        (self.call.device.RunShellCommand(
+            ['ls', '-l', '/this/big/file/can.be.read.with.su'],
+            as_root=True, check_return=True),
+         ['-rw------- root root 123456 1970-01-01 00:00 can.be.read.with.su']),
+        (self.call.device.NeedsSU(), True),
+        (mock.call.pylib.utils.device_temp_file.DeviceTempFile(self.adb),
+         MockTempFile('/sdcard/tmp/on.device')),
+        self.call.device.RunShellCommand(
+            ['cp', '/this/big/file/can.be.read.with.su',
+             '/sdcard/tmp/on.device'],
+            as_root=True, check_return=True),
+        (self.call.device._ReadFileWithPull('/sdcard/tmp/on.device'),
+         contents)):
+      self.assertEqual(
+          contents,
+          self.device.ReadFile('/this/big/file/can.be.read.with.su',
+                               as_root=True))
+
+  def testReadFile_forcePull(self):
+    contents = 'a' * 123456
+    with self.assertCall(
+        self.call.device._ReadFileWithPull('/read/this/big/test/file'),
+        contents):
+      self.assertEqual(
+          contents,
+          self.device.ReadFile('/read/this/big/test/file', force_pull=True))
+
+
+class DeviceUtilsWriteFileTest(DeviceUtilsTest):
+
+  def testWriteFileWithPush_success(self):
+    tmp_host = MockTempFile('/tmp/file/on.host')
+    contents = 'some interesting contents'
+    with self.assertCalls(
+        (mock.call.tempfile.NamedTemporaryFile(), tmp_host),
+        self.call.adb.Push('/tmp/file/on.host', '/path/to/device/file')):
+      self.device._WriteFileWithPush('/path/to/device/file', contents)
+    tmp_host.file.write.assert_called_once_with(contents)
+
+  def testWriteFileWithPush_rejected(self):
+    tmp_host = MockTempFile('/tmp/file/on.host')
+    contents = 'some interesting contents'
+    with self.assertCalls(
+        (mock.call.tempfile.NamedTemporaryFile(), tmp_host),
+        (self.call.adb.Push('/tmp/file/on.host', '/path/to/device/file'),
+         self.CommandError())):
+      with self.assertRaises(device_errors.CommandFailedError):
+        self.device._WriteFileWithPush('/path/to/device/file', contents)
+
+  def testWriteFile_withPush(self):
+    contents = 'some large contents ' * 26 # 20 * 26 = 520 chars
+    with self.assertCalls(
+        self.call.device._WriteFileWithPush('/path/to/device/file', contents)):
+      self.device.WriteFile('/path/to/device/file', contents)
+
+  def testWriteFile_withPushForced(self):
+    contents = 'tiny contents'
+    with self.assertCalls(
+        self.call.device._WriteFileWithPush('/path/to/device/file', contents)):
+      self.device.WriteFile('/path/to/device/file', contents, force_push=True)
+
+  def testWriteFile_withPushAndSU(self):
+    contents = 'some large contents ' * 26 # 20 * 26 = 520 chars
+    with self.assertCalls(
+        (self.call.device.NeedsSU(), True),
+        (mock.call.pylib.utils.device_temp_file.DeviceTempFile(self.adb),
+         MockTempFile('/sdcard/tmp/on.device')),
+        self.call.device._WriteFileWithPush('/sdcard/tmp/on.device', contents),
+        self.call.device.RunShellCommand(
+            ['cp', '/sdcard/tmp/on.device', '/path/to/device/file'],
+            as_root=True, check_return=True)):
+      self.device.WriteFile('/path/to/device/file', contents, as_root=True)
+
+  def testWriteFile_withEcho(self):
+    with self.assertCall(self.call.adb.Shell(
+        "echo -n the.contents > /test/file/to.write"), ''):
+      self.device.WriteFile('/test/file/to.write', 'the.contents')
+
+  def testWriteFile_withEchoAndQuotes(self):
+    with self.assertCall(self.call.adb.Shell(
+        "echo -n 'the contents' > '/test/file/to write'"), ''):
+      self.device.WriteFile('/test/file/to write', 'the contents')
+
+  def testWriteFile_withEchoAndSU(self):
+    with self.assertCalls(
+        (self.call.device.NeedsSU(), True),
+        (self.call.adb.Shell("su -c sh -c 'echo -n contents > /test/file'"),
+         '')):
+      self.device.WriteFile('/test/file', 'contents', as_root=True)
+
+
+class DeviceUtilsLsTest(DeviceUtilsTest):
+
+  def testLs_directory(self):
+    result = [('.', adb_wrapper.DeviceStat(16889, 4096, 1417436123)),
+              ('..', adb_wrapper.DeviceStat(16873, 4096, 12382237)),
+              ('testfile.txt', adb_wrapper.DeviceStat(33206, 3, 1417436122))]
+    with self.assertCalls(
+        (self.call.adb.Ls('/data/local/tmp'), result)):
+      self.assertEquals(result,
+                        self.device.Ls('/data/local/tmp'))
+
+  def testLs_nothing(self):
+    with self.assertCalls(
+        (self.call.adb.Ls('/data/local/tmp/testfile.txt'), [])):
+      self.assertEquals([],
+                        self.device.Ls('/data/local/tmp/testfile.txt'))
+
+
+class DeviceUtilsStatTest(DeviceUtilsTest):
+
+  def testStat_file(self):
+    result = [('.', adb_wrapper.DeviceStat(16889, 4096, 1417436123)),
+              ('..', adb_wrapper.DeviceStat(16873, 4096, 12382237)),
+              ('testfile.txt', adb_wrapper.DeviceStat(33206, 3, 1417436122))]
+    with self.assertCalls(
+        (self.call.adb.Ls('/data/local/tmp'), result)):
+      self.assertEquals(adb_wrapper.DeviceStat(33206, 3, 1417436122),
+                        self.device.Stat('/data/local/tmp/testfile.txt'))
+
+  def testStat_directory(self):
+    result = [('.', adb_wrapper.DeviceStat(16873, 4096, 12382237)),
+              ('..', adb_wrapper.DeviceStat(16873, 4096, 12382237)),
+              ('tmp', adb_wrapper.DeviceStat(16889, 4096, 1417436123))]
+    with self.assertCalls(
+        (self.call.adb.Ls('/data/local'), result)):
+      self.assertEquals(adb_wrapper.DeviceStat(16889, 4096, 1417436123),
+                        self.device.Stat('/data/local/tmp'))
+
+  def testStat_doesNotExist(self):
+    result = [('.', adb_wrapper.DeviceStat(16889, 4096, 1417436123)),
+              ('..', adb_wrapper.DeviceStat(16873, 4096, 12382237)),
+              ('testfile.txt', adb_wrapper.DeviceStat(33206, 3, 1417436122))]
+    with self.assertCalls(
+        (self.call.adb.Ls('/data/local/tmp'), result)):
+      with self.assertRaises(device_errors.CommandFailedError):
+        self.device.Stat('/data/local/tmp/does.not.exist.txt')
+
+
+class DeviceUtilsSetJavaAssertsTest(DeviceUtilsTest):
+
+  def testSetJavaAsserts_enable(self):
+    with self.assertCalls(
+        (self.call.device.ReadFile(constants.DEVICE_LOCAL_PROPERTIES_PATH),
+         'some.example.prop=with an example value\n'
+         'some.other.prop=value_ok\n'),
+        self.call.device.WriteFile(
+            constants.DEVICE_LOCAL_PROPERTIES_PATH,
+            'some.example.prop=with an example value\n'
+            'some.other.prop=value_ok\n'
+            'dalvik.vm.enableassertions=all\n'),
+        (self.call.device.GetProp('dalvik.vm.enableassertions'), ''),
+        self.call.device.SetProp('dalvik.vm.enableassertions', 'all')):
+      self.assertTrue(self.device.SetJavaAsserts(True))
+
+  def testSetJavaAsserts_disable(self):
+    with self.assertCalls(
+        (self.call.device.ReadFile(constants.DEVICE_LOCAL_PROPERTIES_PATH),
+         'some.example.prop=with an example value\n'
+         'dalvik.vm.enableassertions=all\n'
+         'some.other.prop=value_ok\n'),
+        self.call.device.WriteFile(
+            constants.DEVICE_LOCAL_PROPERTIES_PATH,
+            'some.example.prop=with an example value\n'
+            'some.other.prop=value_ok\n'),
+        (self.call.device.GetProp('dalvik.vm.enableassertions'), 'all'),
+        self.call.device.SetProp('dalvik.vm.enableassertions', '')):
+      self.assertTrue(self.device.SetJavaAsserts(False))
+
+  def testSetJavaAsserts_alreadyEnabled(self):
+    with self.assertCalls(
+        (self.call.device.ReadFile(constants.DEVICE_LOCAL_PROPERTIES_PATH),
+         'some.example.prop=with an example value\n'
+         'dalvik.vm.enableassertions=all\n'
+         'some.other.prop=value_ok\n'),
+        (self.call.device.GetProp('dalvik.vm.enableassertions'), 'all')):
+      self.assertFalse(self.device.SetJavaAsserts(True))
+
+
+class DeviceUtilsGetPropTest(DeviceUtilsTest):
+
+  def testGetProp_exists(self):
+    with self.assertCall(
+        self.call.adb.Shell('getprop test.property'), 'property_value\n'):
+      self.assertEqual('property_value',
+                       self.device.GetProp('test.property'))
+
+  def testGetProp_doesNotExist(self):
+    with self.assertCall(
+        self.call.adb.Shell('getprop property.does.not.exist'), '\n'):
+      self.assertEqual('', self.device.GetProp('property.does.not.exist'))
+
+  def testGetProp_cachedRoProp(self):
+    with self.assertCall(
+        self.call.adb.Shell('getprop ro.build.type'), 'userdebug\n'):
+      self.assertEqual('userdebug',
+                       self.device.GetProp('ro.build.type', cache=True))
+      self.assertEqual('userdebug',
+                       self.device.GetProp('ro.build.type', cache=True))
+
+  def testGetProp_retryAndCache(self):
+    with self.assertCalls(
+        (self.call.adb.Shell('getprop ro.build.type'), self.ShellError()),
+        (self.call.adb.Shell('getprop ro.build.type'), self.ShellError()),
+        (self.call.adb.Shell('getprop ro.build.type'), 'userdebug\n')):
+      self.assertEqual('userdebug',
+                       self.device.GetProp('ro.build.type',
+                                           cache=True, retries=3))
+      self.assertEqual('userdebug',
+                       self.device.GetProp('ro.build.type',
+                                           cache=True, retries=3))
+
+
+class DeviceUtilsSetPropTest(DeviceUtilsTest):
+
+  def testSetProp(self):
+    with self.assertCall(
+        self.call.adb.Shell("setprop test.property 'test value'"), ''):
+      self.device.SetProp('test.property', 'test value')
+
+  def testSetProp_check_succeeds(self):
+    with self.assertCalls(
+        (self.call.adb.Shell('setprop test.property new_value'), ''),
+        (self.call.adb.Shell('getprop test.property'), 'new_value')):
+      self.device.SetProp('test.property', 'new_value', check=True)
+
+  def testSetProp_check_fails(self):
+    with self.assertCalls(
+        (self.call.adb.Shell('setprop test.property new_value'), ''),
+        (self.call.adb.Shell('getprop test.property'), 'old_value')):
+      with self.assertRaises(device_errors.CommandFailedError):
+        self.device.SetProp('test.property', 'new_value', check=True)
+
+
+class DeviceUtilsGetPidsTest(DeviceUtilsTest):
+
+  def testGetPids_noMatches(self):
+    with self.assertCall(
+        self.call.device._RunPipedShellCommand('ps | grep -F does.not.match'),
+        []):
+      self.assertEqual({}, self.device.GetPids('does.not.match'))
+
+  def testGetPids_oneMatch(self):
+    with self.assertCall(
+        self.call.device._RunPipedShellCommand('ps | grep -F one.match'),
+        ['user  1001    100   1024 1024   ffffffff 00000000 one.match']):
+      self.assertEqual({'one.match': '1001'}, self.device.GetPids('one.match'))
+
+  def testGetPids_mutlipleMatches(self):
+    with self.assertCall(
+        self.call.device._RunPipedShellCommand('ps | grep -F match'),
+        ['user  1001    100   1024 1024   ffffffff 00000000 one.match',
+         'user  1002    100   1024 1024   ffffffff 00000000 two.match',
+         'user  1003    100   1024 1024   ffffffff 00000000 three.match']):
+      self.assertEqual(
+          {'one.match': '1001', 'two.match': '1002', 'three.match': '1003'},
+          self.device.GetPids('match'))
+
+  def testGetPids_exactMatch(self):
+    with self.assertCall(
+        self.call.device._RunPipedShellCommand('ps | grep -F exact.match'),
+        ['user  1000    100   1024 1024   ffffffff 00000000 not.exact.match',
+         'user  1234    100   1024 1024   ffffffff 00000000 exact.match']):
+      self.assertEqual(
+          {'not.exact.match': '1000', 'exact.match': '1234'},
+          self.device.GetPids('exact.match'))
+
+  def testGetPids_quotable(self):
+    with self.assertCall(
+        self.call.device._RunPipedShellCommand("ps | grep -F 'my$process'"),
+        ['user  1234    100   1024 1024   ffffffff 00000000 my$process']):
+      self.assertEqual(
+          {'my$process': '1234'}, self.device.GetPids('my$process'))
+
+
+class DeviceUtilsTakeScreenshotTest(DeviceUtilsTest):
+
+  def testTakeScreenshot_fileNameProvided(self):
+    with self.assertCalls(
+        (mock.call.pylib.utils.device_temp_file.DeviceTempFile(
+            self.adb, suffix='.png'),
+         MockTempFile('/tmp/path/temp-123.png')),
+        (self.call.adb.Shell('/system/bin/screencap -p /tmp/path/temp-123.png'),
+         ''),
+        self.call.device.PullFile('/tmp/path/temp-123.png',
+                                  '/test/host/screenshot.png')):
+      self.device.TakeScreenshot('/test/host/screenshot.png')
+
+
+class DeviceUtilsGetMemoryUsageForPidTest(DeviceUtilsTest):
+
+  def setUp(self):
+    super(DeviceUtilsGetMemoryUsageForPidTest, self).setUp()
+
+  def testGetMemoryUsageForPid_validPid(self):
+    with self.assertCalls(
+        (self.call.device._RunPipedShellCommand(
+            'showmap 1234 | grep TOTAL', as_root=True),
+         ['100 101 102 103 104 105 106 107 TOTAL']),
+        (self.call.device.ReadFile('/proc/1234/status', as_root=True),
+         'VmHWM: 1024 kB\n')):
+      self.assertEqual(
+          {
+            'Size': 100,
+            'Rss': 101,
+            'Pss': 102,
+            'Shared_Clean': 103,
+            'Shared_Dirty': 104,
+            'Private_Clean': 105,
+            'Private_Dirty': 106,
+            'VmHWM': 1024
+          },
+          self.device.GetMemoryUsageForPid(1234))
+
+  def testGetMemoryUsageForPid_noSmaps(self):
+    with self.assertCalls(
+        (self.call.device._RunPipedShellCommand(
+            'showmap 4321 | grep TOTAL', as_root=True),
+         ['cannot open /proc/4321/smaps: No such file or directory']),
+        (self.call.device.ReadFile('/proc/4321/status', as_root=True),
+         'VmHWM: 1024 kb\n')):
+      self.assertEquals({'VmHWM': 1024}, self.device.GetMemoryUsageForPid(4321))
+
+  def testGetMemoryUsageForPid_noStatus(self):
+    with self.assertCalls(
+        (self.call.device._RunPipedShellCommand(
+            'showmap 4321 | grep TOTAL', as_root=True),
+         ['100 101 102 103 104 105 106 107 TOTAL']),
+        (self.call.device.ReadFile('/proc/4321/status', as_root=True),
+         self.CommandError())):
+      self.assertEquals(
+          {
+            'Size': 100,
+            'Rss': 101,
+            'Pss': 102,
+            'Shared_Clean': 103,
+            'Shared_Dirty': 104,
+            'Private_Clean': 105,
+            'Private_Dirty': 106,
+          },
+          self.device.GetMemoryUsageForPid(4321))
+
+
+class DeviceUtilsClientCache(DeviceUtilsTest):
+
+  def testClientCache_twoCaches(self):
+    self.device._cache['test'] = 0
+    client_cache_one = self.device.GetClientCache('ClientOne')
+    client_cache_one['test'] = 1
+    client_cache_two = self.device.GetClientCache('ClientTwo')
+    client_cache_two['test'] = 2
+    self.assertEqual(self.device._cache, {'test': 0})
+    self.assertEqual(client_cache_one, {'test': 1})
+    self.assertEqual(client_cache_two, {'test': 2})
+    self.device._ClearCache()
+    self.assertEqual(self.device._cache, {})
+    self.assertEqual(client_cache_one, {})
+    self.assertEqual(client_cache_two, {})
+
+  def testClientCache_multipleInstances(self):
+    client_cache_one = self.device.GetClientCache('ClientOne')
+    client_cache_one['test'] = 1
+    client_cache_two = self.device.GetClientCache('ClientOne')
+    self.assertEqual(client_cache_one, {'test': 1})
+    self.assertEqual(client_cache_two, {'test': 1})
+    self.device._ClearCache()
+    self.assertEqual(client_cache_one, {})
+    self.assertEqual(client_cache_two, {})
+
+
+class DeviceUtilsParallelTest(mock_calls.TestCase):
+
+  def testParallel_default(self):
+    test_serials = ['0123456789abcdef', 'fedcba9876543210']
+    with self.assertCall(
+        mock.call.pylib.device.device_utils.DeviceUtils.HealthyDevices(),
+        [device_utils.DeviceUtils(s) for s in test_serials]):
+      parallel_devices = device_utils.DeviceUtils.parallel()
+    for serial, device in zip(test_serials, parallel_devices.pGet(None)):
+      self.assertTrue(isinstance(device, device_utils.DeviceUtils))
+      self.assertEquals(serial, device.adb.GetDeviceSerial())
+
+  def testParallel_noDevices(self):
+    with self.assertCall(
+        mock.call.pylib.device.device_utils.DeviceUtils.HealthyDevices(), []):
+      with self.assertRaises(device_errors.NoDevicesError):
+        device_utils.DeviceUtils.parallel()
+
+
+class DeviceUtilsHealthyDevicesTest(mock_calls.TestCase):
+
+  def _createAdbWrapperMock(self, serial, is_ready=True):
+    adb = _AdbWrapperMock(serial)
+    adb.is_ready = is_ready
+    return adb
+
+  def testHealthyDevices_default(self):
+    test_serials = ['0123456789abcdef', 'fedcba9876543210']
+    with self.assertCalls(
+        (mock.call.pylib.device.device_blacklist.ReadBlacklist(), []),
+        (mock.call.pylib.device.adb_wrapper.AdbWrapper.Devices(),
+         [self._createAdbWrapperMock(s) for s in test_serials])):
+      devices = device_utils.DeviceUtils.HealthyDevices()
+    for serial, device in zip(test_serials, devices):
+      self.assertTrue(isinstance(device, device_utils.DeviceUtils))
+      self.assertEquals(serial, device.adb.GetDeviceSerial())
+
+  def testHealthyDevices_blacklisted(self):
+    test_serials = ['0123456789abcdef', 'fedcba9876543210']
+    with self.assertCalls(
+        (mock.call.pylib.device.device_blacklist.ReadBlacklist(),
+         ['fedcba9876543210']),
+        (mock.call.pylib.device.adb_wrapper.AdbWrapper.Devices(),
+         [self._createAdbWrapperMock(s) for s in test_serials])):
+      devices = device_utils.DeviceUtils.HealthyDevices()
+    self.assertEquals(1, len(devices))
+    self.assertTrue(isinstance(devices[0], device_utils.DeviceUtils))
+    self.assertEquals('0123456789abcdef', devices[0].adb.GetDeviceSerial())
+
+
+if __name__ == '__main__':
+  logging.getLogger().setLevel(logging.DEBUG)
+  unittest.main(verbosity=2)
+
diff --git a/build/android/pylib/device/intent.py b/build/android/pylib/device/intent.py
new file mode 100644
index 0000000..333b9f1
--- /dev/null
+++ b/build/android/pylib/device/intent.py
@@ -0,0 +1,113 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Manages intents and associated information.
+
+This is generally intended to be used with functions that calls Android's
+Am command.
+"""
+
+class Intent(object):
+
+  def __init__(self, action='android.intent.action.VIEW', activity=None,
+               category=None, component=None, data=None, extras=None,
+               flags=None, package=None):
+    """Creates an Intent.
+
+    Args:
+      action: A string containing the action.
+      activity: A string that, with |package|, can be used to specify the
+                component.
+      category: A string or list containing any categories.
+      component: A string that specifies the component to send the intent to.
+      data: A string containing a data URI.
+      extras: A dict containing extra parameters to be passed along with the
+              intent.
+      flags: A string containing flags to pass.
+      package: A string that, with activity, can be used to specify the
+               component.
+    """
+    self._action = action
+    self._activity = activity
+    if isinstance(category, list) or category is None:
+      self._category = category
+    else:
+      self._category = [category]
+    self._component = component
+    self._data = data
+    self._extras = extras
+    self._flags = flags
+    self._package = package
+
+    if self._component and '/' in component:
+      self._package, self._activity = component.split('/', 1)
+    elif self._package and self._activity:
+      self._component = '%s/%s' % (package, activity)
+
+  @property
+  def action(self):
+    return self._action
+
+  @property
+  def activity(self):
+    return self._activity
+
+  @property
+  def category(self):
+    return self._category
+
+  @property
+  def component(self):
+    return self._component
+
+  @property
+  def data(self):
+    return self._data
+
+  @property
+  def extras(self):
+    return self._extras
+
+  @property
+  def flags(self):
+    return self._flags
+
+  @property
+  def package(self):
+    return self._package
+
+  @property
+  def am_args(self):
+    """Returns the intent as a list of arguments for the activity manager.
+
+    For details refer to the specification at:
+    - http://developer.android.com/tools/help/adb.html#IntentSpec
+    """
+    args = []
+    if self.action:
+      args.extend(['-a', self.action])
+    if self.data:
+      args.extend(['-d', self.data])
+    if self.category:
+      args.extend(arg for cat in self.category for arg in ('-c', cat))
+    if self.component:
+      args.extend(['-n', self.component])
+    if self.flags:
+      args.extend(['-f', self.flags])
+    if self.extras:
+      for key, value in self.extras.iteritems():
+        if value is None:
+          args.extend(['--esn', key])
+        elif isinstance(value, str):
+          args.extend(['--es', key, value])
+        elif isinstance(value, bool):
+          args.extend(['--ez', key, str(value)])
+        elif isinstance(value, int):
+          args.extend(['--ei', key, str(value)])
+        elif isinstance(value, float):
+          args.extend(['--ef', key, str(value)])
+        else:
+          raise NotImplementedError(
+              'Intent does not know how to pass %s extras' % type(value))
+    return args
diff --git a/build/android/pylib/device/logcat_monitor.py b/build/android/pylib/device/logcat_monitor.py
new file mode 100644
index 0000000..2eebc2d
--- /dev/null
+++ b/build/android/pylib/device/logcat_monitor.py
@@ -0,0 +1,139 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# pylint: disable=unused-argument
+
+import collections
+import itertools
+import logging
+import subprocess
+import tempfile
+import time
+import re
+
+from pylib.device import adb_wrapper
+from pylib.device import decorators
+from pylib.device import device_errors
+
+
+class LogcatMonitor(object):
+
+  _THREADTIME_RE_FORMAT = (
+      r'(?P<date>\S*) +(?P<time>\S*) +(?P<proc_id>%s) +(?P<thread_id>%s) +'
+      r'(?P<log_level>%s) +(?P<component>%s) *: +(?P<message>%s)$')
+
+  def __init__(self, adb, clear=True, filter_specs=None):
+    """Create a LogcatMonitor instance.
+
+    Args:
+      adb: An instance of adb_wrapper.AdbWrapper.
+      clear: If True, clear the logcat when monitoring starts.
+      filter_specs: An optional list of '<tag>[:priority]' strings.
+    """
+    if isinstance(adb, adb_wrapper.AdbWrapper):
+      self._adb = adb
+    else:
+      raise ValueError('Unsupported type passed for argument "device"')
+    self._clear = clear
+    self._filter_specs = filter_specs
+    self._logcat_out = None
+    self._logcat_out_file = None
+    self._logcat_proc = None
+
+  @decorators.WithTimeoutAndRetriesDefaults(10, 0)
+  def WaitFor(self, success_regex, failure_regex=None, timeout=None,
+              retries=None):
+    """Wait for a matching logcat line or until a timeout occurs.
+
+    This will attempt to match lines in the logcat against both |success_regex|
+    and |failure_regex| (if provided). Note that this calls re.search on each
+    logcat line, not re.match, so the provided regular expressions don't have
+    to match an entire line.
+
+    Args:
+      success_regex: The regular expression to search for.
+      failure_regex: An optional regular expression that, if hit, causes this
+        to stop looking for a match. Can be None.
+      timeout: timeout in seconds
+      retries: number of retries
+
+    Returns:
+      A match object if |success_regex| matches a part of a logcat line, or
+      None if |failure_regex| matches a part of a logcat line.
+    Raises:
+      CommandFailedError on logcat failure (NOT on a |failure_regex| match).
+      CommandTimeoutError if no logcat line matching either |success_regex| or
+        |failure_regex| is found in |timeout| seconds.
+      DeviceUnreachableError if the device becomes unreachable.
+    """
+    if isinstance(success_regex, basestring):
+      success_regex = re.compile(success_regex)
+    if isinstance(failure_regex, basestring):
+      failure_regex = re.compile(failure_regex)
+
+    logging.debug('Waiting %d seconds for "%s"', timeout, success_regex.pattern)
+
+    # NOTE This will continue looping until:
+    #  - success_regex matches a line, in which case the match object is
+    #    returned.
+    #  - failure_regex matches a line, in which case None is returned
+    #  - the timeout is hit, in which case a CommandTimeoutError is raised.
+    for l in self._adb.Logcat(filter_specs=self._filter_specs):
+      m = success_regex.search(l)
+      if m:
+        return m
+      if failure_regex and failure_regex.search(l):
+        return None
+
+  def FindAll(self, message_regex, proc_id=None, thread_id=None, log_level=None,
+              component=None):
+    """Finds all lines in the logcat that match the provided constraints.
+
+    Args:
+      message_regex: The regular expression that the <message> section must
+        match.
+      proc_id: The process ID to match. If None, matches any process ID.
+      thread_id: The thread ID to match. If None, matches any thread ID.
+      log_level: The log level to match. If None, matches any log level.
+      component: The component to match. If None, matches any component.
+
+    Yields:
+      A match object for each matching line in the logcat. The match object
+      will always contain, in addition to groups defined in |message_regex|,
+      the following named groups: 'date', 'time', 'proc_id', 'thread_id',
+      'log_level', 'component', and 'message'.
+    """
+    if proc_id is None:
+      proc_id = r'\d+'
+    if thread_id is None:
+      thread_id = r'\d+'
+    if log_level is None:
+      log_level = r'[VDIWEF]'
+    if component is None:
+      component = r'[^\s:]+'
+    threadtime_re = re.compile(
+        type(self)._THREADTIME_RE_FORMAT % (
+            proc_id, thread_id, log_level, component, message_regex))
+
+    for line in self._adb.Logcat(dump=True, logcat_format='threadtime'):
+      m = re.match(threadtime_re, line)
+      if m:
+        yield m
+
+  def Start(self):
+    """Starts the logcat monitor.
+
+    Clears the logcat if |clear| was set in |__init__|.
+    """
+    if self._clear:
+      self._adb.Logcat(clear=True)
+
+  def __enter__(self):
+    """Starts the logcat monitor."""
+    self.Start()
+    return self
+
+  def __exit__(self, exc_type, exc_val, exc_tb):
+    """Stops the logcat monitor."""
+    pass
diff --git a/build/android/pylib/device/logcat_monitor_test.py b/build/android/pylib/device/logcat_monitor_test.py
new file mode 100755
index 0000000..db397e57
--- /dev/null
+++ b/build/android/pylib/device/logcat_monitor_test.py
@@ -0,0 +1,164 @@
+#!/usr/bin/env python
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import itertools
+import os
+import sys
+import unittest
+
+from pylib import constants
+from pylib.device import adb_wrapper
+from pylib.device import decorators
+from pylib.device import logcat_monitor
+
+sys.path.append(os.path.join(
+    constants.DIR_SOURCE_ROOT, 'third_party', 'pymock'))
+import mock # pylint: disable=F0401
+
+
+class LogcatMonitorTest(unittest.TestCase):
+
+  _TEST_THREADTIME_LOGCAT_DATA = [
+        '01-01 01:02:03.456  7890  0987 V LogcatMonitorTest: '
+            'verbose logcat monitor test message 1',
+        '01-01 01:02:03.457  8901  1098 D LogcatMonitorTest: '
+            'debug logcat monitor test message 2',
+        '01-01 01:02:03.458  9012  2109 I LogcatMonitorTest: '
+            'info logcat monitor test message 3',
+        '01-01 01:02:03.459  0123  3210 W LogcatMonitorTest: '
+            'warning logcat monitor test message 4',
+        '01-01 01:02:03.460  1234  4321 E LogcatMonitorTest: '
+            'error logcat monitor test message 5',
+        '01-01 01:02:03.461  2345  5432 F LogcatMonitorTest: '
+            'fatal logcat monitor test message 6',
+        '01-01 01:02:03.462  3456  6543 D LogcatMonitorTest: '
+            'ignore me',]
+
+  def _createTestLog(self, raw_logcat=None):
+    test_adb = adb_wrapper.AdbWrapper('0123456789abcdef')
+    test_adb.Logcat = mock.Mock(return_value=(l for l in raw_logcat))
+    test_log = logcat_monitor.LogcatMonitor(test_adb, clear=False)
+    return test_log
+
+  def assertIterEqual(self, expected_iter, actual_iter):
+    for expected, actual in itertools.izip_longest(expected_iter, actual_iter):
+      self.assertIsNotNone(
+          expected,
+          msg='actual has unexpected elements starting with %s' % str(actual))
+      self.assertIsNotNone(
+          actual,
+          msg='actual is missing elements starting with %s' % str(expected))
+      self.assertEqual(actual.group('proc_id'), expected[0])
+      self.assertEqual(actual.group('thread_id'), expected[1])
+      self.assertEqual(actual.group('log_level'), expected[2])
+      self.assertEqual(actual.group('component'), expected[3])
+      self.assertEqual(actual.group('message'), expected[4])
+
+    with self.assertRaises(StopIteration):
+      next(actual_iter)
+    with self.assertRaises(StopIteration):
+      next(expected_iter)
+
+  def testWaitFor_success(self):
+    test_log = self._createTestLog(
+        raw_logcat=type(self)._TEST_THREADTIME_LOGCAT_DATA)
+    actual_match = test_log.WaitFor(r'.*(fatal|error) logcat monitor.*', None)
+    self.assertTrue(actual_match)
+    self.assertEqual(
+        '01-01 01:02:03.460  1234  4321 E LogcatMonitorTest: '
+            'error logcat monitor test message 5',
+        actual_match.group(0))
+    self.assertEqual('error', actual_match.group(1))
+
+  def testWaitFor_failure(self):
+    test_log = self._createTestLog(
+        raw_logcat=type(self)._TEST_THREADTIME_LOGCAT_DATA)
+    actual_match = test_log.WaitFor(
+        r'.*My Success Regex.*', r'.*(fatal|error) logcat monitor.*')
+    self.assertIsNone(actual_match)
+
+  def testFindAll_defaults(self):
+    test_log = self._createTestLog(
+        raw_logcat=type(self)._TEST_THREADTIME_LOGCAT_DATA)
+    expected_results = [
+        ('7890', '0987', 'V', 'LogcatMonitorTest',
+         'verbose logcat monitor test message 1'),
+        ('8901', '1098', 'D', 'LogcatMonitorTest',
+         'debug logcat monitor test message 2'),
+        ('9012', '2109', 'I', 'LogcatMonitorTest',
+         'info logcat monitor test message 3'),
+        ('0123', '3210', 'W', 'LogcatMonitorTest',
+         'warning logcat monitor test message 4'),
+        ('1234', '4321', 'E', 'LogcatMonitorTest',
+         'error logcat monitor test message 5'),
+        ('2345', '5432', 'F', 'LogcatMonitorTest',
+         'fatal logcat monitor test message 6')]
+    actual_results = test_log.FindAll(r'\S* logcat monitor test message \d')
+    self.assertIterEqual(iter(expected_results), actual_results)
+
+  def testFindAll_defaults_miss(self):
+    test_log = self._createTestLog(
+        raw_logcat=type(self)._TEST_THREADTIME_LOGCAT_DATA)
+    expected_results = []
+    actual_results = test_log.FindAll(r'\S* nothing should match this \d')
+    self.assertIterEqual(iter(expected_results), actual_results)
+
+  def testFindAll_filterProcId(self):
+    test_log = self._createTestLog(
+        raw_logcat=type(self)._TEST_THREADTIME_LOGCAT_DATA)
+    actual_results = test_log.FindAll(
+        r'\S* logcat monitor test message \d', proc_id=1234)
+    expected_results = [
+        ('1234', '4321', 'E', 'LogcatMonitorTest',
+         'error logcat monitor test message 5')]
+    self.assertIterEqual(iter(expected_results), actual_results)
+
+  def testFindAll_filterThreadId(self):
+    test_log = self._createTestLog(
+        raw_logcat=type(self)._TEST_THREADTIME_LOGCAT_DATA)
+    actual_results = test_log.FindAll(
+        r'\S* logcat monitor test message \d', thread_id=2109)
+    expected_results = [
+        ('9012', '2109', 'I', 'LogcatMonitorTest',
+         'info logcat monitor test message 3')]
+    self.assertIterEqual(iter(expected_results), actual_results)
+
+  def testFindAll_filterLogLevel(self):
+    test_log = self._createTestLog(
+        raw_logcat=type(self)._TEST_THREADTIME_LOGCAT_DATA)
+    actual_results = test_log.FindAll(
+        r'\S* logcat monitor test message \d', log_level=r'[DW]')
+    expected_results = [
+        ('8901', '1098', 'D', 'LogcatMonitorTest',
+         'debug logcat monitor test message 2'),
+        ('0123', '3210', 'W', 'LogcatMonitorTest',
+         'warning logcat monitor test message 4'),]
+    self.assertIterEqual(iter(expected_results), actual_results)
+
+  def testFindAll_filterComponent(self):
+    test_log = self._createTestLog(
+        raw_logcat=type(self)._TEST_THREADTIME_LOGCAT_DATA)
+    actual_results = test_log.FindAll(r'.*', component='LogcatMonitorTest')
+    expected_results = [
+        ('7890', '0987', 'V', 'LogcatMonitorTest',
+         'verbose logcat monitor test message 1'),
+        ('8901', '1098', 'D', 'LogcatMonitorTest',
+         'debug logcat monitor test message 2'),
+        ('9012', '2109', 'I', 'LogcatMonitorTest',
+         'info logcat monitor test message 3'),
+        ('0123', '3210', 'W', 'LogcatMonitorTest',
+         'warning logcat monitor test message 4'),
+        ('1234', '4321', 'E', 'LogcatMonitorTest',
+         'error logcat monitor test message 5'),
+        ('2345', '5432', 'F', 'LogcatMonitorTest',
+         'fatal logcat monitor test message 6'),
+        ('3456', '6543', 'D', 'LogcatMonitorTest',
+         'ignore me'),]
+    self.assertIterEqual(iter(expected_results), actual_results)
+
+
+if __name__ == '__main__':
+  unittest.main(verbosity=2)
+
diff --git a/build/android/pylib/device/shared_prefs.py b/build/android/pylib/device/shared_prefs.py
new file mode 100644
index 0000000..32cef4b
--- /dev/null
+++ b/build/android/pylib/device/shared_prefs.py
@@ -0,0 +1,391 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Helper object to read and modify Shared Preferences from Android apps.
+
+See e.g.:
+  http://developer.android.com/reference/android/content/SharedPreferences.html
+"""
+
+import collections
+import logging
+import posixpath
+
+from xml.etree import ElementTree
+
+
+_XML_DECLARATION = "<?xml version='1.0' encoding='utf-8' standalone='yes' ?>\n"
+
+
+class BasePref(object):
+  """Base class for getting/setting the value of a specific preference type.
+
+  Should not be instantiated directly. The SharedPrefs collection will
+  instantiate the appropriate subclasses, which directly manipulate the
+  underlying xml document, to parse and serialize values according to their
+  type.
+
+  Args:
+    elem: An xml ElementTree object holding the preference data.
+
+  Properties:
+    tag_name: A string with the tag that must be used for this preference type.
+  """
+  tag_name = None
+
+  def __init__(self, elem):
+    if elem.tag != type(self).tag_name:
+      raise TypeError('Property %r has type %r, but trying to access as %r' %
+                      (elem.get('name'), elem.tag, type(self).tag_name))
+    self._elem = elem
+
+  def __str__(self):
+    """Get the underlying xml element as a string."""
+    return ElementTree.tostring(self._elem)
+
+  def get(self):
+    """Get the value of this preference."""
+    return self._elem.get('value')
+
+  def set(self, value):
+    """Set from a value casted as a string."""
+    self._elem.set('value', str(value))
+
+  @property
+  def has_value(self):
+    """Check whether the element has a value."""
+    return self._elem.get('value') is not None
+
+
+class BooleanPref(BasePref):
+  """Class for getting/setting a preference with a boolean value.
+
+  The underlying xml element has the form, e.g.:
+      <boolean name="featureEnabled" value="false" />
+  """
+  tag_name = 'boolean'
+  VALUES = {'true': True, 'false': False}
+
+  def get(self):
+    """Get the value as a Python bool."""
+    return type(self).VALUES[super(BooleanPref, self).get()]
+
+  def set(self, value):
+    """Set from a value casted as a bool."""
+    super(BooleanPref, self).set('true' if value else 'false')
+
+
+class FloatPref(BasePref):
+  """Class for getting/setting a preference with a float value.
+
+  The underlying xml element has the form, e.g.:
+      <float name="someMetric" value="4.7" />
+  """
+  tag_name = 'float'
+
+  def get(self):
+    """Get the value as a Python float."""
+    return float(super(FloatPref, self).get())
+
+
+class IntPref(BasePref):
+  """Class for getting/setting a preference with an int value.
+
+  The underlying xml element has the form, e.g.:
+      <int name="aCounter" value="1234" />
+  """
+  tag_name = 'int'
+
+  def get(self):
+    """Get the value as a Python int."""
+    return int(super(IntPref, self).get())
+
+
+class LongPref(IntPref):
+  """Class for getting/setting a preference with a long value.
+
+  The underlying xml element has the form, e.g.:
+      <long name="aLongCounter" value="1234" />
+
+  We use the same implementation from IntPref.
+  """
+  tag_name = 'long'
+
+
+class StringPref(BasePref):
+  """Class for getting/setting a preference with a string value.
+
+  The underlying xml element has the form, e.g.:
+      <string name="someHashValue">249b3e5af13d4db2</string>
+  """
+  tag_name = 'string'
+
+  def get(self):
+    """Get the value as a Python string."""
+    return self._elem.text
+
+  def set(self, value):
+    """Set from a value casted as a string."""
+    self._elem.text = str(value)
+
+
+class StringSetPref(StringPref):
+  """Class for getting/setting a preference with a set of string values.
+
+  The underlying xml element has the form, e.g.:
+      <set name="managed_apps">
+          <string>com.mine.app1</string>
+          <string>com.mine.app2</string>
+          <string>com.mine.app3</string>
+      </set>
+  """
+  tag_name = 'set'
+
+  def get(self):
+    """Get a list with the string values contained."""
+    value = []
+    for child in self._elem:
+      assert child.tag == 'string'
+      value.append(child.text)
+    return value
+
+  def set(self, value):
+    """Set from a sequence of values, each casted as a string."""
+    for child in list(self._elem):
+      self._elem.remove(child)
+    for item in value:
+      ElementTree.SubElement(self._elem, 'string').text = str(item)
+
+
+_PREF_TYPES = {c.tag_name: c for c in [BooleanPref, FloatPref, IntPref,
+                                       LongPref, StringPref, StringSetPref]}
+
+
+class SharedPrefs(object):
+  def __init__(self, device, package, filename):
+    """Helper object to read and update "Shared Prefs" of Android apps.
+
+    Such files typically look like, e.g.:
+
+        <?xml version='1.0' encoding='utf-8' standalone='yes' ?>
+        <map>
+          <int name="databaseVersion" value="107" />
+          <boolean name="featureEnabled" value="false" />
+          <string name="someHashValue">249b3e5af13d4db2</string>
+        </map>
+
+    Example usage:
+
+        prefs = shared_prefs.SharedPrefs(device, 'com.my.app', 'my_prefs.xml')
+        prefs.Load()
+        prefs.GetString('someHashValue') # => '249b3e5af13d4db2'
+        prefs.SetInt('databaseVersion', 42)
+        prefs.Remove('featureEnabled')
+        prefs.Commit()
+
+    The object may also be used as a context manager to automatically load and
+    commit, respectively, upon entering and leaving the context.
+
+    Args:
+      device: A DeviceUtils object.
+      package: A string with the package name of the app that owns the shared
+        preferences file.
+      filename: A string with the name of the preferences file to read/write.
+    """
+    self._device = device
+    self._xml = None
+    self._package = package
+    self._filename = filename
+    self._path = '/data/data/%s/shared_prefs/%s' % (package, filename)
+    self._changed = False
+
+  def __repr__(self):
+    """Get a useful printable representation of the object."""
+    return '<{cls} file {filename} for {package} on {device}>'.format(
+      cls=type(self).__name__, filename=self.filename, package=self.package,
+      device=str(self._device))
+
+  def __str__(self):
+    """Get the underlying xml document as a string."""
+    return _XML_DECLARATION + ElementTree.tostring(self.xml)
+
+  @property
+  def package(self):
+    """Get the package name of the app that owns the shared preferences."""
+    return self._package
+
+  @property
+  def filename(self):
+    """Get the filename of the shared preferences file."""
+    return self._filename
+
+  @property
+  def path(self):
+    """Get the full path to the shared preferences file on the device."""
+    return self._path
+
+  @property
+  def changed(self):
+    """True if properties have changed and a commit would be needed."""
+    return self._changed
+
+  @property
+  def xml(self):
+    """Get the underlying xml document as an ElementTree object."""
+    if self._xml is None:
+      self._xml = ElementTree.Element('map')
+    return self._xml
+
+  def Load(self):
+    """Load the shared preferences file from the device.
+
+    A empty xml document, which may be modified and saved on |commit|, is
+    created if the file does not already exist.
+    """
+    if self._device.FileExists(self.path):
+      self._xml = ElementTree.fromstring(
+          self._device.ReadFile(self.path, as_root=True))
+      assert self._xml.tag == 'map'
+    else:
+      self._xml = None
+    self._changed = False
+
+  def Clear(self):
+    """Clear all of the preferences contained in this object."""
+    if self._xml is not None and len(self): # only clear if not already empty
+      self._xml = None
+      self._changed = True
+
+  def Commit(self):
+    """Save the current set of preferences to the device.
+
+    Only actually saves if some preferences have been modified.
+    """
+    if not self.changed:
+      return
+    self._device.RunShellCommand(
+        ['mkdir', '-p', posixpath.dirname(self.path)],
+        as_root=True, check_return=True)
+    self._device.WriteFile(self.path, str(self), as_root=True)
+    self._device.KillAll(self.package, as_root=True, quiet=True)
+    self._changed = False
+
+  def __len__(self):
+    """Get the number of preferences in this collection."""
+    return len(self.xml)
+
+  def PropertyType(self, key):
+    """Get the type (i.e. tag name) of a property in the collection."""
+    return self._GetChild(key).tag
+
+  def HasProperty(self, key):
+    try:
+      self._GetChild(key)
+      return True
+    except KeyError:
+      return False
+
+  def GetBoolean(self, key):
+    """Get a boolean property."""
+    return BooleanPref(self._GetChild(key)).get()
+
+  def SetBoolean(self, key, value):
+    """Set a boolean property."""
+    self._SetPrefValue(key, value, BooleanPref)
+
+  def GetFloat(self, key):
+    """Get a float property."""
+    return FloatPref(self._GetChild(key)).get()
+
+  def SetFloat(self, key, value):
+    """Set a float property."""
+    self._SetPrefValue(key, value, FloatPref)
+
+  def GetInt(self, key):
+    """Get an int property."""
+    return IntPref(self._GetChild(key)).get()
+
+  def SetInt(self, key, value):
+    """Set an int property."""
+    self._SetPrefValue(key, value, IntPref)
+
+  def GetLong(self, key):
+    """Get a long property."""
+    return LongPref(self._GetChild(key)).get()
+
+  def SetLong(self, key, value):
+    """Set a long property."""
+    self._SetPrefValue(key, value, LongPref)
+
+  def GetString(self, key):
+    """Get a string property."""
+    return StringPref(self._GetChild(key)).get()
+
+  def SetString(self, key, value):
+    """Set a string property."""
+    self._SetPrefValue(key, value, StringPref)
+
+  def GetStringSet(self, key):
+    """Get a string set property."""
+    return StringSetPref(self._GetChild(key)).get()
+
+  def SetStringSet(self, key, value):
+    """Set a string set property."""
+    self._SetPrefValue(key, value, StringSetPref)
+
+  def Remove(self, key):
+    """Remove a preference from the collection."""
+    self.xml.remove(self._GetChild(key))
+
+  def AsDict(self):
+    """Return the properties and their values as a dictionary."""
+    d = {}
+    for child in self.xml:
+      pref = _PREF_TYPES[child.tag](child)
+      d[child.get('name')] = pref.get()
+    return d
+
+  def __enter__(self):
+    """Load preferences file from the device when entering a context."""
+    self.Load()
+    return self
+
+  def __exit__(self, exc_type, _exc_value, _traceback):
+    """Save preferences file to the device when leaving a context."""
+    if not exc_type:
+      self.Commit()
+
+  def _GetChild(self, key):
+    """Get the underlying xml node that holds the property of a given key.
+
+    Raises:
+      KeyError when the key is not found in the collection.
+    """
+    for child in self.xml:
+      if child.get('name') == key:
+        return child
+    raise KeyError(key)
+
+  def _SetPrefValue(self, key, value, pref_cls):
+    """Set the value of a property.
+
+    Args:
+      key: The key of the property to set.
+      value: The new value of the property.
+      pref_cls: A subclass of BasePref used to access the property.
+
+    Raises:
+      TypeError when the key already exists but with a different type.
+    """
+    try:
+      pref = pref_cls(self._GetChild(key))
+      old_value = pref.get()
+    except KeyError:
+      pref = pref_cls(ElementTree.SubElement(
+          self.xml, pref_cls.tag_name, {'name': key}))
+      old_value = None
+    if old_value != value:
+      pref.set(value)
+      self._changed = True
+      logging.info('Setting property: %s', pref)
diff --git a/build/android/pylib/device/shared_prefs_test.py b/build/android/pylib/device/shared_prefs_test.py
new file mode 100755
index 0000000..c5f0ec3
--- /dev/null
+++ b/build/android/pylib/device/shared_prefs_test.py
@@ -0,0 +1,169 @@
+#!/usr/bin/env python
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Unit tests for the contents of shared_prefs.py (mostly SharedPrefs).
+"""
+
+import logging
+import os
+import sys
+import unittest
+
+from pylib import constants
+from pylib.device import device_utils
+from pylib.device import shared_prefs
+
+sys.path.append(os.path.join(
+    constants.DIR_SOURCE_ROOT, 'third_party', 'pymock'))
+import mock
+
+
+def MockDeviceWithFiles(files=None):
+  if files is None:
+    files = {}
+
+  def file_exists(path):
+    return path in files
+
+  def write_file(path, contents, **_kwargs):
+    files[path] = contents
+
+  def read_file(path, **_kwargs):
+    return files[path]
+
+  device = mock.MagicMock(spec=device_utils.DeviceUtils)
+  device.FileExists = mock.Mock(side_effect=file_exists)
+  device.WriteFile = mock.Mock(side_effect=write_file)
+  device.ReadFile = mock.Mock(side_effect=read_file)
+  return device
+
+
+class SharedPrefsTest(unittest.TestCase):
+
+  def setUp(self):
+    self.device = MockDeviceWithFiles({
+      '/data/data/com.some.package/shared_prefs/prefs.xml':
+          "<?xml version='1.0' encoding='utf-8' standalone='yes' ?>\n"
+          '<map>\n'
+          '  <int name="databaseVersion" value="107" />\n'
+          '  <boolean name="featureEnabled" value="false" />\n'
+          '  <string name="someHashValue">249b3e5af13d4db2</string>\n'
+          '</map>'})
+    self.expected_data = {'databaseVersion': 107,
+                          'featureEnabled': False,
+                          'someHashValue': '249b3e5af13d4db2'}
+
+  def testPropertyLifetime(self):
+    prefs = shared_prefs.SharedPrefs(
+        self.device, 'com.some.package', 'prefs.xml')
+    self.assertEquals(len(prefs), 0) # collection is empty before loading
+    prefs.SetInt('myValue', 444)
+    self.assertEquals(len(prefs), 1)
+    self.assertEquals(prefs.GetInt('myValue'), 444)
+    self.assertTrue(prefs.HasProperty('myValue'))
+    prefs.Remove('myValue')
+    self.assertEquals(len(prefs), 0)
+    self.assertFalse(prefs.HasProperty('myValue'))
+    with self.assertRaises(KeyError):
+      prefs.GetInt('myValue')
+
+  def testPropertyType(self):
+    prefs = shared_prefs.SharedPrefs(
+        self.device, 'com.some.package', 'prefs.xml')
+    prefs.SetInt('myValue', 444)
+    self.assertEquals(prefs.PropertyType('myValue'), 'int')
+    with self.assertRaises(TypeError):
+      prefs.GetString('myValue')
+    with self.assertRaises(TypeError):
+      prefs.SetString('myValue', 'hello')
+
+  def testLoad(self):
+    prefs = shared_prefs.SharedPrefs(
+        self.device, 'com.some.package', 'prefs.xml')
+    self.assertEquals(len(prefs), 0) # collection is empty before loading
+    prefs.Load()
+    self.assertEquals(len(prefs), len(self.expected_data))
+    self.assertEquals(prefs.AsDict(), self.expected_data)
+    self.assertFalse(prefs.changed)
+
+  def testClear(self):
+    prefs = shared_prefs.SharedPrefs(
+        self.device, 'com.some.package', 'prefs.xml')
+    prefs.Load()
+    self.assertEquals(prefs.AsDict(), self.expected_data)
+    self.assertFalse(prefs.changed)
+    prefs.Clear()
+    self.assertEquals(len(prefs), 0) # collection is empty now
+    self.assertTrue(prefs.changed)
+
+  def testCommit(self):
+    prefs = shared_prefs.SharedPrefs(
+        self.device, 'com.some.package', 'other_prefs.xml')
+    self.assertFalse(self.device.FileExists(prefs.path)) # file does not exist
+    prefs.Load()
+    self.assertEquals(len(prefs), 0) # file did not exist, collection is empty
+    prefs.SetInt('magicNumber', 42)
+    prefs.SetFloat('myMetric', 3.14)
+    prefs.SetLong('bigNumner', 6000000000)
+    prefs.SetStringSet('apps', ['gmail', 'chrome', 'music'])
+    self.assertFalse(self.device.FileExists(prefs.path)) # still does not exist
+    self.assertTrue(prefs.changed)
+    prefs.Commit()
+    self.assertTrue(self.device.FileExists(prefs.path)) # should exist now
+    self.device.KillAll.assert_called_once_with(prefs.package, as_root=True,
+                                                quiet=True)
+    self.assertFalse(prefs.changed)
+
+    prefs = shared_prefs.SharedPrefs(
+        self.device, 'com.some.package', 'other_prefs.xml')
+    self.assertEquals(len(prefs), 0) # collection is empty before loading
+    prefs.Load()
+    self.assertEquals(prefs.AsDict(), {
+        'magicNumber': 42,
+        'myMetric': 3.14,
+        'bigNumner': 6000000000,
+        'apps': ['gmail', 'chrome', 'music']}) # data survived roundtrip
+
+  def testAsContextManager_onlyReads(self):
+    with shared_prefs.SharedPrefs(
+        self.device, 'com.some.package', 'prefs.xml') as prefs:
+      self.assertEquals(prefs.AsDict(), self.expected_data) # loaded and ready
+    self.assertEquals(self.device.WriteFile.call_args_list, []) # did not write
+
+  def testAsContextManager_readAndWrite(self):
+    with shared_prefs.SharedPrefs(
+        self.device, 'com.some.package', 'prefs.xml') as prefs:
+      prefs.SetBoolean('featureEnabled', True)
+      prefs.Remove('someHashValue')
+      prefs.SetString('newString', 'hello')
+
+    self.assertTrue(self.device.WriteFile.called) # did write
+    with shared_prefs.SharedPrefs(
+        self.device, 'com.some.package', 'prefs.xml') as prefs:
+      # changes persisted
+      self.assertTrue(prefs.GetBoolean('featureEnabled'))
+      self.assertFalse(prefs.HasProperty('someHashValue'))
+      self.assertEquals(prefs.GetString('newString'), 'hello')
+      self.assertTrue(prefs.HasProperty('databaseVersion')) # still there
+
+  def testAsContextManager_commitAborted(self):
+    with self.assertRaises(TypeError):
+      with shared_prefs.SharedPrefs(
+          self.device, 'com.some.package', 'prefs.xml') as prefs:
+        prefs.SetBoolean('featureEnabled', True)
+        prefs.Remove('someHashValue')
+        prefs.SetString('newString', 'hello')
+        prefs.SetInt('newString', 123) # oops!
+
+    self.assertEquals(self.device.WriteFile.call_args_list, []) # did not write
+    with shared_prefs.SharedPrefs(
+        self.device, 'com.some.package', 'prefs.xml') as prefs:
+      # contents were not modified
+      self.assertEquals(prefs.AsDict(), self.expected_data)
+
+if __name__ == '__main__':
+  logging.getLogger().setLevel(logging.DEBUG)
+  unittest.main(verbosity=2)
diff --git a/build/android/pylib/device_settings.py b/build/android/pylib/device_settings.py
new file mode 100644
index 0000000..beabcff
--- /dev/null
+++ b/build/android/pylib/device_settings.py
@@ -0,0 +1,198 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+
+from pylib import constants
+from pylib import content_settings
+from pylib.device import device_errors
+
+_LOCK_SCREEN_SETTINGS_PATH = '/data/system/locksettings.db'
+_ALTERNATE_LOCK_SCREEN_SETTINGS_PATH = (
+    '/data/data/com.android.providers.settings/databases/settings.db')
+PASSWORD_QUALITY_UNSPECIFIED = '0'
+
+
+def ConfigureContentSettings(device, desired_settings):
+  """Configures device content setings from a list.
+
+  Many settings are documented at:
+    http://developer.android.com/reference/android/provider/Settings.Global.html
+    http://developer.android.com/reference/android/provider/Settings.Secure.html
+    http://developer.android.com/reference/android/provider/Settings.System.html
+
+  Many others are undocumented.
+
+  Args:
+    device: A DeviceUtils instance for the device to configure.
+    desired_settings: A list of (table, [(key: value), ...]) for all
+        settings to configure.
+  """
+  if device.build_type == 'userdebug':
+    for table, key_value in desired_settings:
+      settings = content_settings.ContentSettings(table, device)
+      for key, value in key_value:
+        settings[key] = value
+      logging.info('\n%s %s', table, (80 - len(table)) * '-')
+      for key, value in sorted(settings.iteritems()):
+        logging.info('\t%s: %s', key, value)
+
+
+def SetLockScreenSettings(device):
+  """Sets lock screen settings on the device.
+
+  On certain device/Android configurations we need to disable the lock screen in
+  a different database. Additionally, the password type must be set to
+  DevicePolicyManager.PASSWORD_QUALITY_UNSPECIFIED.
+  Lock screen settings are stored in sqlite on the device in:
+      /data/system/locksettings.db
+
+  IMPORTANT: The first column is used as a primary key so that all rows with the
+  same value for that column are removed from the table prior to inserting the
+  new values.
+
+  Args:
+    device: A DeviceUtils instance for the device to configure.
+
+  Raises:
+    Exception if the setting was not properly set.
+  """
+  if device.build_type != 'userdebug':
+    logging.warning('Unable to disable lockscreen on user builds.')
+    return
+
+  def get_lock_settings(table):
+    return [(table, 'lockscreen.disabled', '1'),
+            (table, 'lockscreen.password_type', PASSWORD_QUALITY_UNSPECIFIED),
+            (table, 'lockscreen.password_type_alternate',
+             PASSWORD_QUALITY_UNSPECIFIED)]
+
+  if device.FileExists(_LOCK_SCREEN_SETTINGS_PATH):
+    db = _LOCK_SCREEN_SETTINGS_PATH
+    locksettings = get_lock_settings('locksettings')
+    columns = ['name', 'user', 'value']
+    generate_values = lambda k, v: [k, '0', v]
+  elif device.FileExists(_ALTERNATE_LOCK_SCREEN_SETTINGS_PATH):
+    db = _ALTERNATE_LOCK_SCREEN_SETTINGS_PATH
+    locksettings = get_lock_settings('secure') + get_lock_settings('system')
+    columns = ['name', 'value']
+    generate_values = lambda k, v: [k, v]
+  else:
+    logging.warning('Unable to find database file to set lock screen settings.')
+    return
+
+  for table, key, value in locksettings:
+    # Set the lockscreen setting for default user '0'
+    values = generate_values(key, value)
+
+    cmd = """begin transaction;
+delete from '%(table)s' where %(primary_key)s='%(primary_value)s';
+insert into '%(table)s' (%(columns)s) values (%(values)s);
+commit transaction;""" % {
+      'table': table,
+      'primary_key': columns[0],
+      'primary_value': values[0],
+      'columns': ', '.join(columns),
+      'values': ', '.join(["'%s'" % value for value in values])
+    }
+    output_msg = device.RunShellCommand('sqlite3 %s "%s"' % (db, cmd),
+                                        as_root=True)
+    if output_msg:
+      logging.info(' '.join(output_msg))
+
+
+ENABLE_LOCATION_SETTINGS = [
+  # Note that setting these in this order is required in order for all of
+  # them to take and stick through a reboot.
+  ('com.google.settings/partner', [
+    ('use_location_for_services', 1),
+  ]),
+  ('settings/secure', [
+    # Ensure Geolocation is enabled and allowed for tests.
+    ('location_providers_allowed', 'gps,network'),
+  ]),
+  ('com.google.settings/partner', [
+    ('network_location_opt_in', 1),
+  ])
+]
+
+DISABLE_LOCATION_SETTINGS = [
+  ('com.google.settings/partner', [
+    ('use_location_for_services', 0),
+  ]),
+  ('settings/secure', [
+    # Ensure Geolocation is disabled.
+    ('location_providers_allowed', ''),
+  ]),
+]
+
+ENABLE_MOCK_LOCATION_SETTINGS = [
+  ('settings/secure', [
+    ('mock_location', 1),
+  ]),
+]
+
+DISABLE_MOCK_LOCATION_SETTINGS = [
+  ('settings/secure', [
+    ('mock_location', 0),
+  ]),
+]
+
+DETERMINISTIC_DEVICE_SETTINGS = [
+  ('settings/global', [
+    ('assisted_gps_enabled', 0),
+
+    # Disable "auto time" and "auto time zone" to avoid network-provided time
+    # to overwrite the device's datetime and timezone synchronized from host
+    # when running tests later. See b/6569849.
+    ('auto_time', 0),
+    ('auto_time_zone', 0),
+
+    ('development_settings_enabled', 1),
+
+    # Flag for allowing ActivityManagerService to send ACTION_APP_ERROR intents
+    # on application crashes and ANRs. If this is disabled, the crash/ANR dialog
+    # will never display the "Report" button.
+    # Type: int ( 0 = disallow, 1 = allow )
+    ('send_action_app_error', 0),
+
+    ('stay_on_while_plugged_in', 3),
+
+    ('verifier_verify_adb_installs', 0),
+  ]),
+  ('settings/secure', [
+    ('allowed_geolocation_origins',
+        'http://www.google.co.uk http://www.google.com'),
+
+    # Ensure that we never get random dialogs like "Unfortunately the process
+    # android.process.acore has stopped", which steal the focus, and make our
+    # automation fail (because the dialog steals the focus then mistakenly
+    # receives the injected user input events).
+    ('anr_show_background', 0),
+
+    ('lockscreen.disabled', 1),
+
+    ('screensaver_enabled', 0),
+  ]),
+  ('settings/system', [
+    # Don't want devices to accidentally rotate the screen as that could
+    # affect performance measurements.
+    ('accelerometer_rotation', 0),
+
+    ('lockscreen.disabled', 1),
+
+    # Turn down brightness and disable auto-adjust so that devices run cooler.
+    ('screen_brightness', 5),
+    ('screen_brightness_mode', 0),
+
+    ('user_rotation', 0),
+  ]),
+]
+
+NETWORK_DISABLED_SETTINGS = [
+  ('settings/global', [
+    ('airplane_mode_on', 1),
+    ('wifi_on', 0),
+  ]),
+]
diff --git a/build/android/pylib/device_signal.py b/build/android/pylib/device_signal.py
new file mode 100644
index 0000000..6a5b709
--- /dev/null
+++ b/build/android/pylib/device_signal.py
@@ -0,0 +1,41 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Defines constants for signals that should be supported on devices.
+
+Note: Obtained by running `kill -l` on a user device.
+"""
+
+
+SIGHUP = 1 # Hangup
+SIGINT = 2 # Interrupt
+SIGQUIT = 3 # Quit
+SIGILL = 4 # Illegal instruction
+SIGTRAP = 5 # Trap
+SIGABRT = 6 # Aborted
+SIGBUS = 7 # Bus error
+SIGFPE = 8 # Floating point exception
+SIGKILL = 9 # Killed
+SIGUSR1 = 10 # User signal 1
+SIGSEGV = 11 # Segmentation fault
+SIGUSR2 = 12 # User signal 2
+SIGPIPE = 13 # Broken pipe
+SIGALRM = 14 # Alarm clock
+SIGTERM = 15 # Terminated
+SIGSTKFLT = 16 # Stack fault
+SIGCHLD = 17 # Child exited
+SIGCONT = 18 # Continue
+SIGSTOP = 19 # Stopped (signal)
+SIGTSTP = 20 # Stopped
+SIGTTIN = 21 # Stopped (tty input)
+SIGTTOU = 22 # Stopped (tty output)
+SIGURG = 23 # Urgent I/O condition
+SIGXCPU = 24 # CPU time limit exceeded
+SIGXFSZ = 25 # File size limit exceeded
+SIGVTALRM = 26 # Virtual timer expired
+SIGPROF = 27 # Profiling timer expired
+SIGWINCH = 28 # Window size changed
+SIGIO = 29 # I/O possible
+SIGPWR = 30 # Power failure
+SIGSYS = 31 # Bad system call
diff --git a/build/android/pylib/efficient_android_directory_copy.sh b/build/android/pylib/efficient_android_directory_copy.sh
new file mode 100755
index 0000000..7021109
--- /dev/null
+++ b/build/android/pylib/efficient_android_directory_copy.sh
@@ -0,0 +1,78 @@
+#!/system/bin/sh
+
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Android shell script to make the destination directory identical with the
+# source directory, without doing unnecessary copies. This assumes that the
+# the destination directory was originally a copy of the source directory, and
+# has since been modified.
+
+source=$1
+dest=$2
+echo copying $source to $dest
+
+delete_extra() {
+  # Don't delete symbolic links, since doing so deletes the vital lib link.
+  if [ ! -L "$1" ]
+  then
+    if [ ! -e "$source/$1" ]
+    then
+      echo rm -rf "$dest/$1"
+      rm -rf "$dest/$1"
+    elif [ -d "$1" ]
+    then
+      for f in "$1"/*
+      do
+       delete_extra "$f"
+      done
+    fi
+  fi
+}
+
+copy_if_older() {
+  if [ -d "$1" ] && [ -e "$dest/$1" ]
+  then
+    if [ ! -e "$dest/$1" ]
+    then
+      echo cp -a "$1" "$dest/$1"
+      cp -a "$1" "$dest/$1"
+    else
+      for f in "$1"/*
+      do
+        copy_if_older "$f"
+      done
+    fi
+  elif [ ! -e "$dest/$1" ] || [ "$1" -ot "$dest/$1" ] || [ "$1" -nt "$dest/$1" ]
+  then
+    # dates are different, so either the destination of the source has changed.
+    echo cp -a "$1" "$dest/$1"
+    cp -a "$1" "$dest/$1"
+  fi
+}
+
+if [ -e "$dest" ]
+then
+  echo cd "$dest"
+  cd "$dest"
+  for f in ./*
+  do
+    if [ -e "$f" ]
+    then
+      delete_extra "$f"
+    fi
+  done
+else
+  echo mkdir "$dest"
+  mkdir "$dest"
+fi
+echo cd "$source"
+cd "$source"
+for f in ./*
+do
+  if [ -e "$f" ]
+  then
+    copy_if_older "$f"
+  fi
+done
diff --git a/build/android/pylib/flag_changer.py b/build/android/pylib/flag_changer.py
new file mode 100644
index 0000000..718bc39
--- /dev/null
+++ b/build/android/pylib/flag_changer.py
@@ -0,0 +1,166 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+
+import pylib.android_commands
+import pylib.device.device_utils
+
+from pylib.device import device_errors
+
+
+class FlagChanger(object):
+  """Changes the flags Chrome runs with.
+
+  There are two different use cases for this file:
+  * Flags are permanently set by calling Set().
+  * Flags can be temporarily set for a particular set of unit tests.  These
+    tests should call Restore() to revert the flags to their original state
+    once the tests have completed.
+  """
+
+  def __init__(self, device, cmdline_file):
+    """Initializes the FlagChanger and records the original arguments.
+
+    Args:
+      device: A DeviceUtils instance.
+      cmdline_file: Path to the command line file on the device.
+    """
+    # TODO(jbudorick) Remove once telemetry switches over.
+    if isinstance(device, pylib.android_commands.AndroidCommands):
+      device = pylib.device.device_utils.DeviceUtils(device)
+    self._device = device
+    self._cmdline_file = cmdline_file
+
+    # Save the original flags.
+    try:
+      self._orig_line = self._device.ReadFile(self._cmdline_file).strip()
+    except device_errors.CommandFailedError:
+      self._orig_line = ''
+
+    # Parse out the flags into a list to facilitate adding and removing flags.
+    self._current_flags = self._TokenizeFlags(self._orig_line)
+
+  def Get(self):
+    """Returns list of current flags."""
+    return self._current_flags
+
+  def Set(self, flags):
+    """Replaces all flags on the current command line with the flags given.
+
+    Args:
+      flags: A list of flags to set, eg. ['--single-process'].
+    """
+    if flags:
+      assert flags[0] != 'chrome'
+
+    self._current_flags = flags
+    self._UpdateCommandLineFile()
+
+  def AddFlags(self, flags):
+    """Appends flags to the command line if they aren't already there.
+
+    Args:
+      flags: A list of flags to add on, eg. ['--single-process'].
+    """
+    if flags:
+      assert flags[0] != 'chrome'
+
+    # Avoid appending flags that are already present.
+    for flag in flags:
+      if flag not in self._current_flags:
+        self._current_flags.append(flag)
+    self._UpdateCommandLineFile()
+
+  def RemoveFlags(self, flags):
+    """Removes flags from the command line, if they exist.
+
+    Args:
+      flags: A list of flags to remove, eg. ['--single-process'].  Note that we
+             expect a complete match when removing flags; if you want to remove
+             a switch with a value, you must use the exact string used to add
+             it in the first place.
+    """
+    if flags:
+      assert flags[0] != 'chrome'
+
+    for flag in flags:
+      if flag in self._current_flags:
+        self._current_flags.remove(flag)
+    self._UpdateCommandLineFile()
+
+  def Restore(self):
+    """Restores the flags to their original state."""
+    self._current_flags = self._TokenizeFlags(self._orig_line)
+    self._UpdateCommandLineFile()
+
+  def _UpdateCommandLineFile(self):
+    """Writes out the command line to the file, or removes it if empty."""
+    logging.info('Current flags: %s', self._current_flags)
+    # Root is not required to write to /data/local/tmp/.
+    use_root = '/data/local/tmp/' not in self._cmdline_file
+    if self._current_flags:
+      # The first command line argument doesn't matter as we are not actually
+      # launching the chrome executable using this command line.
+      cmd_line = ' '.join(['_'] + self._current_flags)
+      self._device.WriteFile(
+          self._cmdline_file, cmd_line, as_root=use_root)
+      file_contents = self._device.ReadFile(
+          self._cmdline_file, as_root=use_root).rstrip()
+      assert file_contents == cmd_line, (
+          'Failed to set the command line file at %s' % self._cmdline_file)
+    else:
+      self._device.RunShellCommand('rm ' + self._cmdline_file,
+                                   as_root=use_root)
+      assert not self._device.FileExists(self._cmdline_file), (
+          'Failed to remove the command line file at %s' % self._cmdline_file)
+
+  @staticmethod
+  def _TokenizeFlags(line):
+    """Changes the string containing the command line into a list of flags.
+
+    Follows similar logic to CommandLine.java::tokenizeQuotedArguments:
+    * Flags are split using whitespace, unless the whitespace is within a
+      pair of quotation marks.
+    * Unlike the Java version, we keep the quotation marks around switch
+      values since we need them to re-create the file when new flags are
+      appended.
+
+    Args:
+      line: A string containing the entire command line.  The first token is
+            assumed to be the program name.
+    """
+    if not line:
+      return []
+
+    tokenized_flags = []
+    current_flag = ""
+    within_quotations = False
+
+    # Move through the string character by character and build up each flag
+    # along the way.
+    for c in line.strip():
+      if c is '"':
+        if len(current_flag) > 0 and current_flag[-1] == '\\':
+          # Last char was a backslash; pop it, and treat this " as a literal.
+          current_flag = current_flag[0:-1] + '"'
+        else:
+          within_quotations = not within_quotations
+          current_flag += c
+      elif not within_quotations and (c is ' ' or c is '\t'):
+        if current_flag is not "":
+          tokenized_flags.append(current_flag)
+          current_flag = ""
+      else:
+        current_flag += c
+
+    # Tack on the last flag.
+    if not current_flag:
+      if within_quotations:
+        logging.warn('Unterminated quoted argument: ' + line)
+    else:
+      tokenized_flags.append(current_flag)
+
+    # Return everything but the program name.
+    return tokenized_flags[1:]
diff --git a/build/android/pylib/forwarder.py b/build/android/pylib/forwarder.py
new file mode 100644
index 0000000..c8c47d6
--- /dev/null
+++ b/build/android/pylib/forwarder.py
@@ -0,0 +1,331 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# pylint: disable=W0212
+
+import fcntl
+import logging
+import os
+import psutil
+
+from pylib import cmd_helper
+from pylib import constants
+from pylib import valgrind_tools
+
+# TODO(jbudorick) Remove once telemetry gets switched over.
+import pylib.android_commands
+import pylib.device.device_utils
+
+
+def _GetProcessStartTime(pid):
+  return psutil.Process(pid).create_time
+
+
+class _FileLock(object):
+  """With statement-aware implementation of a file lock.
+
+  File locks are needed for cross-process synchronization when the
+  multiprocessing Python module is used.
+  """
+  def __init__(self, path):
+    self._fd = -1
+    self._path = path
+
+  def __enter__(self):
+    self._fd = os.open(self._path, os.O_RDONLY | os.O_CREAT)
+    if self._fd < 0:
+      raise Exception('Could not open file %s for reading' % self._path)
+    fcntl.flock(self._fd, fcntl.LOCK_EX)
+
+  def __exit__(self, _exception_type, _exception_value, traceback):
+    fcntl.flock(self._fd, fcntl.LOCK_UN)
+    os.close(self._fd)
+
+
+class Forwarder(object):
+  """Thread-safe class to manage port forwards from the device to the host."""
+
+  _DEVICE_FORWARDER_FOLDER = (constants.TEST_EXECUTABLE_DIR +
+                              '/forwarder/')
+  _DEVICE_FORWARDER_PATH = (constants.TEST_EXECUTABLE_DIR +
+                            '/forwarder/device_forwarder')
+  _LOCK_PATH = '/tmp/chrome.forwarder.lock'
+  # Defined in host_forwarder_main.cc
+  _HOST_FORWARDER_LOG = '/tmp/host_forwarder_log'
+
+  _instance = None
+
+  @staticmethod
+  def Map(port_pairs, device, tool=None):
+    """Runs the forwarder.
+
+    Args:
+      port_pairs: A list of tuples (device_port, host_port) to forward. Note
+                 that you can specify 0 as a device_port, in which case a
+                 port will by dynamically assigned on the device. You can
+                 get the number of the assigned port using the
+                 DevicePortForHostPort method.
+      device: A DeviceUtils instance.
+      tool: Tool class to use to get wrapper, if necessary, for executing the
+            forwarder (see valgrind_tools.py).
+
+    Raises:
+      Exception on failure to forward the port.
+    """
+    # TODO(jbudorick) Remove once telemetry gets switched over.
+    if isinstance(device, pylib.android_commands.AndroidCommands):
+      device = pylib.device.device_utils.DeviceUtils(device)
+    if not tool:
+      tool = valgrind_tools.CreateTool(None, device)
+    with _FileLock(Forwarder._LOCK_PATH):
+      instance = Forwarder._GetInstanceLocked(tool)
+      instance._InitDeviceLocked(device, tool)
+
+      device_serial = str(device)
+      redirection_commands = [
+          ['--adb=' + constants.GetAdbPath(),
+           '--serial-id=' + device_serial,
+           '--map', str(device_port), str(host_port)]
+          for device_port, host_port in port_pairs]
+      logging.info('Forwarding using commands: %s', redirection_commands)
+
+      for redirection_command in redirection_commands:
+        try:
+          (exit_code, output) = cmd_helper.GetCmdStatusAndOutput(
+              [instance._host_forwarder_path] + redirection_command)
+        except OSError as e:
+          if e.errno == 2:
+            raise Exception('Unable to start host forwarder. Make sure you have'
+                            ' built host_forwarder.')
+          else: raise
+        if exit_code != 0:
+          Forwarder._KillDeviceLocked(device, tool)
+          raise Exception('%s exited with %d:\n%s' % (
+              instance._host_forwarder_path, exit_code, '\n'.join(output)))
+        tokens = output.split(':')
+        if len(tokens) != 2:
+          raise Exception('Unexpected host forwarder output "%s", '
+                          'expected "device_port:host_port"' % output)
+        device_port = int(tokens[0])
+        host_port = int(tokens[1])
+        serial_with_port = (device_serial, device_port)
+        instance._device_to_host_port_map[serial_with_port] = host_port
+        instance._host_to_device_port_map[host_port] = serial_with_port
+        logging.info('Forwarding device port: %d to host port: %d.',
+                     device_port, host_port)
+
+  @staticmethod
+  def UnmapDevicePort(device_port, device):
+    """Unmaps a previously forwarded device port.
+
+    Args:
+      device: A DeviceUtils instance.
+      device_port: A previously forwarded port (through Map()).
+    """
+    # TODO(jbudorick) Remove once telemetry gets switched over.
+    if isinstance(device, pylib.android_commands.AndroidCommands):
+      device = pylib.device.device_utils.DeviceUtils(device)
+    with _FileLock(Forwarder._LOCK_PATH):
+      Forwarder._UnmapDevicePortLocked(device_port, device)
+
+  @staticmethod
+  def UnmapAllDevicePorts(device):
+    """Unmaps all the previously forwarded ports for the provided device.
+
+    Args:
+      device: A DeviceUtils instance.
+      port_pairs: A list of tuples (device_port, host_port) to unmap.
+    """
+    # TODO(jbudorick) Remove once telemetry gets switched over.
+    if isinstance(device, pylib.android_commands.AndroidCommands):
+      device = pylib.device.device_utils.DeviceUtils(device)
+    with _FileLock(Forwarder._LOCK_PATH):
+      if not Forwarder._instance:
+        return
+      adb_serial = str(device)
+      if adb_serial not in Forwarder._instance._initialized_devices:
+        return
+      port_map = Forwarder._GetInstanceLocked(
+          None)._device_to_host_port_map
+      for (device_serial, device_port) in port_map.keys():
+        if adb_serial == device_serial:
+          Forwarder._UnmapDevicePortLocked(device_port, device)
+      # There are no more ports mapped, kill the device_forwarder.
+      tool = valgrind_tools.CreateTool(None, device)
+      Forwarder._KillDeviceLocked(device, tool)
+
+  @staticmethod
+  def DevicePortForHostPort(host_port):
+    """Returns the device port that corresponds to a given host port."""
+    with _FileLock(Forwarder._LOCK_PATH):
+      (_device_serial, device_port) = Forwarder._GetInstanceLocked(
+          None)._host_to_device_port_map.get(host_port)
+      return device_port
+
+  @staticmethod
+  def RemoveHostLog():
+    if os.path.exists(Forwarder._HOST_FORWARDER_LOG):
+      os.unlink(Forwarder._HOST_FORWARDER_LOG)
+
+  @staticmethod
+  def GetHostLog():
+    if not os.path.exists(Forwarder._HOST_FORWARDER_LOG):
+      return ''
+    with file(Forwarder._HOST_FORWARDER_LOG, 'r') as f:
+      return f.read()
+
+  @staticmethod
+  def _GetInstanceLocked(tool):
+    """Returns the singleton instance.
+
+    Note that the global lock must be acquired before calling this method.
+
+    Args:
+      tool: Tool class to use to get wrapper, if necessary, for executing the
+            forwarder (see valgrind_tools.py).
+    """
+    if not Forwarder._instance:
+      Forwarder._instance = Forwarder(tool)
+    return Forwarder._instance
+
+  def __init__(self, tool):
+    """Constructs a new instance of Forwarder.
+
+    Note that Forwarder is a singleton therefore this constructor should be
+    called only once.
+
+    Args:
+      tool: Tool class to use to get wrapper, if necessary, for executing the
+            forwarder (see valgrind_tools.py).
+    """
+    assert not Forwarder._instance
+    self._tool = tool
+    self._initialized_devices = set()
+    self._device_to_host_port_map = dict()
+    self._host_to_device_port_map = dict()
+    self._host_forwarder_path = os.path.join(
+        constants.GetOutDirectory(), 'host_forwarder')
+    assert os.path.exists(self._host_forwarder_path), 'Please build forwarder2'
+    self._device_forwarder_path_on_host = os.path.join(
+        constants.GetOutDirectory(), 'forwarder_dist')
+    self._InitHostLocked()
+
+  @staticmethod
+  def _UnmapDevicePortLocked(device_port, device):
+    """Internal method used by UnmapDevicePort().
+
+    Note that the global lock must be acquired before calling this method.
+    """
+    instance = Forwarder._GetInstanceLocked(None)
+    serial = str(device)
+    serial_with_port = (serial, device_port)
+    if not serial_with_port in instance._device_to_host_port_map:
+      logging.error('Trying to unmap non-forwarded port %d' % device_port)
+      return
+    redirection_command = ['--adb=' + constants.GetAdbPath(),
+                           '--serial-id=' + serial,
+                           '--unmap', str(device_port)]
+    (exit_code, output) = cmd_helper.GetCmdStatusAndOutput(
+        [instance._host_forwarder_path] + redirection_command)
+    if exit_code != 0:
+      logging.error('%s exited with %d:\n%s' % (
+          instance._host_forwarder_path, exit_code, '\n'.join(output)))
+    host_port = instance._device_to_host_port_map[serial_with_port]
+    del instance._device_to_host_port_map[serial_with_port]
+    del instance._host_to_device_port_map[host_port]
+
+  @staticmethod
+  def _GetPidForLock():
+    """Returns the PID used for host_forwarder initialization.
+
+    The PID of the "sharder" is used to handle multiprocessing. The "sharder"
+    is the initial process that forks that is the parent process.
+    """
+    return os.getpgrp()
+
+  def _InitHostLocked(self):
+    """Initializes the host forwarder daemon.
+
+    Note that the global lock must be acquired before calling this method. This
+    method kills any existing host_forwarder process that could be stale.
+    """
+    # See if the host_forwarder daemon was already initialized by a concurrent
+    # process or thread (in case multi-process sharding is not used).
+    pid_for_lock = Forwarder._GetPidForLock()
+    fd = os.open(Forwarder._LOCK_PATH, os.O_RDWR | os.O_CREAT)
+    with os.fdopen(fd, 'r+') as pid_file:
+      pid_with_start_time = pid_file.readline()
+      if pid_with_start_time:
+        (pid, process_start_time) = pid_with_start_time.split(':')
+        if pid == str(pid_for_lock):
+          if process_start_time == str(_GetProcessStartTime(pid_for_lock)):
+            return
+      self._KillHostLocked()
+      pid_file.seek(0)
+      pid_file.write(
+          '%s:%s' % (pid_for_lock, str(_GetProcessStartTime(pid_for_lock))))
+      pid_file.truncate()
+
+  def _InitDeviceLocked(self, device, tool):
+    """Initializes the device_forwarder daemon for a specific device (once).
+
+    Note that the global lock must be acquired before calling this method. This
+    method kills any existing device_forwarder daemon on the device that could
+    be stale, pushes the latest version of the daemon (to the device) and starts
+    it.
+
+    Args:
+      device: A DeviceUtils instance.
+      tool: Tool class to use to get wrapper, if necessary, for executing the
+            forwarder (see valgrind_tools.py).
+    """
+    device_serial = str(device)
+    if device_serial in self._initialized_devices:
+      return
+    Forwarder._KillDeviceLocked(device, tool)
+    device.PushChangedFiles([(
+        self._device_forwarder_path_on_host,
+        Forwarder._DEVICE_FORWARDER_FOLDER)])
+    cmd = '%s %s' % (tool.GetUtilWrapper(), Forwarder._DEVICE_FORWARDER_PATH)
+    device.RunShellCommand(
+        cmd, env={'LD_LIBRARY_PATH': Forwarder._DEVICE_FORWARDER_FOLDER},
+        check_return=True)
+    self._initialized_devices.add(device_serial)
+
+  def _KillHostLocked(self):
+    """Kills the forwarder process running on the host.
+
+    Note that the global lock must be acquired before calling this method.
+    """
+    logging.info('Killing host_forwarder.')
+    (exit_code, output) = cmd_helper.GetCmdStatusAndOutput(
+        [self._host_forwarder_path, '--kill-server'])
+    if exit_code != 0:
+      (exit_code, output) = cmd_helper.GetCmdStatusAndOutput(
+          ['pkill', '-9', 'host_forwarder'])
+      if exit_code != 0:
+        raise Exception('%s exited with %d:\n%s' % (
+              self._host_forwarder_path, exit_code, '\n'.join(output)))
+
+  @staticmethod
+  def _KillDeviceLocked(device, tool):
+    """Kills the forwarder process running on the device.
+
+    Note that the global lock must be acquired before calling this method.
+
+    Args:
+      device: Instance of DeviceUtils for talking to the device.
+      tool: Wrapper tool (e.g. valgrind) that can be used to execute the device
+            forwarder (see valgrind_tools.py).
+    """
+    logging.info('Killing device_forwarder.')
+    Forwarder._instance._initialized_devices.discard(str(device))
+    if not device.FileExists(Forwarder._DEVICE_FORWARDER_PATH):
+      return
+
+    cmd = '%s %s --kill-server' % (tool.GetUtilWrapper(),
+                                   Forwarder._DEVICE_FORWARDER_PATH)
+    device.RunShellCommand(
+        cmd, env={'LD_LIBRARY_PATH': Forwarder._DEVICE_FORWARDER_FOLDER},
+        check_return=True)
diff --git a/build/android/pylib/gtest/__init__.py b/build/android/pylib/gtest/__init__.py
new file mode 100644
index 0000000..727e987
--- /dev/null
+++ b/build/android/pylib/gtest/__init__.py
@@ -0,0 +1,4 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
diff --git a/build/android/pylib/gtest/filter/OWNERS b/build/android/pylib/gtest/filter/OWNERS
new file mode 100644
index 0000000..72e8ffc
--- /dev/null
+++ b/build/android/pylib/gtest/filter/OWNERS
@@ -0,0 +1 @@
+*
diff --git a/build/android/pylib/gtest/filter/base_unittests_disabled b/build/android/pylib/gtest/filter/base_unittests_disabled
new file mode 100644
index 0000000..bf2311d
--- /dev/null
+++ b/build/android/pylib/gtest/filter/base_unittests_disabled
@@ -0,0 +1,28 @@
+# List of suppressions
+
+# Android will not support StackTrace.
+StackTrace.*
+#
+# Sometimes this is automatically generated by run_tests.py
+VerifyPathControlledByUserTest.Symlinks
+
+# http://crbug.com/138845
+MessagePumpLibeventTest.TestWatchingFromBadThread
+
+StringPrintfTest.StringPrintfMisc
+StringPrintfTest.StringAppendfString
+StringPrintfTest.StringAppendfInt
+StringPrintfTest.StringPrintfBounds
+ProcessUtilTest.GetAppOutputRestrictedSIGPIPE
+# TODO(jrg): Fails on bots.  Works locally.  Figure out why.  2/6/12
+FieldTrialTest.*
+# Flaky?
+ScopedJavaRefTest.RefCounts
+# Death tests are not supported with apks.
+*DeathTest*
+FileTest.MemoryCorruption
+MessagePumpLibeventTest.QuitOutsideOfRun
+ScopedFD.ScopedFDCrashesOnCloseFailure
+
+# http://crbug.com/245043
+StackContainer.BufferAlignment
diff --git a/build/android/pylib/gtest/filter/base_unittests_emulator_additional_disabled b/build/android/pylib/gtest/filter/base_unittests_emulator_additional_disabled
new file mode 100644
index 0000000..85e8fd6
--- /dev/null
+++ b/build/android/pylib/gtest/filter/base_unittests_emulator_additional_disabled
@@ -0,0 +1,10 @@
+# Addtional list of suppressions from emulator
+#
+# Automatically generated by run_tests.py
+PathServiceTest.Get
+SharedMemoryTest.OpenClose
+StringPrintfTest.StringAppendfInt
+StringPrintfTest.StringAppendfString
+StringPrintfTest.StringPrintfBounds
+StringPrintfTest.StringPrintfMisc
+VerifyPathControlledByUserTest.Symlinks
diff --git a/build/android/pylib/gtest/filter/blink_heap_unittests_disabled b/build/android/pylib/gtest/filter/blink_heap_unittests_disabled
new file mode 100644
index 0000000..7a43fb1
--- /dev/null
+++ b/build/android/pylib/gtest/filter/blink_heap_unittests_disabled
@@ -0,0 +1,2 @@
+# List of suppressions
+
diff --git a/build/android/pylib/gtest/filter/breakpad_unittests_disabled b/build/android/pylib/gtest/filter/breakpad_unittests_disabled
new file mode 100644
index 0000000..cefc64f
--- /dev/null
+++ b/build/android/pylib/gtest/filter/breakpad_unittests_disabled
@@ -0,0 +1,9 @@
+FileIDStripTest.StripSelf
+# crbug.com/303960
+ExceptionHandlerTest.InstructionPointerMemoryNullPointer
+# crbug.com/171419
+MinidumpWriterTest.MappingInfoContained
+# crbug.com/310088
+MinidumpWriterTest.MinidumpSizeLimit
+# crbug.com/375838
+ElfCoreDumpTest.ValidCoreFile
diff --git a/build/android/pylib/gtest/filter/cc_unittests_disabled b/build/android/pylib/gtest/filter/cc_unittests_disabled
new file mode 100644
index 0000000..b49d2c6
--- /dev/null
+++ b/build/android/pylib/gtest/filter/cc_unittests_disabled
@@ -0,0 +1,5 @@
+# Death tests are not supported with apks.
+BeginFrameObserverBaseTest.OnBeginFrameImplementation
+BeginFrameSourceBaseTest.ObserverManipulation
+BeginFrameSourceMultiplexerTest.SourcesManipulation
+BeginFrameSourceMultiplexerTest.MinimumIntervalNegativeFails
diff --git a/build/android/pylib/gtest/filter/content_browsertests_disabled b/build/android/pylib/gtest/filter/content_browsertests_disabled
new file mode 100644
index 0000000..dcad240
--- /dev/null
+++ b/build/android/pylib/gtest/filter/content_browsertests_disabled
@@ -0,0 +1,62 @@
+# List of suppressions
+# Timeouts
+Http/MediaTest.*
+File/MediaTest.*
+MediaTest.*
+DatabaseTest.*
+
+# Crashes
+RenderFrameHostManagerTest.IgnoreRendererDebugURLsWhenCrashed
+
+# Plugins are not supported.
+BrowserPluginThreadedCompositorPixelTest.*
+BrowserPluginHostTest.*
+BrowserPluginTest.*
+PluginTest.*
+
+# http://crbug.com/463740
+CrossPlatformAccessibilityBrowserTest.SelectedEditableTextAccessibility
+
+# http://crbug.com/297230
+DumpAccessibilityTreeTest.AccessibilityAriaLevel
+DumpAccessibilityTreeTest.AccessibilityAriaProgressbar
+DumpAccessibilityTreeTest.AccessibilityListMarkers
+DumpAccessibilityTreeTest.AccessibilityUl
+DumpAccessibilityTreeTest.AccessibilityCanvas
+RendererAccessibilityTest.DetachAccessibilityObject
+DumpAccessibilityTreeTest.AccessibilityDialog
+DumpAccessibilityTreeTest.AccessibilityModalDialogClosed
+DumpAccessibilityTreeTest.AccessibilityModalDialogInIframeOpened
+RendererAccessibilityTest.EventOnObjectNotInTree
+
+# http://crbug.com/187500
+RenderViewImplTest.*
+RendererAccessibilityTest.SendFullAccessibilityTreeOnReload
+RendererAccessibilityTest.HideAccessibilityObject
+RendererAccessibilityTest.ShowAccessibilityObject
+
+# http://crbug.com/215894
+DownloadContentTest.CancelInterruptedDownload
+DownloadContentTest.CancelResumingDownload
+DownloadContentTest.RemoveDownload
+DownloadContentTest.RemoveResumingDownload
+DownloadContentTest.ResumeInterruptedDownload
+DownloadContentTest.ResumeInterruptedDownloadNoRange
+DownloadContentTest.ResumeInterruptedDownloadNoVerifiers
+DownloadContentTest.ResumeInterruptedDownloadBadPrecondition
+DownloadContentTest.ResumeWithDeletedFile
+
+# http://crbug.com/386227
+IndexedDBBrowserTest.VersionChangeCrashResilience
+
+# http://crbug.com/233118
+IndexedDBBrowserTest.NullKeyPathPersistence
+
+# http://crbug.com/342525
+IndexedDBBrowserTestSingleProcess.RenderThreadShutdownTest
+
+# http://crbug.com/338421
+GinBrowserTest.GinAndGarbageCollection
+
+# http://crbug.com/343604
+MSE_ClearKey/EncryptedMediaTest.ConfigChangeVideo/0
diff --git a/build/android/pylib/gtest/filter/content_unittests_disabled b/build/android/pylib/gtest/filter/content_unittests_disabled
new file mode 100644
index 0000000..925a7d1
--- /dev/null
+++ b/build/android/pylib/gtest/filter/content_unittests_disabled
@@ -0,0 +1,13 @@
+# List of suppressions
+
+# crbug.com/139095
+RenderWidgetTest.OnMsgPaintAtSize
+# crbug.com/147549
+GamepadProviderTest.PollingAccess
+PepperGamepadHostTest.WaitForReply
+# crbug.com/159234
+WebContentsVideoCaptureDeviceTest.*
+# crbug.com/167045
+ContentViewPopupZoomerTest.testPopupZoomerShowsUp
+# crbug.com/254034
+PageStateSerializationTest.BackwardsCompat_v11
diff --git a/build/android/pylib/gtest/filter/gfx_unittests_disabled b/build/android/pylib/gtest/filter/gfx_unittests_disabled
new file mode 100644
index 0000000..b9aec9e
--- /dev/null
+++ b/build/android/pylib/gtest/filter/gfx_unittests_disabled
@@ -0,0 +1,10 @@
+CanvasTest.StringSizeEmptyString
+CanvasTest.StringWidth
+FontTest.Ascent
+FontTest.AvgWidths
+FontTest.CapHeight
+FontTest.GetActualFontNameForTesting
+FontTest.Height
+FontTest.LoadArial
+FontTest.LoadArialBold
+TextUtilsTest.GetStringWidth
diff --git a/build/android/pylib/gtest/filter/ipc_tests_disabled b/build/android/pylib/gtest/filter/ipc_tests_disabled
new file mode 100644
index 0000000..e8d0691
--- /dev/null
+++ b/build/android/pylib/gtest/filter/ipc_tests_disabled
@@ -0,0 +1,18 @@
+# Times out
+IPCSyncChannelTest.ChattyServer
+
+# MultiProcessTest related failures. These tests fail if DCHECK is enabled.
+IPCChannelPosixTest.AdvancedConnected
+IPCChannelPosixTest.ResetState
+IPCChannelPosixTest.MultiConnection
+IPCFuzzingTest.SanityTest
+IPCFuzzingTest.MsgBadPayloadArgs
+IPCFuzzingTest.MsgBadPayloadShort
+IPCSendFdsTest.DescriptorTest
+IPCChannelProxyTest.MessageClassFilters
+IPCChannelProxyTest.GlobalAndMessageClassFilters
+IPCChannelProxyTest.FilterRemoval
+IPCChannelTest.ChannelTest
+IPCChannelTest.ChannelProxyTest
+IPCChannelTest.SendMessageInChannelConnected
+SyncSocketTest.SanityTest
diff --git a/build/android/pylib/gtest/filter/media_unittests_disabled b/build/android/pylib/gtest/filter/media_unittests_disabled
new file mode 100644
index 0000000..ed3b9aa
--- /dev/null
+++ b/build/android/pylib/gtest/filter/media_unittests_disabled
@@ -0,0 +1,8 @@
+# List of suppressions
+
+# Death tests are not supported on APK
+# http://crbug.com/138855
+CompositeFilterDeathTest.*
+
+# http://crbug.com/138833
+AesDecryptorTest.*
diff --git a/build/android/pylib/gtest/filter/net_unittests_disabled b/build/android/pylib/gtest/filter/net_unittests_disabled
new file mode 100644
index 0000000..75a1c86
--- /dev/null
+++ b/build/android/pylib/gtest/filter/net_unittests_disabled
@@ -0,0 +1,41 @@
+# List of suppressions.
+
+PythonUtils.PythonRunTime
+VerifyEndEntity/CertVerifyProcWeakDigestTest.Verify/0
+VerifyEndEntity/CertVerifyProcWeakDigestTest.Verify/1
+VerifyEndEntity/CertVerifyProcWeakDigestTest.Verify/2
+VerifyIncompleteEndEntity/CertVerifyProcWeakDigestTest.Verify/0
+VerifyIncompleteEndEntity/CertVerifyProcWeakDigestTest.Verify/1
+VerifyIncompleteEndEntity/CertVerifyProcWeakDigestTest.Verify/2
+VerifyIncompleteIntermediate/CertVerifyProcWeakDigestTest.Verify/0
+VerifyIncompleteIntermediate/CertVerifyProcWeakDigestTest.Verify/1
+VerifyIncompleteIntermediate/CertVerifyProcWeakDigestTest.Verify/2
+VerifyIntermediate/CertVerifyProcWeakDigestTest.Verify/0
+VerifyIntermediate/CertVerifyProcWeakDigestTest.Verify/1
+VerifyIntermediate/CertVerifyProcWeakDigestTest.Verify/2
+VerifyMixed/CertVerifyProcWeakDigestTest.Verify/0
+VerifyMixed/CertVerifyProcWeakDigestTest.Verify/1
+VerifyMixed/CertVerifyProcWeakDigestTest.Verify/2
+VerifyRoot/CertVerifyProcWeakDigestTest.Verify/0
+VerifyRoot/CertVerifyProcWeakDigestTest.Verify/1
+VerifyRoot/CertVerifyProcWeakDigestTest.Verify/2
+
+# Can't spin up more than one SpawnedTestServer on Android.
+URLRequestTestReferrerPolicy.HTTPToCrossOriginHTTP
+URLRequestTestReferrerPolicy.HTTPSToCrossOriginHTTPS
+URLRequestTestReferrerPolicy.HTTPToHTTPS
+URLRequestTestReferrerPolicy.HTTPSToHTTP
+
+# Fail only on bots.
+HttpCache.RangeGET_Cancel
+HttpCache.RangeGET_Cancel2
+HttpCache.RangeGET_OK
+HttpCache.RangeGET_Previous200
+HttpCache.RangeGET_Revalidate2
+HttpCache.RangeGET_SyncOK
+HttpCache.TypicalGET_ConditionalRequest
+# Death tests are not supported with apks.
+*DeathTest*
+# These are death tests and thus also disabled.
+PrioritizedDispatcherTest.CancelNull
+PrioritizedDispatcherTest.CancelMissing
diff --git a/build/android/pylib/gtest/filter/sync_unit_tests_disabled b/build/android/pylib/gtest/filter/sync_unit_tests_disabled
new file mode 100644
index 0000000..cc4b72d
--- /dev/null
+++ b/build/android/pylib/gtest/filter/sync_unit_tests_disabled
@@ -0,0 +1,4 @@
+SyncHttpBridgeTest.*
+
+# crbug.com/144422
+OnDiskSyncableDirectory.FailInitialWrite
diff --git a/build/android/pylib/gtest/filter/unit_tests_disabled b/build/android/pylib/gtest/filter/unit_tests_disabled
new file mode 100644
index 0000000..c7851fd
--- /dev/null
+++ b/build/android/pylib/gtest/filter/unit_tests_disabled
@@ -0,0 +1,119 @@
+# List of suppressions
+
+# The UDP related tests currently do not work on Android because
+# we lack a UDP forwarder tool.
+NetworkStatsTestUDP.*
+
+# Missing test resource of 16MB.
+HistoryProfileTest.TypicalProfileVersion
+
+# crbug.com/139408
+SQLitePersistentCookieStoreTest.TestDontLoadOldSessionCookies
+SQLitePersistentCookieStoreTest.PersistIsPersistent
+
+# crbug.com/139433
+AutofillTableTest.AutofillProfile*
+AutofillTableTest.UpdateAutofillProfile
+
+# crbug.com/139400
+AutofillProfileTest.*
+CreditCardTest.SetInfoExpirationMonth
+
+# crbug.com/139398
+DownloadItemModelTest.InterruptTooltip
+
+# Tests crashing in the APK
+# l10n_util.cc(655)] Check failed: std::string::npos != pos
+DownloadItemModelTest.InterruptStatus
+# l10n_util.cc(655)] Check failed: std::string::npos != pos
+WebsiteSettingsTest.OnSiteDataAccessed
+
+# crbug.com/139423
+ValueStoreFrontendTest.GetExistingData
+
+# crbug.com/139421
+ChromeSelectFilePolicyTest.ExpectAsynchronousListenerCall
+
+# http://crbug.com/139033
+ChromeDownloadManagerDelegateTest.StartDownload_PromptAlways
+
+# Extension support is limited on Android.
+# Some of these can be enabled if we register extension related prefs in
+# browser_prefs.cc
+ExtensionTest.*
+ExtensionAPI.*
+ExtensionFileUtilTest.*
+ExtensionPermissionsTest.*
+ExtensionUnpackerTest.*
+ActiveTabTest.*
+ExtensionAppsPromo.*
+ComponentLoaderTest.*
+ExtensionFromUserScript.*
+ExtensionFromWebApp.*
+ExtensionIconManagerTest.*
+ExtensionServiceTest.*
+ExtensionServiceTestSimple.*
+ExtensionSourcePriorityTest.*
+ExtensionSpecialStoragePolicyTest.*
+ExternalPolicyProviderTest.*
+ExternalProviderImplTest.*
+MenuManagerTest.*
+PageActionControllerTest.*
+PermissionsUpdaterTest.*
+ImageLoaderTest.*
+ImageLoadingTrackerTest.*
+ExtensionSettingsFrontendTest.*
+ExtensionSettingsSyncTest.*
+ExtensionUpdaterTest.*
+UserScriptListenerTest.*
+WebApplicationTest.GetShortcutInfoForTab
+ExtensionActionIconFactoryTest.*
+
+# crbug.com/139411
+AutocompleteProviderTest.*
+HistoryContentsProviderBodyOnlyTest.*
+HistoryContentsProviderTest.*
+HQPOrderingTest.*
+SearchProviderTest.*
+
+ProtocolHandlerRegistryTest.TestOSRegistrationFailure
+
+# crbug.com/139418
+SQLiteServerBoundCertStoreTest.TestUpgradeV1
+SQLiteServerBoundCertStoreTest.TestUpgradeV2
+
+ProfileSyncComponentsFactoryImplTest.*
+PermissionsTest.GetWarningMessages_Plugins
+ImageOperations.ResizeShouldAverageColors
+
+# crbug.com/138275
+PrerenderTest.*
+RenderWidgetTest.OnMsgPaintAtSize
+
+# crbug.com/139643
+VariationsUtilTest.DisableAfterInitialization
+VariationsUtilTest.AssociateGoogleVariationID
+VariationsUtilTest.NoAssociation
+
+# crbug.com/141473
+AutofillManagerTest.UpdatePasswordSyncState
+AutofillManagerTest.UpdatePasswordGenerationState
+
+# crbug.com/144227
+ExtensionIconImageTest.*
+
+# crbug.com/145843
+EntropyProviderTest.UseOneTimeRandomizationSHA1
+EntropyProviderTest.UseOneTimeRandomizationPermuted
+
+# crbug.com/147500
+ManifestTest.RestrictedKeys
+
+# crbug.com/152599
+SyncSearchEngineDataTypeControllerTest.*
+
+# crbug.com/256259
+DiagnosticsModelTest.RunAll
+
+# Death tests are not supported with apks.
+*DeathTest*
diff --git a/build/android/pylib/gtest/filter/webkit_unit_tests_disabled b/build/android/pylib/gtest/filter/webkit_unit_tests_disabled
new file mode 100644
index 0000000..1ffa325
--- /dev/null
+++ b/build/android/pylib/gtest/filter/webkit_unit_tests_disabled
@@ -0,0 +1,25 @@
+# List of suppressions
+
+# crbug.com/159935
+WebCompositorInputHandlerImplTest.gestureFlingAnimates
+WebCompositorInputHandlerImplTest.gestureFlingTransferResets
+WebPageSerializerTest.HTMLNodes
+
+# crbug.com/241730
+ScrollAnimatorNoneTest.CurveMathQuartic
+ScrollAnimatorNoneTest.ScrollDownToBumper
+ScrollAnimatorNoneTest.ScrollQuadraticSmoothed
+ScrollAnimatorNoneTest.ScrollTwiceCubic
+ScrollAnimatorNoneTest.VaryingInputsEquivalencyCoastSteep
+WebViewTest.VisitedLinkCrash
+
+# Disabled until blink roll r151682
+DeferredImageDecoderTest.drawScaledIntoSkPicture
+
+# Disabled until blink roll r173540
+DeferredImageDecoderTest.decodeOnOtherThread
+DeferredImageDecoderTest.drawIntoSkPicture
+DeferredImageDecoderTest.drawIntoSkPictureProgressive
+
+# crbug.com/320005
+CoreAnimationCompositorAnimationsTest.ConvertTimingForCompositorIterationCount
diff --git a/build/android/pylib/gtest/gtest_config.py b/build/android/pylib/gtest/gtest_config.py
new file mode 100644
index 0000000..76e0f50
--- /dev/null
+++ b/build/android/pylib/gtest/gtest_config.py
@@ -0,0 +1,54 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Configuration file for android gtest suites."""
+
+# Add new suites here before upgrading them to the stable list below.
+EXPERIMENTAL_TEST_SUITES = [
+    'components_browsertests',
+    'content_gl_tests',
+    'heap_profiler_unittests',
+    'devtools_bridge_tests',
+]
+
+TELEMETRY_EXPERIMENTAL_TEST_SUITES = [
+    'telemetry_unittests',
+]
+
+# Do not modify this list without approval of an android owner.
+# This list determines which suites are run by default, both for local
+# testing and on android trybots running on commit-queue.
+STABLE_TEST_SUITES = [
+    'android_webview_unittests',
+    'base_unittests',
+    'breakpad_unittests',
+    'cc_unittests',
+    'components_unittests',
+    'content_browsertests',
+    'content_unittests',
+    'events_unittests',
+    'gl_tests',
+    'gl_unittests',
+    'gpu_unittests',
+    'ipc_tests',
+    'media_unittests',
+    'midi_unittests',
+    'net_unittests',
+    'sandbox_linux_unittests',
+    'skia_unittests',
+    'sql_unittests',
+    'sync_unit_tests',
+    'ui_android_unittests',
+    'ui_base_unittests',
+    'ui_touch_selection_unittests',
+    'unit_tests',
+    'webkit_unit_tests',
+]
+
+# Tests fail in component=shared_library build, which is required for ASan.
+# http://crbug.com/344868
+ASAN_EXCLUDED_TEST_SUITES = [
+    'breakpad_unittests',
+    'sandbox_linux_unittests'
+]
diff --git a/build/android/pylib/gtest/gtest_test_instance.py b/build/android/pylib/gtest/gtest_test_instance.py
new file mode 100644
index 0000000..3285e0b
--- /dev/null
+++ b/build/android/pylib/gtest/gtest_test_instance.py
@@ -0,0 +1,329 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+import os
+import re
+import shutil
+import sys
+import tempfile
+
+from pylib import constants
+from pylib.base import base_test_result
+from pylib.base import test_instance
+from pylib.utils import apk_helper
+
+sys.path.append(os.path.join(
+    constants.DIR_SOURCE_ROOT, 'build', 'util', 'lib', 'common'))
+import unittest_util
+
+
+BROWSER_TEST_SUITES = [
+  'components_browsertests',
+  'content_browsertests',
+]
+
+
+_DEFAULT_ISOLATE_FILE_PATHS = {
+    'base_unittests': 'base/base_unittests.isolate',
+    'blink_heap_unittests':
+      'third_party/WebKit/Source/platform/heap/BlinkHeapUnitTests.isolate',
+    'breakpad_unittests': 'breakpad/breakpad_unittests.isolate',
+    'cc_perftests': 'cc/cc_perftests.isolate',
+    'components_browsertests': 'components/components_browsertests.isolate',
+    'components_unittests': 'components/components_unittests.isolate',
+    'content_browsertests': 'content/content_browsertests.isolate',
+    'content_unittests': 'content/content_unittests.isolate',
+    'media_perftests': 'media/media_perftests.isolate',
+    'media_unittests': 'media/media_unittests.isolate',
+    'midi_unittests': 'media/midi/midi_unittests.isolate',
+    'net_unittests': 'net/net_unittests.isolate',
+    'sql_unittests': 'sql/sql_unittests.isolate',
+    'sync_unit_tests': 'sync/sync_unit_tests.isolate',
+    'ui_base_unittests': 'ui/base/ui_base_tests.isolate',
+    'unit_tests': 'chrome/unit_tests.isolate',
+    'webkit_unit_tests':
+      'third_party/WebKit/Source/web/WebKitUnitTests.isolate',
+}
+
+
+# Used for filtering large data deps at a finer grain than what's allowed in
+# isolate files since pushing deps to devices is expensive.
+# Wildcards are allowed.
+_DEPS_EXCLUSION_LIST = [
+    'chrome/test/data/extensions/api_test',
+    'chrome/test/data/extensions/secure_shell',
+    'chrome/test/data/firefox*',
+    'chrome/test/data/gpu',
+    'chrome/test/data/image_decoding',
+    'chrome/test/data/import',
+    'chrome/test/data/page_cycler',
+    'chrome/test/data/perf',
+    'chrome/test/data/pyauto_private',
+    'chrome/test/data/safari_import',
+    'chrome/test/data/scroll',
+    'chrome/test/data/third_party',
+    'third_party/hunspell_dictionaries/*.dic',
+    # crbug.com/258690
+    'webkit/data/bmp_decoder',
+    'webkit/data/ico_decoder',
+]
+
+
+_EXTRA_NATIVE_TEST_ACTIVITY = (
+    'org.chromium.native_test.NativeTestInstrumentationTestRunner.'
+        'NativeTestActivity')
+_EXTRA_SHARD_SIZE_LIMIT =(
+    'org.chromium.native_test.NativeTestInstrumentationTestRunner.'
+        'ShardSizeLimit')
+
+# TODO(jbudorick): Remove these once we're no longer parsing stdout to generate
+# results.
+_RE_TEST_STATUS = re.compile(
+    r'\[ +((?:RUN)|(?:FAILED)|(?:OK)) +\] ?([^ ]+)(?: \((\d+) ms\))?$')
+_RE_TEST_RUN_STATUS = re.compile(
+    r'\[ +(PASSED|RUNNER_FAILED|CRASHED) \] ?[^ ]+')
+
+
+# TODO(jbudorick): Make this a class method of GtestTestInstance once
+# test_package_apk and test_package_exe are gone.
+def ParseGTestListTests(raw_list):
+  """Parses a raw test list as provided by --gtest_list_tests.
+
+  Args:
+    raw_list: The raw test listing with the following format:
+
+    IPCChannelTest.
+      SendMessageInChannelConnected
+    IPCSyncChannelTest.
+      Simple
+      DISABLED_SendWithTimeoutMixedOKAndTimeout
+
+  Returns:
+    A list of all tests. For the above raw listing:
+
+    [IPCChannelTest.SendMessageInChannelConnected, IPCSyncChannelTest.Simple,
+     IPCSyncChannelTest.DISABLED_SendWithTimeoutMixedOKAndTimeout]
+  """
+  ret = []
+  current = ''
+  for test in raw_list:
+    if not test:
+      continue
+    if test[0] != ' ':
+      test_case = test.split()[0]
+      if test_case.endswith('.'):
+        current = test_case
+    elif not 'YOU HAVE' in test:
+      test_name = test.split()[0]
+      ret += [current + test_name]
+  return ret
+
+
+class GtestTestInstance(test_instance.TestInstance):
+
+  def __init__(self, args, isolate_delegate, error_func):
+    super(GtestTestInstance, self).__init__()
+    # TODO(jbudorick): Support multiple test suites.
+    if len(args.suite_name) > 1:
+      raise ValueError('Platform mode currently supports only 1 gtest suite')
+    self._suite = args.suite_name[0]
+
+    self._apk_path = os.path.join(
+        constants.GetOutDirectory(), '%s_apk' % self._suite,
+        '%s-debug.apk' % self._suite)
+    self._exe_path = os.path.join(constants.GetOutDirectory(),
+                                  self._suite)
+    if not os.path.exists(self._apk_path):
+      self._apk_path = None
+      self._activity = None
+      self._package = None
+      self._runner = None
+    else:
+      helper = apk_helper.ApkHelper(self._apk_path)
+      self._activity = helper.GetActivityName()
+      self._package = helper.GetPackageName()
+      self._runner = helper.GetInstrumentationName()
+      self._extras = {
+        _EXTRA_NATIVE_TEST_ACTIVITY: self._activity,
+      }
+      if self._suite in BROWSER_TEST_SUITES:
+        self._extras[_EXTRA_SHARD_SIZE_LIMIT] = 1
+
+    if not os.path.exists(self._exe_path):
+      self._exe_path = None
+    if not self._apk_path and not self._exe_path:
+      error_func('Could not find apk or executable for %s' % self._suite)
+
+    self._data_deps = []
+    if args.test_filter:
+      self._gtest_filter = args.test_filter
+    elif args.test_filter_file:
+      with open(args.test_filter_file, 'r') as f:
+        self._gtest_filter = ':'.join(l.strip() for l in f)
+    else:
+      self._gtest_filter = None
+
+    if not args.isolate_file_path:
+      default_isolate_file_path = _DEFAULT_ISOLATE_FILE_PATHS.get(self._suite)
+      if default_isolate_file_path:
+        args.isolate_file_path = os.path.join(
+            constants.DIR_SOURCE_ROOT, default_isolate_file_path)
+
+    if args.isolate_file_path:
+      self._isolate_abs_path = os.path.abspath(args.isolate_file_path)
+      self._isolate_delegate = isolate_delegate
+      self._isolated_abs_path = os.path.join(
+          constants.GetOutDirectory(), '%s.isolated' % self._suite)
+    else:
+      logging.warning('No isolate file provided. No data deps will be pushed.');
+      self._isolate_delegate = None
+
+    if args.app_data_files:
+      self._app_data_files = args.app_data_files
+      if args.app_data_file_dir:
+        self._app_data_file_dir = args.app_data_file_dir
+      else:
+        self._app_data_file_dir = tempfile.mkdtemp()
+        logging.critical('Saving app files to %s', self._app_data_file_dir)
+    else:
+      self._app_data_files = None
+      self._app_data_file_dir = None
+
+  #override
+  def TestType(self):
+    return 'gtest'
+
+  #override
+  def SetUp(self):
+    """Map data dependencies via isolate."""
+    if self._isolate_delegate:
+      self._isolate_delegate.Remap(
+          self._isolate_abs_path, self._isolated_abs_path)
+      self._isolate_delegate.PurgeExcluded(_DEPS_EXCLUSION_LIST)
+      self._isolate_delegate.MoveOutputDeps()
+      dest_dir = None
+      if self._suite == 'breakpad_unittests':
+        dest_dir = '/data/local/tmp/'
+      self._data_deps.extend([(constants.ISOLATE_DEPS_DIR, dest_dir)])
+
+
+  def GetDataDependencies(self):
+    """Returns the test suite's data dependencies.
+
+    Returns:
+      A list of (host_path, device_path) tuples to push. If device_path is
+      None, the client is responsible for determining where to push the file.
+    """
+    return self._data_deps
+
+  def FilterTests(self, test_list, disabled_prefixes=None):
+    """Filters |test_list| based on prefixes and, if present, a filter string.
+
+    Args:
+      test_list: The list of tests to filter.
+      disabled_prefixes: A list of test prefixes to filter. Defaults to
+        DISABLED_, FLAKY_, FAILS_, PRE_, and MANUAL_
+    Returns:
+      A filtered list of tests to run.
+    """
+    gtest_filter_strings = [
+        self._GenerateDisabledFilterString(disabled_prefixes)]
+    if self._gtest_filter:
+      gtest_filter_strings.append(self._gtest_filter)
+
+    filtered_test_list = test_list
+    for gtest_filter_string in gtest_filter_strings:
+      logging.debug('Filtering tests using: %s', gtest_filter_string)
+      filtered_test_list = unittest_util.FilterTestNames(
+          filtered_test_list, gtest_filter_string)
+    return filtered_test_list
+
+  def _GenerateDisabledFilterString(self, disabled_prefixes):
+    disabled_filter_items = []
+
+    if disabled_prefixes is None:
+      disabled_prefixes = ['DISABLED_', 'FLAKY_', 'FAILS_', 'PRE_', 'MANUAL_']
+    disabled_filter_items += ['%s*' % dp for dp in disabled_prefixes]
+    disabled_filter_items += ['*.%s*' % dp for dp in disabled_prefixes]
+
+    disabled_tests_file_path = os.path.join(
+        constants.DIR_SOURCE_ROOT, 'build', 'android', 'pylib', 'gtest',
+        'filter', '%s_disabled' % self._suite)
+    if disabled_tests_file_path and os.path.exists(disabled_tests_file_path):
+      with open(disabled_tests_file_path) as disabled_tests_file:
+        disabled_filter_items += [
+            '%s' % l for l in (line.strip() for line in disabled_tests_file)
+            if l and not l.startswith('#')]
+
+    return '*-%s' % ':'.join(disabled_filter_items)
+
+  def ParseGTestOutput(self, output):
+    """Parses raw gtest output and returns a list of results.
+
+    Args:
+      output: A list of output lines.
+    Returns:
+      A list of base_test_result.BaseTestResults.
+    """
+    results = []
+    for l in output:
+      matcher = _RE_TEST_STATUS.match(l)
+      if matcher:
+        result_type = None
+        if matcher.group(1) == 'OK':
+          result_type = base_test_result.ResultType.PASS
+        elif matcher.group(1) == 'FAILED':
+          result_type = base_test_result.ResultType.FAIL
+
+        if result_type:
+          test_name = matcher.group(2)
+          duration = matcher.group(3) if matcher.group(3) else 0
+          results.append(base_test_result.BaseTestResult(
+              test_name, result_type, duration))
+      logging.info(l)
+    return results
+
+  #override
+  def TearDown(self):
+    """Clear the mappings created by SetUp."""
+    if self._isolate_delegate:
+      self._isolate_delegate.Clear()
+
+  @property
+  def activity(self):
+    return self._activity
+
+  @property
+  def apk(self):
+    return self._apk_path
+
+  @property
+  def app_file_dir(self):
+    return self._app_data_file_dir
+
+  @property
+  def app_files(self):
+    return self._app_data_files
+
+  @property
+  def exe(self):
+    return self._exe_path
+
+  @property
+  def extras(self):
+    return self._extras
+
+  @property
+  def package(self):
+    return self._package
+
+  @property
+  def runner(self):
+    return self._runner
+
+  @property
+  def suite(self):
+    return self._suite
+
diff --git a/build/android/pylib/gtest/gtest_test_instance_test.py b/build/android/pylib/gtest/gtest_test_instance_test.py
new file mode 100755
index 0000000..c52b235
--- /dev/null
+++ b/build/android/pylib/gtest/gtest_test_instance_test.py
@@ -0,0 +1,86 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import unittest
+
+from pylib.gtest import gtest_test_instance
+
+
+class GtestTestInstanceTests(unittest.TestCase):
+
+  def testParseGTestListTests_simple(self):
+    raw_output = [
+      'TestCaseOne.',
+      '  testOne',
+      '  testTwo',
+      'TestCaseTwo.',
+      '  testThree',
+      '  testFour',
+    ]
+    actual = gtest_test_instance.ParseGTestListTests(raw_output)
+    expected = [
+      'TestCaseOne.testOne',
+      'TestCaseOne.testTwo',
+      'TestCaseTwo.testThree',
+      'TestCaseTwo.testFour',
+    ]
+    self.assertEqual(expected, actual)
+
+  def testParseGTestListTests_typeParameterized_old(self):
+    raw_output = [
+      'TPTestCase/WithTypeParam/0.',
+      '  testOne',
+      '  testTwo',
+    ]
+    actual = gtest_test_instance.ParseGTestListTests(raw_output)
+    expected = [
+      'TPTestCase/WithTypeParam/0.testOne',
+      'TPTestCase/WithTypeParam/0.testTwo',
+    ]
+    self.assertEqual(expected, actual)
+
+  def testParseGTestListTests_typeParameterized_new(self):
+    raw_output = [
+      'TPTestCase/WithTypeParam/0.  # TypeParam = TypeParam0',
+      '  testOne',
+      '  testTwo',
+    ]
+    actual = gtest_test_instance.ParseGTestListTests(raw_output)
+    expected = [
+      'TPTestCase/WithTypeParam/0.testOne',
+      'TPTestCase/WithTypeParam/0.testTwo',
+    ]
+    self.assertEqual(expected, actual)
+
+  def testParseGTestListTests_valueParameterized_old(self):
+    raw_output = [
+      'VPTestCase.',
+      '  testWithValueParam/0',
+      '  testWithValueParam/1',
+    ]
+    actual = gtest_test_instance.ParseGTestListTests(raw_output)
+    expected = [
+      'VPTestCase.testWithValueParam/0',
+      'VPTestCase.testWithValueParam/1',
+    ]
+    self.assertEqual(expected, actual)
+
+  def testParseGTestListTests_valueParameterized_new(self):
+    raw_output = [
+      'VPTestCase.',
+      '  testWithValueParam/0  # GetParam() = 0',
+      '  testWithValueParam/1  # GetParam() = 1',
+    ]
+    actual = gtest_test_instance.ParseGTestListTests(raw_output)
+    expected = [
+      'VPTestCase.testWithValueParam/0',
+      'VPTestCase.testWithValueParam/1',
+    ]
+    self.assertEqual(expected, actual)
+
+
+if __name__ == '__main__':
+  unittest.main(verbosity=2)
+
diff --git a/build/android/pylib/gtest/local_device_gtest_run.py b/build/android/pylib/gtest/local_device_gtest_run.py
new file mode 100644
index 0000000..f1cea4e
--- /dev/null
+++ b/build/android/pylib/gtest/local_device_gtest_run.py
@@ -0,0 +1,241 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import itertools
+import logging
+import os
+import posixpath
+
+from pylib import constants
+from pylib import ports
+from pylib.base import test_run
+from pylib.device import device_errors
+from pylib.gtest import gtest_test_instance
+
+from pylib.local import local_test_server_spawner
+from pylib.local.device import local_device_environment
+from pylib.local.device import local_device_test_run
+from pylib.utils import device_temp_file
+
+_COMMAND_LINE_FLAGS_SUPPORTED = True
+
+_EXTRA_COMMAND_LINE_FILE = (
+    'org.chromium.native_test.NativeTestActivity.CommandLineFile')
+_EXTRA_COMMAND_LINE_FLAGS = (
+    'org.chromium.native_test.NativeTestActivity.CommandLineFlags')
+_EXTRA_TEST_LIST = (
+    'org.chromium.native_test.NativeTestInstrumentationTestRunner'
+        '.TestList')
+
+_MAX_SHARD_SIZE = 256
+
+# TODO(jbudorick): Move this up to the test instance if the net test server is
+# handled outside of the APK for the remote_device environment.
+_SUITE_REQUIRES_TEST_SERVER_SPAWNER = [
+  'components_browsertests', 'content_unittests', 'content_browsertests',
+  'net_unittests', 'unit_tests'
+]
+
+# TODO(jbudorick): Move this inside _ApkDelegate once TestPackageApk is gone.
+def PullAppFilesImpl(device, package, files, directory):
+  device_dir = device.GetApplicationDataDirectory(package)
+  host_dir = os.path.join(directory, str(device))
+  for f in files:
+    device_file = posixpath.join(device_dir, f)
+    host_file = os.path.join(host_dir, *f.split(posixpath.sep))
+    host_file_base, ext = os.path.splitext(host_file)
+    for i in itertools.count():
+      host_file = '%s_%d%s' % (host_file_base, i, ext)
+      if not os.path.exists(host_file):
+        break
+    device.PullFile(device_file, host_file)
+
+class _ApkDelegate(object):
+  def __init__(self, test_instance):
+    self._activity = test_instance.activity
+    self._apk = test_instance.apk
+    self._package = test_instance.package
+    self._runner = test_instance.runner
+
+    self._component = '%s/%s' % (self._package, self._runner)
+    self._extras = test_instance.extras
+
+  def Install(self, device):
+    device.Install(self._apk)
+
+  def Run(self, test, device, flags=None, **kwargs):
+    extras = dict(self._extras)
+
+    with device_temp_file.DeviceTempFile(device.adb) as command_line_file:
+      device.WriteFile(command_line_file.name, '_ %s' % flags if flags else '_')
+      extras[_EXTRA_COMMAND_LINE_FILE] = command_line_file.name
+
+      with device_temp_file.DeviceTempFile(device.adb) as test_list_file:
+        if test:
+          device.WriteFile(test_list_file.name, '\n'.join(test))
+          extras[_EXTRA_TEST_LIST] = test_list_file.name
+
+        return device.StartInstrumentation(
+            self._component, extras=extras, raw=False, **kwargs)
+
+  def PullAppFiles(self, device, files, directory):
+    PullAppFilesImpl(device, self._package, files, directory)
+
+  def Clear(self, device):
+    device.ClearApplicationState(self._package)
+
+
+class _ExeDelegate(object):
+  def __init__(self, tr, exe):
+    self._exe_host_path = exe
+    self._exe_file_name = os.path.split(exe)[-1]
+    self._exe_device_path = '%s/%s' % (
+        constants.TEST_EXECUTABLE_DIR, self._exe_file_name)
+    deps_host_path = self._exe_host_path + '_deps'
+    if os.path.exists(deps_host_path):
+      self._deps_host_path = deps_host_path
+      self._deps_device_path = self._exe_device_path + '_deps'
+    else:
+      self._deps_host_path = None
+    self._test_run = tr
+
+  def Install(self, device):
+    # TODO(jbudorick): Look into merging this with normal data deps pushing if
+    # executables become supported on nonlocal environments.
+    host_device_tuples = [(self._exe_host_path, self._exe_device_path)]
+    if self._deps_host_path:
+      host_device_tuples.append((self._deps_host_path, self._deps_device_path))
+    device.PushChangedFiles(host_device_tuples)
+
+  def Run(self, test, device, flags=None, **kwargs):
+    cmd = [
+        self._test_run.GetTool(device).GetTestWrapper(),
+        self._exe_device_path,
+    ]
+    if test:
+      cmd.append('--gtest_filter=%s' % ':'.join(test))
+    if flags:
+      cmd.append(flags)
+    cwd = constants.TEST_EXECUTABLE_DIR
+
+    env = {
+      'LD_LIBRARY_PATH':
+          '%s/%s_deps' % (constants.TEST_EXECUTABLE_DIR, self._exe_file_name),
+    }
+    try:
+      gcov_strip_depth = os.environ['NATIVE_COVERAGE_DEPTH_STRIP']
+      external = device.GetExternalStoragePath()
+      env['GCOV_PREFIX'] = '%s/gcov' % external
+      env['GCOV_PREFIX_STRIP'] = gcov_strip_depth
+    except (device_errors.CommandFailedError, KeyError):
+      pass
+
+    # TODO(jbudorick): Switch to just RunShellCommand once perezju@'s CL
+    # for long shell commands lands.
+    with device_temp_file.DeviceTempFile(device.adb) as script_file:
+      script_contents = ' '.join(cmd)
+      logging.info('script contents: %r' % script_contents)
+      device.WriteFile(script_file.name, script_contents)
+      output = device.RunShellCommand(['sh', script_file.name], cwd=cwd,
+                                      env=env, **kwargs)
+    return output
+
+  def PullAppFiles(self, device, files, directory):
+    pass
+
+  def Clear(self, device):
+    device.KillAll(self._exe_file_name, blocking=True, timeout=30, quiet=True)
+
+
+class LocalDeviceGtestRun(local_device_test_run.LocalDeviceTestRun):
+
+  def __init__(self, env, test_instance):
+    assert isinstance(env, local_device_environment.LocalDeviceEnvironment)
+    assert isinstance(test_instance, gtest_test_instance.GtestTestInstance)
+    super(LocalDeviceGtestRun, self).__init__(env, test_instance)
+
+    if self._test_instance.apk:
+      self._delegate = _ApkDelegate(self._test_instance)
+    elif self._test_instance.exe:
+      self._delegate = _ExeDelegate(self, self._test_instance.exe)
+
+    self._servers = {}
+
+  #override
+  def TestPackage(self):
+    return self._test_instance.suite
+
+  #override
+  def SetUp(self):
+
+    def individual_device_set_up(dev, host_device_tuples):
+      # Install test APK.
+      self._delegate.Install(dev)
+
+      # Push data dependencies.
+      external_storage = dev.GetExternalStoragePath()
+      host_device_tuples = [
+          (h, d if d is not None else external_storage)
+          for h, d in host_device_tuples]
+      dev.PushChangedFiles(host_device_tuples)
+
+      self._servers[str(dev)] = []
+      if self.TestPackage() in _SUITE_REQUIRES_TEST_SERVER_SPAWNER:
+        self._servers[str(dev)].append(
+            local_test_server_spawner.LocalTestServerSpawner(
+                ports.AllocateTestServerPort(), dev, self.GetTool(dev)))
+
+      for s in self._servers[str(dev)]:
+        s.SetUp()
+
+    self._env.parallel_devices.pMap(individual_device_set_up,
+                                    self._test_instance.GetDataDependencies())
+
+  #override
+  def _ShouldShard(self):
+    return True
+
+  #override
+  def _CreateShards(self, tests):
+    device_count = len(self._env.devices)
+    shards = []
+    for i in xrange(0, device_count):
+      unbounded_shard = tests[i::device_count]
+      shards += [unbounded_shard[j:j+_MAX_SHARD_SIZE]
+                 for j in xrange(0, len(unbounded_shard), _MAX_SHARD_SIZE)]
+    return shards
+
+  #override
+  def _GetTests(self):
+    tests = self._delegate.Run(
+        None, self._env.devices[0], flags='--gtest_list_tests')
+    tests = gtest_test_instance.ParseGTestListTests(tests)
+    tests = self._test_instance.FilterTests(tests)
+    return tests
+
+  #override
+  def _RunTest(self, device, test):
+    # Run the test.
+    output = self._delegate.Run(
+        test, device, timeout=900, retries=0)
+    for s in self._servers[str(device)]:
+      s.Reset()
+    if self._test_instance.app_files:
+      self._delegate.PullAppFiles(device, self._test_instance.app_files,
+                                  self._test_instance.app_file_dir)
+    self._delegate.Clear(device)
+
+    # Parse the output.
+    # TODO(jbudorick): Transition test scripts away from parsing stdout.
+    results = self._test_instance.ParseGTestOutput(output)
+    return results
+
+  #override
+  def TearDown(self):
+    def individual_device_tear_down(dev):
+      for s in self._servers[str(dev)]:
+        s.TearDown()
+
+    self._env.parallel_devices.pMap(individual_device_tear_down)
+
diff --git a/build/android/pylib/gtest/setup.py b/build/android/pylib/gtest/setup.py
new file mode 100644
index 0000000..f563ccf
--- /dev/null
+++ b/build/android/pylib/gtest/setup.py
@@ -0,0 +1,230 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Generates test runner factory and tests for GTests."""
+# pylint: disable=W0212
+
+import logging
+import os
+import sys
+
+from pylib import constants
+
+from pylib.base import base_setup
+from pylib.base import base_test_result
+from pylib.base import test_dispatcher
+from pylib.device import device_utils
+from pylib.gtest import gtest_test_instance
+from pylib.gtest import test_package_apk
+from pylib.gtest import test_package_exe
+from pylib.gtest import test_runner
+
+sys.path.insert(0,
+                os.path.join(constants.DIR_SOURCE_ROOT, 'build', 'util', 'lib',
+                             'common'))
+import unittest_util # pylint: disable=F0401
+
+
+ISOLATE_FILE_PATHS = gtest_test_instance._DEFAULT_ISOLATE_FILE_PATHS
+
+
+# Used for filtering large data deps at a finer grain than what's allowed in
+# isolate files since pushing deps to devices is expensive.
+# Wildcards are allowed.
+DEPS_EXCLUSION_LIST = [
+    'chrome/test/data/extensions/api_test',
+    'chrome/test/data/extensions/secure_shell',
+    'chrome/test/data/firefox*',
+    'chrome/test/data/gpu',
+    'chrome/test/data/image_decoding',
+    'chrome/test/data/import',
+    'chrome/test/data/page_cycler',
+    'chrome/test/data/perf',
+    'chrome/test/data/pyauto_private',
+    'chrome/test/data/safari_import',
+    'chrome/test/data/scroll',
+    'chrome/test/data/third_party',
+    'third_party/hunspell_dictionaries/*.dic',
+    # crbug.com/258690
+    'webkit/data/bmp_decoder',
+    'webkit/data/ico_decoder',
+]
+
+
+def _GetDisabledTestsFilterFromFile(suite_name):
+  """Returns a gtest filter based on the *_disabled file.
+
+  Args:
+    suite_name: Name of the test suite (e.g. base_unittests).
+
+  Returns:
+    A gtest filter which excludes disabled tests.
+    Example: '*-StackTrace.*:StringPrintfTest.StringPrintfMisc'
+  """
+  filter_file_path = os.path.join(
+      os.path.abspath(os.path.dirname(__file__)),
+      'filter', '%s_disabled' % suite_name)
+
+  if not filter_file_path or not os.path.exists(filter_file_path):
+    logging.info('No filter file found at %s', filter_file_path)
+    return '*'
+
+  filters = [x for x in [x.strip() for x in file(filter_file_path).readlines()]
+             if x and x[0] != '#']
+  disabled_filter = '*-%s' % ':'.join(filters)
+  logging.info('Applying filter "%s" obtained from %s',
+               disabled_filter, filter_file_path)
+  return disabled_filter
+
+
+def _GetTests(test_options, test_package, devices):
+  """Get a list of tests.
+
+  Args:
+    test_options: A GTestOptions object.
+    test_package: A TestPackageApk object.
+    devices: A list of attached devices.
+
+  Returns:
+    A list of all the tests in the test suite.
+  """
+  class TestListResult(base_test_result.BaseTestResult):
+    def __init__(self):
+      super(TestListResult, self).__init__(
+          'gtest_list_tests', base_test_result.ResultType.PASS)
+      self.test_list = []
+
+  def TestListerRunnerFactory(device, _shard_index):
+    class TestListerRunner(test_runner.TestRunner):
+      def RunTest(self, _test):
+        result = TestListResult()
+        self.test_package.Install(self.device)
+        result.test_list = self.test_package.GetAllTests(self.device)
+        results = base_test_result.TestRunResults()
+        results.AddResult(result)
+        return results, None
+    return TestListerRunner(test_options, device, test_package)
+
+  results, _no_retry = test_dispatcher.RunTests(
+      ['gtest_list_tests'], TestListerRunnerFactory, devices)
+  tests = []
+  for r in results.GetAll():
+    tests.extend(r.test_list)
+  return tests
+
+
+def _FilterTestsUsingPrefixes(all_tests, pre=False, manual=False):
+  """Removes tests with disabled prefixes.
+
+  Args:
+    all_tests: List of tests to filter.
+    pre: If True, include tests with PRE_ prefix.
+    manual: If True, include tests with MANUAL_ prefix.
+
+  Returns:
+    List of tests remaining.
+  """
+  filtered_tests = []
+  filter_prefixes = ['DISABLED_', 'FLAKY_', 'FAILS_']
+
+  if not pre:
+    filter_prefixes.append('PRE_')
+
+  if not manual:
+    filter_prefixes.append('MANUAL_')
+
+  for t in all_tests:
+    test_case, test = t.split('.', 1)
+    if not any([test_case.startswith(prefix) or test.startswith(prefix) for
+                prefix in filter_prefixes]):
+      filtered_tests.append(t)
+  return filtered_tests
+
+
+def _FilterDisabledTests(tests, suite_name, has_gtest_filter):
+  """Removes disabled tests from |tests|.
+
+  Applies the following filters in order:
+    1. Remove tests with disabled prefixes.
+    2. Remove tests specified in the *_disabled files in the 'filter' dir
+
+  Args:
+    tests: List of tests.
+    suite_name: Name of the test suite (e.g. base_unittests).
+    has_gtest_filter: Whether a gtest_filter is provided.
+
+  Returns:
+    List of tests remaining.
+  """
+  tests = _FilterTestsUsingPrefixes(
+      tests, has_gtest_filter, has_gtest_filter)
+  tests = unittest_util.FilterTestNames(
+      tests, _GetDisabledTestsFilterFromFile(suite_name))
+
+  return tests
+
+
+def Setup(test_options, devices):
+  """Create the test runner factory and tests.
+
+  Args:
+    test_options: A GTestOptions object.
+    devices: A list of attached devices.
+
+  Returns:
+    A tuple of (TestRunnerFactory, tests).
+  """
+  test_package = test_package_apk.TestPackageApk(test_options.suite_name)
+  if not os.path.exists(test_package.suite_path):
+    exe_test_package = test_package_exe.TestPackageExecutable(
+        test_options.suite_name)
+    if not os.path.exists(exe_test_package.suite_path):
+      raise Exception(
+          'Did not find %s target. Ensure it has been built.\n'
+          '(not found at %s or %s)'
+          % (test_options.suite_name,
+             test_package.suite_path,
+             exe_test_package.suite_path))
+    test_package = exe_test_package
+  logging.warning('Found target %s', test_package.suite_path)
+
+  i = base_setup.GenerateDepsDirUsingIsolate(test_options.suite_name,
+                                         test_options.isolate_file_path,
+                                         ISOLATE_FILE_PATHS,
+                                         DEPS_EXCLUSION_LIST)
+  def push_data_deps_to_device_dir(device):
+    device_dir = (constants.TEST_EXECUTABLE_DIR
+        if test_package.suite_name == 'breakpad_unittests'
+        else device.GetExternalStoragePath())
+    base_setup.PushDataDeps(device, device_dir, test_options)
+  device_utils.DeviceUtils.parallel(devices).pMap(push_data_deps_to_device_dir)
+  if i:
+    i.Clear()
+
+  tests = _GetTests(test_options, test_package, devices)
+
+  # Constructs a new TestRunner with the current options.
+  def TestRunnerFactory(device, _shard_index):
+    return test_runner.TestRunner(
+        test_options,
+        device,
+        test_package)
+
+  if test_options.run_disabled:
+    test_options = test_options._replace(
+        test_arguments=('%s --gtest_also_run_disabled_tests' %
+                        test_options.test_arguments))
+  else:
+    tests = _FilterDisabledTests(tests, test_options.suite_name,
+                                 bool(test_options.gtest_filter))
+  if test_options.gtest_filter:
+    tests = unittest_util.FilterTestNames(tests, test_options.gtest_filter)
+
+  # Coalesce unit tests into a single test per device
+  if test_options.suite_name not in gtest_test_instance.BROWSER_TEST_SUITES:
+    num_devices = len(devices)
+    tests = [':'.join(tests[i::num_devices]) for i in xrange(num_devices)]
+    tests = [t for t in tests if t]
+
+  return (TestRunnerFactory, tests)
diff --git a/build/android/pylib/gtest/test_options.py b/build/android/pylib/gtest/test_options.py
new file mode 100644
index 0000000..8bc6996
--- /dev/null
+++ b/build/android/pylib/gtest/test_options.py
@@ -0,0 +1,19 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Defines the GTestOptions named tuple."""
+
+import collections
+
+GTestOptions = collections.namedtuple('GTestOptions', [
+    'tool',
+    'gtest_filter',
+    'run_disabled',
+    'test_arguments',
+    'timeout',
+    'isolate_file_path',
+    'suite_name',
+    'app_data_files',
+    'app_data_file_dir',
+    'delete_stale_data'])
diff --git a/build/android/pylib/gtest/test_package.py b/build/android/pylib/gtest/test_package.py
new file mode 100644
index 0000000..4042a98
--- /dev/null
+++ b/build/android/pylib/gtest/test_package.py
@@ -0,0 +1,76 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Base class representing GTest test packages."""
+# pylint: disable=R0201
+
+
+class TestPackage(object):
+
+  """A helper base class for both APK and stand-alone executables.
+
+  Args:
+    suite_name: Name of the test suite (e.g. base_unittests).
+  """
+  def __init__(self, suite_name):
+    self.suite_name = suite_name
+
+  def ClearApplicationState(self, device):
+    """Clears the application state.
+
+    Args:
+      device: Instance of DeviceUtils.
+    """
+    raise NotImplementedError('Method must be overridden.')
+
+  def CreateCommandLineFileOnDevice(self, device, test_filter, test_arguments):
+    """Creates a test runner script and pushes to the device.
+
+    Args:
+      device: Instance of DeviceUtils.
+      test_filter: A test_filter flag.
+      test_arguments: Additional arguments to pass to the test binary.
+    """
+    raise NotImplementedError('Method must be overridden.')
+
+  def GetAllTests(self, device):
+    """Returns a list of all tests available in the test suite.
+
+    Args:
+      device: Instance of DeviceUtils.
+    """
+    raise NotImplementedError('Method must be overridden.')
+
+  def GetGTestReturnCode(self, _device):
+    return None
+
+  def SpawnTestProcess(self, device):
+    """Spawn the test process.
+
+    Args:
+      device: Instance of DeviceUtils.
+
+    Returns:
+      An instance of pexpect spawn class.
+    """
+    raise NotImplementedError('Method must be overridden.')
+
+  def Install(self, device):
+    """Install the test package to the device.
+
+    Args:
+      device: Instance of DeviceUtils.
+    """
+    raise NotImplementedError('Method must be overridden.')
+
+  def PullAppFiles(self, device, files, directory):
+    """Pull application data from the device.
+
+    Args:
+      device: Instance of DeviceUtils.
+      files: A list of paths relative to the application data directory to
+        retrieve from the device.
+      directory: The host directory to which files should be pulled.
+    """
+    raise NotImplementedError('Method must be overridden.')
diff --git a/build/android/pylib/gtest/test_package_apk.py b/build/android/pylib/gtest/test_package_apk.py
new file mode 100644
index 0000000..a679b03
--- /dev/null
+++ b/build/android/pylib/gtest/test_package_apk.py
@@ -0,0 +1,157 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Defines TestPackageApk to help run APK-based native tests."""
+# pylint: disable=W0212
+
+import itertools
+import logging
+import os
+import posixpath
+import shlex
+import sys
+import tempfile
+import time
+
+from pylib import android_commands
+from pylib import constants
+from pylib import pexpect
+from pylib.device import device_errors
+from pylib.device import intent
+from pylib.gtest import gtest_test_instance
+from pylib.gtest import local_device_gtest_run
+from pylib.gtest.test_package import TestPackage
+
+
+class TestPackageApk(TestPackage):
+  """A helper class for running APK-based native tests."""
+
+  def __init__(self, suite_name):
+    """
+    Args:
+      suite_name: Name of the test suite (e.g. base_unittests).
+    """
+    TestPackage.__init__(self, suite_name)
+    self.suite_path = os.path.join(
+        constants.GetOutDirectory(), '%s_apk' % suite_name,
+        '%s-debug.apk' % suite_name)
+    if suite_name == 'content_browsertests':
+      self._package_info = constants.PACKAGE_INFO['content_browsertests']
+    elif suite_name == 'components_browsertests':
+      self._package_info = constants.PACKAGE_INFO['components_browsertests']
+    else:
+      self._package_info = constants.PACKAGE_INFO['gtest']
+
+    if suite_name == 'net_unittests':
+      self._extras = {'RunInSubThread': ''}
+    else:
+      self._extras = []
+
+  def _CreateCommandLineFileOnDevice(self, device, options):
+    device.WriteFile(self._package_info.cmdline_file,
+                     self.suite_name + ' ' + options)
+
+  def _GetFifo(self):
+    # The test.fifo path is determined by:
+    # testing/android/native_test/java/src/org/chromium/native_test/
+    #     NativeTestActivity.java and
+    # testing/android/native_test_launcher.cc
+    return '/data/data/' + self._package_info.package + '/files/test.fifo'
+
+  def _ClearFifo(self, device):
+    device.RunShellCommand('rm -f ' + self._GetFifo())
+
+  def _WatchFifo(self, device, timeout, logfile=None):
+    for i in range(100):
+      if device.FileExists(self._GetFifo()):
+        logging.info('Fifo created. Slept for %f secs' % (i * 0.5))
+        break
+      time.sleep(0.5)
+    else:
+      raise device_errors.DeviceUnreachableError(
+          'Unable to find fifo on device %s ' % self._GetFifo())
+    args = shlex.split(device.old_interface.Adb()._target_arg)
+    args += ['shell', 'cat', self._GetFifo()]
+    return pexpect.spawn('adb', args, timeout=timeout, logfile=logfile)
+
+  def _StartActivity(self, device, force_stop=True):
+    device.StartActivity(
+        intent.Intent(package=self._package_info.package,
+                      activity=self._package_info.activity,
+                      action='android.intent.action.MAIN',
+                      extras=self._extras),
+        # No wait since the runner waits for FIFO creation anyway.
+        blocking=False,
+        force_stop=force_stop)
+
+  #override
+  def ClearApplicationState(self, device):
+    device.ClearApplicationState(self._package_info.package)
+    # Content shell creates a profile on the sdscard which accumulates cache
+    # files over time.
+    if self.suite_name == 'content_browsertests':
+      try:
+        device.RunShellCommand(
+            'rm -r %s/content_shell' % device.GetExternalStoragePath(),
+            timeout=60 * 2)
+      except device_errors.CommandFailedError:
+        # TODO(jbudorick) Handle this exception appropriately once the
+        #                 conversions are done.
+        pass
+    elif self.suite_name == 'components_browsertests':
+      try:
+        device.RunShellCommand(
+            'rm -r %s/components_shell' % device.GetExternalStoragePath(),
+            timeout=60 * 2)
+      except device_errors.CommandFailedError:
+        # TODO(jbudorick) Handle this exception appropriately once the
+        #                 conversions are done.
+        pass
+
+  #override
+  def CreateCommandLineFileOnDevice(self, device, test_filter, test_arguments):
+    self._CreateCommandLineFileOnDevice(
+        device, '--gtest_filter=%s %s' % (test_filter, test_arguments))
+
+  #override
+  def GetAllTests(self, device):
+    self._CreateCommandLineFileOnDevice(device, '--gtest_list_tests')
+    try:
+      self.tool.SetupEnvironment()
+      # Clear and start monitoring logcat.
+      self._ClearFifo(device)
+      self._StartActivity(device)
+      # Wait for native test to complete.
+      p = self._WatchFifo(device, timeout=30 * self.tool.GetTimeoutScale())
+      p.expect('<<ScopedMainEntryLogger')
+      p.close()
+    finally:
+      self.tool.CleanUpEnvironment()
+    # We need to strip the trailing newline.
+    content = [line.rstrip() for line in p.before.splitlines()]
+    return gtest_test_instance.ParseGTestListTests(content)
+
+  #override
+  def SpawnTestProcess(self, device):
+    try:
+      self.tool.SetupEnvironment()
+      self._ClearFifo(device)
+      # Doesn't need to stop an Activity because ClearApplicationState() is
+      # always called before this call and so it is already stopped at this
+      # point.
+      self._StartActivity(device, force_stop=False)
+    finally:
+      self.tool.CleanUpEnvironment()
+    logfile = android_commands.NewLineNormalizer(sys.stdout)
+    return self._WatchFifo(device, timeout=10, logfile=logfile)
+
+  #override
+  def Install(self, device):
+    self.tool.CopyFiles(device)
+    device.Install(self.suite_path)
+
+  #override
+  def PullAppFiles(self, device, files, directory):
+    local_device_gtest_run.PullAppFilesImpl(
+        device, self._package_info.package, files, directory)
diff --git a/build/android/pylib/gtest/test_package_exe.py b/build/android/pylib/gtest/test_package_exe.py
new file mode 100644
index 0000000..87071b5
--- /dev/null
+++ b/build/android/pylib/gtest/test_package_exe.py
@@ -0,0 +1,163 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Defines TestPackageExecutable to help run stand-alone executables."""
+
+import logging
+import os
+import posixpath
+import sys
+import tempfile
+
+from pylib import cmd_helper
+from pylib import constants
+from pylib import pexpect
+from pylib.device import device_errors
+from pylib.gtest import gtest_test_instance
+from pylib.gtest.test_package import TestPackage
+
+
+class TestPackageExecutable(TestPackage):
+  """A helper class for running stand-alone executables."""
+
+  _TEST_RUNNER_RET_VAL_FILE = 'gtest_retval'
+
+  def __init__(self, suite_name):
+    """
+    Args:
+      suite_name: Name of the test suite (e.g. base_unittests).
+    """
+    TestPackage.__init__(self, suite_name)
+    self.suite_path = os.path.join(constants.GetOutDirectory(), suite_name)
+    self._symbols_dir = os.path.join(constants.GetOutDirectory(),
+                                     'lib.target')
+
+  #override
+  def GetGTestReturnCode(self, device):
+    ret = None
+    ret_code = 1  # Assume failure if we can't find it
+    ret_code_file = tempfile.NamedTemporaryFile()
+    try:
+      if not device.PullFile(
+          constants.TEST_EXECUTABLE_DIR + '/' +
+          TestPackageExecutable._TEST_RUNNER_RET_VAL_FILE,
+          ret_code_file.name):
+        logging.critical('Unable to pull gtest ret val file %s',
+                         ret_code_file.name)
+        raise ValueError
+      ret_code = file(ret_code_file.name).read()
+      ret = int(ret_code)
+    except ValueError:
+      logging.critical('Error reading gtest ret val file %s [%s]',
+                       ret_code_file.name, ret_code)
+      ret = 1
+    return ret
+
+  @staticmethod
+  def _AddNativeCoverageExports(device):
+    # export GCOV_PREFIX set the path for native coverage results
+    # export GCOV_PREFIX_STRIP indicates how many initial directory
+    #                          names to strip off the hardwired absolute paths.
+    #                          This value is calculated in buildbot.sh and
+    #                          depends on where the tree is built.
+    # Ex: /usr/local/google/code/chrome will become
+    #     /code/chrome if GCOV_PREFIX_STRIP=3
+    try:
+      depth = os.environ['NATIVE_COVERAGE_DEPTH_STRIP']
+      export_string = ('export GCOV_PREFIX="%s/gcov"\n' %
+                       device.GetExternalStoragePath())
+      export_string += 'export GCOV_PREFIX_STRIP=%s\n' % depth
+      return export_string
+    except KeyError:
+      logging.info('NATIVE_COVERAGE_DEPTH_STRIP is not defined: '
+                   'No native coverage.')
+      return ''
+    except device_errors.CommandFailedError:
+      logging.info('No external storage found: No native coverage.')
+      return ''
+
+  #override
+  def ClearApplicationState(self, device):
+    device.KillAll(self.suite_name, blocking=True, timeout=30, quiet=True)
+
+  #override
+  def CreateCommandLineFileOnDevice(self, device, test_filter, test_arguments):
+    tool_wrapper = self.tool.GetTestWrapper()
+    sh_script_file = tempfile.NamedTemporaryFile()
+    # We need to capture the exit status from the script since adb shell won't
+    # propagate to us.
+    sh_script_file.write(
+        'cd %s\n'
+        '%s'
+        '%s LD_LIBRARY_PATH=%s/%s_deps %s/%s --gtest_filter=%s %s\n'
+        'echo $? > %s' %
+        (constants.TEST_EXECUTABLE_DIR,
+         self._AddNativeCoverageExports(device),
+         tool_wrapper,
+         constants.TEST_EXECUTABLE_DIR,
+         self.suite_name,
+         constants.TEST_EXECUTABLE_DIR,
+         self.suite_name,
+         test_filter, test_arguments,
+         TestPackageExecutable._TEST_RUNNER_RET_VAL_FILE))
+    sh_script_file.flush()
+    cmd_helper.RunCmd(['chmod', '+x', sh_script_file.name])
+    device.PushChangedFiles([(
+        sh_script_file.name,
+        constants.TEST_EXECUTABLE_DIR + '/chrome_test_runner.sh')])
+    logging.info('Conents of the test runner script: ')
+    for line in open(sh_script_file.name).readlines():
+      logging.info('  ' + line.rstrip())
+
+  #override
+  def GetAllTests(self, device):
+    lib_path = posixpath.join(
+        constants.TEST_EXECUTABLE_DIR, '%s_deps' % self.suite_name)
+
+    cmd = []
+    if self.tool.GetTestWrapper():
+      cmd.append(self.tool.GetTestWrapper())
+    cmd.extend([
+        posixpath.join(constants.TEST_EXECUTABLE_DIR, self.suite_name),
+        '--gtest_list_tests'])
+
+    output = device.RunShellCommand(
+        cmd, check_return=True, env={'LD_LIBRARY_PATH': lib_path})
+    return gtest_test_instance.ParseGTestListTests(output)
+
+  #override
+  def SpawnTestProcess(self, device):
+    args = ['adb', '-s', str(device), 'shell', 'sh',
+            constants.TEST_EXECUTABLE_DIR + '/chrome_test_runner.sh']
+    logging.info(args)
+    return pexpect.spawn(args[0], args[1:], logfile=sys.stdout)
+
+  #override
+  def Install(self, device):
+    if self.tool.NeedsDebugInfo():
+      target_name = self.suite_path
+    else:
+      target_name = self.suite_path + '_stripped'
+      if not os.path.isfile(target_name):
+        raise Exception('Did not find %s, build target %s' %
+                        (target_name, self.suite_name + '_stripped'))
+
+      target_mtime = os.stat(target_name).st_mtime
+      source_mtime = os.stat(self.suite_path).st_mtime
+      if target_mtime < source_mtime:
+        raise Exception(
+            'stripped binary (%s, timestamp %d) older than '
+            'source binary (%s, timestamp %d), build target %s' %
+            (target_name, target_mtime, self.suite_path, source_mtime,
+             self.suite_name + '_stripped'))
+
+    test_binary_path = constants.TEST_EXECUTABLE_DIR + '/' + self.suite_name
+    device.PushChangedFiles([(target_name, test_binary_path)])
+    deps_path = self.suite_path + '_deps'
+    if os.path.isdir(deps_path):
+      device.PushChangedFiles([(deps_path, test_binary_path + '_deps')])
+
+  #override
+  def PullAppFiles(self, device, files, directory):
+    pass
diff --git a/build/android/pylib/gtest/test_runner.py b/build/android/pylib/gtest/test_runner.py
new file mode 100644
index 0000000..a48f18a
--- /dev/null
+++ b/build/android/pylib/gtest/test_runner.py
@@ -0,0 +1,217 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+import os
+import re
+import tempfile
+
+from pylib import pexpect
+from pylib import ports
+from pylib.base import base_test_result
+from pylib.base import base_test_runner
+from pylib.device import device_errors
+from pylib.gtest import gtest_test_instance
+from pylib.local import local_test_server_spawner
+from pylib.perf import perf_control
+
+# Test case statuses.
+RE_RUN = re.compile('\\[ RUN      \\] ?(.*)\r\n')
+RE_FAIL = re.compile('\\[  FAILED  \\] ?(.*?)( \\((\\d+) ms\\))?\r\r\n')
+RE_OK = re.compile('\\[       OK \\] ?(.*?)( \\((\\d+) ms\\))?\r\r\n')
+
+# Test run statuses.
+RE_PASSED = re.compile('\\[  PASSED  \\] ?(.*)\r\n')
+RE_RUNNER_FAIL = re.compile('\\[ RUNNER_FAILED \\] ?(.*)\r\n')
+# Signal handlers are installed before starting tests
+# to output the CRASHED marker when a crash happens.
+RE_CRASH = re.compile('\\[ CRASHED      \\](.*)\r\n')
+
+# Bots that don't output anything for 20 minutes get timed out, so that's our
+# hard cap.
+_INFRA_STDOUT_TIMEOUT = 20 * 60
+
+
+def _TestSuiteRequiresMockTestServer(suite_name):
+  """Returns True if the test suite requires mock test server."""
+  tests_require_net_test_server = ['unit_tests', 'net_unittests',
+                                   'components_browsertests',
+                                   'content_unittests',
+                                   'content_browsertests']
+  return (suite_name in
+          tests_require_net_test_server)
+
+def _TestSuiteRequiresHighPerfMode(suite_name):
+  """Returns True if the test suite requires high performance mode."""
+  return 'perftests' in suite_name
+
+class TestRunner(base_test_runner.BaseTestRunner):
+  def __init__(self, test_options, device, test_package):
+    """Single test suite attached to a single device.
+
+    Args:
+      test_options: A GTestOptions object.
+      device: Device to run the tests.
+      test_package: An instance of TestPackage class.
+    """
+
+    super(TestRunner, self).__init__(device, test_options.tool)
+
+    self.test_package = test_package
+    self.test_package.tool = self.tool
+    self._test_arguments = test_options.test_arguments
+
+    timeout = test_options.timeout
+    if timeout == 0:
+      timeout = 60
+    # On a VM (e.g. chromium buildbots), this timeout is way too small.
+    if os.environ.get('BUILDBOT_SLAVENAME'):
+      timeout = timeout * 2
+
+    self._timeout = min(timeout * self.tool.GetTimeoutScale(),
+                        _INFRA_STDOUT_TIMEOUT)
+    if _TestSuiteRequiresHighPerfMode(self.test_package.suite_name):
+      self._perf_controller = perf_control.PerfControl(self.device)
+
+    if _TestSuiteRequiresMockTestServer(self.test_package.suite_name):
+      self._servers = [
+          local_test_server_spawner.LocalTestServerSpawner(
+              ports.AllocateTestServerPort(), self.device, self.tool)]
+    else:
+      self._servers = []
+
+    if test_options.app_data_files:
+      self._app_data_files = test_options.app_data_files
+      if test_options.app_data_file_dir:
+        self._app_data_file_dir = test_options.app_data_file_dir
+      else:
+        self._app_data_file_dir = tempfile.mkdtemp()
+        logging.critical('Saving app files to %s', self._app_data_file_dir)
+    else:
+      self._app_data_files = None
+      self._app_data_file_dir = None
+
+  #override
+  def InstallTestPackage(self):
+    self.test_package.Install(self.device)
+
+  def _ParseTestOutput(self, p):
+    """Process the test output.
+
+    Args:
+      p: An instance of pexpect spawn class.
+
+    Returns:
+      A TestRunResults object.
+    """
+    results = base_test_result.TestRunResults()
+
+    log = ''
+    try:
+      while True:
+        full_test_name = None
+
+        found = p.expect([RE_RUN, RE_PASSED, RE_RUNNER_FAIL],
+                         timeout=self._timeout)
+        if found == 1:  # RE_PASSED
+          break
+        elif found == 2:  # RE_RUNNER_FAIL
+          break
+        else:  # RE_RUN
+          full_test_name = p.match.group(1).replace('\r', '')
+          found = p.expect([RE_OK, RE_FAIL, RE_CRASH], timeout=self._timeout)
+          log = p.before.replace('\r', '')
+          if found == 0:  # RE_OK
+            if full_test_name == p.match.group(1).replace('\r', ''):
+              duration_ms = int(p.match.group(3)) if p.match.group(3) else 0
+              results.AddResult(base_test_result.BaseTestResult(
+                  full_test_name, base_test_result.ResultType.PASS,
+                  duration=duration_ms, log=log))
+          elif found == 2:  # RE_CRASH
+            results.AddResult(base_test_result.BaseTestResult(
+                full_test_name, base_test_result.ResultType.CRASH,
+                log=log))
+            break
+          else:  # RE_FAIL
+            duration_ms = int(p.match.group(3)) if p.match.group(3) else 0
+            results.AddResult(base_test_result.BaseTestResult(
+                full_test_name, base_test_result.ResultType.FAIL,
+                duration=duration_ms, log=log))
+    except pexpect.EOF:
+      logging.error('Test terminated - EOF')
+      # We're here because either the device went offline, or the test harness
+      # crashed without outputting the CRASHED marker (crbug.com/175538).
+      if not self.device.IsOnline():
+        raise device_errors.DeviceUnreachableError(
+            'Device %s went offline.' % str(self.device))
+      if full_test_name:
+        results.AddResult(base_test_result.BaseTestResult(
+            full_test_name, base_test_result.ResultType.CRASH,
+            log=p.before.replace('\r', '')))
+    except pexpect.TIMEOUT:
+      logging.error('Test terminated after %d second timeout.',
+                    self._timeout)
+      if full_test_name:
+        results.AddResult(base_test_result.BaseTestResult(
+            full_test_name, base_test_result.ResultType.TIMEOUT,
+            log=p.before.replace('\r', '')))
+    finally:
+      p.close()
+
+    ret_code = self.test_package.GetGTestReturnCode(self.device)
+    if ret_code:
+      logging.critical(
+          'gtest exit code: %d\npexpect.before: %s\npexpect.after: %s',
+          ret_code, p.before, p.after)
+
+    return results
+
+  #override
+  def RunTest(self, test):
+    test_results = base_test_result.TestRunResults()
+    if not test:
+      return test_results, None
+
+    try:
+      self.test_package.ClearApplicationState(self.device)
+      self.test_package.CreateCommandLineFileOnDevice(
+          self.device, test, self._test_arguments)
+      test_results = self._ParseTestOutput(
+          self.test_package.SpawnTestProcess(self.device))
+      if self._app_data_files:
+        self.test_package.PullAppFiles(self.device, self._app_data_files,
+                                       self._app_data_file_dir)
+    finally:
+      for s in self._servers:
+        s.Reset()
+    # Calculate unknown test results.
+    all_tests = set(test.split(':'))
+    all_tests_ran = set([t.GetName() for t in test_results.GetAll()])
+    unknown_tests = all_tests - all_tests_ran
+    test_results.AddResults(
+        [base_test_result.BaseTestResult(t, base_test_result.ResultType.UNKNOWN)
+         for t in unknown_tests])
+    retry = ':'.join([t.GetName() for t in test_results.GetNotPass()])
+    return test_results, retry
+
+  #override
+  def SetUp(self):
+    """Sets up necessary test enviroment for the test suite."""
+    super(TestRunner, self).SetUp()
+    for s in self._servers:
+      s.SetUp()
+    if _TestSuiteRequiresHighPerfMode(self.test_package.suite_name):
+      self._perf_controller.SetHighPerfMode()
+    self.tool.SetupEnvironment()
+
+  #override
+  def TearDown(self):
+    """Cleans up the test enviroment for the test suite."""
+    for s in self._servers:
+      s.TearDown()
+    if _TestSuiteRequiresHighPerfMode(self.test_package.suite_name):
+      self._perf_controller.SetDefaultPerfMode()
+    self.test_package.ClearApplicationState(self.device)
+    self.tool.CleanUpEnvironment()
+    super(TestRunner, self).TearDown()
diff --git a/build/android/pylib/host_driven/__init__.py b/build/android/pylib/host_driven/__init__.py
new file mode 100644
index 0000000..727e987
--- /dev/null
+++ b/build/android/pylib/host_driven/__init__.py
@@ -0,0 +1,4 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
diff --git a/build/android/pylib/host_driven/setup.py b/build/android/pylib/host_driven/setup.py
new file mode 100644
index 0000000..b2ed348
--- /dev/null
+++ b/build/android/pylib/host_driven/setup.py
@@ -0,0 +1,200 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Setup for instrumentation host-driven tests."""
+
+import logging
+import os
+import sys
+import types
+
+from pylib.host_driven import test_case
+from pylib.host_driven import test_info_collection
+from pylib.host_driven import test_runner
+
+
+def _GetPythonFiles(root, files):
+  """Returns all files from |files| that end in 'Test.py'.
+
+  Args:
+    root: A directory name with python files.
+    files: A list of file names.
+
+  Returns:
+    A list with all python files that match the testing naming scheme.
+  """
+  return [os.path.join(root, f) for f in files if f.endswith('Test.py')]
+
+
+def _InferImportNameFromFile(python_file):
+  """Given a file, infer the import name for that file.
+
+  Example: /usr/foo/bar/baz.py -> baz.
+
+  Args:
+    python_file: Path to the Python file, ostensibly to import later.
+
+  Returns:
+    The module name for the given file.
+  """
+  return os.path.splitext(os.path.basename(python_file))[0]
+
+
+def _GetTestModules(host_driven_test_root, is_official_build):
+  """Retrieve a list of python modules that match the testing naming scheme.
+
+  Walks the location of host-driven tests, imports them, and provides the list
+  of imported modules to the caller.
+
+  Args:
+    host_driven_test_root: The path to walk, looking for the
+        pythonDrivenTests or host_driven_tests directory
+    is_official_build: Whether to run only those tests marked 'official'
+
+  Returns:
+    A list of python modules under |host_driven_test_root| which match the
+    testing naming scheme. Each module should define one or more classes that
+    derive from HostDrivenTestCase.
+  """
+  # By default run all host-driven tests under pythonDrivenTests or
+  # host_driven_tests.
+  host_driven_test_file_list = []
+  for root, _, files in os.walk(host_driven_test_root):
+    if (root.endswith('host_driven_tests') or
+        root.endswith('pythonDrivenTests') or
+        (is_official_build and (root.endswith('pythonDrivenTests/official') or
+                                root.endswith('host_driven_tests/official')))):
+      host_driven_test_file_list += _GetPythonFiles(root, files)
+  host_driven_test_file_list.sort()
+
+  test_module_list = [_GetModuleFromFile(test_file)
+                      for test_file in host_driven_test_file_list]
+  return test_module_list
+
+
+def _GetModuleFromFile(python_file):
+  """Gets the python module associated with a file by importing it.
+
+  Args:
+    python_file: File to import.
+
+  Returns:
+    The module object.
+  """
+  sys.path.append(os.path.dirname(python_file))
+  import_name = _InferImportNameFromFile(python_file)
+  return __import__(import_name)
+
+
+def _GetTestsFromClass(test_case_class, **kwargs):
+  """Returns one test object for each test method in |test_case_class|.
+
+  Test methods are methods on the class which begin with 'test'.
+
+  Args:
+    test_case_class: Class derived from HostDrivenTestCase which contains zero
+        or more test methods.
+    kwargs: Keyword args to pass into the constructor of test cases.
+
+  Returns:
+    A list of test case objects, each initialized for a particular test method.
+  """
+  test_names = [m for m in dir(test_case_class)
+                if _IsTestMethod(m, test_case_class)]
+  return [test_case_class(name, **kwargs) for name in test_names]
+
+
+def _GetTestsFromModule(test_module, **kwargs):
+  """Gets a list of test objects from |test_module|.
+
+  Args:
+    test_module: Module from which to get the set of test methods.
+    kwargs: Keyword args to pass into the constructor of test cases.
+
+  Returns:
+    A list of test case objects each initialized for a particular test method
+    defined in |test_module|.
+  """
+
+  tests = []
+  for name in dir(test_module):
+    attr = getattr(test_module, name)
+    if _IsTestCaseClass(attr):
+      tests.extend(_GetTestsFromClass(attr, **kwargs))
+  return tests
+
+
+def _IsTestCaseClass(test_class):
+  return (type(test_class) is types.TypeType and
+          issubclass(test_class, test_case.HostDrivenTestCase) and
+          test_class is not test_case.HostDrivenTestCase)
+
+
+def _IsTestMethod(attrname, test_case_class):
+  """Checks whether this is a valid test method.
+
+  Args:
+    attrname: The method name.
+    test_case_class: The test case class.
+
+  Returns:
+    True if test_case_class.'attrname' is callable and it starts with 'test';
+    False otherwise.
+  """
+  attr = getattr(test_case_class, attrname)
+  return callable(attr) and attrname.startswith('test')
+
+
+def _GetAllTests(test_root, is_official_build, **kwargs):
+  """Retrieve a list of host-driven tests defined under |test_root|.
+
+  Args:
+    test_root: Path which contains host-driven test files.
+    is_official_build: Whether this is an official build.
+    kwargs: Keyword args to pass into the constructor of test cases.
+
+  Returns:
+    List of test case objects, one for each available test method.
+  """
+  if not test_root:
+    return []
+  all_tests = []
+  test_module_list = _GetTestModules(test_root, is_official_build)
+  for module in test_module_list:
+    all_tests.extend(_GetTestsFromModule(module, **kwargs))
+  return all_tests
+
+
+def InstrumentationSetup(host_driven_test_root, official_build,
+                         instrumentation_options):
+  """Creates a list of host-driven instrumentation tests and a runner factory.
+
+  Args:
+    host_driven_test_root: Directory where the host-driven tests are.
+    official_build: True if this is an official build.
+    instrumentation_options: An InstrumentationOptions object.
+
+  Returns:
+    A tuple of (TestRunnerFactory, tests).
+  """
+
+  test_collection = test_info_collection.TestInfoCollection()
+  all_tests = _GetAllTests(
+      host_driven_test_root, official_build,
+      instrumentation_options=instrumentation_options)
+  test_collection.AddTests(all_tests)
+
+  available_tests = test_collection.GetAvailableTests(
+      instrumentation_options.annotations,
+      instrumentation_options.exclude_annotations,
+      instrumentation_options.test_filter)
+  logging.debug('All available tests: ' + str(
+      [t.tagged_name for t in available_tests]))
+
+  def TestRunnerFactory(device, shard_index):
+    return test_runner.HostDrivenTestRunner(
+        device, shard_index,
+        instrumentation_options.tool)
+
+  return (TestRunnerFactory, available_tests)
diff --git a/build/android/pylib/host_driven/test_case.py b/build/android/pylib/host_driven/test_case.py
new file mode 100644
index 0000000..6ff4c5f
--- /dev/null
+++ b/build/android/pylib/host_driven/test_case.py
@@ -0,0 +1,189 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Base class for host-driven test cases.
+
+This test case is intended to serve as the base class for any host-driven
+test cases. It is similar to the Python unitttest module in that test cases
+inherit from this class and add methods which will be run as tests.
+
+When a HostDrivenTestCase object is instantiated, its purpose is to run only one
+test method in the derived class. The test runner gives it the name of the test
+method the instance will run. The test runner calls SetUp with the device ID
+which the test method will run against. The test runner runs the test method
+itself, collecting the result, and calls TearDown.
+
+Tests can perform arbitrary Python commands and asserts in test methods. Tests
+that run instrumentation tests can make use of the _RunJavaTestFilters helper
+function to trigger Java tests and convert results into a single host-driven
+test result.
+"""
+
+import logging
+import os
+import time
+
+from pylib import constants
+from pylib import forwarder
+from pylib import valgrind_tools
+from pylib.base import base_test_result
+from pylib.device import device_utils
+from pylib.instrumentation import test_package
+from pylib.instrumentation import test_result
+from pylib.instrumentation import test_runner
+
+# aka the parent of com.google.android
+BASE_ROOT = 'src' + os.sep
+
+
+class HostDrivenTestCase(object):
+  """Base class for host-driven test cases."""
+
+  _HOST_DRIVEN_TAG = 'HostDriven'
+
+  def __init__(self, test_name, instrumentation_options=None):
+    """Create a test case initialized to run |test_name|.
+
+    Args:
+      test_name: The name of the method to run as the test.
+      instrumentation_options: An InstrumentationOptions object.
+    """
+    class_name = self.__class__.__name__
+    self.device = None
+    self.device_id = ''
+    self.has_forwarded_ports = False
+    self.instrumentation_options = instrumentation_options
+    self.ports_to_forward = []
+    self.shard_index = 0
+
+    # Use tagged_name when creating results, so that we can identify host-driven
+    # tests in the overall results.
+    self.test_name = test_name
+    self.qualified_name = '%s.%s' % (class_name, self.test_name)
+    self.tagged_name = '%s_%s' % (self._HOST_DRIVEN_TAG, self.qualified_name)
+
+  # TODO(bulach): make ports_to_forward not optional and move the Forwarder
+  # mapping here.
+  def SetUp(self, device, shard_index, ports_to_forward=None):
+    if not ports_to_forward:
+      ports_to_forward = []
+    self.device = device
+    self.shard_index = shard_index
+    self.device_id = str(self.device)
+    if ports_to_forward:
+      self.ports_to_forward = ports_to_forward
+
+  def TearDown(self):
+    pass
+
+  # TODO(craigdh): Remove GetOutDir once references have been removed
+  # downstream.
+  @staticmethod
+  def GetOutDir():
+    return constants.GetOutDirectory()
+
+  def Run(self):
+    logging.info('Running host-driven test: %s', self.tagged_name)
+    # Get the test method on the derived class and execute it
+    return getattr(self, self.test_name)()
+
+  @staticmethod
+  def __GetHostForwarderLog():
+    return ('-- Begin Full HostForwarder log\n'
+            '%s\n'
+            '--End Full HostForwarder log\n' % forwarder.Forwarder.GetHostLog())
+
+  def __StartForwarder(self):
+    logging.warning('Forwarding %s %s', self.ports_to_forward,
+                    self.has_forwarded_ports)
+    if self.ports_to_forward and not self.has_forwarded_ports:
+      self.has_forwarded_ports = True
+      tool = valgrind_tools.CreateTool(None, self.device)
+      forwarder.Forwarder.Map([(port, port) for port in self.ports_to_forward],
+                              self.device, tool)
+
+  def __RunJavaTest(self, test, test_pkg, additional_flags=None):
+    """Runs a single Java test in a Java TestRunner.
+
+    Args:
+      test: Fully qualified test name (ex. foo.bar.TestClass#testMethod)
+      test_pkg: TestPackage object.
+      additional_flags: A list of additional flags to add to the command line.
+
+    Returns:
+      TestRunResults object with a single test result.
+    """
+    # TODO(bulach): move this to SetUp() stage.
+    self.__StartForwarder()
+
+    java_test_runner = test_runner.TestRunner(
+        self.instrumentation_options, self.device, self.shard_index,
+        test_pkg, additional_flags=additional_flags)
+    try:
+      java_test_runner.SetUp()
+      return java_test_runner.RunTest(test)[0]
+    finally:
+      java_test_runner.TearDown()
+
+  def _RunJavaTestFilters(self, test_filters, additional_flags=None):
+    """Calls a list of tests and stops at the first test failure.
+
+    This method iterates until either it encounters a non-passing test or it
+    exhausts the list of tests. Then it returns the appropriate overall result.
+
+    Test cases may make use of this method internally to assist in running
+    instrumentation tests. This function relies on instrumentation_options
+    being defined.
+
+    Args:
+      test_filters: A list of Java test filters.
+      additional_flags: A list of addition flags to add to the command line.
+
+    Returns:
+      A TestRunResults object containing an overall result for this set of Java
+      tests. If any Java tests do not pass, this is a fail overall.
+    """
+    test_type = base_test_result.ResultType.PASS
+    log = ''
+
+    test_pkg = test_package.TestPackage(
+        self.instrumentation_options.test_apk_path,
+        self.instrumentation_options.test_apk_jar_path,
+        self.instrumentation_options.test_support_apk_path)
+
+    start_ms = int(time.time()) * 1000
+    done = False
+    for test_filter in test_filters:
+      tests = test_pkg.GetAllMatchingTests(None, None, test_filter)
+      # Filters should always result in >= 1 test.
+      if len(tests) == 0:
+        raise Exception('Java test filter "%s" returned no tests.'
+                        % test_filter)
+      for test in tests:
+        # We're only running one test at a time, so this TestRunResults object
+        # will hold only one result.
+        java_result = self.__RunJavaTest(test, test_pkg, additional_flags)
+        assert len(java_result.GetAll()) == 1
+        if not java_result.DidRunPass():
+          result = java_result.GetNotPass().pop()
+          log = result.GetLog()
+          log += self.__GetHostForwarderLog()
+          test_type = result.GetType()
+          done = True
+          break
+      if done:
+        break
+    duration_ms = int(time.time()) * 1000 - start_ms
+
+    overall_result = base_test_result.TestRunResults()
+    overall_result.AddResult(
+        test_result.InstrumentationTestResult(
+            self.tagged_name, test_type, start_ms, duration_ms, log=log))
+    return overall_result
+
+  def __str__(self):
+    return self.tagged_name
+
+  def __repr__(self):
+    return self.tagged_name
diff --git a/build/android/pylib/host_driven/test_info_collection.py b/build/android/pylib/host_driven/test_info_collection.py
new file mode 100644
index 0000000..c65d417
--- /dev/null
+++ b/build/android/pylib/host_driven/test_info_collection.py
@@ -0,0 +1,144 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Module containing information about the host-driven tests."""
+
+import logging
+import os
+import sys
+
+from pylib.host_driven import tests_annotations
+
+from pylib import constants
+
+sys.path.insert(0,
+                os.path.join(constants.DIR_SOURCE_ROOT,
+                             'build', 'util', 'lib', 'common'))
+
+import unittest_util # pylint: disable=F0401
+
+class TestInfo(object):
+  """An object containing and representing a test function, plus metadata."""
+
+  def __init__(self, runnable, set_up=None, tear_down=None):
+    # The actual test function/method.
+    self.runnable = runnable
+    # Qualified name of test function/method (e.g. FooModule.testBar).
+    self.qualified_name = self._GetQualifiedName(runnable)
+    # setUp and teardown functions, if any.
+    self.set_up = set_up
+    self.tear_down = tear_down
+
+  @staticmethod
+  def _GetQualifiedName(runnable):
+    """Helper method to infer a runnable's name and module name.
+
+    Many filters and lists presuppose a format of module_name.testMethodName.
+    To make this easy on everyone, we use some reflection magic to infer this
+    name automatically.
+
+    Args:
+      runnable: the test method to get the qualified name for
+
+    Returns:
+      qualified name for this runnable, incl. module name and method name.
+    """
+    runnable_name = runnable.__name__
+    # See also tests_annotations.
+    module_name = os.path.splitext(
+        os.path.basename(runnable.__globals__['__file__']))[0]
+    return '.'.join([module_name, runnable_name])
+
+  def __str__(self):
+    return self.qualified_name
+
+
+class TestInfoCollection(object):
+  """A collection of TestInfo objects which facilitates filtering."""
+
+  def __init__(self):
+    """Initialize a new TestInfoCollection."""
+    # Master list of all valid tests.
+    self.all_tests = []
+
+  def AddTests(self, test_infos):
+    """Adds a set of tests to this collection.
+
+    The user may then retrieve them, optionally according to criteria, via
+    GetAvailableTests().
+
+    Args:
+      test_infos: a list of TestInfos representing test functions/methods.
+    """
+    self.all_tests = test_infos
+
+  def GetAvailableTests(self, annotations, exclude_annotations, name_filter):
+    """Get a collection of TestInfos which match the supplied criteria.
+
+    Args:
+      annotations: List of annotations. Each test in the returned list is
+        annotated with atleast one of these annotations.
+      exclude_annotations: List of annotations. The tests in the returned
+        list are not annotated with any of these annotations.
+      name_filter: name filter which tests must match, if any
+
+    Returns:
+      List of available tests.
+    """
+    available_tests = self.all_tests
+
+    # Filter out tests which match neither the requested annotation, nor the
+    # requested name filter, if any.
+    available_tests = [t for t in available_tests if
+                       self._AnnotationIncludesTest(t, annotations)]
+    if annotations and len(annotations) == 1 and annotations[0] == 'SmallTest':
+      tests_without_annotation = [
+          t for t in self.all_tests if
+          not tests_annotations.AnnotatedFunctions.GetTestAnnotations(
+              t.qualified_name)]
+      test_names = [t.qualified_name for t in tests_without_annotation]
+      logging.warning('The following tests do not contain any annotation. '
+                      'Assuming "SmallTest":\n%s',
+                      '\n'.join(test_names))
+      available_tests += tests_without_annotation
+    if exclude_annotations:
+      excluded_tests = [t for t in available_tests if
+                        self._AnnotationIncludesTest(t, exclude_annotations)]
+      available_tests = list(set(available_tests) - set(excluded_tests))
+
+    if name_filter:
+      available_test_names = unittest_util.FilterTestNames(
+          [t.qualified_name for t in available_tests], name_filter)
+      available_tests = [
+          t for t in available_tests if
+          t.qualified_name in available_test_names]
+    return available_tests
+
+  @staticmethod
+  def _AnnotationIncludesTest(test_info, annotation_filter_list):
+    """Checks whether a given test represented by test_info matches annotation.
+
+    Args:
+      test_info: TestInfo object representing the test
+      annotation_filter_list: list of annotation filters to match (e.g. Smoke)
+
+    Returns:
+      True if no annotation was supplied or the test matches; false otherwise.
+    """
+    if not annotation_filter_list:
+      return True
+    for annotation_filter in annotation_filter_list:
+      filters = annotation_filter.split('=')
+      if len(filters) == 2:
+        key = filters[0]
+        value_list = filters[1].split(',')
+        for value in value_list:
+          if tests_annotations.AnnotatedFunctions.IsAnnotated(
+              key + ':' + value, test_info.qualified_name):
+            return True
+      elif tests_annotations.AnnotatedFunctions.IsAnnotated(
+          annotation_filter, test_info.qualified_name):
+        return True
+    return False
+
diff --git a/build/android/pylib/host_driven/test_runner.py b/build/android/pylib/host_driven/test_runner.py
new file mode 100644
index 0000000..8620aa1
--- /dev/null
+++ b/build/android/pylib/host_driven/test_runner.py
@@ -0,0 +1,133 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Runs host-driven tests on a particular device."""
+
+import logging
+import sys
+import time
+import traceback
+
+from pylib.base import base_test_result
+from pylib.base import base_test_runner
+from pylib.host_driven import test_case
+from pylib.instrumentation import test_result
+
+
+class HostDrivenExceptionTestResult(test_result.InstrumentationTestResult):
+  """Test result corresponding to a python exception in a host-driven test."""
+
+  def __init__(self, test_name, start_date_ms, exc_info):
+    """Constructs a HostDrivenExceptionTestResult object.
+
+    Args:
+      test_name: name of the test which raised an exception.
+      start_date_ms: the starting time for the test.
+      exc_info: exception info, ostensibly from sys.exc_info().
+    """
+    exc_type, exc_value, exc_traceback = exc_info
+    trace_info = ''.join(traceback.format_exception(exc_type, exc_value,
+                                                    exc_traceback))
+    log_msg = 'Exception:\n' + trace_info
+    duration_ms = (int(time.time()) * 1000) - start_date_ms
+
+    super(HostDrivenExceptionTestResult, self).__init__(
+        test_name,
+        base_test_result.ResultType.FAIL,
+        start_date_ms,
+        duration_ms,
+        log=str(exc_type) + ' ' + log_msg)
+
+
+class HostDrivenTestRunner(base_test_runner.BaseTestRunner):
+  """Orchestrates running a set of host-driven tests.
+
+  Any Python exceptions in the tests are caught and translated into a failed
+  result, rather than being re-raised on the main thread.
+  """
+
+  # TODO(jbudorick): Remove cleanup_test_files once it's no longer used.
+  # pylint: disable=unused-argument
+  #override
+  def __init__(self, device, shard_index, tool, cleanup_test_files=None):
+    """Creates a new HostDrivenTestRunner.
+
+    Args:
+      device: Attached android device.
+      shard_index: Shard index.
+      tool: Name of the Valgrind tool.
+      cleanup_test_files: Deprecated.
+    """
+
+    super(HostDrivenTestRunner, self).__init__(device, tool)
+
+    # The shard index affords the ability to create unique port numbers (e.g.
+    # DEFAULT_PORT + shard_index) if the test so wishes.
+    self.shard_index = shard_index
+
+  # pylint: enable=unused-argument
+
+  #override
+  def RunTest(self, test):
+    """Sets up and runs a test case.
+
+    Args:
+      test: An object which is ostensibly a subclass of HostDrivenTestCase.
+
+    Returns:
+      A TestRunResults object which contains the result produced by the test
+      and, in the case of a failure, the test that should be retried.
+    """
+
+    assert isinstance(test, test_case.HostDrivenTestCase)
+
+    start_date_ms = int(time.time()) * 1000
+    exception_raised = False
+
+    try:
+      test.SetUp(self.device, self.shard_index)
+    except Exception:
+      logging.exception(
+          'Caught exception while trying to run SetUp() for test: ' +
+          test.tagged_name)
+      # Tests whose SetUp() method has failed are likely to fail, or at least
+      # yield invalid results.
+      exc_info = sys.exc_info()
+      results = base_test_result.TestRunResults()
+      results.AddResult(HostDrivenExceptionTestResult(
+          test.tagged_name, start_date_ms, exc_info))
+      return results, test
+
+    try:
+      results = test.Run()
+    except Exception:
+      # Setting this lets TearDown() avoid stomping on our stack trace from
+      # Run() should TearDown() also raise an exception.
+      exception_raised = True
+      logging.exception('Caught exception while trying to run test: ' +
+                        test.tagged_name)
+      exc_info = sys.exc_info()
+      results = base_test_result.TestRunResults()
+      results.AddResult(HostDrivenExceptionTestResult(
+          test.tagged_name, start_date_ms, exc_info))
+
+    try:
+      test.TearDown()
+    except Exception:
+      logging.exception(
+          'Caught exception while trying run TearDown() for test: ' +
+          test.tagged_name)
+      if not exception_raised:
+        # Don't stomp the error during the test if TearDown blows up. This is a
+        # trade-off: if the test fails, this will mask any problem with TearDown
+        # until the test is fixed.
+        exc_info = sys.exc_info()
+        results = base_test_result.TestRunResults()
+        results.AddResult(HostDrivenExceptionTestResult(
+            test.tagged_name, start_date_ms, exc_info))
+
+    if not results.DidRunPass():
+      return results, test
+    else:
+      return results, None
diff --git a/build/android/pylib/host_driven/test_server.py b/build/android/pylib/host_driven/test_server.py
new file mode 100644
index 0000000..0783500
--- /dev/null
+++ b/build/android/pylib/host_driven/test_server.py
@@ -0,0 +1,130 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Host driven test server controller.
+
+This class controls the startup and shutdown of a python driven test server that
+runs in a separate process.
+
+The server starts up automatically when the object is created.
+
+After it starts up, it is possible to retreive the hostname it started on
+through accessing the member field |host| and the port name through |port|.
+
+For shutting down the server, call TearDown().
+"""
+
+import logging
+import subprocess
+import os
+import os.path
+import time
+import urllib2
+
+from pylib import constants
+
+# NOTE: when adding or modifying these lines, omit any leading slashes!
+# Otherwise os.path.join() will (correctly) treat them as absolute paths
+# instead of relative paths, and will do nothing.
+_PYTHONPATH_DIRS = [
+    'net/tools/testserver/',
+    'third_party/',
+    'third_party/pyftpdlib/src/',
+    'third_party/pywebsocket/src',
+    'third_party/tlslite/',
+]
+
+# Python files in these directories are generated as part of the build.
+# These dirs are located in out/(Debug|Release) directory.
+# The correct path is determined based on the build type. E.g. out/Debug for
+# debug builds and out/Release for release builds.
+_GENERATED_PYTHONPATH_DIRS = [
+    'pyproto/policy/proto/',
+    'pyproto/sync/protocol/',
+    'pyproto/'
+]
+
+_TEST_SERVER_HOST = '127.0.0.1'
+# Paths for supported test server executables.
+TEST_NET_SERVER_PATH = 'net/tools/testserver/testserver.py'
+TEST_SYNC_SERVER_PATH = 'sync/tools/testserver/sync_testserver.py'
+TEST_POLICY_SERVER_PATH = 'chrome/browser/policy/test/policy_testserver.py'
+# Parameters to check that the server is up and running.
+TEST_SERVER_CHECK_PARAMS = {
+  TEST_NET_SERVER_PATH: {
+      'url_path': '/',
+      'response': 'Default response given for path'
+  },
+  TEST_SYNC_SERVER_PATH: {
+      'url_path': 'chromiumsync/time',
+      'response': '0123456789'
+  },
+  TEST_POLICY_SERVER_PATH: {
+      'url_path': 'test/ping',
+      'response': 'Policy server is up.'
+  },
+}
+
+class TestServer(object):
+  """Sets up a host driven test server on the host machine.
+
+  For shutting down the server, call TearDown().
+  """
+
+  def __init__(self, shard_index, test_server_port, test_server_path,
+               test_server_flags=None):
+    """Sets up a Python driven test server on the host machine.
+
+    Args:
+      shard_index: Index of the current shard.
+      test_server_port: Port to run the test server on. This is multiplexed with
+                        the shard index. To retrieve the real port access the
+                        member variable |port|.
+      test_server_path: The path (relative to the root src dir) of the server
+      test_server_flags: Optional list of additional flags to the test server
+    """
+    self.host = _TEST_SERVER_HOST
+    self.port = test_server_port + shard_index
+
+    src_dir = constants.DIR_SOURCE_ROOT
+    # Make dirs into a list of absolute paths.
+    abs_dirs = [os.path.join(src_dir, d) for d in _PYTHONPATH_DIRS]
+    # Add the generated python files to the path
+    abs_dirs.extend([os.path.join(src_dir, constants.GetOutDirectory(), d)
+                     for d in _GENERATED_PYTHONPATH_DIRS])
+    current_python_path = os.environ.get('PYTHONPATH')
+    extra_python_path = ':'.join(abs_dirs)
+    if current_python_path:
+      python_path = current_python_path + ':' + extra_python_path
+    else:
+      python_path = extra_python_path
+
+    # NOTE: A separate python process is used to simplify getting the right
+    # system path for finding includes.
+    test_server_flags = test_server_flags or []
+    cmd = ['python', os.path.join(src_dir, test_server_path),
+           '--log-to-console',
+           ('--host=%s' % self.host),
+           ('--port=%d' % self.port),
+           '--on-remote-server'] + test_server_flags
+    self._test_server_process = subprocess.Popen(
+          cmd, env={'PYTHONPATH': python_path})
+    test_url = 'http://%s:%d/%s' % (self.host, self.port,
+        TEST_SERVER_CHECK_PARAMS[test_server_path]['url_path'])
+    expected_response = TEST_SERVER_CHECK_PARAMS[test_server_path]['response']
+    retries = 0
+    while retries < 5:
+      try:
+        d = urllib2.urlopen(test_url).read()
+        logging.info('URL %s GOT: %s' % (test_url, d))
+        if d.startswith(expected_response):
+          break
+      except Exception as e:
+        logging.info('URL %s GOT: %s' % (test_url, e))
+      time.sleep(retries * 0.1)
+      retries += 1
+
+  def TearDown(self):
+    self._test_server_process.kill()
+    self._test_server_process.wait()
diff --git a/build/android/pylib/host_driven/tests_annotations.py b/build/android/pylib/host_driven/tests_annotations.py
new file mode 100644
index 0000000..5331140
--- /dev/null
+++ b/build/android/pylib/host_driven/tests_annotations.py
@@ -0,0 +1,94 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Annotations for host-driven tests."""
+# pylint: disable=W0212
+
+import os
+
+
+class AnnotatedFunctions(object):
+  """A container for annotated methods."""
+  _ANNOTATED = {}
+
+  @staticmethod
+  def _AddFunction(annotation, function):
+    """Adds an annotated function to our container.
+
+    Args:
+      annotation: the annotation string.
+      function: the function.
+    Returns:
+      The function passed in.
+    """
+    module_name = os.path.splitext(os.path.basename(
+        function.__globals__['__file__']))[0]
+    qualified_function_name = '.'.join([module_name, function.func_name])
+    function_list = AnnotatedFunctions._ANNOTATED.get(annotation, [])
+    function_list.append(qualified_function_name)
+    AnnotatedFunctions._ANNOTATED[annotation] = function_list
+    return function
+
+  @staticmethod
+  def IsAnnotated(annotation, qualified_function_name):
+    """True if function name (module.function) contains the annotation.
+
+    Args:
+      annotation: the annotation string.
+      qualified_function_name: the qualified function name.
+    Returns:
+      True if module.function contains the annotation.
+    """
+    return qualified_function_name in AnnotatedFunctions._ANNOTATED.get(
+        annotation, [])
+
+  @staticmethod
+  def GetTestAnnotations(qualified_function_name):
+    """Returns a list containing all annotations for the given function.
+
+    Args:
+      qualified_function_name: the qualified function name.
+    Returns:
+      List of all annotations for this function.
+    """
+    return [annotation
+            for annotation, tests in AnnotatedFunctions._ANNOTATED.iteritems()
+            if qualified_function_name in tests]
+
+
+# The following functions are annotations used for the host-driven tests.
+def Smoke(function):
+  return AnnotatedFunctions._AddFunction('Smoke', function)
+
+
+def SmallTest(function):
+  return AnnotatedFunctions._AddFunction('SmallTest', function)
+
+
+def MediumTest(function):
+  return AnnotatedFunctions._AddFunction('MediumTest', function)
+
+
+def LargeTest(function):
+  return AnnotatedFunctions._AddFunction('LargeTest', function)
+
+
+def EnormousTest(function):
+  return AnnotatedFunctions._AddFunction('EnormousTest', function)
+
+
+def FlakyTest(function):
+  return AnnotatedFunctions._AddFunction('FlakyTest', function)
+
+
+def DisabledTest(function):
+  return AnnotatedFunctions._AddFunction('DisabledTest', function)
+
+
+def Feature(feature_list):
+  def _AddFeatures(function):
+    for feature in feature_list:
+      AnnotatedFunctions._AddFunction('Feature:%s' % feature, function)
+    return AnnotatedFunctions._AddFunction('Feature', function)
+  return _AddFeatures
diff --git a/build/android/pylib/instrumentation/__init__.py b/build/android/pylib/instrumentation/__init__.py
new file mode 100644
index 0000000..727e987
--- /dev/null
+++ b/build/android/pylib/instrumentation/__init__.py
@@ -0,0 +1,4 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
diff --git a/build/android/pylib/instrumentation/instrumentation_parser.py b/build/android/pylib/instrumentation/instrumentation_parser.py
new file mode 100644
index 0000000..1859f14
--- /dev/null
+++ b/build/android/pylib/instrumentation/instrumentation_parser.py
@@ -0,0 +1,96 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+import re
+
+# http://developer.android.com/reference/android/test/InstrumentationTestRunner.html
+STATUS_CODE_START = 1
+STATUS_CODE_OK = 0
+STATUS_CODE_ERROR = -1
+STATUS_CODE_FAILURE = -2
+
+# http://developer.android.com/reference/android/app/Activity.html
+RESULT_CODE_OK = -1
+RESULT_CODE_CANCELED = 0
+
+_INSTR_LINE_RE = re.compile('^\s*INSTRUMENTATION_([A-Z_]+): (.*)$')
+
+
+class InstrumentationParser(object):
+
+  def __init__(self, stream):
+    """An incremental parser for the output of Android instrumentation tests.
+
+    Example:
+
+      stream = adb.IterShell('am instrument -r ...')
+      parser = InstrumentationParser(stream)
+
+      for code, bundle in parser.IterStatus():
+        # do something with each instrumentation status
+        print 'status:', code, bundle
+
+      # do something with the final instrumentation result
+      code, bundle = parser.GetResult()
+      print 'result:', code, bundle
+
+    Args:
+      stream: a sequence of lines as produced by the raw output of an
+        instrumentation test (e.g. by |am instrument -r| or |uiautomator|).
+    """
+    self._stream = stream
+    self._code = None
+    self._bundle = None
+
+  def IterStatus(self):
+    """Iterate over statuses as they are produced by the instrumentation test.
+
+    Yields:
+      A tuple (code, bundle) for each instrumentation status found in the
+      output.
+    """
+    def join_bundle_values(bundle):
+      for key in bundle:
+        bundle[key] = '\n'.join(bundle[key])
+      return bundle
+
+    bundle = {'STATUS': {}, 'RESULT': {}}
+    header = None
+    key = None
+    for line in self._stream:
+      m = _INSTR_LINE_RE.match(line)
+      if m:
+        header, value = m.groups()
+        key = None
+        if header in ['STATUS', 'RESULT'] and '=' in value:
+          key, value = value.split('=', 1)
+          bundle[header][key] = [value]
+        elif header == 'STATUS_CODE':
+          yield int(value), join_bundle_values(bundle['STATUS'])
+          bundle['STATUS'] = {}
+        elif header == 'CODE':
+          self._code = int(value)
+        else:
+          logging.warning('Unknown INSTRUMENTATION_%s line: %s', header, value)
+      elif key is not None:
+        bundle[header][key].append(line)
+
+    self._bundle = join_bundle_values(bundle['RESULT'])
+
+  def GetResult(self):
+    """Return the final instrumentation result.
+
+    Returns:
+      A pair (code, bundle) with the final instrumentation result. The |code|
+      may be None if no instrumentation result was found in the output.
+
+    Raises:
+      AssertionError if attempting to get the instrumentation result before
+      exhausting |IterStatus| first.
+    """
+    assert self._bundle is not None, (
+        'The IterStatus generator must be exhausted before reading the final'
+        ' instrumentation result.')
+    return self._code, self._bundle
diff --git a/build/android/pylib/instrumentation/instrumentation_parser_test.py b/build/android/pylib/instrumentation/instrumentation_parser_test.py
new file mode 100755
index 0000000..092d10f
--- /dev/null
+++ b/build/android/pylib/instrumentation/instrumentation_parser_test.py
@@ -0,0 +1,134 @@
+#!/usr/bin/env python
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+"""Unit tests for instrumentation.InstrumentationParser."""
+
+import unittest
+
+from pylib.instrumentation import instrumentation_parser
+
+
+class InstrumentationParserTest(unittest.TestCase):
+
+  def testInstrumentationParser_nothing(self):
+    parser = instrumentation_parser.InstrumentationParser([''])
+    statuses = list(parser.IterStatus())
+    code, bundle = parser.GetResult()
+    self.assertEqual(None, code)
+    self.assertEqual({}, bundle)
+    self.assertEqual([], statuses)
+
+  def testInstrumentationParser_noMatchingStarts(self):
+    raw_output = [
+      '',
+      'this.is.a.test.package.TestClass:.',
+      'Test result for =.',
+      'Time: 1.234',
+      '',
+      'OK (1 test)',
+    ]
+
+    parser = instrumentation_parser.InstrumentationParser(raw_output)
+    statuses = list(parser.IterStatus())
+    code, bundle = parser.GetResult()
+    self.assertEqual(None, code)
+    self.assertEqual({}, bundle)
+    self.assertEqual([], statuses)
+
+  def testInstrumentationParser_resultAndCode(self):
+    raw_output = [
+      'INSTRUMENTATION_RESULT: shortMsg=foo bar',
+      'INSTRUMENTATION_RESULT: longMsg=a foo',
+      'walked into',
+      'a bar',
+      'INSTRUMENTATION_CODE: -1',
+    ]
+
+    parser = instrumentation_parser.InstrumentationParser(raw_output)
+    statuses = list(parser.IterStatus())
+    code, bundle = parser.GetResult()
+    self.assertEqual(-1, code)
+    self.assertEqual(
+        {'shortMsg': 'foo bar', 'longMsg': 'a foo\nwalked into\na bar'}, bundle)
+    self.assertEqual([], statuses)
+
+  def testInstrumentationParser_oneStatus(self):
+    raw_output = [
+      'INSTRUMENTATION_STATUS: foo=1',
+      'INSTRUMENTATION_STATUS: bar=hello',
+      'INSTRUMENTATION_STATUS: world=false',
+      'INSTRUMENTATION_STATUS: class=this.is.a.test.package.TestClass',
+      'INSTRUMENTATION_STATUS: test=testMethod',
+      'INSTRUMENTATION_STATUS_CODE: 0',
+    ]
+
+    parser = instrumentation_parser.InstrumentationParser(raw_output)
+    statuses = list(parser.IterStatus())
+
+    expected = [
+      (0, {
+        'foo': '1',
+        'bar': 'hello',
+        'world': 'false',
+        'class': 'this.is.a.test.package.TestClass',
+        'test': 'testMethod',
+      })
+    ]
+    self.assertEqual(expected, statuses)
+
+  def testInstrumentationParser_multiStatus(self):
+    raw_output = [
+      'INSTRUMENTATION_STATUS: class=foo',
+      'INSTRUMENTATION_STATUS: test=bar',
+      'INSTRUMENTATION_STATUS_CODE: 1',
+      'INSTRUMENTATION_STATUS: test_skipped=true',
+      'INSTRUMENTATION_STATUS_CODE: 0',
+      'INSTRUMENTATION_STATUS: class=hello',
+      'INSTRUMENTATION_STATUS: test=world',
+      'INSTRUMENTATION_STATUS: stack=',
+      'foo/bar.py (27)',
+      'hello/world.py (42)',
+      'test/file.py (1)',
+      'INSTRUMENTATION_STATUS_CODE: -1',
+    ]
+
+    parser = instrumentation_parser.InstrumentationParser(raw_output)
+    statuses = list(parser.IterStatus())
+
+    expected = [
+      (1, {'class': 'foo', 'test': 'bar',}),
+      (0, {'test_skipped': 'true'}),
+      (-1, {
+        'class': 'hello',
+        'test': 'world',
+        'stack': '\nfoo/bar.py (27)\nhello/world.py (42)\ntest/file.py (1)',
+      }),
+    ]
+    self.assertEqual(expected, statuses)
+
+  def testInstrumentationParser_statusResultAndCode(self):
+    raw_output = [
+      'INSTRUMENTATION_STATUS: class=foo',
+      'INSTRUMENTATION_STATUS: test=bar',
+      'INSTRUMENTATION_STATUS_CODE: 1',
+      'INSTRUMENTATION_RESULT: result=hello',
+      'world',
+      '',
+      '',
+      'INSTRUMENTATION_CODE: 0',
+    ]
+
+    parser = instrumentation_parser.InstrumentationParser(raw_output)
+    statuses = list(parser.IterStatus())
+    code, bundle = parser.GetResult()
+
+    self.assertEqual(0, code)
+    self.assertEqual({'result': 'hello\nworld\n\n'}, bundle)
+    self.assertEqual([(1, {'class': 'foo', 'test': 'bar'})], statuses)
+
+
+if __name__ == '__main__':
+  unittest.main(verbosity=2)
diff --git a/build/android/pylib/instrumentation/instrumentation_test_instance.py b/build/android/pylib/instrumentation/instrumentation_test_instance.py
new file mode 100644
index 0000000..f9957b0
--- /dev/null
+++ b/build/android/pylib/instrumentation/instrumentation_test_instance.py
@@ -0,0 +1,525 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+import os
+import pickle
+import re
+import sys
+
+from pylib import cmd_helper
+from pylib import constants
+from pylib import flag_changer
+from pylib.base import base_test_result
+from pylib.base import test_instance
+from pylib.instrumentation import test_result
+from pylib.instrumentation import instrumentation_parser
+from pylib.utils import apk_helper
+from pylib.utils import md5sum
+from pylib.utils import proguard
+
+sys.path.append(
+    os.path.join(constants.DIR_SOURCE_ROOT, 'build', 'util', 'lib', 'common'))
+import unittest_util
+
+# Ref: http://developer.android.com/reference/android/app/Activity.html
+_ACTIVITY_RESULT_CANCELED = 0
+_ACTIVITY_RESULT_OK = -1
+
+_DEFAULT_ANNOTATIONS = [
+    'Smoke', 'SmallTest', 'MediumTest', 'LargeTest',
+    'EnormousTest', 'IntegrationTest']
+_EXTRA_ENABLE_HTTP_SERVER = (
+    'org.chromium.chrome.test.ChromeInstrumentationTestRunner.'
+        + 'EnableTestHttpServer')
+_EXTRA_DRIVER_TEST_LIST = (
+    'org.chromium.test.driver.OnDeviceInstrumentationDriver.TestList')
+_EXTRA_DRIVER_TEST_LIST_FILE = (
+    'org.chromium.test.driver.OnDeviceInstrumentationDriver.TestListFile')
+_EXTRA_DRIVER_TARGET_PACKAGE = (
+    'org.chromium.test.driver.OnDeviceInstrumentationDriver.TargetPackage')
+_EXTRA_DRIVER_TARGET_CLASS = (
+    'org.chromium.test.driver.OnDeviceInstrumentationDriver.TargetClass')
+_NATIVE_CRASH_RE = re.compile('native crash', re.IGNORECASE)
+_PICKLE_FORMAT_VERSION = 10
+
+
+# TODO(jbudorick): Make these private class methods of
+# InstrumentationTestInstance once the instrumentation test_runner is
+# deprecated.
+def ParseAmInstrumentRawOutput(raw_output):
+  """Parses the output of an |am instrument -r| call.
+
+  Args:
+    raw_output: the output of an |am instrument -r| call as a list of lines
+  Returns:
+    A 3-tuple containing:
+      - the instrumentation code as an integer
+      - the instrumentation result as a list of lines
+      - the instrumentation statuses received as a list of 2-tuples
+        containing:
+        - the status code as an integer
+        - the bundle dump as a dict mapping string keys to a list of
+          strings, one for each line.
+  """
+  parser = instrumentation_parser.InstrumentationParser(raw_output)
+  statuses = list(parser.IterStatus())
+  code, bundle = parser.GetResult()
+  return (code, bundle, statuses)
+
+
+def GenerateTestResults(
+    result_code, result_bundle, statuses, start_ms, duration_ms):
+  """Generate test results from |statuses|.
+
+  Args:
+    result_code: The overall status code as an integer.
+    result_bundle: The summary bundle dump as a dict.
+    statuses: A list of 2-tuples containing:
+      - the status code as an integer
+      - the bundle dump as a dict mapping string keys to string values
+      Note that this is the same as the third item in the 3-tuple returned by
+      |_ParseAmInstrumentRawOutput|.
+    start_ms: The start time of the test in milliseconds.
+    duration_ms: The duration of the test in milliseconds.
+
+  Returns:
+    A list containing an instance of InstrumentationTestResult for each test
+    parsed.
+  """
+
+  results = []
+
+  current_result = None
+
+  for status_code, bundle in statuses:
+    test_class = bundle.get('class', '')
+    test_method = bundle.get('test', '')
+    if test_class and test_method:
+      test_name = '%s#%s' % (test_class, test_method)
+    else:
+      continue
+
+    if status_code == instrumentation_parser.STATUS_CODE_START:
+      if current_result:
+        results.append(current_result)
+      current_result = test_result.InstrumentationTestResult(
+          test_name, base_test_result.ResultType.UNKNOWN, start_ms, duration_ms)
+    else:
+      if status_code == instrumentation_parser.STATUS_CODE_OK:
+        if bundle.get('test_skipped', '').lower() in ('true', '1', 'yes'):
+          current_result.SetType(base_test_result.ResultType.SKIP)
+        elif current_result.GetType() == base_test_result.ResultType.UNKNOWN:
+          current_result.SetType(base_test_result.ResultType.PASS)
+      else:
+        if status_code not in (instrumentation_parser.STATUS_CODE_ERROR,
+                               instrumentation_parser.STATUS_CODE_FAILURE):
+          logging.error('Unrecognized status code %d. Handling as an error.',
+                        status_code)
+        current_result.SetType(base_test_result.ResultType.FAIL)
+        if 'stack' in bundle:
+          current_result.SetLog(bundle['stack'])
+
+  if current_result:
+    if current_result.GetType() == base_test_result.ResultType.UNKNOWN:
+      crashed = (result_code == _ACTIVITY_RESULT_CANCELED
+                 and any(_NATIVE_CRASH_RE.search(l)
+                         for l in result_bundle.itervalues()))
+      if crashed:
+        current_result.SetType(base_test_result.ResultType.CRASH)
+
+    results.append(current_result)
+
+  return results
+
+
+class InstrumentationTestInstance(test_instance.TestInstance):
+
+  def __init__(self, args, isolate_delegate, error_func):
+    super(InstrumentationTestInstance, self).__init__()
+
+    self._apk_under_test = None
+    self._package_info = None
+    self._suite = None
+    self._test_apk = None
+    self._test_jar = None
+    self._test_package = None
+    self._test_runner = None
+    self._test_support_apk = None
+    self._initializeApkAttributes(args, error_func)
+
+    self._data_deps = None
+    self._isolate_abs_path = None
+    self._isolate_delegate = None
+    self._isolated_abs_path = None
+    self._test_data = None
+    self._initializeDataDependencyAttributes(args, isolate_delegate)
+
+    self._annotations = None
+    self._excluded_annotations = None
+    self._test_filter = None
+    self._initializeTestFilterAttributes(args)
+
+    self._flags = None
+    self._initializeFlagAttributes(args)
+
+    self._driver_apk = None
+    self._driver_package = None
+    self._driver_name = None
+    self._initializeDriverAttributes()
+
+  def _initializeApkAttributes(self, args, error_func):
+    if args.apk_under_test.endswith('.apk'):
+      self._apk_under_test = args.apk_under_test
+    else:
+      self._apk_under_test = os.path.join(
+          constants.GetOutDirectory(), constants.SDK_BUILD_APKS_DIR,
+          '%s.apk' % args.apk_under_test)
+
+    if not os.path.exists(self._apk_under_test):
+      error_func('Unable to find APK under test: %s' % self._apk_under_test)
+
+    if args.test_apk.endswith('.apk'):
+      self._suite = os.path.splitext(os.path.basename(args.test_apk))[0]
+      self._test_apk = args.test_apk
+    else:
+      self._suite = args.test_apk
+      self._test_apk = os.path.join(
+          constants.GetOutDirectory(), constants.SDK_BUILD_APKS_DIR,
+          '%s.apk' % args.test_apk)
+
+    self._test_jar = os.path.join(
+        constants.GetOutDirectory(), constants.SDK_BUILD_TEST_JAVALIB_DIR,
+        '%s.jar' % self._suite)
+    self._test_support_apk = os.path.join(
+        constants.GetOutDirectory(), constants.SDK_BUILD_TEST_JAVALIB_DIR,
+        '%sSupport.apk' % self._suite)
+
+    if not os.path.exists(self._test_apk):
+      error_func('Unable to find test APK: %s' % self._test_apk)
+    if not os.path.exists(self._test_jar):
+      error_func('Unable to find test JAR: %s' % self._test_jar)
+
+    apk = apk_helper.ApkHelper(self.test_apk)
+    self._test_package = apk.GetPackageName()
+    self._test_runner = apk.GetInstrumentationName()
+
+    self._package_info = None
+    for package_info in constants.PACKAGE_INFO.itervalues():
+      if self._test_package == package_info.test_package:
+        self._package_info = package_info
+    if not self._package_info:
+      logging.warning('Unable to find package info for %s', self._test_package)
+
+  def _initializeDataDependencyAttributes(self, args, isolate_delegate):
+    self._data_deps = []
+    if args.isolate_file_path:
+      self._isolate_abs_path = os.path.abspath(args.isolate_file_path)
+      self._isolate_delegate = isolate_delegate
+      self._isolated_abs_path = os.path.join(
+          constants.GetOutDirectory(), '%s.isolated' % self._test_package)
+    else:
+      self._isolate_delegate = None
+
+    # TODO(jbudorick): Deprecate and remove --test-data once data dependencies
+    # are fully converted to isolate.
+    if args.test_data:
+      logging.info('Data dependencies specified via --test-data')
+      self._test_data = args.test_data
+    else:
+      self._test_data = None
+
+    if not self._isolate_delegate and not self._test_data:
+      logging.warning('No data dependencies will be pushed.')
+
+  def _initializeTestFilterAttributes(self, args):
+    self._test_filter = args.test_filter
+
+    def annotation_dict_element(a):
+      a = a.split('=')
+      return (a[0], a[1] if len(a) == 2 else None)
+
+    if args.annotation_str:
+      self._annotations = dict(
+          annotation_dict_element(a)
+          for a in args.annotation_str.split(','))
+    elif not self._test_filter:
+      self._annotations = dict(
+          annotation_dict_element(a)
+          for a in _DEFAULT_ANNOTATIONS)
+    else:
+      self._annotations = {}
+
+    if args.exclude_annotation_str:
+      self._excluded_annotations = dict(
+          annotation_dict_element(a)
+          for a in args.exclude_annotation_str.split(','))
+    else:
+      self._excluded_annotations = {}
+
+  def _initializeFlagAttributes(self, args):
+    self._flags = ['--disable-fre', '--enable-test-intents']
+    # TODO(jbudorick): Transition "--device-flags" to "--device-flags-file"
+    if hasattr(args, 'device_flags') and args.device_flags:
+      with open(args.device_flags) as device_flags_file:
+        stripped_lines = (l.strip() for l in device_flags_file)
+        self._flags.extend([flag for flag in stripped_lines if flag])
+    if hasattr(args, 'device_flags_file') and args.device_flags_file:
+      with open(args.device_flags_file) as device_flags_file:
+        stripped_lines = (l.strip() for l in device_flags_file)
+        self._flags.extend([flag for flag in stripped_lines if flag])
+
+  def _initializeDriverAttributes(self):
+    self._driver_apk = os.path.join(
+        constants.GetOutDirectory(), constants.SDK_BUILD_APKS_DIR,
+        'OnDeviceInstrumentationDriver.apk')
+    if os.path.exists(self._driver_apk):
+      driver_apk = apk_helper.ApkHelper(self._driver_apk)
+      self._driver_package = driver_apk.GetPackageName()
+      self._driver_name = driver_apk.GetInstrumentationName()
+    else:
+      self._driver_apk = None
+
+  @property
+  def apk_under_test(self):
+    return self._apk_under_test
+
+  @property
+  def flags(self):
+    return self._flags
+
+  @property
+  def driver_apk(self):
+    return self._driver_apk
+
+  @property
+  def driver_package(self):
+    return self._driver_package
+
+  @property
+  def driver_name(self):
+    return self._driver_name
+
+  @property
+  def package_info(self):
+    return self._package_info
+
+  @property
+  def suite(self):
+    return self._suite
+
+  @property
+  def test_apk(self):
+    return self._test_apk
+
+  @property
+  def test_jar(self):
+    return self._test_jar
+
+  @property
+  def test_support_apk(self):
+    return self._test_support_apk
+
+  @property
+  def test_package(self):
+    return self._test_package
+
+  @property
+  def test_runner(self):
+    return self._test_runner
+
+  #override
+  def TestType(self):
+    return 'instrumentation'
+
+  #override
+  def SetUp(self):
+    if self._isolate_delegate:
+      self._isolate_delegate.Remap(
+          self._isolate_abs_path, self._isolated_abs_path)
+      self._isolate_delegate.MoveOutputDeps()
+      self._data_deps.extend([(constants.ISOLATE_DEPS_DIR, None)])
+
+    # TODO(jbudorick): Convert existing tests that depend on the --test-data
+    # mechanism to isolate, then remove this.
+    if self._test_data:
+      for t in self._test_data:
+        device_rel_path, host_rel_path = t.split(':')
+        host_abs_path = os.path.join(constants.DIR_SOURCE_ROOT, host_rel_path)
+        self._data_deps.extend(
+            [(host_abs_path,
+              [None, 'chrome', 'test', 'data', device_rel_path])])
+
+  def GetDataDependencies(self):
+    return self._data_deps
+
+  def GetTests(self):
+    pickle_path = '%s-proguard.pickle' % self.test_jar
+    try:
+      tests = self._GetTestsFromPickle(pickle_path, self.test_jar)
+    except self.ProguardPickleException as e:
+      logging.info('Getting tests from JAR via proguard. (%s)' % str(e))
+      tests = self._GetTestsFromProguard(self.test_jar)
+      self._SaveTestsToPickle(pickle_path, self.test_jar, tests)
+    return self._InflateTests(self._FilterTests(tests))
+
+  class ProguardPickleException(Exception):
+    pass
+
+  def _GetTestsFromPickle(self, pickle_path, jar_path):
+    if not os.path.exists(pickle_path):
+      raise self.ProguardPickleException('%s does not exist.' % pickle_path)
+    if os.path.getmtime(pickle_path) <= os.path.getmtime(jar_path):
+      raise self.ProguardPickleException(
+          '%s newer than %s.' % (jar_path, pickle_path))
+
+    with open(pickle_path, 'r') as pickle_file:
+      pickle_data = pickle.loads(pickle_file.read())
+    jar_md5 = md5sum.CalculateHostMd5Sums(jar_path)[jar_path]
+
+    try:
+      if pickle_data['VERSION'] != _PICKLE_FORMAT_VERSION:
+        raise self.ProguardPickleException('PICKLE_FORMAT_VERSION has changed.')
+      if pickle_data['JAR_MD5SUM'] != jar_md5:
+        raise self.ProguardPickleException('JAR file MD5 sum differs.')
+      return pickle_data['TEST_METHODS']
+    except TypeError as e:
+      logging.error(pickle_data)
+      raise self.ProguardPickleException(str(e))
+
+  def _GetTestsFromProguard(self, jar_path):
+    p = proguard.Dump(jar_path)
+
+    def is_test_class(c):
+      return c['class'].endswith('Test')
+
+    def is_test_method(m):
+      return m['method'].startswith('test')
+
+    class_lookup = dict((c['class'], c) for c in p['classes'])
+    def recursive_get_class_annotations(c):
+      s = c['superclass']
+      if s in class_lookup:
+        a = recursive_get_class_annotations(class_lookup[s])
+      else:
+        a = {}
+      a.update(c['annotations'])
+      return a
+
+    def stripped_test_class(c):
+      return {
+        'class': c['class'],
+        'annotations': recursive_get_class_annotations(c),
+        'methods': [m for m in c['methods'] if is_test_method(m)],
+      }
+
+    return [stripped_test_class(c) for c in p['classes']
+            if is_test_class(c)]
+
+  def _SaveTestsToPickle(self, pickle_path, jar_path, tests):
+    jar_md5 = md5sum.CalculateHostMd5Sums(jar_path)[jar_path]
+    pickle_data = {
+      'VERSION': _PICKLE_FORMAT_VERSION,
+      'JAR_MD5SUM': jar_md5,
+      'TEST_METHODS': tests,
+    }
+    with open(pickle_path, 'w') as pickle_file:
+      pickle.dump(pickle_data, pickle_file)
+
+  def _FilterTests(self, tests):
+
+    def gtest_filter(c, m):
+      t = ['%s.%s' % (c['class'].split('.')[-1], m['method'])]
+      return (not self._test_filter
+              or unittest_util.FilterTestNames(t, self._test_filter))
+
+    def annotation_filter(all_annotations):
+      if not self._annotations:
+        return True
+      return any_annotation_matches(self._annotations, all_annotations)
+
+    def excluded_annotation_filter(all_annotations):
+      if not self._excluded_annotations:
+        return True
+      return not any_annotation_matches(self._excluded_annotations,
+                                        all_annotations)
+
+    def any_annotation_matches(annotations, all_annotations):
+      return any(
+          ak in all_annotations and (av is None or av == all_annotations[ak])
+          for ak, av in annotations.iteritems())
+
+    filtered_classes = []
+    for c in tests:
+      filtered_methods = []
+      for m in c['methods']:
+        # Gtest filtering
+        if not gtest_filter(c, m):
+          continue
+
+        all_annotations = dict(c['annotations'])
+        all_annotations.update(m['annotations'])
+        if (not annotation_filter(all_annotations)
+            or not excluded_annotation_filter(all_annotations)):
+          continue
+
+        filtered_methods.append(m)
+
+      if filtered_methods:
+        filtered_class = dict(c)
+        filtered_class['methods'] = filtered_methods
+        filtered_classes.append(filtered_class)
+
+    return filtered_classes
+
+  def _InflateTests(self, tests):
+    inflated_tests = []
+    for c in tests:
+      for m in c['methods']:
+        a = dict(c['annotations'])
+        a.update(m['annotations'])
+        inflated_tests.append({
+            'class': c['class'],
+            'method': m['method'],
+            'annotations': a,
+        })
+    return inflated_tests
+
+  @staticmethod
+  def GetHttpServerEnvironmentVars():
+    return {
+      _EXTRA_ENABLE_HTTP_SERVER: None,
+    }
+
+  def GetDriverEnvironmentVars(
+      self, test_list=None, test_list_file_path=None):
+    env = {
+      _EXTRA_DRIVER_TARGET_PACKAGE: self.test_package,
+      _EXTRA_DRIVER_TARGET_CLASS: self.test_runner,
+    }
+
+    if test_list:
+      env[_EXTRA_DRIVER_TEST_LIST] = ','.join(test_list)
+
+    if test_list_file_path:
+      env[_EXTRA_DRIVER_TEST_LIST_FILE] = (
+          os.path.basename(test_list_file_path))
+
+    return env
+
+  @staticmethod
+  def ParseAmInstrumentRawOutput(raw_output):
+    return ParseAmInstrumentRawOutput(raw_output)
+
+  @staticmethod
+  def GenerateTestResults(
+      result_code, result_bundle, statuses, start_ms, duration_ms):
+    return GenerateTestResults(result_code, result_bundle, statuses,
+                               start_ms, duration_ms)
+
+  #override
+  def TearDown(self):
+    if self._isolate_delegate:
+      self._isolate_delegate.Clear()
+
diff --git a/build/android/pylib/instrumentation/instrumentation_test_instance_test.py b/build/android/pylib/instrumentation/instrumentation_test_instance_test.py
new file mode 100755
index 0000000..752e4d3
--- /dev/null
+++ b/build/android/pylib/instrumentation/instrumentation_test_instance_test.py
@@ -0,0 +1,109 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+"""Unit tests for instrumentation.TestRunner."""
+
+# pylint: disable=W0212
+
+import os
+import sys
+import unittest
+
+from pylib import constants
+from pylib.base import base_test_result
+from pylib.instrumentation import instrumentation_test_instance
+
+sys.path.append(os.path.join(
+    constants.DIR_SOURCE_ROOT, 'third_party', 'pymock'))
+import mock  # pylint: disable=F0401
+
+
+class InstrumentationTestInstanceTest(unittest.TestCase):
+
+  def setUp(self):
+    options = mock.Mock()
+    options.tool = ''
+
+  def testGenerateTestResults_noStatus(self):
+    results = instrumentation_test_instance.GenerateTestResults(
+        None, None, [], 0, 1000)
+    self.assertEqual([], results)
+
+  def testGenerateTestResults_testPassed(self):
+    statuses = [
+      (1, {
+        'class': 'test.package.TestClass',
+        'test': 'testMethod',
+      }),
+      (0, {
+        'class': 'test.package.TestClass',
+        'test': 'testMethod',
+      }),
+    ]
+    results = instrumentation_test_instance.GenerateTestResults(
+        None, None, statuses, 0, 1000)
+    self.assertEqual(1, len(results))
+    self.assertEqual(base_test_result.ResultType.PASS, results[0].GetType())
+
+  def testGenerateTestResults_testSkipped_true(self):
+    statuses = [
+      (1, {
+        'class': 'test.package.TestClass',
+        'test': 'testMethod',
+      }),
+      (0, {
+        'test_skipped': 'true',
+        'class': 'test.package.TestClass',
+        'test': 'testMethod',
+      }),
+      (0, {
+        'class': 'test.package.TestClass',
+        'test': 'testMethod',
+      }),
+    ]
+    results = instrumentation_test_instance.GenerateTestResults(
+        None, None, statuses, 0, 1000)
+    self.assertEqual(1, len(results))
+    self.assertEqual(base_test_result.ResultType.SKIP, results[0].GetType())
+
+  def testGenerateTestResults_testSkipped_false(self):
+    statuses = [
+      (1, {
+        'class': 'test.package.TestClass',
+        'test': 'testMethod',
+      }),
+      (0, {
+        'test_skipped': 'false',
+      }),
+      (0, {
+        'class': 'test.package.TestClass',
+        'test': 'testMethod',
+      }),
+    ]
+    results = instrumentation_test_instance.GenerateTestResults(
+        None, None, statuses, 0, 1000)
+    self.assertEqual(1, len(results))
+    self.assertEqual(base_test_result.ResultType.PASS, results[0].GetType())
+
+  def testGenerateTestResults_testFailed(self):
+    statuses = [
+      (1, {
+        'class': 'test.package.TestClass',
+        'test': 'testMethod',
+      }),
+      (-2, {
+        'class': 'test.package.TestClass',
+        'test': 'testMethod',
+      }),
+    ]
+    results = instrumentation_test_instance.GenerateTestResults(
+        None, None, statuses, 0, 1000)
+    self.assertEqual(1, len(results))
+    self.assertEqual(base_test_result.ResultType.FAIL, results[0].GetType())
+
+
+if __name__ == '__main__':
+  unittest.main(verbosity=2)
diff --git a/build/android/pylib/instrumentation/json_perf_parser.py b/build/android/pylib/instrumentation/json_perf_parser.py
new file mode 100644
index 0000000..ffdfbe7
--- /dev/null
+++ b/build/android/pylib/instrumentation/json_perf_parser.py
@@ -0,0 +1,161 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+"""A helper module for parsing JSON objects from perf tests results."""
+
+import json
+
+
+def GetAverageRunInfo(json_data, name):
+  """Summarizes TraceEvent JSON data for performance metrics.
+
+  Example JSON Inputs (More tags can be added but these are required):
+  Measuring Duration:
+  [
+    { "cat": "Java",
+      "ts": 10000000000,
+      "ph": "S",
+      "name": "TestTrace"
+    },
+    { "cat": "Java",
+      "ts": 10000004000,
+      "ph": "F",
+      "name": "TestTrace"
+    },
+    ...
+  ]
+
+  Measuring Call Frequency (FPS):
+  [
+    { "cat": "Java",
+      "ts": 10000000000,
+      "ph": "I",
+      "name": "TestTraceFPS"
+    },
+    { "cat": "Java",
+      "ts": 10000004000,
+      "ph": "I",
+      "name": "TestTraceFPS"
+    },
+    ...
+  ]
+
+  Args:
+    json_data: A list of dictonaries each representing a JSON object.
+    name: The 'name' tag to filter on in the JSON file.
+
+  Returns:
+    A dictionary of result data with the following tags:
+      min: The minimum value tracked.
+      max: The maximum value tracked.
+      average: The average of all the values tracked.
+      count: The number of times the category/name pair was tracked.
+      type: The type of tracking ('Instant' for instant tags and 'Span' for
+            begin/end tags.
+      category: The passed in category filter.
+      name: The passed in name filter.
+      data_points: A list of all of the times used to generate this data.
+      units: The units for the values being reported.
+
+  Raises:
+    Exception: if entry contains invalid data.
+  """
+
+  def EntryFilter(entry):
+    return entry['cat'] == 'Java' and entry['name'] == name
+  filtered_entries = filter(EntryFilter, json_data)
+
+  result = {}
+
+  result['min'] = -1
+  result['max'] = -1
+  result['average'] = 0
+  result['count'] = 0
+  result['type'] = 'Unknown'
+  result['category'] = 'Java'
+  result['name'] = name
+  result['data_points'] = []
+  result['units'] = ''
+
+  total_sum = 0
+
+  last_val = 0
+  val_type = None
+  for entry in filtered_entries:
+    if not val_type:
+      if 'mem' in entry:
+        val_type = 'mem'
+
+        def GetVal(entry):
+          return entry['mem']
+
+        result['units'] = 'kb'
+      elif 'ts' in entry:
+        val_type = 'ts'
+
+        def GetVal(entry):
+          return float(entry['ts']) / 1000.0
+
+        result['units'] = 'ms'
+      else:
+        raise Exception('Entry did not contain valid value info: %s' % entry)
+
+    if not val_type in entry:
+      raise Exception('Entry did not contain expected value type "%s" '
+                      'information: %s' % (val_type, entry))
+    val = GetVal(entry)
+    if (entry['ph'] == 'S' and
+        (result['type'] == 'Unknown' or result['type'] == 'Span')):
+      result['type'] = 'Span'
+      last_val = val
+    elif ((entry['ph'] == 'F' and result['type'] == 'Span') or
+          (entry['ph'] == 'I' and (result['type'] == 'Unknown' or
+                                   result['type'] == 'Instant'))):
+      if last_val > 0:
+        delta = val - last_val
+        if result['min'] == -1 or result['min'] > delta:
+          result['min'] = delta
+        if result['max'] == -1 or result['max'] < delta:
+          result['max'] = delta
+        total_sum += delta
+        result['count'] += 1
+        result['data_points'].append(delta)
+      if entry['ph'] == 'I':
+        result['type'] = 'Instant'
+        last_val = val
+  if result['count'] > 0:
+    result['average'] = total_sum / result['count']
+
+  return result
+
+
+def GetAverageRunInfoFromJSONString(json_string, name):
+  """Returns the results from GetAverageRunInfo using a JSON string.
+
+  Args:
+    json_string: The string containing JSON.
+    name: The 'name' tag to filter on in the JSON file.
+
+  Returns:
+    See GetAverageRunInfo Returns section.
+  """
+  return GetAverageRunInfo(json.loads(json_string), name)
+
+
+def GetAverageRunInfoFromFile(json_file, name):
+  """Returns the results from GetAverageRunInfo using a JSON file.
+
+  Args:
+    json_file: The path to a JSON file.
+    name: The 'name' tag to filter on in the JSON file.
+
+  Returns:
+    See GetAverageRunInfo Returns section.
+  """
+  with open(json_file, 'r') as f:
+    data = f.read()
+    perf = json.loads(data)
+
+  return GetAverageRunInfo(perf, name)
diff --git a/build/android/pylib/instrumentation/setup.py b/build/android/pylib/instrumentation/setup.py
new file mode 100644
index 0000000..7a0501e
--- /dev/null
+++ b/build/android/pylib/instrumentation/setup.py
@@ -0,0 +1,113 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Generates test runner factory and tests for instrumentation tests."""
+
+import logging
+import os
+
+from pylib import constants
+from pylib import valgrind_tools
+
+from pylib.base import base_setup
+from pylib.device import device_utils
+from pylib.instrumentation import test_package
+from pylib.instrumentation import test_runner
+
+DEVICE_DATA_DIR = 'chrome/test/data'
+
+ISOLATE_FILE_PATHS = {
+    'AndroidWebViewTest': 'android_webview/android_webview_test_apk.isolate',
+    'ChromeShellTest': 'chrome/chrome_shell_test_apk.isolate',
+    'ContentShellTest': 'content/content_shell_test_apk.isolate',
+}
+
+DEPS_EXCLUSION_LIST = []
+
+# TODO(mikecase): Remove this function and the constant DEVICE_DATA_DIR
+# once all data deps are pushed to the same location on the device.
+def _PushExtraSuiteDataDeps(device, test_apk):
+  """Pushes some extra data files/dirs needed by some test suite.
+
+  Args:
+    test_apk: The test suite basename for which to return file paths.
+  """
+  if test_apk in ['ChromeTest', 'ContentShellTest']:
+    test_files = 'net/data/ssl/certificates'
+    host_device_file_tuple = [
+        (os.path.join(constants.DIR_SOURCE_ROOT, test_files),
+         os.path.join(device.GetExternalStoragePath(), test_files))]
+    device.PushChangedFiles(host_device_file_tuple)
+
+
+# TODO(mikecase): Remove this function once everything uses
+# base_setup.PushDataDeps to push data deps to the device.
+def _PushDataDeps(device, test_options):
+  valgrind_tools.PushFilesForTool(test_options.tool, device)
+
+  host_device_file_tuples = []
+  for dest_host_pair in test_options.test_data:
+    dst_src = dest_host_pair.split(':', 1)
+    dst_layer = dst_src[0]
+    host_src = dst_src[1]
+    host_test_files_path = os.path.join(constants.DIR_SOURCE_ROOT, host_src)
+    if os.path.exists(host_test_files_path):
+      host_device_file_tuples += [(
+          host_test_files_path,
+          '%s/%s/%s' % (
+              device.GetExternalStoragePath(),
+              DEVICE_DATA_DIR,
+              dst_layer))]
+  if host_device_file_tuples:
+    device.PushChangedFiles(host_device_file_tuples)
+
+
+def Setup(test_options, devices):
+  """Create and return the test runner factory and tests.
+
+  Args:
+    test_options: An InstrumentationOptions object.
+
+  Returns:
+    A tuple of (TestRunnerFactory, tests).
+  """
+  if (test_options.coverage_dir and not
+      os.path.exists(test_options.coverage_dir)):
+    os.makedirs(test_options.coverage_dir)
+
+  test_pkg = test_package.TestPackage(test_options.test_apk_path,
+                                      test_options.test_apk_jar_path,
+                                      test_options.test_support_apk_path)
+  tests = test_pkg.GetAllMatchingTests(
+      test_options.annotations,
+      test_options.exclude_annotations,
+      test_options.test_filter)
+  if not tests:
+    logging.error('No instrumentation tests to run with current args.')
+
+  if test_options.test_data:
+    device_utils.DeviceUtils.parallel(devices).pMap(
+        _PushDataDeps, test_options)
+
+  if test_options.isolate_file_path:
+    i = base_setup.GenerateDepsDirUsingIsolate(test_options.test_apk,
+                                           test_options.isolate_file_path,
+                                           ISOLATE_FILE_PATHS,
+                                           DEPS_EXCLUSION_LIST)
+    def push_data_deps_to_device_dir(device):
+      base_setup.PushDataDeps(device, device.GetExternalStoragePath(),
+                              test_options)
+    device_utils.DeviceUtils.parallel(devices).pMap(
+        push_data_deps_to_device_dir)
+    if i:
+      i.Clear()
+
+  device_utils.DeviceUtils.parallel(devices).pMap(
+      _PushExtraSuiteDataDeps, test_options.test_apk)
+
+  def TestRunnerFactory(device, shard_index):
+    return test_runner.TestRunner(test_options, device, shard_index,
+                                  test_pkg)
+
+  return (TestRunnerFactory, tests)
diff --git a/build/android/pylib/instrumentation/test_jar.py b/build/android/pylib/instrumentation/test_jar.py
new file mode 100644
index 0000000..7ad8997
--- /dev/null
+++ b/build/android/pylib/instrumentation/test_jar.py
@@ -0,0 +1,230 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Helper class for instrumenation test jar."""
+# pylint: disable=W0702
+
+import logging
+import os
+import pickle
+import re
+import sys
+
+from pylib import cmd_helper
+from pylib import constants
+from pylib.device import device_utils
+from pylib.utils import md5sum
+from pylib.utils import proguard
+
+sys.path.insert(0,
+                os.path.join(constants.DIR_SOURCE_ROOT,
+                             'build', 'util', 'lib', 'common'))
+
+import unittest_util # pylint: disable=F0401
+
+# If you change the cached output of proguard, increment this number
+PICKLE_FORMAT_VERSION = 4
+
+
+class TestJar(object):
+  _ANNOTATIONS = frozenset(
+      ['Smoke', 'SmallTest', 'MediumTest', 'LargeTest', 'EnormousTest',
+       'FlakyTest', 'DisabledTest', 'Manual', 'PerfTest', 'HostDrivenTest',
+       'IntegrationTest'])
+  _DEFAULT_ANNOTATION = 'SmallTest'
+  _PROGUARD_CLASS_RE = re.compile(r'\s*?- Program class:\s*([\S]+)$')
+  _PROGUARD_SUPERCLASS_RE = re.compile(r'\s*?  Superclass:\s*([\S]+)$')
+  _PROGUARD_METHOD_RE = re.compile(r'\s*?- Method:\s*(\S*)[(].*$')
+  _PROGUARD_ANNOTATION_RE = re.compile(r'\s*?- Annotation \[L(\S*);\]:$')
+  _PROGUARD_ANNOTATION_CONST_RE = (
+      re.compile(r'\s*?- Constant element value.*$'))
+  _PROGUARD_ANNOTATION_VALUE_RE = re.compile(r'\s*?- \S+? \[(.*)\]$')
+
+  def __init__(self, jar_path):
+    if not os.path.exists(jar_path):
+      raise Exception('%s not found, please build it' % jar_path)
+
+    self._PROGUARD_PATH = os.path.join(constants.ANDROID_SDK_ROOT,
+                                       'tools/proguard/lib/proguard.jar')
+    if not os.path.exists(self._PROGUARD_PATH):
+      self._PROGUARD_PATH = os.path.join(os.environ['ANDROID_BUILD_TOP'],
+                                         'external/proguard/lib/proguard.jar')
+    self._jar_path = jar_path
+    self._pickled_proguard_name = self._jar_path + '-proguard.pickle'
+    self._test_methods = {}
+    if not self._GetCachedProguardData():
+      self._GetProguardData()
+
+  def _GetCachedProguardData(self):
+    if (os.path.exists(self._pickled_proguard_name) and
+        (os.path.getmtime(self._pickled_proguard_name) >
+         os.path.getmtime(self._jar_path))):
+      logging.info('Loading cached proguard output from %s',
+                   self._pickled_proguard_name)
+      try:
+        with open(self._pickled_proguard_name, 'r') as r:
+          d = pickle.loads(r.read())
+        jar_md5 = md5sum.CalculateHostMd5Sums(
+          self._jar_path)[os.path.realpath(self._jar_path)]
+        if (d['JAR_MD5SUM'] == jar_md5 and
+            d['VERSION'] == PICKLE_FORMAT_VERSION):
+          self._test_methods = d['TEST_METHODS']
+          return True
+      except:
+        logging.warning('PICKLE_FORMAT_VERSION has changed, ignoring cache')
+    return False
+
+  def _GetProguardData(self):
+    logging.info('Retrieving test methods via proguard.')
+
+    p = proguard.Dump(self._jar_path)
+
+    class_lookup = dict((c['class'], c) for c in p['classes'])
+    def recursive_get_annotations(c):
+      s = c['superclass']
+      if s in class_lookup:
+        a = recursive_get_annotations(class_lookup[s])
+      else:
+        a = {}
+      a.update(c['annotations'])
+      return a
+
+    test_classes = (c for c in p['classes']
+                    if c['class'].endswith('Test'))
+    for c in test_classes:
+      class_annotations = recursive_get_annotations(c)
+      test_methods = (m for m in c['methods']
+                      if m['method'].startswith('test'))
+      for m in test_methods:
+        qualified_method = '%s#%s' % (c['class'], m['method'])
+        annotations = dict(class_annotations)
+        annotations.update(m['annotations'])
+        self._test_methods[qualified_method] = m
+        self._test_methods[qualified_method]['annotations'] = annotations
+
+    logging.info('Storing proguard output to %s', self._pickled_proguard_name)
+    d = {'VERSION': PICKLE_FORMAT_VERSION,
+         'TEST_METHODS': self._test_methods,
+         'JAR_MD5SUM':
+              md5sum.CalculateHostMd5Sums(
+                self._jar_path)[os.path.realpath(self._jar_path)]}
+    with open(self._pickled_proguard_name, 'w') as f:
+      f.write(pickle.dumps(d))
+
+  @staticmethod
+  def _IsTestMethod(test):
+    class_name, method = test.split('#')
+    return class_name.endswith('Test') and method.startswith('test')
+
+  def GetTestAnnotations(self, test):
+    """Returns a list of all annotations for the given |test|. May be empty."""
+    if not self._IsTestMethod(test) or not test in self._test_methods:
+      return []
+    return self._test_methods[test]['annotations']
+
+  @staticmethod
+  def _AnnotationsMatchFilters(annotation_filter_list, annotations):
+    """Checks if annotations match any of the filters."""
+    if not annotation_filter_list:
+      return True
+    for annotation_filter in annotation_filter_list:
+      filters = annotation_filter.split('=')
+      if len(filters) == 2:
+        key = filters[0]
+        value_list = filters[1].split(',')
+        for value in value_list:
+          if key in annotations and value == annotations[key]:
+            return True
+      elif annotation_filter in annotations:
+        return True
+    return False
+
+  def GetAnnotatedTests(self, annotation_filter_list):
+    """Returns a list of all tests that match the given annotation filters."""
+    return [test for test in self.GetTestMethods()
+            if self._IsTestMethod(test) and self._AnnotationsMatchFilters(
+                annotation_filter_list, self.GetTestAnnotations(test))]
+
+  def GetTestMethods(self):
+    """Returns a dict of all test methods and relevant attributes.
+
+    Test methods are retrieved as Class#testMethod.
+    """
+    return self._test_methods
+
+  def _GetTestsMissingAnnotation(self):
+    """Get a list of test methods with no known annotations."""
+    tests_missing_annotations = []
+    for test_method in self.GetTestMethods().iterkeys():
+      annotations_ = frozenset(self.GetTestAnnotations(test_method).iterkeys())
+      if (annotations_.isdisjoint(self._ANNOTATIONS) and
+          not self.IsHostDrivenTest(test_method)):
+        tests_missing_annotations.append(test_method)
+    return sorted(tests_missing_annotations)
+
+  def _IsTestValidForSdkRange(self, test_name, attached_min_sdk_level):
+    required_min_sdk_level = int(
+        self.GetTestAnnotations(test_name).get('MinAndroidSdkLevel', 0))
+    return (required_min_sdk_level is None or
+            attached_min_sdk_level >= required_min_sdk_level)
+
+  def GetAllMatchingTests(self, annotation_filter_list,
+                          exclude_annotation_list, test_filter):
+    """Get a list of tests matching any of the annotations and the filter.
+
+    Args:
+      annotation_filter_list: List of test annotations. A test must have at
+        least one of these annotations. A test without any annotations is
+        considered to be SmallTest.
+      exclude_annotation_list: List of test annotations. A test must not have
+        any of these annotations.
+      test_filter: Filter used for partial matching on the test method names.
+
+    Returns:
+      List of all matching tests.
+    """
+    if annotation_filter_list:
+      available_tests = self.GetAnnotatedTests(annotation_filter_list)
+      # Include un-annotated tests in SmallTest.
+      if annotation_filter_list.count(self._DEFAULT_ANNOTATION) > 0:
+        for test in self._GetTestsMissingAnnotation():
+          logging.warning(
+              '%s has no annotations. Assuming "%s".', test,
+              self._DEFAULT_ANNOTATION)
+          available_tests.append(test)
+    else:
+      available_tests = [m for m in self.GetTestMethods()
+                         if not self.IsHostDrivenTest(m)]
+
+    if exclude_annotation_list:
+      excluded_tests = self.GetAnnotatedTests(exclude_annotation_list)
+      available_tests = list(set(available_tests) - set(excluded_tests))
+
+    tests = []
+    if test_filter:
+      # |available_tests| are in adb instrument format: package.path.class#test.
+
+      # Maps a 'class.test' name to each 'package.path.class#test' name.
+      sanitized_test_names = dict([
+          (t.split('.')[-1].replace('#', '.'), t) for t in available_tests])
+      # Filters 'class.test' names and populates |tests| with the corresponding
+      # 'package.path.class#test' names.
+      tests = [
+          sanitized_test_names[t] for t in unittest_util.FilterTestNames(
+              sanitized_test_names.keys(), test_filter.replace('#', '.'))]
+    else:
+      tests = available_tests
+
+    # Filter out any tests with SDK level requirements that don't match the set
+    # of attached devices.
+    devices = device_utils.DeviceUtils.parallel()
+    min_sdk_version = min(devices.build_version_sdk.pGet(None))
+    tests = [t for t in tests
+             if self._IsTestValidForSdkRange(t, min_sdk_version)]
+
+    return tests
+
+  @staticmethod
+  def IsHostDrivenTest(test):
+    return 'pythonDrivenTests' in test
diff --git a/build/android/pylib/instrumentation/test_options.py b/build/android/pylib/instrumentation/test_options.py
new file mode 100644
index 0000000..e7b7a9f
--- /dev/null
+++ b/build/android/pylib/instrumentation/test_options.py
@@ -0,0 +1,27 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Defines the InstrumentationOptions named tuple."""
+
+import collections
+
+InstrumentationOptions = collections.namedtuple('InstrumentationOptions', [
+    'tool',
+    'annotations',
+    'exclude_annotations',
+    'test_filter',
+    'test_data',
+    'save_perf_json',
+    'screenshot_failures',
+    'wait_for_debugger',
+    'coverage_dir',
+    'test_apk',
+    'test_apk_path',
+    'test_apk_jar_path',
+    'test_runner',
+    'test_support_apk_path',
+    'device_flags',
+    'isolate_file_path',
+    'set_asserts',
+    'delete_stale_data'])
diff --git a/build/android/pylib/instrumentation/test_package.py b/build/android/pylib/instrumentation/test_package.py
new file mode 100644
index 0000000..5be061d
--- /dev/null
+++ b/build/android/pylib/instrumentation/test_package.py
@@ -0,0 +1,44 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Class representing instrumentation test apk and jar."""
+
+import os
+
+from pylib.instrumentation import test_jar
+from pylib.utils import apk_helper
+
+
+class TestPackage(test_jar.TestJar):
+  def __init__(self, apk_path, jar_path, test_support_apk_path):
+    test_jar.TestJar.__init__(self, jar_path)
+
+    if not os.path.exists(apk_path):
+      raise Exception('%s not found, please build it' % apk_path)
+    if test_support_apk_path and not os.path.exists(test_support_apk_path):
+      raise Exception('%s not found, please build it' % test_support_apk_path)
+    self._apk_path = apk_path
+    self._apk_name = os.path.splitext(os.path.basename(apk_path))[0]
+    self._package_name = apk_helper.GetPackageName(self._apk_path)
+    self._test_support_apk_path = test_support_apk_path
+
+  def GetApkPath(self):
+    """Returns the absolute path to the APK."""
+    return self._apk_path
+
+  def GetApkName(self):
+    """Returns the name of the apk without the suffix."""
+    return self._apk_name
+
+  def GetPackageName(self):
+    """Returns the package name of this APK."""
+    return self._package_name
+
+  # Override.
+  def Install(self, device):
+    device.Install(self.GetApkPath())
+    if (self._test_support_apk_path and
+        os.path.exists(self._test_support_apk_path)):
+      device.Install(self._test_support_apk_path)
+
diff --git a/build/android/pylib/instrumentation/test_result.py b/build/android/pylib/instrumentation/test_result.py
new file mode 100644
index 0000000..24e80a8
--- /dev/null
+++ b/build/android/pylib/instrumentation/test_result.py
@@ -0,0 +1,30 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from pylib.base import base_test_result
+
+
+class InstrumentationTestResult(base_test_result.BaseTestResult):
+  """Result information for a single instrumentation test."""
+
+  def __init__(self, full_name, test_type, start_date, dur, log=''):
+    """Construct an InstrumentationTestResult object.
+
+    Args:
+      full_name: Full name of the test.
+      test_type: Type of the test result as defined in ResultType.
+      start_date: Date in milliseconds when the test began running.
+      dur: Duration of the test run in milliseconds.
+      log: A string listing any errors.
+    """
+    super(InstrumentationTestResult, self).__init__(
+        full_name, test_type, dur, log)
+    name_pieces = full_name.rsplit('#')
+    if len(name_pieces) > 1:
+      self._test_name = name_pieces[1]
+      self._class_name = name_pieces[0]
+    else:
+      self._class_name = full_name
+      self._test_name = full_name
+    self._start_date = start_date
diff --git a/build/android/pylib/instrumentation/test_runner.py b/build/android/pylib/instrumentation/test_runner.py
new file mode 100644
index 0000000..0f2e53f
--- /dev/null
+++ b/build/android/pylib/instrumentation/test_runner.py
@@ -0,0 +1,374 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Class for running instrumentation tests on a single device."""
+
+import logging
+import os
+import re
+import sys
+import time
+
+from pylib import constants
+from pylib import flag_changer
+from pylib import valgrind_tools
+from pylib.base import base_test_result
+from pylib.base import base_test_runner
+from pylib.device import device_errors
+from pylib.instrumentation import instrumentation_test_instance
+from pylib.instrumentation import json_perf_parser
+from pylib.instrumentation import test_result
+from pylib.local.device import local_device_instrumentation_test_run
+
+sys.path.append(os.path.join(constants.DIR_SOURCE_ROOT, 'build', 'util', 'lib',
+                             'common'))
+import perf_tests_results_helper # pylint: disable=F0401
+
+
+_PERF_TEST_ANNOTATION = 'PerfTest'
+
+
+class TestRunner(base_test_runner.BaseTestRunner):
+  """Responsible for running a series of tests connected to a single device."""
+
+  _DEVICE_COVERAGE_DIR = 'chrome/test/coverage'
+  _HOSTMACHINE_PERF_OUTPUT_FILE = '/tmp/chrome-profile'
+  _DEVICE_PERF_OUTPUT_SEARCH_PREFIX = (constants.DEVICE_PERF_OUTPUT_DIR +
+                                       '/chrome-profile*')
+
+  def __init__(self, test_options, device, shard_index, test_pkg,
+               additional_flags=None):
+    """Create a new TestRunner.
+
+    Args:
+      test_options: An InstrumentationOptions object.
+      device: Attached android device.
+      shard_index: Shard index.
+      test_pkg: A TestPackage object.
+      additional_flags: A list of additional flags to add to the command line.
+    """
+    super(TestRunner, self).__init__(device, test_options.tool)
+    self._lighttp_port = constants.LIGHTTPD_RANDOM_PORT_FIRST + shard_index
+    self._logcat_monitor = None
+
+    self.coverage_device_file = None
+    self.coverage_dir = test_options.coverage_dir
+    self.coverage_host_file = None
+    self.options = test_options
+    self.test_pkg = test_pkg
+    # Use the correct command line file for the package under test.
+    cmdline_file = [a.cmdline_file for a in constants.PACKAGE_INFO.itervalues()
+                    if a.test_package == self.test_pkg.GetPackageName()]
+    assert len(cmdline_file) < 2, 'Multiple packages have the same test package'
+    if len(cmdline_file) and cmdline_file[0]:
+      self.flags = flag_changer.FlagChanger(self.device, cmdline_file[0])
+      if additional_flags:
+        self.flags.AddFlags(additional_flags)
+    else:
+      self.flags = None
+
+  #override
+  def InstallTestPackage(self):
+    self.test_pkg.Install(self.device)
+
+  def _GetInstrumentationArgs(self):
+    ret = {}
+    if self.options.wait_for_debugger:
+      ret['debug'] = 'true'
+    if self.coverage_dir:
+      ret['coverage'] = 'true'
+      ret['coverageFile'] = self.coverage_device_file
+
+    return ret
+
+  def _TakeScreenshot(self, test):
+    """Takes a screenshot from the device."""
+    screenshot_name = os.path.join(constants.SCREENSHOTS_DIR, '%s.png' % test)
+    logging.info('Taking screenshot named %s', screenshot_name)
+    self.device.TakeScreenshot(screenshot_name)
+
+  def SetUp(self):
+    """Sets up the test harness and device before all tests are run."""
+    super(TestRunner, self).SetUp()
+    if not self.device.HasRoot():
+      logging.warning('Unable to enable java asserts for %s, non rooted device',
+                      str(self.device))
+    else:
+      if self.device.SetJavaAsserts(self.options.set_asserts):
+        # TODO(jbudorick) How to best do shell restart after the
+        #                 android_commands refactor?
+        self.device.RunShellCommand('stop')
+        self.device.RunShellCommand('start')
+        self.device.WaitUntilFullyBooted()
+
+    # We give different default value to launch HTTP server based on shard index
+    # because it may have race condition when multiple processes are trying to
+    # launch lighttpd with same port at same time.
+    self.LaunchTestHttpServer(
+        os.path.join(constants.DIR_SOURCE_ROOT), self._lighttp_port)
+    if self.flags:
+      self.flags.AddFlags(['--disable-fre', '--enable-test-intents'])
+      if self.options.device_flags:
+        with open(self.options.device_flags) as device_flags_file:
+          stripped_flags = (l.strip() for l in device_flags_file)
+          self.flags.AddFlags([flag for flag in stripped_flags if flag])
+
+  def TearDown(self):
+    """Cleans up the test harness and saves outstanding data from test run."""
+    if self.flags:
+      self.flags.Restore()
+    super(TestRunner, self).TearDown()
+
+  def TestSetup(self, test):
+    """Sets up the test harness for running a particular test.
+
+    Args:
+      test: The name of the test that will be run.
+    """
+    self.SetupPerfMonitoringIfNeeded(test)
+    self._SetupIndividualTestTimeoutScale(test)
+    self.tool.SetupEnvironment()
+
+    if self.flags and self._IsFreTest(test):
+      self.flags.RemoveFlags(['--disable-fre'])
+
+    # Make sure the forwarder is still running.
+    self._RestartHttpServerForwarderIfNecessary()
+
+    if self.coverage_dir:
+      coverage_basename = '%s.ec' % test
+      self.coverage_device_file = '%s/%s/%s' % (
+          self.device.GetExternalStoragePath(),
+          TestRunner._DEVICE_COVERAGE_DIR, coverage_basename)
+      self.coverage_host_file = os.path.join(
+          self.coverage_dir, coverage_basename)
+
+  def _IsFreTest(self, test):
+    """Determines whether a test is a first run experience test.
+
+    Args:
+      test: The name of the test to be checked.
+
+    Returns:
+      Whether the feature being tested is FirstRunExperience.
+    """
+    annotations = self.test_pkg.GetTestAnnotations(test)
+    return 'FirstRunExperience' == annotations.get('Feature', None)
+
+  def _IsPerfTest(self, test):
+    """Determines whether a test is a performance test.
+
+    Args:
+      test: The name of the test to be checked.
+
+    Returns:
+      Whether the test is annotated as a performance test.
+    """
+    return _PERF_TEST_ANNOTATION in self.test_pkg.GetTestAnnotations(test)
+
+  def SetupPerfMonitoringIfNeeded(self, test):
+    """Sets up performance monitoring if the specified test requires it.
+
+    Args:
+      test: The name of the test to be run.
+    """
+    if not self._IsPerfTest(test):
+      return
+    self.device.RunShellCommand(
+        ['rm', TestRunner._DEVICE_PERF_OUTPUT_SEARCH_PREFIX])
+    self._logcat_monitor = self.device.GetLogcatMonitor()
+    self._logcat_monitor.Start()
+
+  def TestTeardown(self, test, result):
+    """Cleans up the test harness after running a particular test.
+
+    Depending on the options of this TestRunner this might handle performance
+    tracking.  This method will only be called if the test passed.
+
+    Args:
+      test: The name of the test that was just run.
+      result: result for this test.
+    """
+
+    self.tool.CleanUpEnvironment()
+
+    # The logic below relies on the test passing.
+    if not result or not result.DidRunPass():
+      return
+
+    self.TearDownPerfMonitoring(test)
+
+    if self.flags and self._IsFreTest(test):
+      self.flags.AddFlags(['--disable-fre'])
+
+    if self.coverage_dir:
+      self.device.PullFile(
+          self.coverage_device_file, self.coverage_host_file)
+      self.device.RunShellCommand(
+          'rm -f %s' % self.coverage_device_file)
+
+  def TearDownPerfMonitoring(self, test):
+    """Cleans up performance monitoring if the specified test required it.
+
+    Args:
+      test: The name of the test that was just run.
+    Raises:
+      Exception: if there's anything wrong with the perf data.
+    """
+    if not self._IsPerfTest(test):
+      return
+    raw_test_name = test.split('#')[1]
+
+    # Wait and grab annotation data so we can figure out which traces to parse
+    regex = self._logcat_monitor.WaitFor(
+        re.compile(r'\*\*PERFANNOTATION\(' + raw_test_name + r'\)\:(.*)'))
+
+    # If the test is set to run on a specific device type only (IE: only
+    # tablet or phone) and it is being run on the wrong device, the test
+    # just quits and does not do anything.  The java test harness will still
+    # print the appropriate annotation for us, but will add --NORUN-- for
+    # us so we know to ignore the results.
+    # The --NORUN-- tag is managed by ChromeTabbedActivityTestBase.java
+    if regex.group(1) != '--NORUN--':
+
+      # Obtain the relevant perf data.  The data is dumped to a
+      # JSON formatted file.
+      json_string = self.device.ReadFile(
+          '/data/data/com.google.android.apps.chrome/files/PerfTestData.txt',
+          as_root=True)
+
+      if not json_string:
+        raise Exception('Perf file is empty')
+
+      if self.options.save_perf_json:
+        json_local_file = '/tmp/chromium-android-perf-json-' + raw_test_name
+        with open(json_local_file, 'w') as f:
+          f.write(json_string)
+        logging.info('Saving Perf UI JSON from test ' +
+                     test + ' to ' + json_local_file)
+
+      raw_perf_data = regex.group(1).split(';')
+
+      for raw_perf_set in raw_perf_data:
+        if raw_perf_set:
+          perf_set = raw_perf_set.split(',')
+          if len(perf_set) != 3:
+            raise Exception('Unexpected number of tokens in perf annotation '
+                            'string: ' + raw_perf_set)
+
+          # Process the performance data
+          result = json_perf_parser.GetAverageRunInfoFromJSONString(json_string,
+                                                                    perf_set[0])
+          perf_tests_results_helper.PrintPerfResult(perf_set[1], perf_set[2],
+                                                    [result['average']],
+                                                    result['units'])
+
+  def _SetupIndividualTestTimeoutScale(self, test):
+    timeout_scale = self._GetIndividualTestTimeoutScale(test)
+    valgrind_tools.SetChromeTimeoutScale(self.device, timeout_scale)
+
+  def _GetIndividualTestTimeoutScale(self, test):
+    """Returns the timeout scale for the given |test|."""
+    annotations = self.test_pkg.GetTestAnnotations(test)
+    timeout_scale = 1
+    if 'TimeoutScale' in annotations:
+      try:
+        timeout_scale = int(annotations['TimeoutScale'])
+      except ValueError:
+        logging.warning('Non-integer value of TimeoutScale ignored. (%s)'
+                        % annotations['TimeoutScale'])
+    if self.options.wait_for_debugger:
+      timeout_scale *= 100
+    return timeout_scale
+
+  def _GetIndividualTestTimeoutSecs(self, test):
+    """Returns the timeout in seconds for the given |test|."""
+    annotations = self.test_pkg.GetTestAnnotations(test)
+    if 'Manual' in annotations:
+      return 10 * 60 * 60
+    if 'IntegrationTest' in annotations:
+      return 30 * 60
+    if 'External' in annotations:
+      return 10 * 60
+    if 'EnormousTest' in annotations:
+      return 10 * 60
+    if 'LargeTest' in annotations or _PERF_TEST_ANNOTATION in annotations:
+      return 5 * 60
+    if 'MediumTest' in annotations:
+      return 3 * 60
+    if 'SmallTest' in annotations:
+      return 1 * 60
+
+    logging.warn(("Test size not found in annotations for test '%s', using " +
+                  "1 minute for timeout.") % test)
+    return 1 * 60
+
+  def _RunTest(self, test, timeout):
+    """Runs a single instrumentation test.
+
+    Args:
+      test: Test class/method.
+      timeout: Timeout time in seconds.
+
+    Returns:
+      The raw output of am instrument as a list of lines.
+    """
+    extras = self._GetInstrumentationArgs()
+    extras['class'] = test
+    return self.device.StartInstrumentation(
+        '%s/%s' % (self.test_pkg.GetPackageName(), self.options.test_runner),
+        raw=True, extras=extras, timeout=timeout, retries=3)
+
+  def _GenerateTestResult(self, test, instr_result_code, instr_result_bundle,
+                          statuses, start_ms, duration_ms):
+    results = instrumentation_test_instance.GenerateTestResults(
+        instr_result_code, instr_result_bundle, statuses, start_ms, duration_ms)
+    for r in results:
+      if r.GetName() == test:
+        return r
+    logging.error('Could not find result for test: %s', test)
+    return test_result.InstrumentationTestResult(
+        test, base_test_result.ResultType.UNKNOWN, start_ms, duration_ms)
+
+  #override
+  def RunTest(self, test):
+    results = base_test_result.TestRunResults()
+    timeout = (self._GetIndividualTestTimeoutSecs(test) *
+               self._GetIndividualTestTimeoutScale(test) *
+               self.tool.GetTimeoutScale())
+
+    start_ms = 0
+    duration_ms = 0
+    try:
+      self.TestSetup(test)
+
+      try:
+        self.device.GoHome()
+      except device_errors.CommandTimeoutError:
+        logging.exception('Failed to focus the launcher.')
+
+      time_ms = lambda: int(time.time() * 1000)
+      start_ms = time_ms()
+      raw_output = self._RunTest(test, timeout)
+      duration_ms = time_ms() - start_ms
+
+      # Parse the test output
+      result_code, result_bundle, statuses = (
+          instrumentation_test_instance.ParseAmInstrumentRawOutput(raw_output))
+      result = self._GenerateTestResult(
+          test, result_code, result_bundle, statuses, start_ms, duration_ms)
+      if local_device_instrumentation_test_run.DidPackageCrashOnDevice(
+          self.test_pkg.GetPackageName(), self.device):
+        result.SetType(base_test_result.ResultType.CRASH)
+      results.AddResult(result)
+    except device_errors.CommandTimeoutError as e:
+      results.AddResult(test_result.InstrumentationTestResult(
+          test, base_test_result.ResultType.TIMEOUT, start_ms, duration_ms,
+          log=str(e) or 'No information'))
+    except device_errors.DeviceUnreachableError as e:
+      results.AddResult(test_result.InstrumentationTestResult(
+          test, base_test_result.ResultType.CRASH, start_ms, duration_ms,
+          log=str(e) or 'No information'))
+    self.TestTeardown(test, results)
+    return (results, None if results.DidRunPass() else test)
diff --git a/build/android/pylib/junit/__init__.py b/build/android/pylib/junit/__init__.py
new file mode 100644
index 0000000..5cac026
--- /dev/null
+++ b/build/android/pylib/junit/__init__.py
@@ -0,0 +1,4 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
diff --git a/build/android/pylib/junit/setup.py b/build/android/pylib/junit/setup.py
new file mode 100644
index 0000000..94d4277
--- /dev/null
+++ b/build/android/pylib/junit/setup.py
@@ -0,0 +1,20 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from pylib.junit import test_runner
+
+def Setup(args):
+  """Creates a test runner factory for junit tests.
+
+  Args:
+    args: an argparse.Namespace object.
+  Return:
+    A (runner_factory, tests) tuple.
+  """
+
+  def TestRunnerFactory(_unused_device, _unused_shard_index):
+    return test_runner.JavaTestRunner(args)
+
+  return (TestRunnerFactory, ['JUnit tests'])
+
diff --git a/build/android/pylib/junit/test_dispatcher.py b/build/android/pylib/junit/test_dispatcher.py
new file mode 100644
index 0000000..6e0d865
--- /dev/null
+++ b/build/android/pylib/junit/test_dispatcher.py
@@ -0,0 +1,28 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from pylib import constants
+from pylib.base import base_test_result
+
+def RunTests(tests, runner_factory):
+  """Runs a set of java tests on the host.
+
+  Return:
+    A tuple containing the results & the exit code.
+  """
+  def run(t):
+    runner = runner_factory(None, None)
+    runner.SetUp()
+    results_list, return_code = runner.RunTest(t)
+    runner.TearDown()
+    return (results_list, return_code == 0)
+
+  test_run_results = base_test_result.TestRunResults()
+  exit_code = 0
+  for t in tests:
+    results_list, passed = run(t)
+    test_run_results.AddResults(results_list)
+    if not passed:
+      exit_code = constants.ERROR_EXIT_CODE
+  return (test_run_results, exit_code)
\ No newline at end of file
diff --git a/build/android/pylib/junit/test_runner.py b/build/android/pylib/junit/test_runner.py
new file mode 100644
index 0000000..a6d3bf9
--- /dev/null
+++ b/build/android/pylib/junit/test_runner.py
@@ -0,0 +1,50 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import json
+import os
+import tempfile
+
+from pylib import cmd_helper
+from pylib import constants
+from pylib.base import base_test_result
+from pylib.results import json_results
+
+class JavaTestRunner(object):
+  """Runs java tests on the host."""
+
+  def __init__(self, args):
+    self._package_filter = args.package_filter
+    self._runner_filter = args.runner_filter
+    self._sdk_version = args.sdk_version
+    self._test_filter = args.test_filter
+    self._test_suite = args.test_suite
+
+  def SetUp(self):
+    pass
+
+  def RunTest(self, _test):
+    """Runs junit tests from |self._test_suite|."""
+    with tempfile.NamedTemporaryFile() as json_file:
+      java_script = os.path.join(
+          constants.GetOutDirectory(), 'bin', self._test_suite)
+      command = [java_script,
+                 '-test-jars', self._test_suite + '.jar',
+                 '-json-results-file', json_file.name]
+      if self._test_filter:
+        command.extend(['-gtest-filter', self._test_filter])
+      if self._package_filter:
+        command.extend(['-package-filter', self._package_filter])
+      if self._runner_filter:
+        command.extend(['-runner-filter', self._runner_filter])
+      if self._sdk_version:
+        command.extend(['-sdk-version', self._sdk_version])
+      return_code = cmd_helper.RunCmd(command)
+      results_list = json_results.ParseResultsFromJson(
+          json.loads(json_file.read()))
+      return (results_list, return_code)
+
+  def TearDown(self):
+    pass
+
diff --git a/build/android/pylib/linker/__init__.py b/build/android/pylib/linker/__init__.py
new file mode 100644
index 0000000..af99437
--- /dev/null
+++ b/build/android/pylib/linker/__init__.py
@@ -0,0 +1,4 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
diff --git a/build/android/pylib/linker/setup.py b/build/android/pylib/linker/setup.py
new file mode 100644
index 0000000..5776f5a
--- /dev/null
+++ b/build/android/pylib/linker/setup.py
@@ -0,0 +1,45 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Setup for linker tests."""
+
+import os
+import sys
+
+from pylib import constants
+from pylib.linker import test_case
+from pylib.linker import test_runner
+
+sys.path.insert(0,
+                os.path.join(constants.DIR_SOURCE_ROOT, 'build', 'util', 'lib',
+                             'common'))
+import unittest_util # pylint: disable=F0401
+
+def Setup(args, _devices):
+  """Creates a list of test cases and a runner factory.
+
+  Args:
+    args: an argparse.Namespace object.
+  Returns:
+    A tuple of (TestRunnerFactory, tests).
+  """
+  test_cases = [
+      test_case.LinkerLibraryAddressTest,
+      test_case.LinkerSharedRelroTest,
+      test_case.LinkerRandomizationTest]
+
+  low_memory_modes = [False, True]
+  all_tests = [t(is_low_memory=m) for t in test_cases for m in low_memory_modes]
+
+  if args.test_filter:
+    all_test_names = [test.qualified_name for test in all_tests]
+    filtered_test_names = unittest_util.FilterTestNames(all_test_names,
+                                                        args.test_filter)
+    all_tests = [t for t in all_tests \
+                 if t.qualified_name in filtered_test_names]
+
+  def TestRunnerFactory(device, _shard_index):
+    return test_runner.LinkerTestRunner(device, args.tool)
+
+  return (TestRunnerFactory, all_tests)
diff --git a/build/android/pylib/linker/test_case.py b/build/android/pylib/linker/test_case.py
new file mode 100644
index 0000000..c7b0f50
--- /dev/null
+++ b/build/android/pylib/linker/test_case.py
@@ -0,0 +1,496 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Base class for linker-specific test cases.
+
+   The custom dynamic linker can only be tested through a custom test case
+   for various technical reasons:
+
+     - It's an 'invisible feature', i.e. it doesn't expose a new API or
+       behaviour, all it does is save RAM when loading native libraries.
+
+     - Checking that it works correctly requires several things that do not
+       fit the existing GTest-based and instrumentation-based tests:
+
+         - Native test code needs to be run in both the browser and renderer
+           process at the same time just after loading native libraries, in
+           a completely asynchronous way.
+
+         - Each test case requires restarting a whole new application process
+           with a different command-line.
+
+         - Enabling test support in the Linker code requires building a special
+           APK with a flag to activate special test-only support code in the
+           Linker code itself.
+
+       Host-driven tests have also been tried, but since they're really
+       sub-classes of instrumentation tests, they didn't work well either.
+
+   To build and run the linker tests, do the following:
+
+     ninja -C out/Debug chromium_linker_test_apk
+     build/android/test_runner.py linker
+
+"""
+# pylint: disable=R0201
+
+import logging
+import os
+import re
+import time
+
+from pylib import constants
+from pylib.base import base_test_result
+from pylib.device import device_errors
+from pylib.device import intent
+
+
+ResultType = base_test_result.ResultType
+
+_PACKAGE_NAME = 'org.chromium.chromium_linker_test_apk'
+_ACTIVITY_NAME = '.ChromiumLinkerTestActivity'
+_COMMAND_LINE_FILE = '/data/local/tmp/chromium-linker-test-command-line'
+
+# Path to the Linker.java source file.
+_LINKER_JAVA_SOURCE_PATH = (
+    'base/android/java/src/org/chromium/base/library_loader/Linker.java')
+
+# A regular expression used to extract the browser shared RELRO configuration
+# from the Java source file above.
+_RE_LINKER_BROWSER_CONFIG = re.compile(
+    r'.*BROWSER_SHARED_RELRO_CONFIG\s+=\s+' +
+        r'BROWSER_SHARED_RELRO_CONFIG_(\S+)\s*;.*',
+    re.MULTILINE | re.DOTALL)
+
+# Logcat filters used during each test. Only the 'chromium' one is really
+# needed, but the logs are added to the TestResult in case of error, and
+# it is handy to have the 'chromium_android_linker' ones as well when
+# troubleshooting.
+_LOGCAT_FILTERS = ['*:s', 'chromium:v', 'chromium_android_linker:v']
+#_LOGCAT_FILTERS = ['*:v']  ## DEBUG
+
+# Regular expression used to match status lines in logcat.
+_RE_BROWSER_STATUS_LINE = re.compile(r' BROWSER_LINKER_TEST: (FAIL|SUCCESS)$')
+_RE_RENDERER_STATUS_LINE = re.compile(r' RENDERER_LINKER_TEST: (FAIL|SUCCESS)$')
+
+# Regular expression used to mach library load addresses in logcat.
+_RE_LIBRARY_ADDRESS = re.compile(
+    r'(BROWSER|RENDERER)_LIBRARY_ADDRESS: (\S+) ([0-9A-Fa-f]+)')
+
+
+def _GetBrowserSharedRelroConfig():
+  """Returns a string corresponding to the Linker's configuration of shared
+     RELRO sections in the browser process. This parses the Java linker source
+     file to get the appropriate information.
+  Return:
+      None in case of error (e.g. could not locate the source file).
+     'NEVER' if the browser process shall never use shared RELROs.
+     'LOW_RAM_ONLY' if if uses it only on low-end devices.
+     'ALWAYS' if it always uses a shared RELRO.
+  """
+  source_path = \
+      os.path.join(constants.DIR_SOURCE_ROOT, _LINKER_JAVA_SOURCE_PATH)
+  if not os.path.exists(source_path):
+    logging.error('Could not find linker source file: ' + source_path)
+    return None
+
+  with open(source_path) as f:
+    configs = _RE_LINKER_BROWSER_CONFIG.findall(f.read())
+    if not configs:
+      logging.error(
+          'Can\'t find browser shared RELRO configuration value in ' + \
+          source_path)
+      return None
+
+    if configs[0] not in ['NEVER', 'LOW_RAM_ONLY', 'ALWAYS']:
+      logging.error('Unexpected browser config value: ' + configs[0])
+      return None
+
+    logging.info('Found linker browser shared RELRO config: ' + configs[0])
+    return configs[0]
+
+
+def _StartActivityAndWaitForLinkerTestStatus(device, timeout):
+  """Force-start an activity and wait up to |timeout| seconds until the full
+     linker test status lines appear in the logcat, recorded through |device|.
+  Args:
+    device: A DeviceUtils instance.
+    timeout: Timeout in seconds
+  Returns:
+    A (status, logs) tuple, where status is a ResultType constant, and logs
+    if the final logcat output as a string.
+  """
+
+  # 1. Start recording logcat with appropriate filters.
+  with device.GetLogcatMonitor(filter_specs=_LOGCAT_FILTERS) as logmon:
+
+    # 2. Force-start activity.
+    device.StartActivity(
+        intent.Intent(package=_PACKAGE_NAME, activity=_ACTIVITY_NAME),
+        force_stop=True)
+
+    # 3. Wait up to |timeout| seconds until the test status is in the logcat.
+    result = ResultType.PASS
+    try:
+      browser_match = logmon.WaitFor(_RE_BROWSER_STATUS_LINE, timeout=timeout)
+      logging.debug('Found browser match: %s', browser_match.group(0))
+      renderer_match = logmon.WaitFor(_RE_RENDERER_STATUS_LINE,
+                                      timeout=timeout)
+      logging.debug('Found renderer match: %s', renderer_match.group(0))
+      if (browser_match.group(1) != 'SUCCESS'
+          or renderer_match.group(1) != 'SUCCESS'):
+        result = ResultType.FAIL
+    except device_errors.CommandTimeoutError:
+      result = ResultType.TIMEOUT
+
+    return result, '\n'.join(device.adb.Logcat(dump=True))
+
+
+class LibraryLoadMap(dict):
+  """A helper class to pretty-print a map of library names to load addresses."""
+  def __str__(self):
+    items = ['\'%s\': 0x%x' % (name, address) for \
+        (name, address) in self.iteritems()]
+    return '{%s}' % (', '.join(items))
+
+  def __repr__(self):
+    return 'LibraryLoadMap(%s)' % self.__str__()
+
+
+class AddressList(list):
+  """A helper class to pretty-print a list of load addresses."""
+  def __str__(self):
+    items = ['0x%x' % address for address in self]
+    return '[%s]' % (', '.join(items))
+
+  def __repr__(self):
+    return 'AddressList(%s)' % self.__str__()
+
+
+def _ExtractLibraryLoadAddressesFromLogcat(logs):
+  """Extract the names and addresses of shared libraries loaded in the
+     browser and renderer processes.
+  Args:
+    logs: A string containing logcat output.
+  Returns:
+    A tuple (browser_libs, renderer_libs), where each item is a map of
+    library names (strings) to library load addresses (ints), for the
+    browser and renderer processes, respectively.
+  """
+  browser_libs = LibraryLoadMap()
+  renderer_libs = LibraryLoadMap()
+  for m in _RE_LIBRARY_ADDRESS.finditer(logs):
+    process_type, lib_name, lib_address = m.groups()
+    lib_address = int(lib_address, 16)
+    if process_type == 'BROWSER':
+      browser_libs[lib_name] = lib_address
+    elif process_type == 'RENDERER':
+      renderer_libs[lib_name] = lib_address
+    else:
+      assert False, 'Invalid process type'
+
+  return browser_libs, renderer_libs
+
+
+def _CheckLoadAddressRandomization(lib_map_list, process_type):
+  """Check that a map of library load addresses is random enough.
+  Args:
+    lib_map_list: a list of dictionaries that map library names (string)
+      to load addresses (int). Each item in the list corresponds to a
+      different run / process start.
+    process_type: a string describing the process type.
+  Returns:
+    (status, logs) tuple, where <status> is True iff the load addresses are
+    randomized, False otherwise, and <logs> is a string containing an error
+    message detailing the libraries that are not randomized properly.
+  """
+  # Collect, for each library, its list of load addresses.
+  lib_addr_map = {}
+  for lib_map in lib_map_list:
+    for lib_name, lib_address in lib_map.iteritems():
+      if lib_name not in lib_addr_map:
+        lib_addr_map[lib_name] = AddressList()
+      lib_addr_map[lib_name].append(lib_address)
+
+  logging.info('%s library load map: %s', process_type, lib_addr_map)
+
+  # For each library, check the randomness of its load addresses.
+  bad_libs = {}
+  for lib_name, lib_address_list in lib_addr_map.iteritems():
+    # If all addresses are different, skip to next item.
+    lib_address_set = set(lib_address_list)
+    # Consider that if there is more than one pair of identical addresses in
+    # the list, then randomization is broken.
+    if len(lib_address_set) < len(lib_address_list) - 1:
+      bad_libs[lib_name] = lib_address_list
+
+
+  if bad_libs:
+    return False, '%s libraries failed randomization: %s' % \
+        (process_type, bad_libs)
+
+  return True, '%s libraries properly randomized: %s' % \
+      (process_type, lib_addr_map)
+
+
+class LinkerTestCaseBase(object):
+  """Base class for linker test cases."""
+
+  def __init__(self, is_low_memory=False):
+    """Create a test case.
+    Args:
+      is_low_memory: True to simulate a low-memory device, False otherwise.
+    """
+    self.is_low_memory = is_low_memory
+    if is_low_memory:
+      test_suffix = 'ForLowMemoryDevice'
+    else:
+      test_suffix = 'ForRegularDevice'
+    class_name = self.__class__.__name__
+    self.qualified_name = '%s.%s' % (class_name, test_suffix)
+    self.tagged_name = self.qualified_name
+
+  def _RunTest(self, _device):
+    """Run the test, must be overriden.
+    Args:
+      _device: A DeviceUtils interface.
+    Returns:
+      A (status, log) tuple, where <status> is a ResultType constant, and <log>
+      is the logcat output captured during the test in case of error, or None
+      in case of success.
+    """
+    return ResultType.FAIL, 'Unimplemented _RunTest() method!'
+
+  def Run(self, device):
+    """Run the test on a given device.
+    Args:
+      device: Name of target device where to run the test.
+    Returns:
+      A base_test_result.TestRunResult() instance.
+    """
+    margin = 8
+    print '[ %-*s ] %s' % (margin, 'RUN', self.tagged_name)
+    logging.info('Running linker test: %s', self.tagged_name)
+
+    # Create command-line file on device.
+    command_line_flags = ''
+    if self.is_low_memory:
+      command_line_flags = '--low-memory-device'
+    device.WriteFile(_COMMAND_LINE_FILE, command_line_flags)
+
+    # Run the test.
+    status, logs = self._RunTest(device)
+
+    result_text = 'OK'
+    if status == ResultType.FAIL:
+      result_text = 'FAILED'
+    elif status == ResultType.TIMEOUT:
+      result_text = 'TIMEOUT'
+    print '[ %*s ] %s' % (margin, result_text, self.tagged_name)
+
+    results = base_test_result.TestRunResults()
+    results.AddResult(
+        base_test_result.BaseTestResult(
+            self.tagged_name,
+            status,
+            log=logs))
+
+    return results
+
+  def __str__(self):
+    return self.tagged_name
+
+  def __repr__(self):
+    return self.tagged_name
+
+
+class LinkerSharedRelroTest(LinkerTestCaseBase):
+  """A linker test case to check the status of shared RELRO sections.
+
+    The core of the checks performed here are pretty simple:
+
+      - Clear the logcat and start recording with an appropriate set of filters.
+      - Create the command-line appropriate for the test-case.
+      - Start the activity (always forcing a cold start).
+      - Every second, look at the current content of the filtered logcat lines
+        and look for instances of the following:
+
+            BROWSER_LINKER_TEST: <status>
+            RENDERER_LINKER_TEST: <status>
+
+        where <status> can be either FAIL or SUCCESS. These lines can appear
+        in any order in the logcat. Once both browser and renderer status are
+        found, stop the loop. Otherwise timeout after 30 seconds.
+
+        Note that there can be other lines beginning with BROWSER_LINKER_TEST:
+        and RENDERER_LINKER_TEST:, but are not followed by a <status> code.
+
+      - The test case passes if the <status> for both the browser and renderer
+        process are SUCCESS. Otherwise its a fail.
+  """
+  def _RunTest(self, device):
+    # Wait up to 30 seconds until the linker test status is in the logcat.
+    return _StartActivityAndWaitForLinkerTestStatus(device, timeout=30)
+
+
+class LinkerLibraryAddressTest(LinkerTestCaseBase):
+  """A test case that verifies library load addresses.
+
+     The point of this check is to ensure that the libraries are loaded
+     according to the following rules:
+
+     - For low-memory devices, they should always be loaded at the same address
+       in both browser and renderer processes, both below 0x4000_0000.
+
+     - For regular devices, the browser process should load libraries above
+       0x4000_0000, and renderer ones below it.
+  """
+  def _RunTest(self, device):
+    result, logs = _StartActivityAndWaitForLinkerTestStatus(device, timeout=30)
+
+    # Return immediately in case of timeout.
+    if result == ResultType.TIMEOUT:
+      return result, logs
+
+    # Collect the library load addresses in the browser and renderer processes.
+    browser_libs, renderer_libs = _ExtractLibraryLoadAddressesFromLogcat(logs)
+
+    logging.info('Browser libraries: %s', browser_libs)
+    logging.info('Renderer libraries: %s', renderer_libs)
+
+    # Check that the same libraries are loaded into both processes:
+    browser_set = set(browser_libs.keys())
+    renderer_set = set(renderer_libs.keys())
+    if browser_set != renderer_set:
+      logging.error('Library set mistmach browser=%s renderer=%s',
+          browser_libs.keys(), renderer_libs.keys())
+      return ResultType.FAIL, logs
+
+    # And that there are not empty.
+    if not browser_set:
+      logging.error('No libraries loaded in any process!')
+      return ResultType.FAIL, logs
+
+    # Check that the renderer libraries are loaded at 'low-addresses'. i.e.
+    # below 0x4000_0000, for every kind of device.
+    memory_boundary = 0x40000000
+    bad_libs = []
+    for lib_name, lib_address in renderer_libs.iteritems():
+      if lib_address >= memory_boundary:
+        bad_libs.append((lib_name, lib_address))
+
+    if bad_libs:
+      logging.error('Renderer libraries loaded at high addresses: %s', bad_libs)
+      return ResultType.FAIL, logs
+
+    browser_config = _GetBrowserSharedRelroConfig()
+    if not browser_config:
+      return ResultType.FAIL, 'Bad linker source configuration'
+
+    if browser_config == 'ALWAYS' or \
+        (browser_config == 'LOW_RAM_ONLY' and self.is_low_memory):
+      # The libraries must all be loaded at the same addresses. This also
+      # implicitly checks that the browser libraries are at low addresses.
+      addr_mismatches = []
+      for lib_name, lib_address in browser_libs.iteritems():
+        lib_address2 = renderer_libs[lib_name]
+        if lib_address != lib_address2:
+          addr_mismatches.append((lib_name, lib_address, lib_address2))
+
+      if addr_mismatches:
+        logging.error('Library load address mismatches: %s',
+            addr_mismatches)
+        return ResultType.FAIL, logs
+
+    # Otherwise, check that libraries are loaded at 'high-addresses'.
+    # Note that for low-memory devices, the previous checks ensure that they
+    # were loaded at low-addresses.
+    else:
+      bad_libs = []
+      for lib_name, lib_address in browser_libs.iteritems():
+        if lib_address < memory_boundary:
+          bad_libs.append((lib_name, lib_address))
+
+      if bad_libs:
+        logging.error('Browser libraries loaded at low addresses: %s', bad_libs)
+        return ResultType.FAIL, logs
+
+    # Everything's ok.
+    return ResultType.PASS, logs
+
+
+class LinkerRandomizationTest(LinkerTestCaseBase):
+  """A linker test case to check that library load address randomization works
+     properly between successive starts of the test program/activity.
+
+     This starts the activity several time (each time forcing a new process
+     creation) and compares the load addresses of the libraries in them to
+     detect that they have changed.
+
+     In theory, two successive runs could (very rarely) use the same load
+     address, so loop 5 times and compare the values there. It is assumed
+     that if there are more than one pair of identical addresses, then the
+     load addresses are not random enough for this test.
+  """
+  def _RunTest(self, device):
+    max_loops = 5
+    browser_lib_map_list = []
+    renderer_lib_map_list = []
+    logs_list = []
+    for _ in range(max_loops):
+      # Start the activity.
+      result, logs = _StartActivityAndWaitForLinkerTestStatus(
+          device, timeout=30)
+      if result == ResultType.TIMEOUT:
+        # Something bad happened. Return immediately.
+        return result, logs
+
+      # Collect library addresses.
+      browser_libs, renderer_libs = _ExtractLibraryLoadAddressesFromLogcat(logs)
+      browser_lib_map_list.append(browser_libs)
+      renderer_lib_map_list.append(renderer_libs)
+      logs_list.append(logs)
+
+    # Check randomization in the browser libraries.
+    logs = '\n'.join(logs_list)
+
+    browser_status, browser_logs = _CheckLoadAddressRandomization(
+        browser_lib_map_list, 'Browser')
+
+    renderer_status, renderer_logs = _CheckLoadAddressRandomization(
+        renderer_lib_map_list, 'Renderer')
+
+    browser_config = _GetBrowserSharedRelroConfig()
+    if not browser_config:
+      return ResultType.FAIL, 'Bad linker source configuration'
+
+    if not browser_status:
+      if browser_config == 'ALWAYS' or \
+          (browser_config == 'LOW_RAM_ONLY' and self.is_low_memory):
+        return ResultType.FAIL, browser_logs
+
+      # IMPORTANT NOTE: The system's ASLR implementation seems to be very poor
+      # when starting an activity process in a loop with "adb shell am start".
+      #
+      # When simulating a regular device, loading libraries in the browser
+      # process uses a simple mmap(NULL, ...) to let the kernel device where to
+      # load the file (this is similar to what System.loadLibrary() does).
+      #
+      # Unfortunately, at least in the context of this test, doing so while
+      # restarting the activity with the activity manager very, very, often
+      # results in the system using the same load address for all 5 runs, or
+      # sometimes only 4 out of 5.
+      #
+      # This has been tested experimentally on both Android 4.1.2 and 4.3.
+      #
+      # Note that this behaviour doesn't seem to happen when starting an
+      # application 'normally', i.e. when using the application launcher to
+      # start the activity.
+      logging.info('Ignoring system\'s low randomization of browser libraries' +
+                   ' for regular devices')
+
+    if not renderer_status:
+      return ResultType.FAIL, renderer_logs
+
+    return ResultType.PASS, logs
diff --git a/build/android/pylib/linker/test_runner.py b/build/android/pylib/linker/test_runner.py
new file mode 100644
index 0000000..b6803e4
--- /dev/null
+++ b/build/android/pylib/linker/test_runner.py
@@ -0,0 +1,98 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Runs linker tests on a particular device."""
+
+import logging
+import os.path
+import sys
+import traceback
+
+from pylib import constants
+from pylib.base import base_test_result
+from pylib.base import base_test_runner
+from pylib.linker import test_case
+from pylib.utils import apk_helper
+
+
+# Name of the Android package to install for this to work.
+_PACKAGE_NAME = 'ChromiumLinkerTest'
+
+
+class LinkerExceptionTestResult(base_test_result.BaseTestResult):
+  """Test result corresponding to a python exception in a host-custom test."""
+
+  def __init__(self, test_name, exc_info):
+    """Constructs a LinkerExceptionTestResult object.
+
+    Args:
+      test_name: name of the test which raised an exception.
+      exc_info: exception info, ostensibly from sys.exc_info().
+    """
+    exc_type, exc_value, exc_traceback = exc_info
+    trace_info = ''.join(traceback.format_exception(exc_type, exc_value,
+                                                    exc_traceback))
+    log_msg = 'Exception:\n' + trace_info
+
+    super(LinkerExceptionTestResult, self).__init__(
+        test_name,
+        base_test_result.ResultType.FAIL,
+        log="%s %s" % (exc_type, log_msg))
+
+
+class LinkerTestRunner(base_test_runner.BaseTestRunner):
+  """Orchestrates running a set of linker tests.
+
+  Any Python exceptions in the tests are caught and translated into a failed
+  result, rather than being re-raised on the main thread.
+  """
+
+  #override
+  def __init__(self, device, tool):
+    """Creates a new LinkerTestRunner.
+
+    Args:
+      device: Attached android device.
+      tool: Name of the Valgrind tool.
+    """
+    super(LinkerTestRunner, self).__init__(device, tool)
+
+  #override
+  def InstallTestPackage(self):
+    apk_path = os.path.join(
+        constants.GetOutDirectory(), 'apks', '%s.apk' % _PACKAGE_NAME)
+
+    if not os.path.exists(apk_path):
+      raise Exception('%s not found, please build it' % apk_path)
+
+    self.device.Install(apk_path)
+
+  #override
+  def RunTest(self, test):
+    """Sets up and runs a test case.
+
+    Args:
+      test: An object which is ostensibly a subclass of LinkerTestCaseBase.
+
+    Returns:
+      A TestRunResults object which contains the result produced by the test
+      and, in the case of a failure, the test that should be retried.
+    """
+
+    assert isinstance(test, test_case.LinkerTestCaseBase)
+
+    try:
+      results = test.Run(self.device)
+    except Exception:
+      logging.exception('Caught exception while trying to run test: ' +
+                        test.tagged_name)
+      exc_info = sys.exc_info()
+      results = base_test_result.TestRunResults()
+      results.AddResult(LinkerExceptionTestResult(
+          test.tagged_name, exc_info))
+
+    if not results.DidRunPass():
+      return results, test
+    else:
+      return results, None
diff --git a/build/android/pylib/local/__init__.py b/build/android/pylib/local/__init__.py
new file mode 100644
index 0000000..4d6aabb
--- /dev/null
+++ b/build/android/pylib/local/__init__.py
@@ -0,0 +1,3 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
diff --git a/build/android/pylib/local/device/__init__.py b/build/android/pylib/local/device/__init__.py
new file mode 100644
index 0000000..4d6aabb
--- /dev/null
+++ b/build/android/pylib/local/device/__init__.py
@@ -0,0 +1,3 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
diff --git a/build/android/pylib/local/device/local_device_environment.py b/build/android/pylib/local/device/local_device_environment.py
new file mode 100644
index 0000000..04f9ab7
--- /dev/null
+++ b/build/android/pylib/local/device/local_device_environment.py
@@ -0,0 +1,54 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from pylib.base import environment
+from pylib.device import adb_wrapper
+from pylib.device import device_errors
+from pylib.device import device_utils
+from pylib.utils import parallelizer
+
+
+class LocalDeviceEnvironment(environment.Environment):
+
+  def __init__(self, args, _error_func):
+    super(LocalDeviceEnvironment, self).__init__()
+    self._device_serial = args.test_device
+    self._devices = []
+    self._max_tries = 1 + args.num_retries
+    self._tool_name = args.tool
+
+  #override
+  def SetUp(self):
+    available_devices = device_utils.DeviceUtils.HealthyDevices()
+    if not available_devices:
+      raise device_errors.NoDevicesError
+    if self._device_serial:
+      self._devices = [d for d in available_devices
+                       if d.adb.GetDeviceSerial() == self._device_serial]
+      if not self._devices:
+        raise device_errors.DeviceUnreachableError(
+            'Could not find device %r' % self._device_serial)
+    else:
+      self._devices = available_devices
+
+  @property
+  def devices(self):
+    return self._devices
+
+  @property
+  def parallel_devices(self):
+    return parallelizer.SyncParallelizer(self._devices)
+
+  @property
+  def max_tries(self):
+    return self._max_tries
+
+  @property
+  def tool(self):
+    return self._tool_name
+
+  #override
+  def TearDown(self):
+    pass
+
diff --git a/build/android/pylib/local/device/local_device_instrumentation_test_run.py b/build/android/pylib/local/device/local_device_instrumentation_test_run.py
new file mode 100644
index 0000000..e388fce
--- /dev/null
+++ b/build/android/pylib/local/device/local_device_instrumentation_test_run.py
@@ -0,0 +1,207 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+import re
+import time
+
+from pylib import flag_changer
+from pylib.base import base_test_result
+from pylib.base import test_run
+from pylib.constants import keyevent
+from pylib.device import device_errors
+from pylib.local.device import local_device_test_run
+
+
+TIMEOUT_ANNOTATIONS = [
+  ('Manual', 10 * 60 * 60),
+  ('IntegrationTest', 30 * 60),
+  ('External', 10 * 60),
+  ('EnormousTest', 10 * 60),
+  ('LargeTest', 5 * 60),
+  ('MediumTest', 3 * 60),
+  ('SmallTest', 1 * 60),
+]
+
+
+# TODO(jbudorick): Make this private once the instrumentation test_runner is
+# deprecated.
+def DidPackageCrashOnDevice(package_name, device):
+  # Dismiss any error dialogs. Limit the number in case we have an error
+  # loop or we are failing to dismiss.
+  try:
+    for _ in xrange(10):
+      package = _DismissCrashDialog(device)
+      if not package:
+        return False
+      # Assume test package convention of ".test" suffix
+      if package in package_name:
+        return True
+  except device_errors.CommandFailedError:
+    logging.exception('Error while attempting to dismiss crash dialog.')
+  return False
+
+
+_CURRENT_FOCUS_CRASH_RE = re.compile(
+    r'\s*mCurrentFocus.*Application (Error|Not Responding): (\S+)}')
+
+
+def _DismissCrashDialog(device):
+  # TODO(jbudorick): Try to grep the output on the device instead of using
+  # large_output if/when DeviceUtils exposes a public interface for piped
+  # shell command handling.
+  for l in device.RunShellCommand(
+      ['dumpsys', 'window', 'windows'], check_return=True, large_output=True):
+    m = re.match(_CURRENT_FOCUS_CRASH_RE, l)
+    if m:
+      device.SendKeyEvent(keyevent.KEYCODE_DPAD_RIGHT)
+      device.SendKeyEvent(keyevent.KEYCODE_DPAD_RIGHT)
+      device.SendKeyEvent(keyevent.KEYCODE_ENTER)
+      return m.group(2)
+
+  return None
+
+
+class LocalDeviceInstrumentationTestRun(
+    local_device_test_run.LocalDeviceTestRun):
+  def __init__(self, env, test_instance):
+    super(LocalDeviceInstrumentationTestRun, self).__init__(env, test_instance)
+    self._flag_changers = {}
+
+  def TestPackage(self):
+    return None
+
+  def SetUp(self):
+    def substitute_external_storage(d, external_storage):
+      if not d:
+        return external_storage
+      elif isinstance(d, list):
+        return '/'.join(p if p else external_storage for p in d)
+      else:
+        return d
+
+    def individual_device_set_up(dev, host_device_tuples):
+      dev.Install(self._test_instance.apk_under_test)
+      dev.Install(self._test_instance.test_apk)
+
+      external_storage = dev.GetExternalStoragePath()
+      host_device_tuples = [
+          (h, substitute_external_storage(d, external_storage))
+          for h, d in host_device_tuples]
+      logging.info('instrumentation data deps:')
+      for h, d in host_device_tuples:
+        logging.info('%r -> %r', h, d)
+      dev.PushChangedFiles(host_device_tuples)
+      if self._test_instance.flags:
+        if not self._test_instance.package_info:
+          logging.error("Couldn't set flags: no package info")
+        elif not self._test_instance.package_info.cmdline_file:
+          logging.error("Couldn't set flags: no cmdline_file")
+        else:
+          self._flag_changers[str(dev)] = flag_changer.FlagChanger(
+              dev, self._test_instance.package_info.cmdline_file)
+          logging.debug('Attempting to set flags: %r',
+                        self._test_instance.flags)
+          self._flag_changers[str(dev)].AddFlags(self._test_instance.flags)
+
+    self._env.parallel_devices.pMap(
+        individual_device_set_up,
+        self._test_instance.GetDataDependencies())
+
+  def TearDown(self):
+    def individual_device_tear_down(dev):
+      if str(dev) in self._flag_changers:
+        self._flag_changers[str(dev)].Restore()
+
+    self._env.parallel_devices.pMap(individual_device_tear_down)
+
+  #override
+  def _CreateShards(self, tests):
+    return tests
+
+  #override
+  def _GetTests(self):
+    return self._test_instance.GetTests()
+
+  #override
+  def _GetTestName(self, test):
+    return '%s#%s' % (test['class'], test['method'])
+
+  #override
+  def _RunTest(self, device, test):
+    extras = self._test_instance.GetHttpServerEnvironmentVars()
+
+    if isinstance(test, list):
+      if not self._test_instance.driver_apk:
+        raise Exception('driver_apk does not exist. '
+                        'Please build it and try again.')
+
+      def name_and_timeout(t):
+        n = self._GetTestName(t)
+        i = self._GetTimeoutFromAnnotations(t['annotations'], n)
+        return (n, i)
+
+      test_names, timeouts = zip(*(name_and_timeout(t) for t in test))
+
+      test_name = ','.join(test_names)
+      target = '%s/%s' % (
+          self._test_instance.driver_package,
+          self._test_instance.driver_name)
+      extras.update(
+          self._test_instance.GetDriverEnvironmentVars(
+              test_list=test_names))
+      timeout = sum(timeouts)
+    else:
+      test_name = self._GetTestName(test)
+      target = '%s/%s' % (
+          self._test_instance.test_package, self._test_instance.test_runner)
+      extras['class'] = test_name
+      timeout = self._GetTimeoutFromAnnotations(test['annotations'], test_name)
+
+    logging.info('preparing to run %s: %s' % (test_name, test))
+
+    time_ms = lambda: int(time.time() * 1e3)
+    start_ms = time_ms()
+    output = device.StartInstrumentation(
+        target, raw=True, extras=extras, timeout=timeout, retries=0)
+    duration_ms = time_ms() - start_ms
+
+    # TODO(jbudorick): Make instrumentation tests output a JSON so this
+    # doesn't have to parse the output.
+    logging.debug('output from %s:', test_name)
+    for l in output:
+      logging.debug('  %s', l)
+
+    result_code, result_bundle, statuses = (
+        self._test_instance.ParseAmInstrumentRawOutput(output))
+    results = self._test_instance.GenerateTestResults(
+        result_code, result_bundle, statuses, start_ms, duration_ms)
+    if DidPackageCrashOnDevice(self._test_instance.test_package, device):
+      for r in results:
+        if r.GetType() == base_test_result.ResultType.UNKNOWN:
+          r.SetType(base_test_result.ResultType.CRASH)
+    return results
+
+  #override
+  def _ShouldShard(self):
+    return True
+
+  @staticmethod
+  def _GetTimeoutFromAnnotations(annotations, test_name):
+    for k, v in TIMEOUT_ANNOTATIONS:
+      if k in annotations:
+        timeout = v
+    else:
+      logging.warning('Using default 1 minute timeout for %s' % test_name)
+      timeout = 60
+
+    try:
+      scale = int(annotations.get('TimeoutScale', 1))
+    except ValueError as e:
+      logging.warning("Non-integer value of TimeoutScale ignored. (%s)", str(e))
+      scale = 1
+    timeout *= scale
+
+    return timeout
+
diff --git a/build/android/pylib/local/device/local_device_test_run.py b/build/android/pylib/local/device/local_device_test_run.py
new file mode 100644
index 0000000..fa24eb1
--- /dev/null
+++ b/build/android/pylib/local/device/local_device_test_run.py
@@ -0,0 +1,99 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+
+from pylib import valgrind_tools
+from pylib.base import base_test_result
+from pylib.base import test_run
+from pylib.base import test_collection
+
+
+class LocalDeviceTestRun(test_run.TestRun):
+
+  def __init__(self, env, test_instance):
+    super(LocalDeviceTestRun, self).__init__(env, test_instance)
+    self._tools = {}
+
+  #override
+  def RunTests(self):
+    tests = self._GetTests()
+
+    def run_tests_on_device(dev, tests):
+      r = base_test_result.TestRunResults()
+      for test in tests:
+        result = self._RunTest(dev, test)
+        if isinstance(result, base_test_result.BaseTestResult):
+          r.AddResult(result)
+        elif isinstance(result, list):
+          r.AddResults(result)
+        else:
+          raise Exception('Unexpected result type: %s' % type(result).__name__)
+        if isinstance(tests, test_collection.TestCollection):
+          tests.test_completed()
+      return r
+
+    tries = 0
+    results = base_test_result.TestRunResults()
+    all_fail_results = {}
+    while tries < self._env.max_tries and tests:
+      logging.debug('try %d, will run %d tests:', tries, len(tests))
+      for t in tests:
+        logging.debug('  %s', t)
+
+      if self._ShouldShard():
+        tc = test_collection.TestCollection(self._CreateShards(tests))
+        try_results = self._env.parallel_devices.pMap(
+            run_tests_on_device, tc).pGet(None)
+      else:
+        try_results = self._env.parallel_devices.pMap(
+            run_tests_on_device, tests).pGet(None)
+      for try_result in try_results:
+        for result in try_result.GetAll():
+          if result.GetType() in (base_test_result.ResultType.PASS,
+                                  base_test_result.ResultType.SKIP):
+            results.AddResult(result)
+          else:
+            all_fail_results[result.GetName()] = result
+
+      results_names = set(r.GetName() for r in results.GetAll())
+      tests = [t for t in tests if self._GetTestName(t) not in results_names]
+      tries += 1
+
+    all_unknown_test_names = set(self._GetTestName(t) for t in tests)
+    all_failed_test_names = set(all_fail_results.iterkeys())
+
+    unknown_tests = all_unknown_test_names.difference(all_failed_test_names)
+    failed_tests = all_failed_test_names.intersection(all_unknown_test_names)
+
+    if unknown_tests:
+      results.AddResults(
+          base_test_result.BaseTestResult(
+              u, base_test_result.ResultType.UNKNOWN)
+          for u in unknown_tests)
+    if failed_tests:
+      results.AddResults(all_fail_results[f] for f in failed_tests)
+
+    return results
+
+  def GetTool(self, device):
+    if not str(device) in self._tools:
+      self._tools[str(device)] = valgrind_tools.CreateTool(
+          self._env.tool, device)
+    return self._tools[str(device)]
+
+  def _CreateShards(self, tests):
+    raise NotImplementedError
+
+  def _GetTestName(self, test):
+    return test
+
+  def _GetTests(self):
+    raise NotImplementedError
+
+  def _RunTest(self, device, test):
+    raise NotImplementedError
+
+  def _ShouldShard(self):
+    raise NotImplementedError
diff --git a/build/android/pylib/local/local_test_server_spawner.py b/build/android/pylib/local/local_test_server_spawner.py
new file mode 100644
index 0000000..77f552e
--- /dev/null
+++ b/build/android/pylib/local/local_test_server_spawner.py
@@ -0,0 +1,45 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from pylib import chrome_test_server_spawner
+from pylib import forwarder
+from pylib.base import test_server
+
+
+class LocalTestServerSpawner(test_server.TestServer):
+
+  def __init__(self, port, device, tool):
+    super(LocalTestServerSpawner, self).__init__()
+    self._device = device
+    self._spawning_server = chrome_test_server_spawner.SpawningServer(
+        port, device, tool)
+    self._tool = tool
+
+  @property
+  def server_address(self):
+    return self._spawning_server.server.server_address
+
+  @property
+  def port(self):
+    return self.server_address[1]
+
+  #override
+  def SetUp(self):
+    self._device.WriteFile(
+        '%s/net-test-server-ports' % self._device.GetExternalStoragePath(),
+        '%s:0' % str(self.port))
+    forwarder.Forwarder.Map(
+        [(self.port, self.port)], self._device, self._tool)
+    self._spawning_server.Start()
+
+  #override
+  def Reset(self):
+    self._spawning_server.CleanupState()
+
+  #override
+  def TearDown(self):
+    self.Reset()
+    self._spawning_server.Stop()
+    forwarder.Forwarder.UnmapDevicePort(self.port, self._device)
+
diff --git a/build/android/pylib/monkey/__init__.py b/build/android/pylib/monkey/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/build/android/pylib/monkey/__init__.py
diff --git a/build/android/pylib/monkey/setup.py b/build/android/pylib/monkey/setup.py
new file mode 100644
index 0000000..fe690a5
--- /dev/null
+++ b/build/android/pylib/monkey/setup.py
@@ -0,0 +1,27 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Generates test runner factory and tests for monkey tests."""
+
+from pylib.monkey import test_runner
+
+
+def Setup(test_options):
+  """Create and return the test runner factory and tests.
+
+  Args:
+    test_options: A MonkeyOptions object.
+
+  Returns:
+    A tuple of (TestRunnerFactory, tests).
+  """
+  # Token to replicate across devices as the "test". The TestRunner does all of
+  # the work to run the test.
+  tests = ['MonkeyTest']
+
+  def TestRunnerFactory(device, shard_index):
+    return test_runner.TestRunner(
+        test_options, device, shard_index)
+
+  return (TestRunnerFactory, tests)
diff --git a/build/android/pylib/monkey/test_options.py b/build/android/pylib/monkey/test_options.py
new file mode 100644
index 0000000..54d3d08
--- /dev/null
+++ b/build/android/pylib/monkey/test_options.py
@@ -0,0 +1,16 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Defines the MonkeyOptions named tuple."""
+
+import collections
+
+MonkeyOptions = collections.namedtuple('MonkeyOptions', [
+    'verbose_count',
+    'package',
+    'event_count',
+    'category',
+    'throttle',
+    'seed',
+    'extra_args'])
diff --git a/build/android/pylib/monkey/test_runner.py b/build/android/pylib/monkey/test_runner.py
new file mode 100644
index 0000000..3fd1797
--- /dev/null
+++ b/build/android/pylib/monkey/test_runner.py
@@ -0,0 +1,106 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Runs a monkey test on a single device."""
+
+import logging
+import random
+
+from pylib import constants
+from pylib.base import base_test_result
+from pylib.base import base_test_runner
+from pylib.device import device_errors
+from pylib.device import intent
+
+_CHROME_PACKAGE = constants.PACKAGE_INFO['chrome'].package
+
+class TestRunner(base_test_runner.BaseTestRunner):
+  """A TestRunner instance runs a monkey test on a single device."""
+
+  def __init__(self, test_options, device, _):
+    super(TestRunner, self).__init__(device, None)
+    self._options = test_options
+    self._package = constants.PACKAGE_INFO[self._options.package].package
+    self._activity = constants.PACKAGE_INFO[self._options.package].activity
+
+  def _LaunchMonkeyTest(self):
+    """Runs monkey test for a given package.
+
+    Returns:
+      Output from the monkey command on the device.
+    """
+
+    timeout_ms = self._options.event_count * self._options.throttle * 1.5
+
+    cmd = ['monkey',
+           '-p %s' % self._package,
+           ' '.join(['-c %s' % c for c in self._options.category]),
+           '--throttle %d' % self._options.throttle,
+           '-s %d' % (self._options.seed or random.randint(1, 100)),
+           '-v ' * self._options.verbose_count,
+           '--monitor-native-crashes',
+           '--kill-process-after-error',
+           self._options.extra_args,
+           '%d' % self._options.event_count]
+    return self.device.RunShellCommand(' '.join(cmd), timeout=timeout_ms)
+
+  def RunTest(self, test_name):
+    """Run a Monkey test on the device.
+
+    Args:
+      test_name: String to use for logging the test result.
+
+    Returns:
+      A tuple of (TestRunResults, retry).
+    """
+    self.device.StartActivity(
+        intent.Intent(package=self._package, activity=self._activity,
+                      action='android.intent.action.MAIN'),
+        blocking=True, force_stop=True)
+
+    # Chrome crashes are not always caught by Monkey test runner.
+    # Verify Chrome has the same PID before and after the test.
+    before_pids = self.device.GetPids(self._package)
+
+    # Run the test.
+    output = ''
+    if before_pids:
+      output = '\n'.join(self._LaunchMonkeyTest())
+      after_pids = self.device.GetPids(self._package)
+
+    crashed = True
+    if not self._package in before_pids:
+      logging.error('Failed to start the process.')
+    elif not self._package in after_pids:
+      logging.error('Process %s has died.', before_pids[self._package])
+    elif before_pids[self._package] != after_pids[self._package]:
+      logging.error('Detected process restart %s -> %s',
+                    before_pids[self._package], after_pids[self._package])
+    else:
+      crashed = False
+
+    results = base_test_result.TestRunResults()
+    success_pattern = 'Events injected: %d' % self._options.event_count
+    if success_pattern in output and not crashed:
+      result = base_test_result.BaseTestResult(
+          test_name, base_test_result.ResultType.PASS, log=output)
+    else:
+      result = base_test_result.BaseTestResult(
+          test_name, base_test_result.ResultType.FAIL, log=output)
+      if 'chrome' in self._options.package:
+        logging.warning('Starting MinidumpUploadService...')
+        # TODO(jbudorick): Update this after upstreaming.
+        minidump_intent = intent.Intent(
+            action='%s.crash.ACTION_FIND_ALL' % _CHROME_PACKAGE,
+            package=self._package,
+            activity='%s.crash.MinidumpUploadService' % _CHROME_PACKAGE)
+        try:
+          self.device.RunShellCommand(
+              ['am', 'startservice'] + minidump_intent.am_args,
+              as_root=True, check_return=True)
+        except device_errors.CommandFailedError:
+          logging.exception('Failed to start MinidumpUploadService')
+
+    results.AddResult(result)
+    return results, False
diff --git a/build/android/pylib/perf/__init__.py b/build/android/pylib/perf/__init__.py
new file mode 100644
index 0000000..9228df8
--- /dev/null
+++ b/build/android/pylib/perf/__init__.py
@@ -0,0 +1,3 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
diff --git a/build/android/pylib/perf/cache_control.py b/build/android/pylib/perf/cache_control.py
new file mode 100644
index 0000000..8065cf9
--- /dev/null
+++ b/build/android/pylib/perf/cache_control.py
@@ -0,0 +1,21 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from pylib import android_commands
+from pylib.device import device_utils
+
+class CacheControl(object):
+  _DROP_CACHES = '/proc/sys/vm/drop_caches'
+
+  def __init__(self, device):
+    # TODO(jbudorick) Remove once telemetry gets switched over.
+    if isinstance(device, android_commands.AndroidCommands):
+      device = device_utils.DeviceUtils(device)
+    self._device = device
+
+  def DropRamCaches(self):
+    """Drops the filesystem ram caches for performance testing."""
+    self._device.RunShellCommand('sync', as_root=True)
+    self._device.WriteFile(CacheControl._DROP_CACHES, '3', as_root=True)
+
diff --git a/build/android/pylib/perf/perf_control.py b/build/android/pylib/perf/perf_control.py
new file mode 100644
index 0000000..f89f397
--- /dev/null
+++ b/build/android/pylib/perf/perf_control.py
@@ -0,0 +1,161 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import atexit
+import logging
+
+from pylib import android_commands
+from pylib.device import device_errors
+from pylib.device import device_utils
+
+
+class PerfControl(object):
+  """Provides methods for setting the performance mode of a device."""
+  _CPU_PATH = '/sys/devices/system/cpu'
+  _KERNEL_MAX = '/sys/devices/system/cpu/kernel_max'
+
+  def __init__(self, device):
+    # TODO(jbudorick) Remove once telemetry gets switched over.
+    if isinstance(device, android_commands.AndroidCommands):
+      device = device_utils.DeviceUtils(device)
+    self._device = device
+    # this will raise an AdbCommandFailedError if no CPU files are found
+    self._cpu_files = self._device.RunShellCommand(
+        'ls -d cpu[0-9]*', cwd=self._CPU_PATH, check_return=True, as_root=True)
+    assert self._cpu_files, 'Failed to detect CPUs.'
+    self._cpu_file_list = ' '.join(self._cpu_files)
+    logging.info('CPUs found: %s', self._cpu_file_list)
+    self._have_mpdecision = self._device.FileExists('/system/bin/mpdecision')
+
+  def SetHighPerfMode(self):
+    """Sets the highest stable performance mode for the device."""
+    try:
+      self._device.EnableRoot()
+    except device_errors.CommandFailedError:
+      message = 'Need root for performance mode. Results may be NOISY!!'
+      logging.warning(message)
+      # Add an additional warning at exit, such that it's clear that any results
+      # may be different/noisy (due to the lack of intended performance mode).
+      atexit.register(logging.warning, message)
+      return
+
+    product_model = self._device.product_model
+    # TODO(epenner): Enable on all devices (http://crbug.com/383566)
+    if 'Nexus 4' == product_model:
+      self._ForceAllCpusOnline(True)
+      if not self._AllCpusAreOnline():
+        logging.warning('Failed to force CPUs online. Results may be NOISY!')
+      self._SetScalingGovernorInternal('performance')
+    elif 'Nexus 5' == product_model:
+      self._ForceAllCpusOnline(True)
+      if not self._AllCpusAreOnline():
+        logging.warning('Failed to force CPUs online. Results may be NOISY!')
+      self._SetScalingGovernorInternal('performance')
+      self._SetScalingMaxFreq(1190400)
+      self._SetMaxGpuClock(200000000)
+    else:
+      self._SetScalingGovernorInternal('performance')
+
+  def SetPerfProfilingMode(self):
+    """Enables all cores for reliable perf profiling."""
+    self._ForceAllCpusOnline(True)
+    self._SetScalingGovernorInternal('performance')
+    if not self._AllCpusAreOnline():
+      if not self._device.HasRoot():
+        raise RuntimeError('Need root to force CPUs online.')
+      raise RuntimeError('Failed to force CPUs online.')
+
+  def SetDefaultPerfMode(self):
+    """Sets the performance mode for the device to its default mode."""
+    if not self._device.HasRoot():
+      return
+    product_model = self._device.product_model
+    if 'Nexus 5' == product_model:
+      if self._AllCpusAreOnline():
+        self._SetScalingMaxFreq(2265600)
+        self._SetMaxGpuClock(450000000)
+
+    governor_mode = {
+        'GT-I9300': 'pegasusq',
+        'Galaxy Nexus': 'interactive',
+        'Nexus 4': 'ondemand',
+        'Nexus 5': 'ondemand',
+        'Nexus 7': 'interactive',
+        'Nexus 10': 'interactive'
+    }.get(product_model, 'ondemand')
+    self._SetScalingGovernorInternal(governor_mode)
+    self._ForceAllCpusOnline(False)
+
+  def GetCpuInfo(self):
+    online = (output.rstrip() == '1' and status == 0
+              for (_, output, status) in self._ForEachCpu('cat "$CPU/online"'))
+    governor = (output.rstrip() if status == 0 else None
+                for (_, output, status)
+                in self._ForEachCpu('cat "$CPU/cpufreq/scaling_governor"'))
+    return zip(self._cpu_files, online, governor)
+
+  def _ForEachCpu(self, cmd):
+    script = '; '.join([
+        'for CPU in %s' % self._cpu_file_list,
+        'do %s' % cmd,
+        'echo -n "%~%$?%~%"',
+        'done'
+    ])
+    output = self._device.RunShellCommand(
+        script, cwd=self._CPU_PATH, check_return=True, as_root=True)
+    output = '\n'.join(output).split('%~%')
+    return zip(self._cpu_files, output[0::2], (int(c) for c in output[1::2]))
+
+  def _WriteEachCpuFile(self, path, value):
+    results = self._ForEachCpu(
+        'test -e "$CPU/{path}" && echo {value} > "$CPU/{path}"'.format(
+            path=path, value=value))
+    cpus = ' '.join(cpu for (cpu, _, status) in results if status == 0)
+    if cpus:
+      logging.info('Successfully set %s to %r on: %s', path, value, cpus)
+    else:
+      logging.warning('Failed to set %s to %r on any cpus')
+
+  def _SetScalingGovernorInternal(self, value):
+    self._WriteEachCpuFile('cpufreq/scaling_governor', value)
+
+  def _SetScalingMaxFreq(self, value):
+    self._WriteEachCpuFile('cpufreq/scaling_max_freq', '%d' % value)
+
+  def _SetMaxGpuClock(self, value):
+    self._device.WriteFile('/sys/class/kgsl/kgsl-3d0/max_gpuclk',
+                           str(value),
+                           as_root=True)
+
+  def _AllCpusAreOnline(self):
+    results = self._ForEachCpu('cat "$CPU/online"')
+    # TODO(epenner): Investigate why file may be missing
+    # (http://crbug.com/397118)
+    return all(output.rstrip() == '1' and status == 0
+               for (cpu, output, status) in results
+               if cpu != 'cpu0')
+
+  def _ForceAllCpusOnline(self, force_online):
+    """Enable all CPUs on a device.
+
+    Some vendors (or only Qualcomm?) hot-plug their CPUs, which can add noise
+    to measurements:
+    - In perf, samples are only taken for the CPUs that are online when the
+      measurement is started.
+    - The scaling governor can't be set for an offline CPU and frequency scaling
+      on newly enabled CPUs adds noise to both perf and tracing measurements.
+
+    It appears Qualcomm is the only vendor that hot-plugs CPUs, and on Qualcomm
+    this is done by "mpdecision".
+
+    """
+    if self._have_mpdecision:
+      script = 'stop mpdecision' if force_online else 'start mpdecision'
+      self._device.RunShellCommand(script, check_return=True, as_root=True)
+
+    if not self._have_mpdecision and not self._AllCpusAreOnline():
+      logging.warning('Unexpected cpu hot plugging detected.')
+
+    if force_online:
+      self._ForEachCpu('echo 1 > "$CPU/online"')
diff --git a/build/android/pylib/perf/perf_control_unittest.py b/build/android/pylib/perf/perf_control_unittest.py
new file mode 100644
index 0000000..69b8b46
--- /dev/null
+++ b/build/android/pylib/perf/perf_control_unittest.py
@@ -0,0 +1,37 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+# pylint: disable=W0212
+
+import os
+import sys
+import unittest
+
+sys.path.append(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir))
+
+from pylib.device import device_utils
+from pylib.perf import perf_control
+
+class TestPerfControl(unittest.TestCase):
+  def setUp(self):
+    if not os.getenv('BUILDTYPE'):
+      os.environ['BUILDTYPE'] = 'Debug'
+
+    devices = device_utils.DeviceUtils.HealthyDevices()
+    self.assertGreater(len(devices), 0, 'No device attached!')
+    self._device = devices[0]
+
+  def testHighPerfMode(self):
+    perf = perf_control.PerfControl(self._device)
+    try:
+      perf.SetPerfProfilingMode()
+      cpu_info = perf.GetCpuInfo()
+      self.assertEquals(len(perf._cpu_files), len(cpu_info))
+      for _, online, governor in cpu_info:
+        self.assertTrue(online)
+        self.assertEquals('performance', governor)
+    finally:
+      perf.SetDefaultPerfMode()
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/build/android/pylib/perf/setup.py b/build/android/pylib/perf/setup.py
new file mode 100644
index 0000000..8e1fc28
--- /dev/null
+++ b/build/android/pylib/perf/setup.py
@@ -0,0 +1,97 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Generates test runner factory and tests for performance tests."""
+
+import json
+import fnmatch
+import logging
+import os
+import shutil
+
+from pylib import constants
+from pylib import forwarder
+from pylib.device import device_list
+from pylib.device import device_utils
+from pylib.perf import test_runner
+from pylib.utils import test_environment
+
+
+def _GetAllDevices():
+  devices_path = os.path.join(os.environ.get('CHROMIUM_OUT_DIR', 'out'),
+                              device_list.LAST_DEVICES_FILENAME)
+  try:
+    devices = [device_utils.DeviceUtils(s)
+               for s in device_list.GetPersistentDeviceList(devices_path)]
+  except IOError as e:
+    logging.error('Unable to find %s [%s]', devices_path, e)
+    devices = device_utils.DeviceUtils.HealthyDevices()
+  return sorted(devices)
+
+
+def _GetStepsDictFromSingleStep(test_options):
+  # Running a single command, build the tests structure.
+  steps_dict = {
+    'version': 1,
+    'steps': {
+        'single_step': {
+          'device_affinity': 0,
+          'cmd': test_options.single_step
+        },
+    }
+  }
+  return steps_dict
+
+
+def _GetStepsDict(test_options):
+  if test_options.single_step:
+    return _GetStepsDictFromSingleStep(test_options)
+  if test_options.steps:
+    with file(test_options.steps, 'r') as f:
+      steps = json.load(f)
+
+      # Already using the new format.
+      assert steps['version'] == 1
+      return steps
+
+
+def Setup(test_options):
+  """Create and return the test runner factory and tests.
+
+  Args:
+    test_options: A PerformanceOptions object.
+
+  Returns:
+    A tuple of (TestRunnerFactory, tests, devices).
+  """
+  # TODO(bulach): remove this once the bot side lands. BUG=318369
+  constants.SetBuildType('Release')
+  if os.path.exists(constants.PERF_OUTPUT_DIR):
+    shutil.rmtree(constants.PERF_OUTPUT_DIR)
+  os.makedirs(constants.PERF_OUTPUT_DIR)
+
+  # Before running the tests, kill any leftover server.
+  test_environment.CleanupLeftoverProcesses()
+
+  # We want to keep device affinity, so return all devices ever seen.
+  all_devices = _GetAllDevices()
+
+  steps_dict = _GetStepsDict(test_options)
+  sorted_step_names = sorted(steps_dict['steps'].keys())
+
+  if test_options.test_filter:
+    sorted_step_names = fnmatch.filter(sorted_step_names,
+                                       test_options.test_filter)
+
+  flaky_steps = []
+  if test_options.flaky_steps:
+    with file(test_options.flaky_steps, 'r') as f:
+      flaky_steps = json.load(f)
+
+  def TestRunnerFactory(device, shard_index):
+    return test_runner.TestRunner(
+        test_options, device, shard_index, len(all_devices),
+        steps_dict, flaky_steps)
+
+  return (TestRunnerFactory, sorted_step_names, all_devices)
diff --git a/build/android/pylib/perf/surface_stats_collector.py b/build/android/pylib/perf/surface_stats_collector.py
new file mode 100644
index 0000000..c7e7527
--- /dev/null
+++ b/build/android/pylib/perf/surface_stats_collector.py
@@ -0,0 +1,191 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import Queue
+import datetime
+import logging
+import re
+import threading
+from pylib import android_commands
+from pylib.device import device_utils
+
+
+# Log marker containing SurfaceTexture timestamps.
+_SURFACE_TEXTURE_TIMESTAMPS_MESSAGE = 'SurfaceTexture update timestamps'
+_SURFACE_TEXTURE_TIMESTAMP_RE = r'\d+'
+
+
+class SurfaceStatsCollector(object):
+  """Collects surface stats for a SurfaceView from the output of SurfaceFlinger.
+
+  Args:
+    device: A DeviceUtils instance.
+  """
+
+  def __init__(self, device):
+    # TODO(jbudorick) Remove once telemetry gets switched over.
+    if isinstance(device, android_commands.AndroidCommands):
+      device = device_utils.DeviceUtils(device)
+    self._device = device
+    self._collector_thread = None
+    self._surface_before = None
+    self._get_data_event = None
+    self._data_queue = None
+    self._stop_event = None
+    self._warn_about_empty_data = True
+
+  def DisableWarningAboutEmptyData(self):
+    self._warn_about_empty_data = False
+
+  def Start(self):
+    assert not self._collector_thread
+
+    if self._ClearSurfaceFlingerLatencyData():
+      self._get_data_event = threading.Event()
+      self._stop_event = threading.Event()
+      self._data_queue = Queue.Queue()
+      self._collector_thread = threading.Thread(target=self._CollectorThread)
+      self._collector_thread.start()
+    else:
+      raise Exception('SurfaceFlinger not supported on this device.')
+
+  def Stop(self):
+    assert self._collector_thread
+    (refresh_period, timestamps) = self._GetDataFromThread()
+    if self._collector_thread:
+      self._stop_event.set()
+      self._collector_thread.join()
+      self._collector_thread = None
+    return (refresh_period, timestamps)
+
+  def _CollectorThread(self):
+    last_timestamp = 0
+    timestamps = []
+    retries = 0
+
+    while not self._stop_event.is_set():
+      self._get_data_event.wait(1)
+      try:
+        refresh_period, new_timestamps = self._GetSurfaceFlingerFrameData()
+        if refresh_period is None or timestamps is None:
+          retries += 1
+          if retries < 3:
+            continue
+          if last_timestamp:
+            # Some data has already been collected, but either the app
+            # was closed or there's no new data. Signal the main thread and
+            # wait.
+            self._data_queue.put((None, None))
+            self._stop_event.wait()
+            break
+          raise Exception('Unable to get surface flinger latency data')
+
+        timestamps += [timestamp for timestamp in new_timestamps
+                       if timestamp > last_timestamp]
+        if len(timestamps):
+          last_timestamp = timestamps[-1]
+
+        if self._get_data_event.is_set():
+          self._get_data_event.clear()
+          self._data_queue.put((refresh_period, timestamps))
+          timestamps = []
+      except Exception as e:
+        # On any error, before aborting, put the exception into _data_queue to
+        # prevent the main thread from waiting at _data_queue.get() infinitely.
+        self._data_queue.put(e)
+        raise
+
+  def _GetDataFromThread(self):
+    self._get_data_event.set()
+    ret = self._data_queue.get()
+    if isinstance(ret, Exception):
+      raise ret
+    return ret
+
+  def _ClearSurfaceFlingerLatencyData(self):
+    """Clears the SurfaceFlinger latency data.
+
+    Returns:
+      True if SurfaceFlinger latency is supported by the device, otherwise
+      False.
+    """
+    # The command returns nothing if it is supported, otherwise returns many
+    # lines of result just like 'dumpsys SurfaceFlinger'.
+    results = self._device.RunShellCommand(
+        'dumpsys SurfaceFlinger --latency-clear SurfaceView')
+    return not len(results)
+
+  def GetSurfaceFlingerPid(self):
+    results = self._device.RunShellCommand('ps | grep surfaceflinger')
+    if not results:
+      raise Exception('Unable to get surface flinger process id')
+    pid = results[0].split()[1]
+    return pid
+
+  def _GetSurfaceFlingerFrameData(self):
+    """Returns collected SurfaceFlinger frame timing data.
+
+    Returns:
+      A tuple containing:
+      - The display's nominal refresh period in milliseconds.
+      - A list of timestamps signifying frame presentation times in
+        milliseconds.
+      The return value may be (None, None) if there was no data collected (for
+      example, if the app was closed before the collector thread has finished).
+    """
+    # adb shell dumpsys SurfaceFlinger --latency <window name>
+    # prints some information about the last 128 frames displayed in
+    # that window.
+    # The data returned looks like this:
+    # 16954612
+    # 7657467895508   7657482691352   7657493499756
+    # 7657484466553   7657499645964   7657511077881
+    # 7657500793457   7657516600576   7657527404785
+    # (...)
+    #
+    # The first line is the refresh period (here 16.95 ms), it is followed
+    # by 128 lines w/ 3 timestamps in nanosecond each:
+    # A) when the app started to draw
+    # B) the vsync immediately preceding SF submitting the frame to the h/w
+    # C) timestamp immediately after SF submitted that frame to the h/w
+    #
+    # The difference between the 1st and 3rd timestamp is the frame-latency.
+    # An interesting data is when the frame latency crosses a refresh period
+    # boundary, this can be calculated this way:
+    #
+    # ceil((C - A) / refresh-period)
+    #
+    # (each time the number above changes, we have a "jank").
+    # If this happens a lot during an animation, the animation appears
+    # janky, even if it runs at 60 fps in average.
+    #
+    # We use the special "SurfaceView" window name because the statistics for
+    # the activity's main window are not updated when the main web content is
+    # composited into a SurfaceView.
+    results = self._device.RunShellCommand(
+        'dumpsys SurfaceFlinger --latency SurfaceView')
+    if not len(results):
+      return (None, None)
+
+    timestamps = []
+    nanoseconds_per_millisecond = 1e6
+    refresh_period = long(results[0]) / nanoseconds_per_millisecond
+
+    # If a fence associated with a frame is still pending when we query the
+    # latency data, SurfaceFlinger gives the frame a timestamp of INT64_MAX.
+    # Since we only care about completed frames, we will ignore any timestamps
+    # with this value.
+    pending_fence_timestamp = (1 << 63) - 1
+
+    for line in results[1:]:
+      fields = line.split()
+      if len(fields) != 3:
+        continue
+      timestamp = long(fields[1])
+      if timestamp == pending_fence_timestamp:
+        continue
+      timestamp /= nanoseconds_per_millisecond
+      timestamps.append(timestamp)
+
+    return (refresh_period, timestamps)
diff --git a/build/android/pylib/perf/surface_stats_collector_unittest.py b/build/android/pylib/perf/surface_stats_collector_unittest.py
new file mode 100644
index 0000000..e905d73
--- /dev/null
+++ b/build/android/pylib/perf/surface_stats_collector_unittest.py
@@ -0,0 +1,64 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unittests for SurfaceStatsCollector."""
+# pylint: disable=W0212
+
+import unittest
+
+from pylib.perf.surface_stats_collector import SurfaceStatsCollector
+
+
+class TestSurfaceStatsCollector(unittest.TestCase):
+  @staticmethod
+  def _CreateUniformTimestamps(base, num, delta):
+    return [base + i * delta for i in range(1, num + 1)]
+
+  @staticmethod
+  def _CreateDictionaryFromResults(results):
+    dictionary = {}
+    for result in results:
+      dictionary[result.name] = result
+    return dictionary
+
+  def setUp(self):
+    self.refresh_period = 0.1
+
+  def testOneFrameDelta(self):
+    timestamps = self._CreateUniformTimestamps(0, 10, self.refresh_period)
+    results = self._CreateDictionaryFromResults(
+                  SurfaceStatsCollector._CalculateResults(
+                      self.refresh_period, timestamps, ''))
+
+    self.assertEquals(results['avg_surface_fps'].value,
+                      int(round(1 / self.refresh_period)))
+    self.assertEquals(results['jank_count'].value, 0)
+    self.assertEquals(results['max_frame_delay'].value, 1)
+    self.assertEquals(len(results['frame_lengths'].value), len(timestamps) - 1)
+
+  def testAllFramesTooShort(self):
+    timestamps = self._CreateUniformTimestamps(0, 10, self.refresh_period / 100)
+    self.assertRaises(Exception,
+                      SurfaceStatsCollector._CalculateResults,
+                      [self.refresh_period, timestamps, ''])
+
+  def testSomeFramesTooShort(self):
+    timestamps = self._CreateUniformTimestamps(0, 5, self.refresh_period)
+    # The following timestamps should be skipped.
+    timestamps += self._CreateUniformTimestamps(timestamps[4],
+                                                5,
+                                                self.refresh_period / 100)
+    timestamps += self._CreateUniformTimestamps(timestamps[4],
+                                                5,
+                                                self.refresh_period)
+
+    results = self._CreateDictionaryFromResults(
+                  SurfaceStatsCollector._CalculateResults(
+                      self.refresh_period, timestamps, ''))
+
+    self.assertEquals(len(results['frame_lengths'].value), 9)
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/build/android/pylib/perf/test_options.py b/build/android/pylib/perf/test_options.py
new file mode 100644
index 0000000..eff928e
--- /dev/null
+++ b/build/android/pylib/perf/test_options.py
@@ -0,0 +1,22 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Defines the PerfOptions named tuple."""
+
+import collections
+
+PerfOptions = collections.namedtuple('PerfOptions', [
+    'steps',
+    'flaky_steps',
+    'output_json_list',
+    'print_step',
+    'no_timeout',
+    'test_filter',
+    'dry_run',
+    'single_step',
+    'collect_chartjson_data',
+    'output_chartjson_data',
+    'max_battery_temp',
+    'min_battery_level',
+])
diff --git a/build/android/pylib/perf/test_runner.py b/build/android/pylib/perf/test_runner.py
new file mode 100644
index 0000000..d21a9b7
--- /dev/null
+++ b/build/android/pylib/perf/test_runner.py
@@ -0,0 +1,374 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Runs perf tests.
+
+Our buildbot infrastructure requires each slave to run steps serially.
+This is sub-optimal for android, where these steps can run independently on
+multiple connected devices.
+
+The buildbots will run this script multiple times per cycle:
+- First: all steps listed in --steps in will be executed in parallel using all
+connected devices. Step results will be pickled to disk. Each step has a unique
+name. The result code will be ignored if the step name is listed in
+--flaky-steps.
+The buildbot will treat this step as a regular step, and will not process any
+graph data.
+
+- Then, with -print-step STEP_NAME: at this stage, we'll simply print the file
+with the step results previously saved. The buildbot will then process the graph
+data accordingly.
+
+The JSON steps file contains a dictionary in the format:
+{ "version": int,
+  "steps": {
+    "foo": {
+      "device_affinity": int,
+      "cmd": "script_to_execute foo"
+    },
+    "bar": {
+      "device_affinity": int,
+      "cmd": "script_to_execute bar"
+    }
+  }
+}
+
+The JSON flaky steps file contains a list with step names which results should
+be ignored:
+[
+  "step_name_foo",
+  "step_name_bar"
+]
+
+Note that script_to_execute necessarily have to take at least the following
+option:
+  --device: the serial number to be passed to all adb commands.
+"""
+
+import collections
+import datetime
+import json
+import logging
+import os
+import pickle
+import shutil
+import sys
+import tempfile
+import threading
+import time
+
+from pylib import cmd_helper
+from pylib import constants
+from pylib import forwarder
+from pylib.base import base_test_result
+from pylib.base import base_test_runner
+from pylib.device import battery_utils
+from pylib.device import device_errors
+
+
+def GetPersistedResult(test_name):
+  file_name = os.path.join(constants.PERF_OUTPUT_DIR, test_name)
+  if not os.path.exists(file_name):
+    logging.error('File not found %s', file_name)
+    return None
+
+  with file(file_name, 'r') as f:
+    return pickle.loads(f.read())
+
+
+def OutputJsonList(json_input, json_output):
+  with file(json_input, 'r') as i:
+    all_steps = json.load(i)
+
+  step_values = []
+  for k, v in all_steps['steps'].iteritems():
+    data = {'test': k, 'device_affinity': v['device_affinity']}
+
+    persisted_result = GetPersistedResult(k)
+    if persisted_result:
+      data['total_time'] = persisted_result['total_time']
+    step_values.append(data)
+
+  with file(json_output, 'w') as o:
+    o.write(json.dumps(step_values))
+  return 0
+
+
+def PrintTestOutput(test_name, json_file_name=None):
+  """Helper method to print the output of previously executed test_name.
+
+  Args:
+    test_name: name of the test that has been previously executed.
+    json_file_name: name of the file to output chartjson data to.
+
+  Returns:
+    exit code generated by the test step.
+  """
+  persisted_result = GetPersistedResult(test_name)
+  if not persisted_result:
+    return 1
+  logging.info('*' * 80)
+  logging.info('Output from:')
+  logging.info(persisted_result['cmd'])
+  logging.info('*' * 80)
+  print persisted_result['output']
+
+  if json_file_name:
+    with file(json_file_name, 'w') as f:
+      f.write(persisted_result['chartjson'])
+
+  return persisted_result['exit_code']
+
+
+def PrintSummary(test_names):
+  logging.info('*' * 80)
+  logging.info('Sharding summary')
+  device_total_time = collections.defaultdict(int)
+  for test_name in test_names:
+    file_name = os.path.join(constants.PERF_OUTPUT_DIR, test_name)
+    if not os.path.exists(file_name):
+      logging.info('%s : No status file found', test_name)
+      continue
+    with file(file_name, 'r') as f:
+      result = pickle.loads(f.read())
+    logging.info('%s : exit_code=%d in %d secs at %s',
+                 result['name'], result['exit_code'], result['total_time'],
+                 result['device'])
+    device_total_time[result['device']] += result['total_time']
+  for device, device_time in device_total_time.iteritems():
+    logging.info('Total for device %s : %d secs', device, device_time)
+  logging.info('Total steps time: %d secs', sum(device_total_time.values()))
+
+
+class _HeartBeatLogger(object):
+  # How often to print the heartbeat on flush().
+  _PRINT_INTERVAL = 30.0
+
+  def __init__(self):
+    """A file-like class for keeping the buildbot alive."""
+    self._len = 0
+    self._tick = time.time()
+    self._stopped = threading.Event()
+    self._timer = threading.Thread(target=self._runner)
+    self._timer.start()
+
+  def _runner(self):
+    while not self._stopped.is_set():
+      self.flush()
+      self._stopped.wait(_HeartBeatLogger._PRINT_INTERVAL)
+
+  def write(self, data):
+    self._len += len(data)
+
+  def flush(self):
+    now = time.time()
+    if now - self._tick >= _HeartBeatLogger._PRINT_INTERVAL:
+      self._tick = now
+      print '--single-step output length %d' % self._len
+      sys.stdout.flush()
+
+  def stop(self):
+    self._stopped.set()
+
+
+class TestRunner(base_test_runner.BaseTestRunner):
+  def __init__(self, test_options, device, shard_index, max_shard, tests,
+      flaky_tests):
+    """A TestRunner instance runs a perf test on a single device.
+
+    Args:
+      test_options: A PerfOptions object.
+      device: Device to run the tests.
+      shard_index: the index of this device.
+      max_shards: the maximum shard index.
+      tests: a dict mapping test_name to command.
+      flaky_tests: a list of flaky test_name.
+    """
+    super(TestRunner, self).__init__(device, None)
+    self._options = test_options
+    self._shard_index = shard_index
+    self._max_shard = max_shard
+    self._tests = tests
+    self._flaky_tests = flaky_tests
+    self._output_dir = None
+    self._device_battery = battery_utils.BatteryUtils(self.device)
+
+  @staticmethod
+  def _IsBetter(result):
+    if result['actual_exit_code'] == 0:
+      return True
+    pickled = os.path.join(constants.PERF_OUTPUT_DIR,
+                           result['name'])
+    if not os.path.exists(pickled):
+      return True
+    with file(pickled, 'r') as f:
+      previous = pickle.loads(f.read())
+    return result['actual_exit_code'] < previous['actual_exit_code']
+
+  @staticmethod
+  def _SaveResult(result):
+    if TestRunner._IsBetter(result):
+      with file(os.path.join(constants.PERF_OUTPUT_DIR,
+                             result['name']), 'w') as f:
+        f.write(pickle.dumps(result))
+
+  def _CheckDeviceAffinity(self, test_name):
+    """Returns True if test_name has affinity for this shard."""
+    affinity = (self._tests['steps'][test_name]['device_affinity'] %
+                self._max_shard)
+    if self._shard_index == affinity:
+      return True
+    logging.info('Skipping %s on %s (affinity is %s, device is %s)',
+                 test_name, self.device_serial, affinity, self._shard_index)
+    return False
+
+  def _CleanupOutputDirectory(self):
+    if self._output_dir:
+      shutil.rmtree(self._output_dir, ignore_errors=True)
+      self._output_dir = None
+
+  def _ReadChartjsonOutput(self):
+    if not self._output_dir:
+      return ''
+
+    json_output_path = os.path.join(self._output_dir, 'results-chart.json')
+    try:
+      with open(json_output_path) as f:
+        return f.read()
+    except IOError:
+      logging.exception('Exception when reading chartjson.')
+      logging.error('This usually means that telemetry did not run, so it could'
+                    ' not generate the file. Please check the device running'
+                    ' the test.')
+      return ''
+
+  def _LaunchPerfTest(self, test_name):
+    """Runs a perf test.
+
+    Args:
+      test_name: the name of the test to be executed.
+
+    Returns:
+      A tuple containing (Output, base_test_result.ResultType)
+    """
+    if not self._CheckDeviceAffinity(test_name):
+      return '', base_test_result.ResultType.PASS
+
+    try:
+      logging.warning('Unmapping device ports')
+      forwarder.Forwarder.UnmapAllDevicePorts(self.device)
+      self.device.old_interface.RestartAdbdOnDevice()
+    except Exception as e:
+      logging.error('Exception when tearing down device %s', e)
+
+    cmd = ('%s --device %s' %
+           (self._tests['steps'][test_name]['cmd'],
+            self.device_serial))
+
+    if self._options.collect_chartjson_data:
+      self._output_dir = tempfile.mkdtemp()
+      cmd = cmd + ' --output-dir=%s' % self._output_dir
+
+    logging.info(
+        'temperature: %s (0.1 C)',
+        str(self._device_battery.GetBatteryInfo().get('temperature')))
+    if self._options.max_battery_temp:
+      self._device_battery.LetBatteryCoolToTemperature(
+          self._options.max_battery_temp)
+
+    logging.info('Charge level: %s%%',
+        str(self._device_battery.GetBatteryInfo().get('level')))
+    if self._options.min_battery_level:
+      self._device_battery.ChargeDeviceToLevel(
+          self._options.min_battery_level)
+
+    logging.info('%s : %s', test_name, cmd)
+    start_time = datetime.datetime.now()
+
+    timeout = self._tests['steps'][test_name].get('timeout', 5400)
+    if self._options.no_timeout:
+      timeout = None
+    logging.info('Timeout for %s test: %s', test_name, timeout)
+    full_cmd = cmd
+    if self._options.dry_run:
+      full_cmd = 'echo %s' % cmd
+
+    logfile = sys.stdout
+    if self._options.single_step:
+      # Just print a heart-beat so that the outer buildbot scripts won't timeout
+      # without response.
+      logfile = _HeartBeatLogger()
+    cwd = os.path.abspath(constants.DIR_SOURCE_ROOT)
+    if full_cmd.startswith('src/'):
+      cwd = os.path.abspath(os.path.join(constants.DIR_SOURCE_ROOT, os.pardir))
+    try:
+      exit_code, output = cmd_helper.GetCmdStatusAndOutputWithTimeout(
+          full_cmd, timeout, cwd=cwd, shell=True, logfile=logfile)
+      json_output = self._ReadChartjsonOutput()
+    except cmd_helper.TimeoutError as e:
+      exit_code = -1
+      output = str(e)
+      json_output = ''
+    finally:
+      self._CleanupOutputDirectory()
+      if self._options.single_step:
+        logfile.stop()
+    end_time = datetime.datetime.now()
+    if exit_code is None:
+      exit_code = -1
+    logging.info('%s : exit_code=%d in %d secs at %s',
+                 test_name, exit_code, (end_time - start_time).seconds,
+                 self.device_serial)
+
+    if exit_code == 0:
+      result_type = base_test_result.ResultType.PASS
+    else:
+      result_type = base_test_result.ResultType.FAIL
+      # Since perf tests use device affinity, give the device a chance to
+      # recover if it is offline after a failure. Otherwise, the master sharder
+      # will remove it from the pool and future tests on this device will fail.
+      try:
+        self.device.WaitUntilFullyBooted(timeout=120)
+      except device_errors.CommandTimeoutError as e:
+        logging.error('Device failed to return after %s: %s' % (test_name, e))
+
+    actual_exit_code = exit_code
+    if test_name in self._flaky_tests:
+      # The exit_code is used at the second stage when printing the
+      # test output. If the test is flaky, force to "0" to get that step green
+      # whilst still gathering data to the perf dashboards.
+      # The result_type is used by the test_dispatcher to retry the test.
+      exit_code = 0
+
+    persisted_result = {
+        'name': test_name,
+        'output': output,
+        'chartjson': json_output,
+        'exit_code': exit_code,
+        'actual_exit_code': actual_exit_code,
+        'result_type': result_type,
+        'total_time': (end_time - start_time).seconds,
+        'device': self.device_serial,
+        'cmd': cmd,
+    }
+    self._SaveResult(persisted_result)
+
+    return (output, result_type)
+
+  def RunTest(self, test_name):
+    """Run a perf test on the device.
+
+    Args:
+      test_name: String to use for logging the test result.
+
+    Returns:
+      A tuple of (TestRunResults, retry).
+    """
+    _, result_type = self._LaunchPerfTest(test_name)
+    results = base_test_result.TestRunResults()
+    results.AddResult(base_test_result.BaseTestResult(test_name, result_type))
+    retry = None
+    if not results.DidRunPass():
+      retry = test_name
+    return results, retry
diff --git a/build/android/pylib/perf/thermal_throttle.py b/build/android/pylib/perf/thermal_throttle.py
new file mode 100644
index 0000000..383b6d5
--- /dev/null
+++ b/build/android/pylib/perf/thermal_throttle.py
@@ -0,0 +1,137 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+from pylib import android_commands
+from pylib.device import device_utils
+
+
+class OmapThrottlingDetector(object):
+  """Class to detect and track thermal throttling on an OMAP 4."""
+  OMAP_TEMP_FILE = ('/sys/devices/platform/omap/omap_temp_sensor.0/'
+                    'temperature')
+
+  @staticmethod
+  def IsSupported(device):
+    return device.FileExists(OmapThrottlingDetector.OMAP_TEMP_FILE)
+
+  def __init__(self, device):
+    self._device = device
+
+  @staticmethod
+  def BecameThrottled(log_line):
+    return 'omap_thermal_throttle' in log_line
+
+  @staticmethod
+  def BecameUnthrottled(log_line):
+    return 'omap_thermal_unthrottle' in log_line
+
+  @staticmethod
+  def GetThrottlingTemperature(log_line):
+    if 'throttle_delayed_work_fn' in log_line:
+      return float([s for s in log_line.split() if s.isdigit()][0]) / 1000.0
+
+  def GetCurrentTemperature(self):
+    tempdata = self._device.ReadFile(OmapThrottlingDetector.OMAP_TEMP_FILE)
+    return float(tempdata) / 1000.0
+
+
+class ExynosThrottlingDetector(object):
+  """Class to detect and track thermal throttling on an Exynos 5."""
+  @staticmethod
+  def IsSupported(device):
+    return device.FileExists('/sys/bus/exynos5-core')
+
+  def __init__(self, device):
+    pass
+
+  @staticmethod
+  def BecameThrottled(log_line):
+    return 'exynos_tmu: Throttling interrupt' in log_line
+
+  @staticmethod
+  def BecameUnthrottled(log_line):
+    return 'exynos_thermal_unthrottle: not throttling' in log_line
+
+  @staticmethod
+  def GetThrottlingTemperature(_log_line):
+    return None
+
+  @staticmethod
+  def GetCurrentTemperature():
+    return None
+
+
+class ThermalThrottle(object):
+  """Class to detect and track thermal throttling.
+
+  Usage:
+    Wait for IsThrottled() to be False before running test
+    After running test call HasBeenThrottled() to find out if the
+    test run was affected by thermal throttling.
+  """
+
+  def __init__(self, device):
+    # TODO(jbudorick) Remove once telemetry gets switched over.
+    if isinstance(device, android_commands.AndroidCommands):
+      device = device_utils.DeviceUtils(device)
+    self._device = device
+    self._throttled = False
+    self._detector = None
+    if OmapThrottlingDetector.IsSupported(device):
+      self._detector = OmapThrottlingDetector(device)
+    elif ExynosThrottlingDetector.IsSupported(device):
+      self._detector = ExynosThrottlingDetector(device)
+
+  def HasBeenThrottled(self):
+    """True if there has been any throttling since the last call to
+       HasBeenThrottled or IsThrottled.
+    """
+    return self._ReadLog()
+
+  def IsThrottled(self):
+    """True if currently throttled."""
+    self._ReadLog()
+    return self._throttled
+
+  def _ReadLog(self):
+    if not self._detector:
+      return False
+    has_been_throttled = False
+    serial_number = str(self._device)
+    log = self._device.RunShellCommand('dmesg -c')
+    degree_symbol = unichr(0x00B0)
+    for line in log:
+      if self._detector.BecameThrottled(line):
+        if not self._throttled:
+          logging.warning('>>> Device %s thermally throttled', serial_number)
+        self._throttled = True
+        has_been_throttled = True
+      elif self._detector.BecameUnthrottled(line):
+        if self._throttled:
+          logging.warning('>>> Device %s thermally unthrottled', serial_number)
+        self._throttled = False
+        has_been_throttled = True
+      temperature = self._detector.GetThrottlingTemperature(line)
+      if temperature is not None:
+        logging.info(u'Device %s thermally throttled at %3.1f%sC',
+                     serial_number, temperature, degree_symbol)
+
+    if logging.getLogger().isEnabledFor(logging.DEBUG):
+      # Print current temperature of CPU SoC.
+      temperature = self._detector.GetCurrentTemperature()
+      if temperature is not None:
+        logging.debug(u'Current SoC temperature of %s = %3.1f%sC',
+                      serial_number, temperature, degree_symbol)
+
+      # Print temperature of battery, to give a system temperature
+      dumpsys_log = self._device.RunShellCommand('dumpsys battery')
+      for line in dumpsys_log:
+        if 'temperature' in line:
+          btemp = float([s for s in line.split() if s.isdigit()][0]) / 10.0
+          logging.debug(u'Current battery temperature of %s = %3.1f%sC',
+                        serial_number, btemp, degree_symbol)
+
+    return has_been_throttled
+
diff --git a/build/android/pylib/pexpect.py b/build/android/pylib/pexpect.py
new file mode 100644
index 0000000..cf59fb0
--- /dev/null
+++ b/build/android/pylib/pexpect.py
@@ -0,0 +1,21 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+from __future__ import absolute_import
+
+import os
+import sys
+
+_CHROME_SRC = os.path.join(
+    os.path.abspath(os.path.dirname(__file__)), '..', '..', '..')
+
+_PEXPECT_PATH = os.path.join(_CHROME_SRC, 'third_party', 'pexpect')
+if _PEXPECT_PATH not in sys.path:
+  sys.path.append(_PEXPECT_PATH)
+
+# pexpect is not available on all platforms. We allow this file to be imported
+# on platforms without pexpect and only fail when pexpect is actually used.
+try:
+  from pexpect import * # pylint: disable=W0401,W0614
+except ImportError:
+  pass
diff --git a/build/android/pylib/ports.py b/build/android/pylib/ports.py
new file mode 100644
index 0000000..578152c
--- /dev/null
+++ b/build/android/pylib/ports.py
@@ -0,0 +1,172 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Functions that deal with local and device ports."""
+
+import contextlib
+import fcntl
+import httplib
+import logging
+import os
+import socket
+import traceback
+
+from pylib import constants
+
+
+# The following two methods are used to allocate the port source for various
+# types of test servers. Because some net-related tests can be run on shards at
+# same time, it's important to have a mechanism to allocate the port
+# process-safe. In here, we implement the safe port allocation by leveraging
+# flock.
+def ResetTestServerPortAllocation():
+  """Resets the port allocation to start from TEST_SERVER_PORT_FIRST.
+
+  Returns:
+    Returns True if reset successes. Otherwise returns False.
+  """
+  try:
+    with open(constants.TEST_SERVER_PORT_FILE, 'w') as fp:
+      fp.write('%d' % constants.TEST_SERVER_PORT_FIRST)
+    if os.path.exists(constants.TEST_SERVER_PORT_LOCKFILE):
+      os.unlink(constants.TEST_SERVER_PORT_LOCKFILE)
+    return True
+  except Exception as e:
+    logging.error(e)
+  return False
+
+
+def AllocateTestServerPort():
+  """Allocates a port incrementally.
+
+  Returns:
+    Returns a valid port which should be in between TEST_SERVER_PORT_FIRST and
+    TEST_SERVER_PORT_LAST. Returning 0 means no more valid port can be used.
+  """
+  port = 0
+  ports_tried = []
+  try:
+    fp_lock = open(constants.TEST_SERVER_PORT_LOCKFILE, 'w')
+    fcntl.flock(fp_lock, fcntl.LOCK_EX)
+    # Get current valid port and calculate next valid port.
+    if not os.path.exists(constants.TEST_SERVER_PORT_FILE):
+      ResetTestServerPortAllocation()
+    with open(constants.TEST_SERVER_PORT_FILE, 'r+') as fp:
+      port = int(fp.read())
+      ports_tried.append(port)
+      while not IsHostPortAvailable(port):
+        port += 1
+        ports_tried.append(port)
+      if (port > constants.TEST_SERVER_PORT_LAST or
+          port < constants.TEST_SERVER_PORT_FIRST):
+        port = 0
+      else:
+        fp.seek(0, os.SEEK_SET)
+        fp.write('%d' % (port + 1))
+  except Exception as e:
+    logging.error(e)
+  finally:
+    if fp_lock:
+      fcntl.flock(fp_lock, fcntl.LOCK_UN)
+      fp_lock.close()
+  if port:
+    logging.info('Allocate port %d for test server.', port)
+  else:
+    logging.error('Could not allocate port for test server. '
+                  'List of ports tried: %s', str(ports_tried))
+  return port
+
+
+def IsHostPortAvailable(host_port):
+  """Checks whether the specified host port is available.
+
+  Args:
+    host_port: Port on host to check.
+
+  Returns:
+    True if the port on host is available, otherwise returns False.
+  """
+  s = socket.socket()
+  try:
+    s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
+    s.bind(('', host_port))
+    s.close()
+    return True
+  except socket.error:
+    return False
+
+
+def IsDevicePortUsed(device, device_port, state=''):
+  """Checks whether the specified device port is used or not.
+
+  Args:
+    device: A DeviceUtils instance.
+    device_port: Port on device we want to check.
+    state: String of the specified state. Default is empty string, which
+           means any state.
+
+  Returns:
+    True if the port on device is already used, otherwise returns False.
+  """
+  base_url = '127.0.0.1:%d' % device_port
+  netstat_results = device.RunShellCommand('netstat')
+  for single_connect in netstat_results:
+    # Column 3 is the local address which we want to check with.
+    connect_results = single_connect.split()
+    if connect_results[0] != 'tcp':
+      continue
+    if len(connect_results) < 6:
+      raise Exception('Unexpected format while parsing netstat line: ' +
+                      single_connect)
+    is_state_match = connect_results[5] == state if state else True
+    if connect_results[3] == base_url and is_state_match:
+      return True
+  return False
+
+
+def IsHttpServerConnectable(host, port, tries=3, command='GET', path='/',
+                            expected_read='', timeout=2):
+  """Checks whether the specified http server is ready to serve request or not.
+
+  Args:
+    host: Host name of the HTTP server.
+    port: Port number of the HTTP server.
+    tries: How many times we want to test the connection. The default value is
+           3.
+    command: The http command we use to connect to HTTP server. The default
+             command is 'GET'.
+    path: The path we use when connecting to HTTP server. The default path is
+          '/'.
+    expected_read: The content we expect to read from the response. The default
+                   value is ''.
+    timeout: Timeout (in seconds) for each http connection. The default is 2s.
+
+  Returns:
+    Tuple of (connect status, client error). connect status is a boolean value
+    to indicate whether the server is connectable. client_error is the error
+    message the server returns when connect status is false.
+  """
+  assert tries >= 1
+  for i in xrange(0, tries):
+    client_error = None
+    try:
+      with contextlib.closing(httplib.HTTPConnection(
+          host, port, timeout=timeout)) as http:
+        # Output some debug information when we have tried more than 2 times.
+        http.set_debuglevel(i >= 2)
+        http.request(command, path)
+        r = http.getresponse()
+        content = r.read()
+        if r.status == 200 and r.reason == 'OK' and content == expected_read:
+          return (True, '')
+        client_error = ('Bad response: %s %s version %s\n  ' %
+                        (r.status, r.reason, r.version) +
+                        '\n  '.join([': '.join(h) for h in r.getheaders()]))
+    except (httplib.HTTPException, socket.error) as e:
+      # Probably too quick connecting: try again.
+      exception_error_msgs = traceback.format_exception_only(type(e), e)
+      if exception_error_msgs:
+        client_error = ''.join(exception_error_msgs)
+  # Only returns last client_error.
+  return (False, client_error or 'Timeout')
diff --git a/build/android/pylib/remote/__init__.py b/build/android/pylib/remote/__init__.py
new file mode 100644
index 0000000..4d6aabb
--- /dev/null
+++ b/build/android/pylib/remote/__init__.py
@@ -0,0 +1,3 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
diff --git a/build/android/pylib/remote/device/__init__.py b/build/android/pylib/remote/device/__init__.py
new file mode 100644
index 0000000..4d6aabb
--- /dev/null
+++ b/build/android/pylib/remote/device/__init__.py
@@ -0,0 +1,3 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
diff --git a/build/android/pylib/remote/device/appurify_constants.py b/build/android/pylib/remote/device/appurify_constants.py
new file mode 100644
index 0000000..9343178
--- /dev/null
+++ b/build/android/pylib/remote/device/appurify_constants.py
@@ -0,0 +1,57 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Defines a set of constants specific to appurify."""
+
+# Appurify network config constants.
+class NETWORK(object):
+  WIFI_1_BAR = 1
+  SPRINT_4G_LTE_4_BARS = 2
+  SPRINT_3G_5_BARS = 3
+  SPRINT_3G_4_BARS = 4
+  SPRINT_3G_3_BARS = 5
+  SPRINT_3G_2_BARS = 6
+  SPRINT_3G_1_BAR = 7
+  SPRING_4G_1_BAR = 8
+  VERIZON_3G_5_BARS = 9
+  VERIZON_3G_4_BARS = 10
+  VERIZON_3G_3_BARS = 11
+  VERIZON_3G_2_BARS = 12
+  VERIZON_3G_1_BAR = 13
+  VERIZON_4G_1_BAR = 14
+  ATANDT_3G_5_BARS = 15
+  ATANDT_3G_4_BARS = 16
+  ATANDT_3G_3_BARS = 17
+  ATANDT_3G_2_BARS = 18
+  ATANDT_3G_1_BAR = 19
+  GENERIC_2G_4_BARS = 20
+  GENERIC_2G_3_BARS = 21
+  GENERIC_EVOLVED_EDGE = 22
+  GENERIC_GPRS = 23
+  GENERIC_ENHANCED_GPRS = 24
+  GENERIC_LTE = 25
+  GENERIC_HIGH_LATENCY_DNS = 26
+  GENERIC_100_PERCENT_PACKET_LOSS = 27
+  ATANDT_HSPA_PLUS = 28
+  ATANDT_4G_LTE_4_BARS = 29
+  VERIZON_4G_LTE_4_BARS = 30
+  GENERIC_DIGITAL_SUBSCRIBE_LINE = 31
+  WIFI_STARBUCKS_3_BARS = 32
+  WIFI_STARBUCKS_4_BARS = 33
+  WIFI_STARBUCKS_HIGH_TRAFFIC = 34
+  WIFI_TARGET_1_BAR = 35
+  WIFI_TARGET_3_BARS = 36
+  WIFI_TARGET_4_BARS = 37
+  PUBLIC_WIFI_MCDONALDS_5_BARS = 38
+  PUBLIC_WIFI_MCDONALDS_4_BARS = 39
+  PUBLIC_WIFI_MCDONALDS_2_BARS = 40
+  PUBLIC_WIFI_MCDONALDS_1_BAR = 41
+  PUBLIC_WIFI_KOHLS_5_BARS = 42
+  PUBLIC_WIFI_KOHLS_4_BARS = 43
+  PUBLIC_WIFI_KOHLS_2_BARS = 44
+  PUBLIC_WIFI_ATANDT_5_BARS = 45
+  PUBLIC_WIFI_ATANDT_4_BARS = 46
+  PUBLIC_WIFI_ATANDT_2_BARS = 47
+  PUBLIC_WIFI_ATANDT_1_BAR = 48
+  BOINGO = 49
\ No newline at end of file
diff --git a/build/android/pylib/remote/device/appurify_sanitized.py b/build/android/pylib/remote/device/appurify_sanitized.py
new file mode 100644
index 0000000..9f6ab40
--- /dev/null
+++ b/build/android/pylib/remote/device/appurify_sanitized.py
@@ -0,0 +1,40 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import contextlib
+import logging
+import os
+import sys
+
+from pylib import constants
+
+sys.path.append(os.path.join(
+    constants.DIR_SOURCE_ROOT, 'third_party', 'requests', 'src'))
+sys.path.append(os.path.join(
+    constants.DIR_SOURCE_ROOT, 'third_party', 'appurify-python', 'src'))
+handlers_before = list(logging.getLogger().handlers)
+
+import appurify.api
+import appurify.utils
+
+handlers_after = list(logging.getLogger().handlers)
+new_handler = list(set(handlers_after) - set(handlers_before))
+while new_handler:
+  logging.info("Removing logging handler.")
+  logging.getLogger().removeHandler(new_handler.pop())
+
+api = appurify.api
+utils = appurify.utils
+
+# This is not thread safe. If multiple threads are ever supported with appurify
+# this may cause logging messages to go missing.
+@contextlib.contextmanager
+def SanitizeLogging(verbose_count, level):
+  if verbose_count < 2:
+    logging.disable(level)
+    yield True
+    logging.disable(logging.NOTSET)
+  else:
+    yield False
+
diff --git a/build/android/pylib/remote/device/dummy/BUILD.gn b/build/android/pylib/remote/device/dummy/BUILD.gn
new file mode 100644
index 0000000..54ca275
--- /dev/null
+++ b/build/android/pylib/remote/device/dummy/BUILD.gn
@@ -0,0 +1,14 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/android/config.gni")
+import("//build/config/android/rules.gni")
+
+# GYP: //build/android/pylib/remote/device/dummy/dummy.gyp:remote_device_dummy_apk
+android_apk("remote_device_dummy_apk") {
+  android_manifest = "//build/android/AndroidManifest.xml"
+  java_files = [ "src/org/chromium/dummy/Dummy.java" ]
+  apk_name = "remote_device_dummy"
+  testonly = true
+}
diff --git a/build/android/pylib/remote/device/dummy/dummy.gyp b/build/android/pylib/remote/device/dummy/dummy.gyp
new file mode 100644
index 0000000..b003edc
--- /dev/null
+++ b/build/android/pylib/remote/device/dummy/dummy.gyp
@@ -0,0 +1,25 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Running gtests on a remote device via am instrument requires both an "app"
+# APK and a "test" APK with different package names. Our gtests only use one
+# APK, so we build a dummy APK to upload as the app.
+
+{
+  'targets': [
+    {
+      # GN: //build/android/pylib/remote/device/dummy:remote_device_dummy_apk
+      'target_name': 'remote_device_dummy_apk',
+      'type': 'none',
+      'variables': {
+        'apk_name': 'remote_device_dummy',
+        'java_in_dir': '.',
+        'android_manifest_path': '../../../../../../build/android/AndroidManifest.xml',
+      },
+      'includes': [
+        '../../../../../../build/java_apk.gypi',
+      ]
+    },
+  ]
+}
diff --git a/build/android/pylib/remote/device/dummy/src/org/chromium/dummy/Dummy.java b/build/android/pylib/remote/device/dummy/src/org/chromium/dummy/Dummy.java
new file mode 100644
index 0000000..1281b39
--- /dev/null
+++ b/build/android/pylib/remote/device/dummy/src/org/chromium/dummy/Dummy.java
@@ -0,0 +1,9 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+package org.chromium.dummy;
+
+/** Does nothing. */
+class Dummy {}
+
diff --git a/build/android/pylib/remote/device/remote_device_environment.py b/build/android/pylib/remote/device/remote_device_environment.py
new file mode 100644
index 0000000..dc11845
--- /dev/null
+++ b/build/android/pylib/remote/device/remote_device_environment.py
@@ -0,0 +1,368 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Environment setup and teardown for remote devices."""
+
+import distutils.version
+import json
+import logging
+import os
+import random
+import sys
+
+from pylib import constants
+from pylib.base import environment
+from pylib.remote.device import appurify_sanitized
+from pylib.remote.device import remote_device_helper
+from pylib.utils import timeout_retry
+from pylib.utils import reraiser_thread
+
+class RemoteDeviceEnvironment(environment.Environment):
+  """An environment for running on remote devices."""
+
+  _ENV_KEY = 'env'
+  _DEVICE_KEY = 'device'
+  _DEFAULT_RETRIES = 0
+
+  def __init__(self, args, error_func):
+    """Constructor.
+
+    Args:
+      args: Command line arguments.
+      error_func: error to show when using bad command line arguments.
+    """
+    super(RemoteDeviceEnvironment, self).__init__()
+    self._access_token = None
+    self._device = None
+    self._device_type = args.device_type
+    self._verbose_count = args.verbose_count
+    self._timeouts = {
+        'queueing': 60 * 10,
+        'installing': 60 * 10,
+        'in-progress': 60 * 30,
+        'unknown': 60 * 5
+    }
+    # Example config file:
+    # {
+    #   "remote_device": ["Galaxy S4", "Galaxy S3"],
+    #   "remote_device_os": ["4.4.2", "4.4.4"],
+    #   "remote_device_minimum_os": "4.4.2",
+    #   "api_address": "www.example.com",
+    #   "api_port": "80",
+    #   "api_protocol": "http",
+    #   "api_secret": "apisecret",
+    #   "api_key": "apikey",
+    #   "timeouts": {
+    #     "queueing": 600,
+    #     "installing": 600,
+    #     "in-progress": 1800,
+    #     "unknown": 300
+    #   }
+    # }
+    if args.remote_device_file:
+      with open(args.remote_device_file) as device_file:
+        device_json = json.load(device_file)
+    else:
+      device_json = {}
+
+    self._api_address = device_json.get('api_address', None)
+    self._api_key = device_json.get('api_key', None)
+    self._api_port = device_json.get('api_port', None)
+    self._api_protocol = device_json.get('api_protocol', None)
+    self._api_secret = device_json.get('api_secret', None)
+    self._device_oem = device_json.get('device_oem', None)
+    self._device_type = device_json.get('device_type', 'Android')
+    self._network_config = device_json.get('network_config', None)
+    self._remote_device = device_json.get('remote_device', None)
+    self._remote_device_minimum_os = device_json.get(
+        'remote_device_minimum_os', None)
+    self._remote_device_os = device_json.get('remote_device_os', None)
+    self._remote_device_timeout = device_json.get(
+        'remote_device_timeout', None)
+    self._results_path = device_json.get('results_path', None)
+    self._runner_package = device_json.get('runner_package', None)
+    self._runner_type = device_json.get('runner_type', None)
+    self._timeouts.update(device_json.get('timeouts', {}))
+
+    def command_line_override(
+        file_value, cmd_line_value, desc, print_value=True):
+      if cmd_line_value:
+        if file_value and file_value != cmd_line_value:
+          if print_value:
+            logging.info('Overriding %s from %s to %s',
+                         desc, file_value, cmd_line_value)
+          else:
+            logging.info('overriding %s', desc)
+        return cmd_line_value
+      return file_value
+
+    self._api_address = command_line_override(
+        self._api_address, args.api_address, 'api_address')
+    self._api_port = command_line_override(
+        self._api_port, args.api_port, 'api_port')
+    self._api_protocol = command_line_override(
+        self._api_protocol, args.api_protocol, 'api_protocol')
+    self._device_oem = command_line_override(
+        self._device_oem, args.device_oem, 'device_oem')
+    self._device_type = command_line_override(
+        self._device_type, args.device_type, 'device_type')
+    self._network_config = command_line_override(
+        self._network_config, args.network_config, 'network_config')
+    self._remote_device = command_line_override(
+        self._remote_device, args.remote_device, 'remote_device')
+    self._remote_device_minimum_os = command_line_override(
+        self._remote_device_minimum_os, args.remote_device_minimum_os,
+        'remote_device_minimum_os')
+    self._remote_device_os = command_line_override(
+        self._remote_device_os, args.remote_device_os, 'remote_device_os')
+    self._remote_device_timeout = command_line_override(
+        self._remote_device_timeout, args.remote_device_timeout,
+        'remote_device_timeout')
+    self._results_path = command_line_override(
+        self._results_path, args.results_path, 'results_path')
+    self._runner_package = command_line_override(
+        self._runner_package, args.runner_package, 'runner_package')
+    self._runner_type = command_line_override(
+        self._runner_type, args.runner_type, 'runner_type')
+
+    if args.api_key_file:
+      with open(args.api_key_file) as api_key_file:
+        temp_key = api_key_file.read().strip()
+        self._api_key = command_line_override(
+            self._api_key, temp_key, 'api_key', print_value=False)
+    self._api_key = command_line_override(
+        self._api_key, args.api_key, 'api_key', print_value=False)
+
+    if args.api_secret_file:
+      with open(args.api_secret_file) as api_secret_file:
+        temp_secret = api_secret_file.read().strip()
+        self._api_secret = command_line_override(
+            self._api_secret, temp_secret, 'api_secret', print_value=False)
+    self._api_secret = command_line_override(
+        self._api_secret, args.api_secret, 'api_secret', print_value=False)
+
+    if not self._api_address:
+      error_func('Must set api address with --api-address'
+                 ' or in --remote-device-file.')
+    if not self._api_key:
+      error_func('Must set api key with --api-key, --api-key-file'
+                 ' or in --remote-device-file')
+    if not self._api_port:
+      error_func('Must set api port with --api-port'
+                 ' or in --remote-device-file')
+    if not self._api_protocol:
+      error_func('Must set api protocol with --api-protocol'
+                 ' or in --remote-device-file. Example: http')
+    if not self._api_secret:
+      error_func('Must set api secret with --api-secret, --api-secret-file'
+                 ' or in --remote-device-file')
+
+    logging.info('Api address: %s', self._api_address)
+    logging.info('Api port: %s', self._api_port)
+    logging.info('Api protocol: %s', self._api_protocol)
+    logging.info('Remote device: %s', self._remote_device)
+    logging.info('Remote device minimum OS: %s',
+                 self._remote_device_minimum_os)
+    logging.info('Remote device OS: %s', self._remote_device_os)
+    logging.info('Remote device OEM: %s', self._device_oem)
+    logging.info('Remote device type: %s', self._device_type)
+    logging.info('Remote device timout: %s', self._remote_device_timeout)
+    logging.info('Results Path: %s', self._results_path)
+    logging.info('Runner package: %s', self._runner_package)
+    logging.info('Runner type: %s', self._runner_type)
+    logging.info('Timeouts: %s', self._timeouts)
+
+    if not args.trigger and not args.collect:
+      self._trigger = True
+      self._collect = True
+    else:
+      self._trigger = args.trigger
+      self._collect = args.collect
+
+  def SetUp(self):
+    """Set up the test environment."""
+    os.environ['APPURIFY_API_PROTO'] = self._api_protocol
+    os.environ['APPURIFY_API_HOST'] = self._api_address
+    os.environ['APPURIFY_API_PORT'] = self._api_port
+    os.environ['APPURIFY_STATUS_BASE_URL'] = 'none'
+    self._GetAccessToken()
+    if self._trigger:
+      self._SelectDevice()
+
+  def TearDown(self):
+    """Teardown the test environment."""
+    self._RevokeAccessToken()
+
+  def __enter__(self):
+    """Set up the test run when used as a context manager."""
+    try:
+      self.SetUp()
+      return self
+    except:
+      self.__exit__(*sys.exc_info())
+      raise
+
+  def __exit__(self, exc_type, exc_val, exc_tb):
+    """Tears down the test run when used as a context manager."""
+    self.TearDown()
+
+  def DumpTo(self, persisted_data):
+    env_data = {
+      self._DEVICE_KEY: self._device,
+    }
+    persisted_data[self._ENV_KEY] = env_data
+
+  def LoadFrom(self, persisted_data):
+    env_data = persisted_data[self._ENV_KEY]
+    self._device = env_data[self._DEVICE_KEY]
+
+  def _GetAccessToken(self):
+    """Generates access token for remote device service."""
+    logging.info('Generating remote service access token')
+    with appurify_sanitized.SanitizeLogging(self._verbose_count,
+                                            logging.WARNING):
+      access_token_results = appurify_sanitized.api.access_token_generate(
+          self._api_key, self._api_secret)
+    remote_device_helper.TestHttpResponse(access_token_results,
+                                          'Unable to generate access token.')
+    self._access_token = access_token_results.json()['response']['access_token']
+
+  def _RevokeAccessToken(self):
+    """Destroys access token for remote device service."""
+    logging.info('Revoking remote service access token')
+    with appurify_sanitized.SanitizeLogging(self._verbose_count,
+                                            logging.WARNING):
+      revoke_token_results = appurify_sanitized.api.access_token_revoke(
+          self._access_token)
+    remote_device_helper.TestHttpResponse(revoke_token_results,
+                                          'Unable to revoke access token.')
+
+  def _SelectDevice(self):
+    if self._remote_device_timeout:
+      try:
+        timeout_retry.Run(self._FindDeviceWithTimeout,
+                          self._remote_device_timeout, self._DEFAULT_RETRIES)
+      except reraiser_thread.TimeoutError:
+        self._NoDeviceFound()
+    else:
+      if not self._FindDevice():
+        self._NoDeviceFound()
+
+  def _FindDevice(self):
+    """Find which device to use."""
+    logging.info('Finding device to run tests on.')
+    device_list = self._GetDeviceList()
+    random.shuffle(device_list)
+    for device in device_list:
+      if device['os_name'] != self._device_type:
+        continue
+      if self._remote_device and device['name'] not in self._remote_device:
+        continue
+      if (self._remote_device_os
+          and device['os_version'] not in self._remote_device_os):
+        continue
+      if self._device_oem and device['brand'] not in self._device_oem:
+        continue
+      if (self._remote_device_minimum_os
+          and distutils.version.LooseVersion(device['os_version'])
+          < distutils.version.LooseVersion(self._remote_device_minimum_os)):
+        continue
+      if device['has_available_device']:
+        logging.info('Found device: %s %s',
+                     device['name'], device['os_version'])
+        self._device = device
+        return True
+    return False
+
+  def _FindDeviceWithTimeout(self):
+    """Find which device to use with timeout."""
+    timeout_retry.WaitFor(self._FindDevice, wait_period=1)
+
+  def _PrintAvailableDevices(self, device_list):
+    def compare_devices(a,b):
+      for key in ('os_version', 'name'):
+        c = cmp(a[key], b[key])
+        if c:
+          return c
+      return 0
+
+    logging.critical('Available %s Devices:', self._device_type)
+    logging.critical(
+        '  %s %s %s %s %s',
+        'OS'.ljust(10),
+        'Device Name'.ljust(30),
+        'Available'.ljust(10),
+        'Busy'.ljust(10),
+        'All'.ljust(10))
+    devices = (d for d in device_list if d['os_name'] == self._device_type)
+    for d in sorted(devices, compare_devices):
+      logging.critical(
+          '  %s %s %s %s %s',
+          d['os_version'].ljust(10),
+          d['name'].ljust(30),
+          str(d['available_devices_count']).ljust(10),
+          str(d['busy_devices_count']).ljust(10),
+          str(d['all_devices_count']).ljust(10))
+
+  def _GetDeviceList(self):
+    with appurify_sanitized.SanitizeLogging(self._verbose_count,
+                                            logging.WARNING):
+      dev_list_res = appurify_sanitized.api.devices_list(self._access_token)
+    remote_device_helper.TestHttpResponse(dev_list_res,
+                                         'Unable to generate access token.')
+    return dev_list_res.json()['response']
+
+  def _NoDeviceFound(self):
+    self._PrintAvailableDevices(self._GetDeviceList())
+    raise remote_device_helper.RemoteDeviceError(
+        'No device found.', is_infra_error=True)
+
+  @property
+  def collect(self):
+    return self._collect
+
+  @property
+  def device_type_id(self):
+    return self._device['device_type_id']
+
+  @property
+  def network_config(self):
+    return self._network_config
+
+  @property
+  def only_output_failures(self):
+    # TODO(jbudorick): Remove this once b/18981674 is fixed.
+    return True
+
+  @property
+  def results_path(self):
+    return self._results_path
+
+  @property
+  def runner_package(self):
+    return self._runner_package
+
+  @property
+  def runner_type(self):
+    return self._runner_type
+
+  @property
+  def timeouts(self):
+    return self._timeouts
+
+  @property
+  def token(self):
+    return self._access_token
+
+  @property
+  def trigger(self):
+    return self._trigger
+
+  @property
+  def verbose_count(self):
+    return self._verbose_count
+
+  @property
+  def device_type(self):
+    return self._device_type
diff --git a/build/android/pylib/remote/device/remote_device_gtest_run.py b/build/android/pylib/remote/device/remote_device_gtest_run.py
new file mode 100644
index 0000000..98d41e4
--- /dev/null
+++ b/build/android/pylib/remote/device/remote_device_gtest_run.py
@@ -0,0 +1,81 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Run specific test on specific environment."""
+
+import logging
+import os
+import sys
+import tempfile
+
+from pylib import constants
+from pylib.base import base_test_result
+from pylib.remote.device import appurify_sanitized
+from pylib.remote.device import remote_device_test_run
+from pylib.remote.device import remote_device_helper
+
+
+_EXTRA_COMMAND_LINE_FILE = (
+    'org.chromium.native_test.NativeTestActivity.CommandLineFile')
+
+
+class RemoteDeviceGtestTestRun(remote_device_test_run.RemoteDeviceTestRun):
+  """Run gtests and uirobot tests on a remote device."""
+
+  DEFAULT_RUNNER_PACKAGE = (
+      'org.chromium.native_test.NativeTestInstrumentationTestRunner')
+
+  #override
+  def TestPackage(self):
+    return self._test_instance.suite
+
+  #override
+  def _TriggerSetUp(self):
+    """Set up the triggering of a test run."""
+    logging.info('Triggering test run.')
+
+    if self._env.runner_type:
+      logging.warning('Ignoring configured runner_type "%s"',
+                      self._env.runner_type)
+
+    if not self._env.runner_package:
+      runner_package = self.DEFAULT_RUNNER_PACKAGE
+      logging.info('Using default runner package: %s',
+                   self.DEFAULT_RUNNER_PACKAGE)
+    else:
+      runner_package = self._env.runner_package
+
+    dummy_app_path = os.path.join(
+        constants.GetOutDirectory(), 'apks', 'remote_device_dummy.apk')
+    with tempfile.NamedTemporaryFile(suffix='.flags.txt') as flag_file:
+      env_vars = {}
+      filter_string = self._test_instance._GenerateDisabledFilterString(None)
+      if filter_string:
+        flag_file.write('_ --gtest_filter=%s' % filter_string)
+        flag_file.flush()
+        env_vars[_EXTRA_COMMAND_LINE_FILE] = os.path.basename(flag_file.name)
+        self._test_instance._data_deps.append(
+            (os.path.abspath(flag_file.name), None))
+      self._AmInstrumentTestSetup(
+          dummy_app_path, self._test_instance.apk, runner_package,
+          environment_variables=env_vars)
+
+  _INSTRUMENTATION_STREAM_LEADER = 'INSTRUMENTATION_STATUS: stream='
+
+  #override
+  def _ParseTestResults(self):
+    logging.info('Parsing results from stdout.')
+    results = base_test_result.TestRunResults()
+    output = self._results['results']['output'].splitlines()
+    output = (l[len(self._INSTRUMENTATION_STREAM_LEADER):] for l in output
+              if l.startswith(self._INSTRUMENTATION_STREAM_LEADER))
+    results_list = self._test_instance.ParseGTestOutput(output)
+    results.AddResults(results_list)
+    if self._env.only_output_failures:
+      logging.info('See logcat for more results information.')
+    if not self._results['results']['pass']:
+      results.AddResult(base_test_result.BaseTestResult(
+          'Remote Service detected error.',
+          base_test_result.ResultType.FAIL))
+    return results
diff --git a/build/android/pylib/remote/device/remote_device_helper.py b/build/android/pylib/remote/device/remote_device_helper.py
new file mode 100644
index 0000000..896ae99
--- /dev/null
+++ b/build/android/pylib/remote/device/remote_device_helper.py
@@ -0,0 +1,24 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Common functions and Exceptions for remote_device_*"""
+
+from pylib.utils import base_error
+
+
+class RemoteDeviceError(base_error.BaseError):
+  """Exception to throw when problems occur with remote device service."""
+  pass
+
+
+def TestHttpResponse(response, error_msg):
+  """Checks the Http response from remote device service.
+
+  Args:
+      response: response dict from the remote device service.
+      error_msg: Error message to display if bad response is seen.
+  """
+  if response.status_code != 200:
+    raise RemoteDeviceError(
+        '%s (%d: %s)' % (error_msg, response.status_code, response.reason))
diff --git a/build/android/pylib/remote/device/remote_device_instrumentation_test_run.py b/build/android/pylib/remote/device/remote_device_instrumentation_test_run.py
new file mode 100644
index 0000000..bcdb90c
--- /dev/null
+++ b/build/android/pylib/remote/device/remote_device_instrumentation_test_run.py
@@ -0,0 +1,74 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Run specific test on specific environment."""
+
+import logging
+import os
+import tempfile
+
+from pylib import constants
+from pylib.base import base_test_result
+from pylib.remote.device import remote_device_test_run
+from pylib.utils import apk_helper
+
+
+class RemoteDeviceInstrumentationTestRun(
+    remote_device_test_run.RemoteDeviceTestRun):
+  """Run instrumentation tests on a remote device."""
+
+  #override
+  def TestPackage(self):
+    return self._test_instance.test_package
+
+  #override
+  def _TriggerSetUp(self):
+    """Set up the triggering of a test run."""
+    logging.info('Triggering test run.')
+
+    with tempfile.NamedTemporaryFile(suffix='.txt') as test_list_file:
+      tests = self._test_instance.GetTests()
+      logging.debug('preparing to run %d instrumentation tests remotely:',
+                    len(tests))
+      for t in tests:
+        test_name = '%s#%s' % (t['class'], t['method'])
+        logging.debug('  %s', test_name)
+        test_list_file.write('%s\n' % test_name)
+      test_list_file.flush()
+      self._test_instance._data_deps.append(
+          (os.path.abspath(test_list_file.name), None))
+
+      env_vars = self._test_instance.GetDriverEnvironmentVars(
+          test_list_file_path=test_list_file.name)
+      env_vars.update(self._test_instance.GetHttpServerEnvironmentVars())
+
+      logging.debug('extras:')
+      for k, v in env_vars.iteritems():
+        logging.debug('  %s: %s', k, v)
+
+      self._AmInstrumentTestSetup(
+          self._test_instance.apk_under_test,
+          self._test_instance.driver_apk,
+          self._test_instance.driver_name,
+          environment_variables=env_vars,
+          extra_apks=[self._test_instance.test_apk])
+
+  #override
+  def _ParseTestResults(self):
+    logging.info('Parsing results from stdout.')
+    r = base_test_result.TestRunResults()
+    result_code, result_bundle, statuses = (
+        self._test_instance.ParseAmInstrumentRawOutput(
+            self._results['results']['output'].splitlines()))
+    result = self._test_instance.GenerateTestResults(
+        result_code, result_bundle, statuses, 0, 0)
+
+    if isinstance(result, base_test_result.BaseTestResult):
+      r.AddResult(result)
+    elif isinstance(result, list):
+      r.AddResults(result)
+    else:
+      raise Exception('Unexpected result type: %s' % type(result).__name__)
+
+    return r
diff --git a/build/android/pylib/remote/device/remote_device_test_run.py b/build/android/pylib/remote/device/remote_device_test_run.py
new file mode 100644
index 0000000..60cc735
--- /dev/null
+++ b/build/android/pylib/remote/device/remote_device_test_run.py
@@ -0,0 +1,308 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Run specific test on specific environment."""
+
+import json
+import logging
+import os
+import sys
+import tempfile
+import time
+import zipfile
+
+from pylib import constants
+from pylib.base import test_run
+from pylib.remote.device import appurify_constants
+from pylib.remote.device import appurify_sanitized
+from pylib.remote.device import remote_device_helper
+from pylib.utils import zip_utils
+
+class RemoteDeviceTestRun(test_run.TestRun):
+  """Run tests on a remote device."""
+
+  _TEST_RUN_KEY = 'test_run'
+  _TEST_RUN_ID_KEY = 'test_run_id'
+
+  WAIT_TIME = 5
+  COMPLETE = 'complete'
+  HEARTBEAT_INTERVAL = 300
+
+  def __init__(self, env, test_instance):
+    """Constructor.
+
+    Args:
+      env: Environment the tests will run in.
+      test_instance: The test that will be run.
+    """
+    super(RemoteDeviceTestRun, self).__init__(env, test_instance)
+    self._env = env
+    self._test_instance = test_instance
+    self._app_id = ''
+    self._test_id = ''
+    self._results = ''
+    self._test_run_id = ''
+
+  #override
+  def SetUp(self):
+    """Set up a test run."""
+    if self._env.trigger:
+      self._TriggerSetUp()
+    elif self._env.collect:
+      assert isinstance(self._env.collect, basestring), (
+                        'File for storing test_run_id must be a string.')
+      with open(self._env.collect, 'r') as persisted_data_file:
+        persisted_data = json.loads(persisted_data_file.read())
+        self._env.LoadFrom(persisted_data)
+        self.LoadFrom(persisted_data)
+
+  def _TriggerSetUp(self):
+    """Set up the triggering of a test run."""
+    raise NotImplementedError
+
+  #override
+  def RunTests(self):
+    """Run the test."""
+    if self._env.trigger:
+      with appurify_sanitized.SanitizeLogging(self._env.verbose_count,
+                                              logging.WARNING):
+        test_start_res = appurify_sanitized.api.tests_run(
+            self._env.token, self._env.device_type_id, self._app_id,
+            self._test_id)
+      remote_device_helper.TestHttpResponse(
+        test_start_res, 'Unable to run test.')
+      self._test_run_id = test_start_res.json()['response']['test_run_id']
+      logging.info('Test run id: %s' % self._test_run_id)
+
+    if self._env.collect:
+      current_status = ''
+      timeout_counter = 0
+      heartbeat_counter = 0
+      while self._GetTestStatus(self._test_run_id) != self.COMPLETE:
+        if self._results['detailed_status'] != current_status:
+          logging.info('Test status: %s', self._results['detailed_status'])
+          current_status = self._results['detailed_status']
+          timeout_counter = 0
+          heartbeat_counter = 0
+        if heartbeat_counter > self.HEARTBEAT_INTERVAL:
+          logging.info('Test status: %s', self._results['detailed_status'])
+          heartbeat_counter = 0
+
+        timeout = self._env.timeouts.get(
+            current_status, self._env.timeouts['unknown'])
+        if timeout_counter > timeout:
+          raise remote_device_helper.RemoteDeviceError(
+              'Timeout while in %s state for %s seconds'
+              % (current_status, timeout),
+              is_infra_error=True)
+        time.sleep(self.WAIT_TIME)
+        timeout_counter += self.WAIT_TIME
+        heartbeat_counter += self.WAIT_TIME
+      self._DownloadTestResults(self._env.results_path)
+
+      if self._results['results']['exception']:
+        raise remote_device_helper.RemoteDeviceError(
+            self._results['results']['exception'], is_infra_error=True)
+
+      return self._ParseTestResults()
+
+  #override
+  def TearDown(self):
+    """Tear down the test run."""
+    if self._env.collect:
+      self._CollectTearDown()
+    elif self._env.trigger:
+      assert isinstance(self._env.trigger, basestring), (
+                        'File for storing test_run_id must be a string.')
+      with open(self._env.trigger, 'w') as persisted_data_file:
+        persisted_data = {}
+        self.DumpTo(persisted_data)
+        self._env.DumpTo(persisted_data)
+        persisted_data_file.write(json.dumps(persisted_data))
+
+  def _CollectTearDown(self):
+    if self._GetTestStatus(self._test_run_id) != self.COMPLETE:
+      with appurify_sanitized.SanitizeLogging(self._env.verbose_count,
+                                              logging.WARNING):
+        test_abort_res = appurify_sanitized.api.tests_abort(
+            self._env.token, self._test_run_id, reason='Test runner exiting.')
+      remote_device_helper.TestHttpResponse(test_abort_res,
+                                            'Unable to abort test.')
+
+  def __enter__(self):
+    """Set up the test run when used as a context manager."""
+    self.SetUp()
+    return self
+
+  def __exit__(self, exc_type, exc_val, exc_tb):
+    """Tear down the test run when used as a context manager."""
+    self.TearDown()
+
+  def DumpTo(self, persisted_data):
+    test_run_data = {
+      self._TEST_RUN_ID_KEY: self._test_run_id,
+    }
+    persisted_data[self._TEST_RUN_KEY] = test_run_data
+
+  def LoadFrom(self, persisted_data):
+    test_run_data = persisted_data[self._TEST_RUN_KEY]
+    self._test_run_id = test_run_data[self._TEST_RUN_ID_KEY]
+
+  def _ParseTestResults(self):
+    raise NotImplementedError
+
+  def _GetTestByName(self, test_name):
+    """Gets test_id for specific test.
+
+    Args:
+      test_name: Test to find the ID of.
+    """
+    with appurify_sanitized.SanitizeLogging(self._env.verbose_count,
+                                            logging.WARNING):
+      test_list_res = appurify_sanitized.api.tests_list(self._env.token)
+    remote_device_helper.TestHttpResponse(test_list_res,
+                                          'Unable to get tests list.')
+    for test in test_list_res.json()['response']:
+      if test['test_type'] == test_name:
+        return test['test_id']
+    raise remote_device_helper.RemoteDeviceError(
+        'No test found with name %s' % (test_name))
+
+  def _DownloadTestResults(self, results_path):
+    """Download the test results from remote device service.
+
+    Args:
+      results_path: Path to download appurify results zipfile.
+    """
+    if results_path:
+      logging.info('Downloading results to %s.' % results_path)
+      if not os.path.exists(os.path.dirname(results_path)):
+        os.makedirs(os.path.dirname(results_path))
+      with appurify_sanitized.SanitizeLogging(self._env.verbose_count,
+                                              logging.WARNING):
+        appurify_sanitized.utils.wget(self._results['results']['url'],
+                                      results_path)
+
+  def _GetTestStatus(self, test_run_id):
+    """Checks the state of the test, and sets self._results
+
+    Args:
+      test_run_id: Id of test on on remote service.
+    """
+
+    with appurify_sanitized.SanitizeLogging(self._env.verbose_count,
+                                            logging.WARNING):
+      test_check_res = appurify_sanitized.api.tests_check_result(
+          self._env.token, test_run_id)
+    remote_device_helper.TestHttpResponse(test_check_res,
+                                          'Unable to get test status.')
+    self._results = test_check_res.json()['response']
+    return self._results['status']
+
+  def _AmInstrumentTestSetup(self, app_path, test_path, runner_package,
+                             environment_variables, extra_apks=None):
+    config = {'runner': runner_package}
+    if environment_variables:
+      config['environment_vars'] = ','.join(
+          '%s=%s' % (k, v) for k, v in environment_variables.iteritems())
+
+    self._app_id = self._UploadAppToDevice(app_path)
+
+    data_deps = self._test_instance.GetDataDependencies()
+    if data_deps:
+      with tempfile.NamedTemporaryFile(suffix='.zip') as test_with_deps:
+        sdcard_files = []
+        additional_apks = []
+        host_test = os.path.basename(test_path)
+        with zipfile.ZipFile(test_with_deps.name, 'w') as zip_file:
+          zip_file.write(test_path, host_test, zipfile.ZIP_DEFLATED)
+          for h, _ in data_deps:
+            if os.path.isdir(h):
+              zip_utils.WriteToZipFile(zip_file, h, '.')
+              sdcard_files.extend(os.listdir(h))
+            else:
+              zip_utils.WriteToZipFile(zip_file, h, os.path.basename(h))
+              sdcard_files.append(os.path.basename(h))
+          for a in extra_apks or ():
+            zip_utils.WriteToZipFile(zip_file, a, os.path.basename(a));
+            additional_apks.append(os.path.basename(a))
+
+        config['sdcard_files'] = ','.join(sdcard_files)
+        config['host_test'] = host_test
+        if additional_apks:
+          config['additional_apks'] = ','.join(additional_apks)
+        self._test_id = self._UploadTestToDevice(
+            'robotium', test_with_deps.name, app_id=self._app_id)
+    else:
+      self._test_id = self._UploadTestToDevice('robotium', test_path)
+
+    logging.info('Setting config: %s' % config)
+    appurify_configs = {}
+    if self._env.network_config:
+      appurify_configs['network'] = self._env.network_config
+    self._SetTestConfig('robotium', config, **appurify_configs)
+
+  def _UploadAppToDevice(self, app_path):
+    """Upload app to device."""
+    logging.info('Uploading %s to remote service as %s.', app_path,
+                 self._test_instance.suite)
+    with open(app_path, 'rb') as apk_src:
+      with appurify_sanitized.SanitizeLogging(self._env.verbose_count,
+                                              logging.WARNING):
+        upload_results = appurify_sanitized.api.apps_upload(
+            self._env.token, apk_src, 'raw', name=self._test_instance.suite)
+      remote_device_helper.TestHttpResponse(
+          upload_results, 'Unable to upload %s.' % app_path)
+      return upload_results.json()['response']['app_id']
+
+  def _UploadTestToDevice(self, test_type, test_path, app_id=None):
+    """Upload test to device
+    Args:
+      test_type: Type of test that is being uploaded. Ex. uirobot, gtest..
+    """
+    logging.info('Uploading %s to remote service.' % test_path)
+    with open(test_path, 'rb') as test_src:
+      with appurify_sanitized.SanitizeLogging(self._env.verbose_count,
+                                              logging.WARNING):
+        upload_results = appurify_sanitized.api.tests_upload(
+            self._env.token, test_src, 'raw', test_type, app_id=app_id)
+      remote_device_helper.TestHttpResponse(upload_results,
+          'Unable to upload %s.' % test_path)
+      return upload_results.json()['response']['test_id']
+
+  def _SetTestConfig(self, runner_type, runner_configs,
+                     network=appurify_constants.NETWORK.WIFI_1_BAR,
+                     pcap=0, profiler=0, videocapture=0):
+    """Generates and uploads config file for test.
+    Args:
+      runner_configs: Configs specific to the runner you are using.
+      network: Config to specify the network environment the devices running
+          the tests will be in.
+      pcap: Option to set the recording the of network traffic from the device.
+      profiler: Option to set the recording of CPU, memory, and network
+          transfer usage in the tests.
+      videocapture: Option to set video capture during the tests.
+
+    """
+    logging.info('Generating config file for test.')
+    with tempfile.TemporaryFile() as config:
+      config_data = [
+          '[appurify]',
+          'network=%s' % network,
+          'pcap=%s' % pcap,
+          'profiler=%s' % profiler,
+          'videocapture=%s' % videocapture,
+          '[%s]' % runner_type
+      ]
+      config_data.extend(
+          '%s=%s' % (k, v) for k, v in runner_configs.iteritems())
+      config.write(''.join('%s\n' % l for l in config_data))
+      config.flush()
+      config.seek(0)
+      with appurify_sanitized.SanitizeLogging(self._env.verbose_count,
+                                              logging.WARNING):
+        config_response = appurify_sanitized.api.config_upload(
+            self._env.token, config, self._test_id)
+      remote_device_helper.TestHttpResponse(
+          config_response, 'Unable to upload test config.')
diff --git a/build/android/pylib/remote/device/remote_device_uirobot_test_run.py b/build/android/pylib/remote/device/remote_device_uirobot_test_run.py
new file mode 100644
index 0000000..f818c98
--- /dev/null
+++ b/build/android/pylib/remote/device/remote_device_uirobot_test_run.py
@@ -0,0 +1,88 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Run specific test on specific environment."""
+
+import logging
+import os
+import sys
+
+from pylib import constants
+from pylib.base import base_test_result
+from pylib.remote.device import appurify_sanitized
+from pylib.remote.device import remote_device_test_run
+from pylib.remote.device import remote_device_helper
+
+
+class RemoteDeviceUirobotTestRun(remote_device_test_run.RemoteDeviceTestRun):
+  """Run uirobot tests on a remote device."""
+
+
+  def __init__(self, env, test_instance):
+    """Constructor.
+
+    Args:
+      env: Environment the tests will run in.
+      test_instance: The test that will be run.
+    """
+    super(RemoteDeviceUirobotTestRun, self).__init__(env, test_instance)
+
+  #override
+  def TestPackage(self):
+    return self._test_instance.package_name
+
+  #override
+  def _TriggerSetUp(self):
+    """Set up the triggering of a test run."""
+    logging.info('Triggering test run.')
+
+    if self._env.device_type == 'Android':
+      default_runner_type = 'android_robot'
+    elif self._env.device_type == 'iOS':
+      default_runner_type = 'ios_robot'
+    else:
+      raise remote_device_helper.RemoteDeviceError(
+          'Unknown device type: %s' % self._env.device_type)
+
+    self._app_id = self._UploadAppToDevice(self._test_instance.app_under_test)
+    if not self._env.runner_type:
+      runner_type = default_runner_type
+      logging.info('Using default runner type: %s', default_runner_type)
+    else:
+      runner_type = self._env.runner_type
+
+    self._test_id = self._UploadTestToDevice(
+        'android_robot', None, app_id=self._app_id)
+    config_body = {'duration': self._test_instance.minutes}
+    self._SetTestConfig(runner_type, config_body)
+
+
+  # TODO(rnephew): Switch to base class implementation when supported.
+  #override
+  def _UploadTestToDevice(self, test_type, test_path, app_id=None):
+    if test_path:
+      logging.info("Ignoring test path.")
+    data = {
+        'access_token':self._env.token,
+        'test_type':test_type,
+        'app_id':app_id,
+    }
+    with appurify_sanitized.SanitizeLogging(self._env.verbose_count,
+                                            logging.WARNING):
+      test_upload_res = appurify_sanitized.utils.post('tests/upload',
+                                                      data, None)
+    remote_device_helper.TestHttpResponse(
+        test_upload_res, 'Unable to get UiRobot test id.')
+    return test_upload_res.json()['response']['test_id']
+
+  #override
+  def _ParseTestResults(self):
+    logging.info('Parsing results from remote service.')
+    results = base_test_result.TestRunResults()
+    if self._results['results']['pass']:
+      result_type = base_test_result.ResultType.PASS
+    else:
+      result_type = base_test_result.ResultType.FAIL
+    results.AddResult(base_test_result.BaseTestResult('uirobot', result_type))
+    return results
diff --git a/build/android/pylib/restart_adbd.sh b/build/android/pylib/restart_adbd.sh
new file mode 100755
index 0000000..393b2eb
--- /dev/null
+++ b/build/android/pylib/restart_adbd.sh
@@ -0,0 +1,20 @@
+#!/system/bin/sh
+
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Android shell script to restart adbd on the device. This has to be run
+# atomically as a shell script because stopping adbd prevents further commands
+# from running (even if called in the same adb shell).
+
+trap '' HUP
+trap '' TERM
+trap '' PIPE
+
+function restart() {
+  stop adbd
+  start adbd
+}
+
+restart &
diff --git a/build/android/pylib/results/__init__.py b/build/android/pylib/results/__init__.py
new file mode 100644
index 0000000..4d6aabb
--- /dev/null
+++ b/build/android/pylib/results/__init__.py
@@ -0,0 +1,3 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
diff --git a/build/android/pylib/results/flakiness_dashboard/__init__.py b/build/android/pylib/results/flakiness_dashboard/__init__.py
new file mode 100644
index 0000000..4d6aabb
--- /dev/null
+++ b/build/android/pylib/results/flakiness_dashboard/__init__.py
@@ -0,0 +1,3 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
diff --git a/build/android/pylib/results/flakiness_dashboard/json_results_generator.py b/build/android/pylib/results/flakiness_dashboard/json_results_generator.py
new file mode 100644
index 0000000..e5c433d
--- /dev/null
+++ b/build/android/pylib/results/flakiness_dashboard/json_results_generator.py
@@ -0,0 +1,697 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+#
+# Most of this file was ported over from Blink's
+# Tools/Scripts/webkitpy/layout_tests/layout_package/json_results_generator.py
+# Tools/Scripts/webkitpy/common/net/file_uploader.py
+#
+
+import json
+import logging
+import mimetypes
+import os
+import time
+import urllib2
+
+_log = logging.getLogger(__name__)
+
+_JSON_PREFIX = 'ADD_RESULTS('
+_JSON_SUFFIX = ');'
+
+
+def HasJSONWrapper(string):
+  return string.startswith(_JSON_PREFIX) and string.endswith(_JSON_SUFFIX)
+
+
+def StripJSONWrapper(json_content):
+  # FIXME: Kill this code once the server returns json instead of jsonp.
+  if HasJSONWrapper(json_content):
+    return json_content[len(_JSON_PREFIX):len(json_content) - len(_JSON_SUFFIX)]
+  return json_content
+
+
+def WriteJSON(json_object, file_path, callback=None):
+  # Specify separators in order to get compact encoding.
+  json_string = json.dumps(json_object, separators=(',', ':'))
+  if callback:
+    json_string = callback + '(' + json_string + ');'
+  with open(file_path, 'w') as fp:
+    fp.write(json_string)
+
+
+def ConvertTrieToFlatPaths(trie, prefix=None):
+  """Flattens the trie of paths, prepending a prefix to each."""
+  result = {}
+  for name, data in trie.iteritems():
+    if prefix:
+      name = prefix + '/' + name
+
+    if len(data) and not 'results' in data:
+      result.update(ConvertTrieToFlatPaths(data, name))
+    else:
+      result[name] = data
+
+  return result
+
+
+def AddPathToTrie(path, value, trie):
+  """Inserts a single path and value into a directory trie structure."""
+  if not '/' in path:
+    trie[path] = value
+    return
+
+  directory, _slash, rest = path.partition('/')
+  if not directory in trie:
+    trie[directory] = {}
+  AddPathToTrie(rest, value, trie[directory])
+
+
+def TestTimingsTrie(individual_test_timings):
+  """Breaks a test name into dicts by directory
+
+  foo/bar/baz.html: 1ms
+  foo/bar/baz1.html: 3ms
+
+  becomes
+  foo: {
+      bar: {
+          baz.html: 1,
+          baz1.html: 3
+      }
+  }
+  """
+  trie = {}
+  for test_result in individual_test_timings:
+    test = test_result.test_name
+
+    AddPathToTrie(test, int(1000 * test_result.test_run_time), trie)
+
+  return trie
+
+
+class TestResult(object):
+  """A simple class that represents a single test result."""
+
+  # Test modifier constants.
+  (NONE, FAILS, FLAKY, DISABLED) = range(4)
+
+  def __init__(self, test, failed=False, elapsed_time=0):
+    self.test_name = test
+    self.failed = failed
+    self.test_run_time = elapsed_time
+
+    test_name = test
+    try:
+      test_name = test.split('.')[1]
+    except IndexError:
+      _log.warn('Invalid test name: %s.', test)
+
+    if test_name.startswith('FAILS_'):
+      self.modifier = self.FAILS
+    elif test_name.startswith('FLAKY_'):
+      self.modifier = self.FLAKY
+    elif test_name.startswith('DISABLED_'):
+      self.modifier = self.DISABLED
+    else:
+      self.modifier = self.NONE
+
+  def Fixable(self):
+    return self.failed or self.modifier == self.DISABLED
+
+
+class JSONResultsGeneratorBase(object):
+  """A JSON results generator for generic tests."""
+
+  MAX_NUMBER_OF_BUILD_RESULTS_TO_LOG = 750
+  # Min time (seconds) that will be added to the JSON.
+  MIN_TIME = 1
+
+  # Note that in non-chromium tests those chars are used to indicate
+  # test modifiers (FAILS, FLAKY, etc) but not actual test results.
+  PASS_RESULT = 'P'
+  SKIP_RESULT = 'X'
+  FAIL_RESULT = 'F'
+  FLAKY_RESULT = 'L'
+  NO_DATA_RESULT = 'N'
+
+  MODIFIER_TO_CHAR = {TestResult.NONE: PASS_RESULT,
+                      TestResult.DISABLED: SKIP_RESULT,
+                      TestResult.FAILS: FAIL_RESULT,
+                      TestResult.FLAKY: FLAKY_RESULT}
+
+  VERSION = 4
+  VERSION_KEY = 'version'
+  RESULTS = 'results'
+  TIMES = 'times'
+  BUILD_NUMBERS = 'buildNumbers'
+  TIME = 'secondsSinceEpoch'
+  TESTS = 'tests'
+
+  FIXABLE_COUNT = 'fixableCount'
+  FIXABLE = 'fixableCounts'
+  ALL_FIXABLE_COUNT = 'allFixableCount'
+
+  RESULTS_FILENAME = 'results.json'
+  TIMES_MS_FILENAME = 'times_ms.json'
+  INCREMENTAL_RESULTS_FILENAME = 'incremental_results.json'
+
+  # line too long pylint: disable=line-too-long
+  URL_FOR_TEST_LIST_JSON = (
+      'http://%s/testfile?builder=%s&name=%s&testlistjson=1&testtype=%s&master=%s')
+  # pylint: enable=line-too-long
+
+  def __init__(self, builder_name, build_name, build_number,
+               results_file_base_path, builder_base_url,
+               test_results_map, svn_repositories=None,
+               test_results_server=None,
+               test_type='',
+               master_name=''):
+    """Modifies the results.json file. Grabs it off the archive directory
+    if it is not found locally.
+
+    Args
+      builder_name: the builder name (e.g. Webkit).
+      build_name: the build name (e.g. webkit-rel).
+      build_number: the build number.
+      results_file_base_path: Absolute path to the directory containing the
+          results json file.
+      builder_base_url: the URL where we have the archived test results.
+          If this is None no archived results will be retrieved.
+      test_results_map: A dictionary that maps test_name to TestResult.
+      svn_repositories: A (json_field_name, svn_path) pair for SVN
+          repositories that tests rely on.  The SVN revision will be
+          included in the JSON with the given json_field_name.
+      test_results_server: server that hosts test results json.
+      test_type: test type string (e.g. 'layout-tests').
+      master_name: the name of the buildbot master.
+    """
+    self._builder_name = builder_name
+    self._build_name = build_name
+    self._build_number = build_number
+    self._builder_base_url = builder_base_url
+    self._results_directory = results_file_base_path
+
+    self._test_results_map = test_results_map
+    self._test_results = test_results_map.values()
+
+    self._svn_repositories = svn_repositories
+    if not self._svn_repositories:
+      self._svn_repositories = {}
+
+    self._test_results_server = test_results_server
+    self._test_type = test_type
+    self._master_name = master_name
+
+    self._archived_results = None
+
+  def GenerateJSONOutput(self):
+    json_object = self.GetJSON()
+    if json_object:
+      file_path = (
+          os.path.join(
+              self._results_directory,
+              self.INCREMENTAL_RESULTS_FILENAME))
+      WriteJSON(json_object, file_path)
+
+  def GenerateTimesMSFile(self):
+    times = TestTimingsTrie(self._test_results_map.values())
+    file_path = os.path.join(self._results_directory, self.TIMES_MS_FILENAME)
+    WriteJSON(times, file_path)
+
+  def GetJSON(self):
+    """Gets the results for the results.json file."""
+    results_json = {}
+
+    if not results_json:
+      results_json, error = self._GetArchivedJSONResults()
+      if error:
+        # If there was an error don't write a results.json
+        # file at all as it would lose all the information on the
+        # bot.
+        _log.error('Archive directory is inaccessible. Not '
+                   'modifying or clobbering the results.json '
+                   'file: ' + str(error))
+        return None
+
+    builder_name = self._builder_name
+    if results_json and builder_name not in results_json:
+      _log.debug('Builder name (%s) is not in the results.json file.'
+                 % builder_name)
+
+    self._ConvertJSONToCurrentVersion(results_json)
+
+    if builder_name not in results_json:
+      results_json[builder_name] = (
+          self._CreateResultsForBuilderJSON())
+
+    results_for_builder = results_json[builder_name]
+
+    if builder_name:
+      self._InsertGenericMetaData(results_for_builder)
+
+    self._InsertFailureSummaries(results_for_builder)
+
+    # Update the all failing tests with result type and time.
+    tests = results_for_builder[self.TESTS]
+    all_failing_tests = self._GetFailedTestNames()
+    all_failing_tests.update(ConvertTrieToFlatPaths(tests))
+
+    for test in all_failing_tests:
+      self._InsertTestTimeAndResult(test, tests)
+
+    return results_json
+
+  def SetArchivedResults(self, archived_results):
+    self._archived_results = archived_results
+
+  def UploadJSONFiles(self, json_files):
+    """Uploads the given json_files to the test_results_server (if the
+    test_results_server is given)."""
+    if not self._test_results_server:
+      return
+
+    if not self._master_name:
+      _log.error(
+          '--test-results-server was set, but --master-name was not.  Not '
+          'uploading JSON files.')
+      return
+
+    _log.info('Uploading JSON files for builder: %s', self._builder_name)
+    attrs = [('builder', self._builder_name),
+             ('testtype', self._test_type),
+             ('master', self._master_name)]
+
+    files = [(json_file, os.path.join(self._results_directory, json_file))
+             for json_file in json_files]
+
+    url = 'http://%s/testfile/upload' % self._test_results_server
+    # Set uploading timeout in case appengine server is having problems.
+    # 120 seconds are more than enough to upload test results.
+    uploader = _FileUploader(url, 120)
+    try:
+      response = uploader.UploadAsMultipartFormData(files, attrs)
+      if response:
+        if response.code == 200:
+          _log.info('JSON uploaded.')
+        else:
+          _log.debug(
+              "JSON upload failed, %d: '%s'" %
+              (response.code, response.read()))
+      else:
+        _log.error('JSON upload failed; no response returned')
+    except Exception, err:
+      _log.error('Upload failed: %s' % err)
+      return
+
+  def _GetTestTiming(self, test_name):
+    """Returns test timing data (elapsed time) in second
+    for the given test_name."""
+    if test_name in self._test_results_map:
+      # Floor for now to get time in seconds.
+      return int(self._test_results_map[test_name].test_run_time)
+    return 0
+
+  def _GetFailedTestNames(self):
+    """Returns a set of failed test names."""
+    return set([r.test_name for r in self._test_results if r.failed])
+
+  def _GetModifierChar(self, test_name):
+    """Returns a single char (e.g. SKIP_RESULT, FAIL_RESULT,
+    PASS_RESULT, NO_DATA_RESULT, etc) that indicates the test modifier
+    for the given test_name.
+    """
+    if test_name not in self._test_results_map:
+      return self.__class__.NO_DATA_RESULT
+
+    test_result = self._test_results_map[test_name]
+    if test_result.modifier in self.MODIFIER_TO_CHAR.keys():
+      return self.MODIFIER_TO_CHAR[test_result.modifier]
+
+    return self.__class__.PASS_RESULT
+
+  def _get_result_char(self, test_name):
+    """Returns a single char (e.g. SKIP_RESULT, FAIL_RESULT,
+    PASS_RESULT, NO_DATA_RESULT, etc) that indicates the test result
+    for the given test_name.
+    """
+    if test_name not in self._test_results_map:
+      return self.__class__.NO_DATA_RESULT
+
+    test_result = self._test_results_map[test_name]
+    if test_result.modifier == TestResult.DISABLED:
+      return self.__class__.SKIP_RESULT
+
+    if test_result.failed:
+      return self.__class__.FAIL_RESULT
+
+    return self.__class__.PASS_RESULT
+
+  def _GetSVNRevision(self, in_directory):
+    """Returns the svn revision for the given directory.
+
+    Args:
+      in_directory: The directory where svn is to be run.
+    """
+    # This is overridden in flakiness_dashboard_results_uploader.py.
+    raise NotImplementedError()
+
+  def _GetArchivedJSONResults(self):
+    """Download JSON file that only contains test
+    name list from test-results server. This is for generating incremental
+    JSON so the file generated has info for tests that failed before but
+    pass or are skipped from current run.
+
+    Returns (archived_results, error) tuple where error is None if results
+    were successfully read.
+    """
+    results_json = {}
+    old_results = None
+    error = None
+
+    if not self._test_results_server:
+      return {}, None
+
+    results_file_url = (self.URL_FOR_TEST_LIST_JSON %
+                        (urllib2.quote(self._test_results_server),
+                         urllib2.quote(self._builder_name),
+                         self.RESULTS_FILENAME,
+                         urllib2.quote(self._test_type),
+                         urllib2.quote(self._master_name)))
+
+    try:
+      # FIXME: We should talk to the network via a Host object.
+      results_file = urllib2.urlopen(results_file_url)
+      old_results = results_file.read()
+    except urllib2.HTTPError, http_error:
+      # A non-4xx status code means the bot is hosed for some reason
+      # and we can't grab the results.json file off of it.
+      if http_error.code < 400 and http_error.code >= 500:
+        error = http_error
+    except urllib2.URLError, url_error:
+      error = url_error
+
+    if old_results:
+      # Strip the prefix and suffix so we can get the actual JSON object.
+      old_results = StripJSONWrapper(old_results)
+
+      try:
+        results_json = json.loads(old_results)
+      except Exception:
+        _log.debug('results.json was not valid JSON. Clobbering.')
+        # The JSON file is not valid JSON. Just clobber the results.
+        results_json = {}
+    else:
+      _log.debug('Old JSON results do not exist. Starting fresh.')
+      results_json = {}
+
+    return results_json, error
+
+  def _InsertFailureSummaries(self, results_for_builder):
+    """Inserts aggregate pass/failure statistics into the JSON.
+    This method reads self._test_results and generates
+    FIXABLE, FIXABLE_COUNT and ALL_FIXABLE_COUNT entries.
+
+    Args:
+      results_for_builder: Dictionary containing the test results for a
+          single builder.
+    """
+    # Insert the number of tests that failed or skipped.
+    fixable_count = len([r for r in self._test_results if r.Fixable()])
+    self._InsertItemIntoRawList(results_for_builder,
+                                fixable_count, self.FIXABLE_COUNT)
+
+    # Create a test modifiers (FAILS, FLAKY etc) summary dictionary.
+    entry = {}
+    for test_name in self._test_results_map.iterkeys():
+      result_char = self._GetModifierChar(test_name)
+      entry[result_char] = entry.get(result_char, 0) + 1
+
+    # Insert the pass/skip/failure summary dictionary.
+    self._InsertItemIntoRawList(results_for_builder, entry,
+                                self.FIXABLE)
+
+    # Insert the number of all the tests that are supposed to pass.
+    all_test_count = len(self._test_results)
+    self._InsertItemIntoRawList(results_for_builder,
+                                all_test_count, self.ALL_FIXABLE_COUNT)
+
+  def _InsertItemIntoRawList(self, results_for_builder, item, key):
+    """Inserts the item into the list with the given key in the results for
+    this builder. Creates the list if no such list exists.
+
+    Args:
+      results_for_builder: Dictionary containing the test results for a
+          single builder.
+      item: Number or string to insert into the list.
+      key: Key in results_for_builder for the list to insert into.
+    """
+    if key in results_for_builder:
+      raw_list = results_for_builder[key]
+    else:
+      raw_list = []
+
+    raw_list.insert(0, item)
+    raw_list = raw_list[:self.MAX_NUMBER_OF_BUILD_RESULTS_TO_LOG]
+    results_for_builder[key] = raw_list
+
+  def _InsertItemRunLengthEncoded(self, item, encoded_results):
+    """Inserts the item into the run-length encoded results.
+
+    Args:
+      item: String or number to insert.
+      encoded_results: run-length encoded results. An array of arrays, e.g.
+          [[3,'A'],[1,'Q']] encodes AAAQ.
+    """
+    if len(encoded_results) and item == encoded_results[0][1]:
+      num_results = encoded_results[0][0]
+      if num_results <= self.MAX_NUMBER_OF_BUILD_RESULTS_TO_LOG:
+        encoded_results[0][0] = num_results + 1
+    else:
+      # Use a list instead of a class for the run-length encoding since
+      # we want the serialized form to be concise.
+      encoded_results.insert(0, [1, item])
+
+  def _InsertGenericMetaData(self, results_for_builder):
+    """ Inserts generic metadata (such as version number, current time etc)
+    into the JSON.
+
+    Args:
+      results_for_builder: Dictionary containing the test results for
+          a single builder.
+    """
+    self._InsertItemIntoRawList(results_for_builder,
+                                self._build_number, self.BUILD_NUMBERS)
+
+    # Include SVN revisions for the given repositories.
+    for (name, path) in self._svn_repositories:
+      # Note: for JSON file's backward-compatibility we use 'chrome' rather
+      # than 'chromium' here.
+      lowercase_name = name.lower()
+      if lowercase_name == 'chromium':
+        lowercase_name = 'chrome'
+      self._InsertItemIntoRawList(results_for_builder,
+                                  self._GetSVNRevision(path),
+                                  lowercase_name + 'Revision')
+
+    self._InsertItemIntoRawList(results_for_builder,
+                                int(time.time()),
+                                self.TIME)
+
+  def _InsertTestTimeAndResult(self, test_name, tests):
+    """ Insert a test item with its results to the given tests dictionary.
+
+    Args:
+      tests: Dictionary containing test result entries.
+    """
+
+    result = self._get_result_char(test_name)
+    test_time = self._GetTestTiming(test_name)
+
+    this_test = tests
+    for segment in test_name.split('/'):
+      if segment not in this_test:
+        this_test[segment] = {}
+      this_test = this_test[segment]
+
+    if not len(this_test):
+      self._PopulateResultsAndTimesJSON(this_test)
+
+    if self.RESULTS in this_test:
+      self._InsertItemRunLengthEncoded(result, this_test[self.RESULTS])
+    else:
+      this_test[self.RESULTS] = [[1, result]]
+
+    if self.TIMES in this_test:
+      self._InsertItemRunLengthEncoded(test_time, this_test[self.TIMES])
+    else:
+      this_test[self.TIMES] = [[1, test_time]]
+
+  def _ConvertJSONToCurrentVersion(self, results_json):
+    """If the JSON does not match the current version, converts it to the
+    current version and adds in the new version number.
+    """
+    if self.VERSION_KEY in results_json:
+      archive_version = results_json[self.VERSION_KEY]
+      if archive_version == self.VERSION:
+        return
+    else:
+      archive_version = 3
+
+    # version 3->4
+    if archive_version == 3:
+      for results in results_json.values():
+        self._ConvertTestsToTrie(results)
+
+    results_json[self.VERSION_KEY] = self.VERSION
+
+  def _ConvertTestsToTrie(self, results):
+    if not self.TESTS in results:
+      return
+
+    test_results = results[self.TESTS]
+    test_results_trie = {}
+    for test in test_results.iterkeys():
+      single_test_result = test_results[test]
+      AddPathToTrie(test, single_test_result, test_results_trie)
+
+    results[self.TESTS] = test_results_trie
+
+  def _PopulateResultsAndTimesJSON(self, results_and_times):
+    results_and_times[self.RESULTS] = []
+    results_and_times[self.TIMES] = []
+    return results_and_times
+
+  def _CreateResultsForBuilderJSON(self):
+    results_for_builder = {}
+    results_for_builder[self.TESTS] = {}
+    return results_for_builder
+
+  def _RemoveItemsOverMaxNumberOfBuilds(self, encoded_list):
+    """Removes items from the run-length encoded list after the final
+    item that exceeds the max number of builds to track.
+
+    Args:
+      encoded_results: run-length encoded results. An array of arrays, e.g.
+          [[3,'A'],[1,'Q']] encodes AAAQ.
+    """
+    num_builds = 0
+    index = 0
+    for result in encoded_list:
+      num_builds = num_builds + result[0]
+      index = index + 1
+      if num_builds > self.MAX_NUMBER_OF_BUILD_RESULTS_TO_LOG:
+        return encoded_list[:index]
+    return encoded_list
+
+  def _NormalizeResultsJSON(self, test, test_name, tests):
+    """ Prune tests where all runs pass or tests that no longer exist and
+    truncate all results to maxNumberOfBuilds.
+
+    Args:
+      test: ResultsAndTimes object for this test.
+      test_name: Name of the test.
+      tests: The JSON object with all the test results for this builder.
+    """
+    test[self.RESULTS] = self._RemoveItemsOverMaxNumberOfBuilds(
+        test[self.RESULTS])
+    test[self.TIMES] = self._RemoveItemsOverMaxNumberOfBuilds(
+        test[self.TIMES])
+
+    is_all_pass = self._IsResultsAllOfType(test[self.RESULTS],
+                                           self.PASS_RESULT)
+    is_all_no_data = self._IsResultsAllOfType(test[self.RESULTS],
+                                              self.NO_DATA_RESULT)
+    max_time = max([test_time[1] for test_time in test[self.TIMES]])
+
+    # Remove all passes/no-data from the results to reduce noise and
+    # filesize. If a test passes every run, but takes > MIN_TIME to run,
+    # don't throw away the data.
+    if is_all_no_data or (is_all_pass and max_time <= self.MIN_TIME):
+      del tests[test_name]
+
+  # method could be a function pylint: disable=R0201
+  def _IsResultsAllOfType(self, results, result_type):
+    """Returns whether all the results are of the given type
+    (e.g. all passes)."""
+    return len(results) == 1 and results[0][1] == result_type
+
+
+class _FileUploader(object):
+
+  def __init__(self, url, timeout_seconds):
+    self._url = url
+    self._timeout_seconds = timeout_seconds
+
+  def UploadAsMultipartFormData(self, files, attrs):
+    file_objs = []
+    for filename, path in files:
+      with file(path, 'rb') as fp:
+        file_objs.append(('file', filename, fp.read()))
+
+    # FIXME: We should use the same variable names for the formal and actual
+    # parameters.
+    content_type, data = _EncodeMultipartFormData(attrs, file_objs)
+    return self._UploadData(content_type, data)
+
+  def _UploadData(self, content_type, data):
+    start = time.time()
+    end = start + self._timeout_seconds
+    while time.time() < end:
+      try:
+        request = urllib2.Request(self._url, data,
+                                  {'Content-Type': content_type})
+        return urllib2.urlopen(request)
+      except urllib2.HTTPError as e:
+        _log.warn("Received HTTP status %s loading \"%s\".  "
+                  'Retrying in 10 seconds...' % (e.code, e.filename))
+        time.sleep(10)
+
+
+def _GetMIMEType(filename):
+  return mimetypes.guess_type(filename)[0] or 'application/octet-stream'
+
+
+# FIXME: Rather than taking tuples, this function should take more
+# structured data.
+def _EncodeMultipartFormData(fields, files):
+  """Encode form fields for multipart/form-data.
+
+  Args:
+    fields: A sequence of (name, value) elements for regular form fields.
+    files: A sequence of (name, filename, value) elements for data to be
+           uploaded as files.
+  Returns:
+    (content_type, body) ready for httplib.HTTP instance.
+
+  Source:
+    http://code.google.com/p/rietveld/source/browse/trunk/upload.py
+  """
+  BOUNDARY = '-M-A-G-I-C---B-O-U-N-D-A-R-Y-'
+  CRLF = '\r\n'
+  lines = []
+
+  for key, value in fields:
+    lines.append('--' + BOUNDARY)
+    lines.append('Content-Disposition: form-data; name="%s"' % key)
+    lines.append('')
+    if isinstance(value, unicode):
+      value = value.encode('utf-8')
+    lines.append(value)
+
+  for key, filename, value in files:
+    lines.append('--' + BOUNDARY)
+    lines.append('Content-Disposition: form-data; name="%s"; '
+                 'filename="%s"' % (key, filename))
+    lines.append('Content-Type: %s' % _GetMIMEType(filename))
+    lines.append('')
+    if isinstance(value, unicode):
+      value = value.encode('utf-8')
+    lines.append(value)
+
+  lines.append('--' + BOUNDARY + '--')
+  lines.append('')
+  body = CRLF.join(lines)
+  content_type = 'multipart/form-data; boundary=%s' % BOUNDARY
+  return content_type, body
diff --git a/build/android/pylib/results/flakiness_dashboard/results_uploader.py b/build/android/pylib/results/flakiness_dashboard/results_uploader.py
new file mode 100644
index 0000000..b86d7ac
--- /dev/null
+++ b/build/android/pylib/results/flakiness_dashboard/results_uploader.py
@@ -0,0 +1,181 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Uploads the results to the flakiness dashboard server."""
+# pylint: disable=E1002,R0201
+
+import logging
+import os
+import shutil
+import tempfile
+import xml
+
+
+from pylib import cmd_helper
+from pylib import constants
+from pylib.results.flakiness_dashboard import json_results_generator
+from pylib.utils import repo_utils
+
+
+
+class JSONResultsGenerator(json_results_generator.JSONResultsGeneratorBase):
+  """Writes test results to a JSON file and handles uploading that file to
+  the test results server.
+  """
+  def __init__(self, builder_name, build_name, build_number, tmp_folder,
+               test_results_map, test_results_server, test_type, master_name):
+    super(JSONResultsGenerator, self).__init__(
+        builder_name=builder_name,
+        build_name=build_name,
+        build_number=build_number,
+        results_file_base_path=tmp_folder,
+        builder_base_url=None,
+        test_results_map=test_results_map,
+        svn_repositories=(('webkit', 'third_party/WebKit'),
+                          ('chrome', '.')),
+        test_results_server=test_results_server,
+        test_type=test_type,
+        master_name=master_name)
+
+  #override
+  def _GetModifierChar(self, test_name):
+    if test_name not in self._test_results_map:
+      return self.__class__.NO_DATA_RESULT
+
+    return self._test_results_map[test_name].modifier
+
+  #override
+  def _GetSVNRevision(self, in_directory):
+    """Returns the git/svn revision for the given directory.
+
+    Args:
+      in_directory: The directory relative to src.
+    """
+    def _is_git_directory(in_directory):
+      """Returns true if the given directory is in a git repository.
+
+      Args:
+        in_directory: The directory path to be tested.
+      """
+      if os.path.exists(os.path.join(in_directory, '.git')):
+        return True
+      parent = os.path.dirname(in_directory)
+      if parent == constants.DIR_SOURCE_ROOT or parent == in_directory:
+        return False
+      return _is_git_directory(parent)
+
+    in_directory = os.path.join(constants.DIR_SOURCE_ROOT, in_directory)
+
+    if not os.path.exists(os.path.join(in_directory, '.svn')):
+      if _is_git_directory(in_directory):
+        return repo_utils.GetGitHeadSHA1(in_directory)
+      else:
+        return ''
+
+    output = cmd_helper.GetCmdOutput(['svn', 'info', '--xml'], cwd=in_directory)
+    try:
+      dom = xml.dom.minidom.parseString(output)
+      return dom.getElementsByTagName('entry')[0].getAttribute('revision')
+    except xml.parsers.expat.ExpatError:
+      return ''
+    return ''
+
+
+class ResultsUploader(object):
+  """Handles uploading buildbot tests results to the flakiness dashboard."""
+  def __init__(self, tests_type):
+    self._build_number = os.environ.get('BUILDBOT_BUILDNUMBER')
+    self._builder_name = os.environ.get('BUILDBOT_BUILDERNAME')
+    self._tests_type = tests_type
+
+    if not self._build_number or not self._builder_name:
+      raise Exception('You should not be uploading tests results to the server'
+                      'from your local machine.')
+
+    upstream = (tests_type != 'Chromium_Android_Instrumentation')
+    if upstream:
+      # TODO(frankf): Use factory properties (see buildbot/bb_device_steps.py)
+      # This requires passing the actual master name (e.g. 'ChromiumFYI' not
+      # 'chromium.fyi').
+      from slave import slave_utils # pylint: disable=F0401
+      self._build_name = slave_utils.SlaveBuildName(constants.DIR_SOURCE_ROOT)
+      self._master_name = slave_utils.GetActiveMaster()
+    else:
+      self._build_name = 'chromium-android'
+      buildbot_branch = os.environ.get('BUILDBOT_BRANCH')
+      if not buildbot_branch:
+        buildbot_branch = 'master'
+      else:
+        # Ensure there's no leading "origin/"
+        buildbot_branch = buildbot_branch[buildbot_branch.find('/') + 1:]
+      self._master_name = '%s-%s' % (self._build_name, buildbot_branch)
+
+    self._test_results_map = {}
+
+  def AddResults(self, test_results):
+    # TODO(frankf): Differentiate between fail/crash/timeouts.
+    conversion_map = [
+        (test_results.GetPass(), False,
+            json_results_generator.JSONResultsGeneratorBase.PASS_RESULT),
+        (test_results.GetFail(), True,
+            json_results_generator.JSONResultsGeneratorBase.FAIL_RESULT),
+        (test_results.GetCrash(), True,
+            json_results_generator.JSONResultsGeneratorBase.FAIL_RESULT),
+        (test_results.GetTimeout(), True,
+            json_results_generator.JSONResultsGeneratorBase.FAIL_RESULT),
+        (test_results.GetUnknown(), True,
+            json_results_generator.JSONResultsGeneratorBase.NO_DATA_RESULT),
+        ]
+
+    for results_list, failed, modifier in conversion_map:
+      for single_test_result in results_list:
+        test_result = json_results_generator.TestResult(
+            test=single_test_result.GetName(),
+            failed=failed,
+            elapsed_time=single_test_result.GetDuration() / 1000)
+        # The WebKit TestResult object sets the modifier it based on test name.
+        # Since we don't use the same test naming convention as WebKit the
+        # modifier will be wrong, so we need to overwrite it.
+        test_result.modifier = modifier
+
+        self._test_results_map[single_test_result.GetName()] = test_result
+
+  def Upload(self, test_results_server):
+    if not self._test_results_map:
+      return
+
+    tmp_folder = tempfile.mkdtemp()
+
+    try:
+      results_generator = JSONResultsGenerator(
+          builder_name=self._builder_name,
+          build_name=self._build_name,
+          build_number=self._build_number,
+          tmp_folder=tmp_folder,
+          test_results_map=self._test_results_map,
+          test_results_server=test_results_server,
+          test_type=self._tests_type,
+          master_name=self._master_name)
+
+      json_files = ["incremental_results.json", "times_ms.json"]
+      results_generator.GenerateJSONOutput()
+      results_generator.GenerateTimesMSFile()
+      results_generator.UploadJSONFiles(json_files)
+    except Exception as e:
+      logging.error("Uploading results to test server failed: %s." % e)
+    finally:
+      shutil.rmtree(tmp_folder)
+
+
+def Upload(results, flakiness_dashboard_server, test_type):
+  """Reports test results to the flakiness dashboard for Chrome for Android.
+
+  Args:
+    results: test results.
+    flakiness_dashboard_server: the server to upload the results to.
+    test_type: the type of the tests (as displayed by the flakiness dashboard).
+  """
+  uploader = ResultsUploader(test_type)
+  uploader.AddResults(results)
+  uploader.Upload(flakiness_dashboard_server)
diff --git a/build/android/pylib/results/json_results.py b/build/android/pylib/results/json_results.py
new file mode 100644
index 0000000..65664e3
--- /dev/null
+++ b/build/android/pylib/results/json_results.py
@@ -0,0 +1,139 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import json
+import logging
+
+from pylib.base import base_test_result
+
+
+def GenerateResultsDict(test_run_result):
+  """Create a results dict from |test_run_result| suitable for writing to JSON.
+  Args:
+    test_run_result: a base_test_result.TestRunResults object.
+  Returns:
+    A results dict that mirrors the one generated by
+      base/test/launcher/test_results_tracker.cc:SaveSummaryAsJSON.
+  """
+  # Example json output.
+  # {
+  #   "global_tags": [],
+  #   "all_tests": [
+  #     "test1",
+  #     "test2",
+  #    ],
+  #   "disabled_tests": [],
+  #   "per_iteration_data": [
+  #     {
+  #       "test1": [
+  #         {
+  #           "status": "SUCCESS",
+  #           "elapsed_time_ms": 1,
+  #           "output_snippet": "",
+  #           "output_snippet_base64": "",
+  #           "losless_snippet": "",
+  #         },
+  #       ],
+  #       "test2": [
+  #         {
+  #           "status": "FAILURE",
+  #           "elapsed_time_ms": 12,
+  #           "output_snippet": "",
+  #           "output_snippet_base64": "",
+  #           "losless_snippet": "",
+  #         },
+  #       ],
+  #     },
+  #   ],
+  # }
+
+  assert isinstance(test_run_result, base_test_result.TestRunResults)
+
+  def status_as_string(s):
+    if s == base_test_result.ResultType.PASS:
+      return 'SUCCESS'
+    elif s == base_test_result.ResultType.SKIP:
+      return 'SKIPPED'
+    elif s == base_test_result.ResultType.FAIL:
+      return 'FAILURE'
+    elif s == base_test_result.ResultType.CRASH:
+      return 'CRASH'
+    elif s == base_test_result.ResultType.TIMEOUT:
+      return 'TIMEOUT'
+    elif s == base_test_result.ResultType.UNKNOWN:
+      return 'UNKNOWN'
+
+  def generate_iteration_data(t):
+    return {
+      t.GetName(): [
+        {
+          'status': status_as_string(t.GetType()),
+          'elapsed_time_ms': t.GetDuration(),
+          'output_snippet': '',
+          'losless_snippet': '',
+          'output_snippet_base64:': '',
+        }
+      ]
+    }
+
+  all_tests_tuple, per_iteration_data_tuple = zip(
+      *[(t.GetName(), generate_iteration_data(t))
+        for t in test_run_result.GetAll()])
+
+  return {
+    'global_tags': [],
+    'all_tests': list(all_tests_tuple),
+    # TODO(jbudorick): Add support for disabled tests within base_test_result.
+    'disabled_tests': [],
+    'per_iteration_data': list(per_iteration_data_tuple),
+  }
+
+
+def GenerateJsonResultsFile(test_run_result, file_path):
+  """Write |test_run_result| to JSON.
+
+  This emulates the format of the JSON emitted by
+  base/test/launcher/test_results_tracker.cc:SaveSummaryAsJSON.
+
+  Args:
+    test_run_result: a base_test_result.TestRunResults object.
+    file_path: The path to the JSON file to write.
+  """
+  with open(file_path, 'w') as json_result_file:
+    json_result_file.write(json.dumps(GenerateResultsDict(test_run_result)))
+
+
+def ParseResultsFromJson(json_results):
+  """Creates a list of BaseTestResult objects from JSON.
+
+  Args:
+    json_results: A JSON dict in the format created by
+                  GenerateJsonResultsFile.
+  """
+
+  def string_as_status(s):
+    if s == 'SUCCESS':
+      return base_test_result.ResultType.PASS
+    elif s == 'SKIPPED':
+      return base_test_result.ResultType.SKIP
+    elif s == 'FAILURE':
+      return base_test_result.ResultType.FAIL
+    elif s == 'CRASH':
+      return base_test_result.ResultType.CRASH
+    elif s == 'TIMEOUT':
+      return base_test_result.ResultType.TIMEOUT
+    else:
+      return base_test_result.ResultType.UNKNOWN
+
+  results_list = []
+  testsuite_runs = json_results['per_iteration_data']
+  for testsuite_run in testsuite_runs:
+    for test, test_runs in testsuite_run.iteritems():
+      results_list.extend(
+          [base_test_result.BaseTestResult(test,
+                                           string_as_status(tr['status']),
+                                           duration=tr['elapsed_time_ms'])
+          for tr in test_runs])
+  return results_list
+
diff --git a/build/android/pylib/results/json_results_test.py b/build/android/pylib/results/json_results_test.py
new file mode 100755
index 0000000..1bc730d
--- /dev/null
+++ b/build/android/pylib/results/json_results_test.py
@@ -0,0 +1,133 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import unittest
+
+from pylib.base import base_test_result
+from pylib.results import json_results
+
+
+class JsonResultsTest(unittest.TestCase):
+
+  def testGenerateResultsDict_passedResult(self):
+    result = base_test_result.BaseTestResult(
+        'test.package.TestName', base_test_result.ResultType.PASS)
+
+    all_results = base_test_result.TestRunResults()
+    all_results.AddResult(result)
+
+    results_dict = json_results.GenerateResultsDict(all_results)
+    self.assertEquals(
+        ['test.package.TestName'],
+        results_dict['all_tests'])
+    self.assertEquals(1, len(results_dict['per_iteration_data']))
+
+    iteration_result = results_dict['per_iteration_data'][0]
+    self.assertTrue('test.package.TestName' in iteration_result)
+    self.assertEquals(1, len(iteration_result['test.package.TestName']))
+
+    test_iteration_result = iteration_result['test.package.TestName'][0]
+    self.assertTrue('status' in test_iteration_result)
+    self.assertEquals('SUCCESS', test_iteration_result['status'])
+
+  def testGenerateResultsDict_skippedResult(self):
+    result = base_test_result.BaseTestResult(
+        'test.package.TestName', base_test_result.ResultType.SKIP)
+
+    all_results = base_test_result.TestRunResults()
+    all_results.AddResult(result)
+
+    results_dict = json_results.GenerateResultsDict(all_results)
+    self.assertEquals(
+        ['test.package.TestName'],
+        results_dict['all_tests'])
+    self.assertEquals(1, len(results_dict['per_iteration_data']))
+
+    iteration_result = results_dict['per_iteration_data'][0]
+    self.assertTrue('test.package.TestName' in iteration_result)
+    self.assertEquals(1, len(iteration_result['test.package.TestName']))
+
+    test_iteration_result = iteration_result['test.package.TestName'][0]
+    self.assertTrue('status' in test_iteration_result)
+    self.assertEquals('SKIPPED', test_iteration_result['status'])
+
+  def testGenerateResultsDict_failedResult(self):
+    result = base_test_result.BaseTestResult(
+        'test.package.TestName', base_test_result.ResultType.FAIL)
+
+    all_results = base_test_result.TestRunResults()
+    all_results.AddResult(result)
+
+    results_dict = json_results.GenerateResultsDict(all_results)
+    self.assertEquals(
+        ['test.package.TestName'],
+        results_dict['all_tests'])
+    self.assertEquals(1, len(results_dict['per_iteration_data']))
+
+    iteration_result = results_dict['per_iteration_data'][0]
+    self.assertTrue('test.package.TestName' in iteration_result)
+    self.assertEquals(1, len(iteration_result['test.package.TestName']))
+
+    test_iteration_result = iteration_result['test.package.TestName'][0]
+    self.assertTrue('status' in test_iteration_result)
+    self.assertEquals('FAILURE', test_iteration_result['status'])
+
+  def testGenerateResultsDict_duration(self):
+    result = base_test_result.BaseTestResult(
+        'test.package.TestName', base_test_result.ResultType.PASS, duration=123)
+
+    all_results = base_test_result.TestRunResults()
+    all_results.AddResult(result)
+
+    results_dict = json_results.GenerateResultsDict(all_results)
+    self.assertEquals(
+        ['test.package.TestName'],
+        results_dict['all_tests'])
+    self.assertEquals(1, len(results_dict['per_iteration_data']))
+
+    iteration_result = results_dict['per_iteration_data'][0]
+    self.assertTrue('test.package.TestName' in iteration_result)
+    self.assertEquals(1, len(iteration_result['test.package.TestName']))
+
+    test_iteration_result = iteration_result['test.package.TestName'][0]
+    self.assertTrue('elapsed_time_ms' in test_iteration_result)
+    self.assertEquals(123, test_iteration_result['elapsed_time_ms'])
+
+  def testGenerateResultsDict_multipleResults(self):
+    result1 = base_test_result.BaseTestResult(
+        'test.package.TestName1', base_test_result.ResultType.PASS)
+    result2 = base_test_result.BaseTestResult(
+        'test.package.TestName2', base_test_result.ResultType.PASS)
+
+    all_results = base_test_result.TestRunResults()
+    all_results.AddResult(result1)
+    all_results.AddResult(result2)
+
+    results_dict = json_results.GenerateResultsDict(all_results)
+    self.assertEquals(
+        ['test.package.TestName1', 'test.package.TestName2'],
+        results_dict['all_tests'])
+    self.assertEquals(2, len(results_dict['per_iteration_data']))
+
+    expected_tests = set([
+        'test.package.TestName1',
+        'test.package.TestName2',
+    ])
+
+    for iteration_result in results_dict['per_iteration_data']:
+      self.assertEquals(1, len(iteration_result))
+      name = iteration_result.keys()[0]
+      self.assertTrue(name in expected_tests)
+      expected_tests.remove(name)
+      self.assertEquals(1, len(iteration_result[name]))
+
+      test_iteration_result = iteration_result[name][0]
+      self.assertTrue('status' in test_iteration_result)
+      self.assertEquals('SUCCESS', test_iteration_result['status'])
+
+
+if __name__ == '__main__':
+  unittest.main(verbosity=2)
+
diff --git a/build/android/pylib/results/report_results.py b/build/android/pylib/results/report_results.py
new file mode 100644
index 0000000..4fc6aa0
--- /dev/null
+++ b/build/android/pylib/results/report_results.py
@@ -0,0 +1,114 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Module containing utility functions for reporting results."""
+
+import logging
+import os
+import re
+
+from pylib import constants
+from pylib.results.flakiness_dashboard import results_uploader
+
+
+def _LogToFile(results, test_type, suite_name):
+  """Log results to local files which can be used for aggregation later."""
+  log_file_path = os.path.join(constants.GetOutDirectory(), 'test_logs')
+  if not os.path.exists(log_file_path):
+    os.mkdir(log_file_path)
+  full_file_name = os.path.join(
+      log_file_path, re.sub(r'\W', '_', test_type).lower() + '.log')
+  if not os.path.exists(full_file_name):
+    with open(full_file_name, 'w') as log_file:
+      print >> log_file, '\n%s results for %s build %s:' % (
+          test_type, os.environ.get('BUILDBOT_BUILDERNAME'),
+          os.environ.get('BUILDBOT_BUILDNUMBER'))
+    logging.info('Writing results to %s.' % full_file_name)
+
+  logging.info('Writing results to %s.' % full_file_name)
+  with open(full_file_name, 'a') as log_file:
+    shortened_suite_name = suite_name[:25] + (suite_name[25:] and '...')
+    print >> log_file, '%s%s' % (shortened_suite_name.ljust(30),
+                                 results.GetShortForm())
+
+
+def _LogToFlakinessDashboard(results, test_type, test_package,
+                             flakiness_server):
+  """Upload results to the flakiness dashboard"""
+  logging.info('Upload results for test type "%s", test package "%s" to %s' %
+               (test_type, test_package, flakiness_server))
+
+  try:
+    if test_type == 'Instrumentation':
+      if flakiness_server == constants.UPSTREAM_FLAKINESS_SERVER:
+        assert test_package in ['ContentShellTest',
+                                'ChromePublicTest',
+                                'ChromeShellTest',
+                                'ChromeSyncShellTest',
+                                'AndroidWebViewTest']
+        dashboard_test_type = ('%s_instrumentation_tests' %
+                               test_package.lower().rstrip('test'))
+      # Downstream server.
+      else:
+        dashboard_test_type = 'Chromium_Android_Instrumentation'
+
+    elif test_type == 'Unit test':
+      dashboard_test_type = test_package
+
+    else:
+      logging.warning('Invalid test type')
+      return
+
+    results_uploader.Upload(
+        results, flakiness_server, dashboard_test_type)
+
+  except Exception as e:
+    logging.error(e)
+
+
+def LogFull(results, test_type, test_package, annotation=None,
+            flakiness_server=None):
+  """Log the tests results for the test suite.
+
+  The results will be logged three different ways:
+    1. Log to stdout.
+    2. Log to local files for aggregating multiple test steps
+       (on buildbots only).
+    3. Log to flakiness dashboard (on buildbots only).
+
+  Args:
+    results: An instance of TestRunResults object.
+    test_type: Type of the test (e.g. 'Instrumentation', 'Unit test', etc.).
+    test_package: Test package name (e.g. 'ipc_tests' for gtests,
+                  'ContentShellTest' for instrumentation tests)
+    annotation: If instrumenation test type, this is a list of annotations
+                (e.g. ['Smoke', 'SmallTest']).
+    flakiness_server: If provider, upload the results to flakiness dashboard
+                      with this URL.
+    """
+  if not results.DidRunPass():
+    logging.critical('*' * 80)
+    logging.critical('Detailed Logs')
+    logging.critical('*' * 80)
+    for line in results.GetLogs().splitlines():
+      logging.critical(line)
+  logging.critical('*' * 80)
+  logging.critical('Summary')
+  logging.critical('*' * 80)
+  for line in results.GetGtestForm().splitlines():
+    logging.critical(line)
+  logging.critical('*' * 80)
+
+  if os.environ.get('BUILDBOT_BUILDERNAME'):
+    # It is possible to have multiple buildbot steps for the same
+    # instrumenation test package using different annotations.
+    if annotation and len(annotation) == 1:
+      suite_name = annotation[0]
+    else:
+      suite_name = test_package
+    _LogToFile(results, test_type, suite_name)
+
+    if flakiness_server:
+      _LogToFlakinessDashboard(results, test_type, test_package,
+                               flakiness_server)
diff --git a/build/android/pylib/screenshot.py b/build/android/pylib/screenshot.py
new file mode 100644
index 0000000..0fcc590
--- /dev/null
+++ b/build/android/pylib/screenshot.py
@@ -0,0 +1,99 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+import os
+import tempfile
+import time
+
+from pylib import cmd_helper
+from pylib import device_signal
+from pylib.device import device_errors
+
+# TODO(jbudorick) Remove once telemetry gets switched over.
+import pylib.android_commands
+import pylib.device.device_utils
+
+
+class VideoRecorder(object):
+  """Records a screen capture video from an Android Device (KitKat or newer).
+
+  Args:
+    device: DeviceUtils instance.
+    host_file: Path to the video file to store on the host.
+    megabits_per_second: Video bitrate in megabits per second. Allowed range
+                         from 0.1 to 100 mbps.
+    size: Video frame size tuple (width, height) or None to use the device
+          default.
+    rotate: If True, the video will be rotated 90 degrees.
+  """
+  def __init__(self, device, megabits_per_second=4, size=None,
+               rotate=False):
+    # TODO(jbudorick) Remove once telemetry gets switched over.
+    if isinstance(device, pylib.android_commands.AndroidCommands):
+      device = pylib.device.device_utils.DeviceUtils(device)
+    self._device = device
+    self._device_file = (
+        '%s/screen-recording.mp4' % device.GetExternalStoragePath())
+    self._recorder = None
+    self._recorder_stdout = None
+    self._is_started = False
+
+    self._args = ['adb']
+    if str(self._device):
+      self._args += ['-s', str(self._device)]
+    self._args += ['shell', 'screenrecord', '--verbose']
+    self._args += ['--bit-rate', str(megabits_per_second * 1000 * 1000)]
+    if size:
+      self._args += ['--size', '%dx%d' % size]
+    if rotate:
+      self._args += ['--rotate']
+    self._args += [self._device_file]
+
+  def Start(self):
+    """Start recording video."""
+    self._recorder_stdout = tempfile.mkstemp()[1]
+    self._recorder = cmd_helper.Popen(
+        self._args, stdout=open(self._recorder_stdout, 'w'))
+    if not self._device.GetPids('screenrecord'):
+      raise RuntimeError('Recording failed. Is your device running Android '
+                         'KitKat or later?')
+
+  def IsStarted(self):
+    if not self._is_started:
+      for line in open(self._recorder_stdout):
+        self._is_started = line.startswith('Content area is ')
+        if self._is_started:
+          break
+    return self._is_started
+
+  def Stop(self):
+    """Stop recording video."""
+    os.remove(self._recorder_stdout)
+    self._is_started = False
+    if not self._recorder:
+      return
+    if not self._device.KillAll('screenrecord', signum=device_signal.SIGINT,
+                                quiet=True):
+      logging.warning('Nothing to kill: screenrecord was not running')
+    self._recorder.wait()
+
+  def Pull(self, host_file=None):
+    """Pull resulting video file from the device.
+
+    Args:
+      host_file: Path to the video file to store on the host.
+    Returns:
+      Output video file name on the host.
+    """
+    # TODO(jbudorick): Merge filename generation with the logic for doing so in
+    # DeviceUtils.
+    host_file_name = (
+        host_file
+        or 'screen-recording-%s.mp4' % time.strftime('%Y%m%dT%H%M%S',
+                                                     time.localtime()))
+    host_file_name = os.path.abspath(host_file_name)
+    self._device.PullFile(self._device_file, host_file_name)
+    self._device.RunShellCommand('rm -f "%s"' % self._device_file)
+    return host_file_name
diff --git a/build/android/pylib/sdk/__init__.py b/build/android/pylib/sdk/__init__.py
new file mode 100644
index 0000000..50b23df
--- /dev/null
+++ b/build/android/pylib/sdk/__init__.py
@@ -0,0 +1,3 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
diff --git a/build/android/pylib/sdk/aapt.py b/build/android/pylib/sdk/aapt.py
new file mode 100644
index 0000000..3d317ff
--- /dev/null
+++ b/build/android/pylib/sdk/aapt.py
@@ -0,0 +1,42 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""This module wraps the Android Asset Packaging Tool."""
+
+import os
+
+from pylib import cmd_helper
+from pylib import constants
+from pylib.utils import timeout_retry
+
+_AAPT_PATH = os.path.join(constants.ANDROID_SDK_TOOLS, 'aapt')
+
+def _RunAaptCmd(args):
+  """Runs an aapt command.
+
+  Args:
+    args: A list of arguments for aapt.
+
+  Returns:
+    The output of the command.
+  """
+  cmd = [_AAPT_PATH] + args
+  status, output = cmd_helper.GetCmdStatusAndOutput(cmd)
+  if status != 0:
+    raise Exception('Failed running aapt command: "%s" with output "%s".' %
+                    (' '.join(cmd), output))
+  return output
+
+def Dump(what, apk, assets=None):
+  """Returns the output of the aapt dump command.
+
+  Args:
+    what: What you want to dump.
+    apk: Path to apk you want to dump information for.
+    assets: List of assets in apk you want to dump information for.
+  """
+  assets = assets or []
+  if isinstance(assets, basestring):
+    assets = [assets]
+  return _RunAaptCmd(['dump', what, apk] + assets).splitlines()
\ No newline at end of file
diff --git a/build/android/pylib/sdk/dexdump.py b/build/android/pylib/sdk/dexdump.py
new file mode 100644
index 0000000..ec10aba
--- /dev/null
+++ b/build/android/pylib/sdk/dexdump.py
@@ -0,0 +1,30 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+
+from pylib import cmd_helper
+from pylib import constants
+
+_DEXDUMP_PATH = os.path.join(constants.ANDROID_SDK_TOOLS, 'dexdump')
+
+def DexDump(dexfiles, file_summary=False):
+  """A wrapper around the Android SDK's dexdump tool.
+
+  Args:
+    dexfiles: The dexfile or list of dex files to dump.
+    file_summary: Display summary information from the file header. (-f)
+
+  Returns:
+    An iterable over the output lines.
+  """
+  # TODO(jbudorick): Add support for more options as necessary.
+  if isinstance(dexfiles, basestring):
+    dexfiles = [dexfiles]
+  args = [_DEXDUMP_PATH] + dexfiles
+  if file_summary:
+    args.append('-f')
+
+  return cmd_helper.IterCmdOutputLines(args)
+
diff --git a/build/android/pylib/sdk/split_select.py b/build/android/pylib/sdk/split_select.py
new file mode 100644
index 0000000..e204662
--- /dev/null
+++ b/build/android/pylib/sdk/split_select.py
@@ -0,0 +1,58 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""This module wraps Android's split-select tool."""
+
+import os
+
+from pylib import cmd_helper
+from pylib import constants
+from pylib.utils import timeout_retry
+
+_SPLIT_SELECT_PATH = os.path.join(constants.ANDROID_SDK_TOOLS, 'split-select')
+
+def _RunSplitSelectCmd(args):
+  """Runs a split-select command.
+
+  Args:
+    args: A list of arguments for split-select.
+
+  Returns:
+    The output of the command.
+  """
+  cmd = [_SPLIT_SELECT_PATH] + args
+  status, output = cmd_helper.GetCmdStatusAndOutput(cmd)
+  if status != 0:
+    raise Exception('Failed running command "%s" with output "%s".' %
+                    (' '.join(cmd), output))
+  return output
+
+def _SplitConfig(device):
+  """Returns a config specifying which APK splits are required by the device.
+
+  Args:
+    device: A DeviceUtils object.
+  """
+  return ('%s-r%s-%s:%s' %
+          (device.language,
+           device.country,
+           device.screen_density,
+           device.product_cpu_abi))
+
+def SelectSplits(device, base_apk, split_apks):
+  """Determines which APK splits the device requires.
+
+  Args:
+    device: A DeviceUtils object.
+    base_apk: The path of the base APK.
+    split_apks: A list of paths of APK splits.
+
+  Returns:
+    The list of APK splits that the device requires.
+  """
+  config = _SplitConfig(device)
+  args = ['--target', config, '--base', base_apk]
+  for split in split_apks:
+    args.extend(['--split', split])
+  return _RunSplitSelectCmd(args).splitlines()
\ No newline at end of file
diff --git a/build/android/pylib/symbols/PRESUBMIT.py b/build/android/pylib/symbols/PRESUBMIT.py
new file mode 100644
index 0000000..b4d94ae
--- /dev/null
+++ b/build/android/pylib/symbols/PRESUBMIT.py
@@ -0,0 +1,21 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+def CommonChecks(input_api, output_api):
+  output = []
+  output.extend(input_api.canned_checks.RunPylint(input_api, output_api))
+  output.extend(input_api.canned_checks.RunUnitTestsInDirectory(
+      input_api,
+      output_api,
+      input_api.PresubmitLocalPath(),
+      whitelist=[r'^.+_unittest\.py$']))
+  return output
+
+
+def CheckChangeOnUpload(input_api, output_api):
+  return CommonChecks(input_api, output_api)
+
+
+def CheckChangeOnCommit(input_api, output_api):
+  return CommonChecks(input_api, output_api)
\ No newline at end of file
diff --git a/build/android/pylib/symbols/__init__.py b/build/android/pylib/symbols/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/build/android/pylib/symbols/__init__.py
diff --git a/build/android/pylib/symbols/elf_symbolizer.py b/build/android/pylib/symbols/elf_symbolizer.py
new file mode 100644
index 0000000..374063a
--- /dev/null
+++ b/build/android/pylib/symbols/elf_symbolizer.py
@@ -0,0 +1,467 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import collections
+import datetime
+import logging
+import multiprocessing
+import os
+import posixpath
+import Queue
+import re
+import subprocess
+import sys
+import threading
+import time
+
+
+# addr2line builds a possibly infinite memory cache that can exhaust
+# the computer's memory if allowed to grow for too long. This constant
+# controls how many lookups we do before restarting the process. 4000
+# gives near peak performance without extreme memory usage.
+ADDR2LINE_RECYCLE_LIMIT = 4000
+
+
+class ELFSymbolizer(object):
+  """An uber-fast (multiprocessing, pipelined and asynchronous) ELF symbolizer.
+
+  This class is a frontend for addr2line (part of GNU binutils), designed to
+  symbolize batches of large numbers of symbols for a given ELF file. It
+  supports sharding symbolization against many addr2line instances and
+  pipelining of multiple requests per each instance (in order to hide addr2line
+  internals and OS pipe latencies).
+
+  The interface exhibited by this class is a very simple asynchronous interface,
+  which is based on the following three methods:
+  - SymbolizeAsync(): used to request (enqueue) resolution of a given address.
+  - The |callback| method: used to communicated back the symbol information.
+  - Join(): called to conclude the batch to gather the last outstanding results.
+  In essence, before the Join method returns, this class will have issued as
+  many callbacks as the number of SymbolizeAsync() calls. In this regard, note
+  that due to multiprocess sharding, callbacks can be delivered out of order.
+
+  Some background about addr2line:
+  - it is invoked passing the elf path in the cmdline, piping the addresses in
+    its stdin and getting results on its stdout.
+  - it has pretty large response times for the first requests, but it
+    works very well in streaming mode once it has been warmed up.
+  - it doesn't scale by itself (on more cores). However, spawning multiple
+    instances at the same time on the same file is pretty efficient as they
+    keep hitting the pagecache and become mostly CPU bound.
+  - it might hang or crash, mostly for OOM. This class deals with both of these
+    problems.
+
+  Despite the "scary" imports and the multi* words above, (almost) no multi-
+  threading/processing is involved from the python viewpoint. Concurrency
+  here is achieved by spawning several addr2line subprocesses and handling their
+  output pipes asynchronously. Therefore, all the code here (with the exception
+  of the Queue instance in Addr2Line) should be free from mind-blowing
+  thread-safety concerns.
+
+  The multiprocess sharding works as follows:
+  The symbolizer tries to use the lowest number of addr2line instances as
+  possible (with respect of |max_concurrent_jobs|) and enqueue all the requests
+  in a single addr2line instance. For few symbols (i.e. dozens) sharding isn't
+  worth the startup cost.
+  The multiprocess logic kicks in as soon as the queues for the existing
+  instances grow. Specifically, once all the existing instances reach the
+  |max_queue_size| bound, a new addr2line instance is kicked in.
+  In the case of a very eager producer (i.e. all |max_concurrent_jobs| instances
+  have a backlog of |max_queue_size|), back-pressure is applied on the caller by
+  blocking the SymbolizeAsync method.
+
+  This module has been deliberately designed to be dependency free (w.r.t. of
+  other modules in this project), to allow easy reuse in external projects.
+  """
+
+  def __init__(self, elf_file_path, addr2line_path, callback, inlines=False,
+      max_concurrent_jobs=None, addr2line_timeout=30, max_queue_size=50,
+      source_root_path=None, strip_base_path=None):
+    """Args:
+      elf_file_path: path of the elf file to be symbolized.
+      addr2line_path: path of the toolchain's addr2line binary.
+      callback: a callback which will be invoked for each resolved symbol with
+          the two args (sym_info, callback_arg). The former is an instance of
+          |ELFSymbolInfo| and contains the symbol information. The latter is an
+          embedder-provided argument which is passed to SymbolizeAsync().
+      inlines: when True, the ELFSymbolInfo will contain also the details about
+          the outer inlining functions. When False, only the innermost function
+          will be provided.
+      max_concurrent_jobs: Max number of addr2line instances spawned.
+          Parallelize responsibly, addr2line is a memory and I/O monster.
+      max_queue_size: Max number of outstanding requests per addr2line instance.
+      addr2line_timeout: Max time (in seconds) to wait for a addr2line response.
+          After the timeout, the instance will be considered hung and respawned.
+      source_root_path: In some toolchains only the name of the source file is
+          is output, without any path information; disambiguation searches
+          through the source directory specified by |source_root_path| argument
+          for files whose name matches, adding the full path information to the
+          output. For example, if the toolchain outputs "unicode.cc" and there
+          is a file called "unicode.cc" located under |source_root_path|/foo,
+          the tool will replace "unicode.cc" with
+          "|source_root_path|/foo/unicode.cc". If there are multiple files with
+          the same name, disambiguation will fail because the tool cannot
+          determine which of the files was the source of the symbol.
+      strip_base_path: Rebases the symbols source paths onto |source_root_path|
+          (i.e replace |strip_base_path| with |source_root_path).
+    """
+    assert(os.path.isfile(addr2line_path)), 'Cannot find ' + addr2line_path
+    self.elf_file_path = elf_file_path
+    self.addr2line_path = addr2line_path
+    self.callback = callback
+    self.inlines = inlines
+    self.max_concurrent_jobs = (max_concurrent_jobs or
+                                min(multiprocessing.cpu_count(), 4))
+    self.max_queue_size = max_queue_size
+    self.addr2line_timeout = addr2line_timeout
+    self.requests_counter = 0  # For generating monotonic request IDs.
+    self._a2l_instances = []  # Up to |max_concurrent_jobs| _Addr2Line inst.
+
+    # If necessary, create disambiguation lookup table
+    self.disambiguate = source_root_path is not None
+    self.disambiguation_table = {}
+    self.strip_base_path = strip_base_path
+    if(self.disambiguate):
+      self.source_root_path = os.path.abspath(source_root_path)
+      self._CreateDisambiguationTable()
+
+    # Create one addr2line instance. More instances will be created on demand
+    # (up to |max_concurrent_jobs|) depending on the rate of the requests.
+    self._CreateNewA2LInstance()
+
+  def SymbolizeAsync(self, addr, callback_arg=None):
+    """Requests symbolization of a given address.
+
+    This method is not guaranteed to return immediately. It generally does, but
+    in some scenarios (e.g. all addr2line instances have full queues) it can
+    block to create back-pressure.
+
+    Args:
+      addr: address to symbolize.
+      callback_arg: optional argument which will be passed to the |callback|."""
+    assert(isinstance(addr, int))
+
+    # Process all the symbols that have been resolved in the meanwhile.
+    # Essentially, this drains all the addr2line(s) out queues.
+    for a2l_to_purge in self._a2l_instances:
+      a2l_to_purge.ProcessAllResolvedSymbolsInQueue()
+      a2l_to_purge.RecycleIfNecessary()
+
+    # Find the best instance according to this logic:
+    # 1. Find an existing instance with the shortest queue.
+    # 2. If all of instances' queues are full, but there is room in the pool,
+    #    (i.e. < |max_concurrent_jobs|) create a new instance.
+    # 3. If there were already |max_concurrent_jobs| instances and all of them
+    #    had full queues, make back-pressure.
+
+    # 1.
+    def _SortByQueueSizeAndReqID(a2l):
+      return (a2l.queue_size, a2l.first_request_id)
+    a2l = min(self._a2l_instances, key=_SortByQueueSizeAndReqID)
+
+    # 2.
+    if (a2l.queue_size >= self.max_queue_size and
+        len(self._a2l_instances) < self.max_concurrent_jobs):
+      a2l = self._CreateNewA2LInstance()
+
+    # 3.
+    if a2l.queue_size >= self.max_queue_size:
+      a2l.WaitForNextSymbolInQueue()
+
+    a2l.EnqueueRequest(addr, callback_arg)
+
+  def Join(self):
+    """Waits for all the outstanding requests to complete and terminates."""
+    for a2l in self._a2l_instances:
+      a2l.WaitForIdle()
+      a2l.Terminate()
+
+  def _CreateNewA2LInstance(self):
+    assert(len(self._a2l_instances) < self.max_concurrent_jobs)
+    a2l = ELFSymbolizer.Addr2Line(self)
+    self._a2l_instances.append(a2l)
+    return a2l
+
+  def _CreateDisambiguationTable(self):
+    """ Non-unique file names will result in None entries"""
+    start_time = time.time()
+    logging.info('Collecting information about available source files...')
+    self.disambiguation_table = {}
+
+    for root, _, filenames in os.walk(self.source_root_path):
+      for f in filenames:
+        self.disambiguation_table[f] = os.path.join(root, f) if (f not in
+                                       self.disambiguation_table) else None
+    logging.info('Finished collecting information about '
+                 'possible files (took %.1f s).',
+                 (time.time() - start_time))
+
+
+  class Addr2Line(object):
+    """A python wrapper around an addr2line instance.
+
+    The communication with the addr2line process looks as follows:
+      [STDIN]         [STDOUT]  (from addr2line's viewpoint)
+    > f001111
+    > f002222
+                    < Symbol::Name(foo, bar) for f001111
+                    < /path/to/source/file.c:line_number
+    > f003333
+                    < Symbol::Name2() for f002222
+                    < /path/to/source/file.c:line_number
+                    < Symbol::Name3() for f003333
+                    < /path/to/source/file.c:line_number
+    """
+
+    SYM_ADDR_RE = re.compile(r'([^:]+):(\?|\d+).*')
+
+    def __init__(self, symbolizer):
+      self._symbolizer = symbolizer
+      self._lib_file_name = posixpath.basename(symbolizer.elf_file_path)
+
+      # The request queue (i.e. addresses pushed to addr2line's stdin and not
+      # yet retrieved on stdout)
+      self._request_queue = collections.deque()
+
+      # This is essentially len(self._request_queue). It has been optimized to a
+      # separate field because turned out to be a perf hot-spot.
+      self.queue_size = 0
+
+      # Keep track of the number of symbols a process has processed to
+      # avoid a single process growing too big and using all the memory.
+      self._processed_symbols_count = 0
+
+      # Objects required to handle the addr2line subprocess.
+      self._proc = None  # Subprocess.Popen(...) instance.
+      self._thread = None  # Threading.thread instance.
+      self._out_queue = None  # Queue.Queue instance (for buffering a2l stdout).
+      self._RestartAddr2LineProcess()
+
+    def EnqueueRequest(self, addr, callback_arg):
+      """Pushes an address to addr2line's stdin (and keeps track of it)."""
+      self._symbolizer.requests_counter += 1  # For global "age" of requests.
+      req_idx = self._symbolizer.requests_counter
+      self._request_queue.append((addr, callback_arg, req_idx))
+      self.queue_size += 1
+      self._WriteToA2lStdin(addr)
+
+    def WaitForIdle(self):
+      """Waits until all the pending requests have been symbolized."""
+      while self.queue_size > 0:
+        self.WaitForNextSymbolInQueue()
+
+    def WaitForNextSymbolInQueue(self):
+      """Waits for the next pending request to be symbolized."""
+      if not self.queue_size:
+        return
+
+      # This outer loop guards against a2l hanging (detecting stdout timeout).
+      while True:
+        start_time = datetime.datetime.now()
+        timeout = datetime.timedelta(seconds=self._symbolizer.addr2line_timeout)
+
+        # The inner loop guards against a2l crashing (checking if it exited).
+        while (datetime.datetime.now() - start_time < timeout):
+          # poll() returns !None if the process exited. a2l should never exit.
+          if self._proc.poll():
+            logging.warning('addr2line crashed, respawning (lib: %s).' %
+                            self._lib_file_name)
+            self._RestartAddr2LineProcess()
+            # TODO(primiano): the best thing to do in this case would be
+            # shrinking the pool size as, very likely, addr2line is crashed
+            # due to low memory (and the respawned one will die again soon).
+
+          try:
+            lines = self._out_queue.get(block=True, timeout=0.25)
+          except Queue.Empty:
+            # On timeout (1/4 s.) repeat the inner loop and check if either the
+            # addr2line process did crash or we waited its output for too long.
+            continue
+
+          # In nominal conditions, we get straight to this point.
+          self._ProcessSymbolOutput(lines)
+          return
+
+        # If this point is reached, we waited more than |addr2line_timeout|.
+        logging.warning('Hung addr2line process, respawning (lib: %s).' %
+                        self._lib_file_name)
+        self._RestartAddr2LineProcess()
+
+    def ProcessAllResolvedSymbolsInQueue(self):
+      """Consumes all the addr2line output lines produced (without blocking)."""
+      if not self.queue_size:
+        return
+      while True:
+        try:
+          lines = self._out_queue.get_nowait()
+        except Queue.Empty:
+          break
+        self._ProcessSymbolOutput(lines)
+
+    def RecycleIfNecessary(self):
+      """Restarts the process if it has been used for too long.
+
+      A long running addr2line process will consume excessive amounts
+      of memory without any gain in performance."""
+      if self._processed_symbols_count >= ADDR2LINE_RECYCLE_LIMIT:
+        self._RestartAddr2LineProcess()
+
+
+    def Terminate(self):
+      """Kills the underlying addr2line process.
+
+      The poller |_thread| will terminate as well due to the broken pipe."""
+      try:
+        self._proc.kill()
+        self._proc.communicate()  # Essentially wait() without risking deadlock.
+      except Exception:  # An exception while terminating? How interesting.
+        pass
+      self._proc = None
+
+    def _WriteToA2lStdin(self, addr):
+      self._proc.stdin.write('%s\n' % hex(addr))
+      if self._symbolizer.inlines:
+        # In the case of inlines we output an extra blank line, which causes
+        # addr2line to emit a (??,??:0) tuple that we use as a boundary marker.
+        self._proc.stdin.write('\n')
+      self._proc.stdin.flush()
+
+    def _ProcessSymbolOutput(self, lines):
+      """Parses an addr2line symbol output and triggers the client callback."""
+      (_, callback_arg, _) = self._request_queue.popleft()
+      self.queue_size -= 1
+
+      innermost_sym_info = None
+      sym_info = None
+      for (line1, line2) in lines:
+        prev_sym_info = sym_info
+        name = line1 if not line1.startswith('?') else None
+        source_path = None
+        source_line = None
+        m = ELFSymbolizer.Addr2Line.SYM_ADDR_RE.match(line2)
+        if m:
+          if not m.group(1).startswith('?'):
+            source_path = m.group(1)
+            if not m.group(2).startswith('?'):
+              source_line = int(m.group(2))
+        else:
+          logging.warning('Got invalid symbol path from addr2line: %s' % line2)
+
+        # In case disambiguation is on, and needed
+        was_ambiguous = False
+        disambiguated = False
+        if self._symbolizer.disambiguate:
+          if source_path and not posixpath.isabs(source_path):
+            path = self._symbolizer.disambiguation_table.get(source_path)
+            was_ambiguous = True
+            disambiguated = path is not None
+            source_path = path if disambiguated else source_path
+
+          # Use absolute paths (so that paths are consistent, as disambiguation
+          # uses absolute paths)
+          if source_path and not was_ambiguous:
+            source_path = os.path.abspath(source_path)
+
+        if source_path and self._symbolizer.strip_base_path:
+          # Strip the base path
+          source_path = re.sub('^' + self._symbolizer.strip_base_path,
+              self._symbolizer.source_root_path or '', source_path)
+
+        sym_info = ELFSymbolInfo(name, source_path, source_line, was_ambiguous,
+                                 disambiguated)
+        if prev_sym_info:
+          prev_sym_info.inlined_by = sym_info
+        if not innermost_sym_info:
+          innermost_sym_info = sym_info
+
+      self._processed_symbols_count += 1
+      self._symbolizer.callback(innermost_sym_info, callback_arg)
+
+    def _RestartAddr2LineProcess(self):
+      if self._proc:
+        self.Terminate()
+
+      # The only reason of existence of this Queue (and the corresponding
+      # Thread below) is the lack of a subprocess.stdout.poll_avail_lines().
+      # Essentially this is a pipe able to extract a couple of lines atomically.
+      self._out_queue = Queue.Queue()
+
+      # Start the underlying addr2line process in line buffered mode.
+
+      cmd = [self._symbolizer.addr2line_path, '--functions', '--demangle',
+          '--exe=' + self._symbolizer.elf_file_path]
+      if self._symbolizer.inlines:
+        cmd += ['--inlines']
+      self._proc = subprocess.Popen(cmd, bufsize=1, stdout=subprocess.PIPE,
+          stdin=subprocess.PIPE, stderr=sys.stderr, close_fds=True)
+
+      # Start the poller thread, which simply moves atomically the lines read
+      # from the addr2line's stdout to the |_out_queue|.
+      self._thread = threading.Thread(
+          target=ELFSymbolizer.Addr2Line.StdoutReaderThread,
+          args=(self._proc.stdout, self._out_queue, self._symbolizer.inlines))
+      self._thread.daemon = True  # Don't prevent early process exit.
+      self._thread.start()
+
+      self._processed_symbols_count = 0
+
+      # Replay the pending requests on the new process (only for the case
+      # of a hung addr2line timing out during the game).
+      for (addr, _, _) in self._request_queue:
+        self._WriteToA2lStdin(addr)
+
+    @staticmethod
+    def StdoutReaderThread(process_pipe, queue, inlines):
+      """The poller thread fn, which moves the addr2line stdout to the |queue|.
+
+      This is the only piece of code not running on the main thread. It merely
+      writes to a Queue, which is thread-safe. In the case of inlines, it
+      detects the ??,??:0 marker and sends the lines atomically, such that the
+      main thread always receives all the lines corresponding to one symbol in
+      one shot."""
+      try:
+        lines_for_one_symbol = []
+        while True:
+          line1 = process_pipe.readline().rstrip('\r\n')
+          line2 = process_pipe.readline().rstrip('\r\n')
+          if not line1 or not line2:
+            break
+          inline_has_more_lines = inlines and (len(lines_for_one_symbol) == 0 or
+                                  (line1 != '??' and line2 != '??:0'))
+          if not inlines or inline_has_more_lines:
+            lines_for_one_symbol += [(line1, line2)]
+          if inline_has_more_lines:
+            continue
+          queue.put(lines_for_one_symbol)
+          lines_for_one_symbol = []
+        process_pipe.close()
+
+      # Every addr2line processes will die at some point, please die silently.
+      except (IOError, OSError):
+        pass
+
+    @property
+    def first_request_id(self):
+      """Returns the request_id of the oldest pending request in the queue."""
+      return self._request_queue[0][2] if self._request_queue else 0
+
+
+class ELFSymbolInfo(object):
+  """The result of the symbolization passed as first arg. of each callback."""
+
+  def __init__(self, name, source_path, source_line, was_ambiguous=False,
+               disambiguated=False):
+    """All the fields here can be None (if addr2line replies with '??')."""
+    self.name = name
+    self.source_path = source_path
+    self.source_line = source_line
+    # In the case of |inlines|=True, the |inlined_by| points to the outer
+    # function inlining the current one (and so on, to form a chain).
+    self.inlined_by = None
+    self.disambiguated = disambiguated
+    self.was_ambiguous = was_ambiguous
+
+  def __str__(self):
+    return '%s [%s:%d]' % (
+        self.name or '??', self.source_path or '??', self.source_line or 0)
diff --git a/build/android/pylib/symbols/elf_symbolizer_unittest.py b/build/android/pylib/symbols/elf_symbolizer_unittest.py
new file mode 100755
index 0000000..e963a34
--- /dev/null
+++ b/build/android/pylib/symbols/elf_symbolizer_unittest.py
@@ -0,0 +1,173 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import functools
+import logging
+import os
+import sys
+import unittest
+
+sys.path.insert(0, os.path.dirname(__file__))
+import elf_symbolizer
+import mock_addr2line
+
+
+_MOCK_A2L_PATH = os.path.join(os.path.dirname(mock_addr2line.__file__),
+                              'mock_addr2line')
+_INCOMPLETE_MOCK_ADDR = 1024 * 1024
+_UNKNOWN_MOCK_ADDR = 2 * 1024 * 1024
+_INLINE_MOCK_ADDR = 3 * 1024 * 1024
+
+
+class ELFSymbolizerTest(unittest.TestCase):
+  def setUp(self):
+    self._callback = functools.partial(
+        ELFSymbolizerTest._SymbolizeCallback, self)
+    self._resolved_addresses = set()
+    # Mute warnings, we expect them due to the crash/hang tests.
+    logging.getLogger().setLevel(logging.ERROR)
+
+  def testParallelism1(self):
+    self._RunTest(max_concurrent_jobs=1, num_symbols=100)
+
+  def testParallelism4(self):
+    self._RunTest(max_concurrent_jobs=4, num_symbols=100)
+
+  def testParallelism8(self):
+    self._RunTest(max_concurrent_jobs=8, num_symbols=100)
+
+  def testCrash(self):
+    os.environ['MOCK_A2L_CRASH_EVERY'] = '99'
+    self._RunTest(max_concurrent_jobs=1, num_symbols=100)
+    os.environ['MOCK_A2L_CRASH_EVERY'] = '0'
+
+  def testHang(self):
+    os.environ['MOCK_A2L_HANG_EVERY'] = '99'
+    self._RunTest(max_concurrent_jobs=1, num_symbols=100)
+    os.environ['MOCK_A2L_HANG_EVERY'] = '0'
+
+  def testInlines(self):
+    """Stimulate the inline processing logic."""
+    symbolizer = elf_symbolizer.ELFSymbolizer(
+        elf_file_path='/path/doesnt/matter/mock_lib1.so',
+        addr2line_path=_MOCK_A2L_PATH,
+        callback=self._callback,
+        inlines=True,
+        max_concurrent_jobs=4)
+
+    for addr in xrange(1000):
+      exp_inline = False
+      exp_unknown = False
+
+      # First 100 addresses with inlines.
+      if addr < 100:
+        addr += _INLINE_MOCK_ADDR
+        exp_inline = True
+
+      # Followed by 100 without inlines.
+      elif addr < 200:
+        pass
+
+      # Followed by 100 interleaved inlines and not inlines.
+      elif addr < 300:
+        if addr & 1:
+          addr += _INLINE_MOCK_ADDR
+          exp_inline = True
+
+      # Followed by 100 interleaved inlines and unknonwn.
+      elif addr < 400:
+        if addr & 1:
+          addr += _INLINE_MOCK_ADDR
+          exp_inline = True
+        else:
+          addr += _UNKNOWN_MOCK_ADDR
+          exp_unknown = True
+
+      exp_name = 'mock_sym_for_addr_%d' % addr if not exp_unknown else None
+      exp_source_path = 'mock_src/mock_lib1.so.c' if not exp_unknown else None
+      exp_source_line = addr if not exp_unknown else None
+      cb_arg = (addr, exp_name, exp_source_path, exp_source_line, exp_inline)
+      symbolizer.SymbolizeAsync(addr, cb_arg)
+
+    symbolizer.Join()
+
+  def testIncompleteSyminfo(self):
+    """Stimulate the symbol-not-resolved logic."""
+    symbolizer = elf_symbolizer.ELFSymbolizer(
+        elf_file_path='/path/doesnt/matter/mock_lib1.so',
+        addr2line_path=_MOCK_A2L_PATH,
+        callback=self._callback,
+        max_concurrent_jobs=1)
+
+    # Test symbols with valid name but incomplete path.
+    addr = _INCOMPLETE_MOCK_ADDR
+    exp_name = 'mock_sym_for_addr_%d' % addr
+    exp_source_path = None
+    exp_source_line = None
+    cb_arg = (addr, exp_name, exp_source_path, exp_source_line, False)
+    symbolizer.SymbolizeAsync(addr, cb_arg)
+
+    # Test symbols with no name or sym info.
+    addr = _UNKNOWN_MOCK_ADDR
+    exp_name = None
+    exp_source_path = None
+    exp_source_line = None
+    cb_arg = (addr, exp_name, exp_source_path, exp_source_line, False)
+    symbolizer.SymbolizeAsync(addr, cb_arg)
+
+    symbolizer.Join()
+
+  def _RunTest(self, max_concurrent_jobs, num_symbols):
+    symbolizer = elf_symbolizer.ELFSymbolizer(
+        elf_file_path='/path/doesnt/matter/mock_lib1.so',
+        addr2line_path=_MOCK_A2L_PATH,
+        callback=self._callback,
+        max_concurrent_jobs=max_concurrent_jobs,
+        addr2line_timeout=0.5)
+
+    for addr in xrange(num_symbols):
+      exp_name = 'mock_sym_for_addr_%d' % addr
+      exp_source_path = 'mock_src/mock_lib1.so.c'
+      exp_source_line = addr
+      cb_arg = (addr, exp_name, exp_source_path, exp_source_line, False)
+      symbolizer.SymbolizeAsync(addr, cb_arg)
+
+    symbolizer.Join()
+
+    # Check that all the expected callbacks have been received.
+    for addr in xrange(num_symbols):
+      self.assertIn(addr, self._resolved_addresses)
+      self._resolved_addresses.remove(addr)
+
+    # Check for unexpected callbacks.
+    self.assertEqual(len(self._resolved_addresses), 0)
+
+  def _SymbolizeCallback(self, sym_info, cb_arg):
+    self.assertTrue(isinstance(sym_info, elf_symbolizer.ELFSymbolInfo))
+    self.assertTrue(isinstance(cb_arg, tuple))
+    self.assertEqual(len(cb_arg), 5)
+
+    # Unpack expectations from the callback extra argument.
+    (addr, exp_name, exp_source_path, exp_source_line, exp_inlines) = cb_arg
+    if exp_name is None:
+      self.assertIsNone(sym_info.name)
+    else:
+      self.assertTrue(sym_info.name.startswith(exp_name))
+    self.assertEqual(sym_info.source_path, exp_source_path)
+    self.assertEqual(sym_info.source_line, exp_source_line)
+
+    if exp_inlines:
+      self.assertEqual(sym_info.name, exp_name + '_inner')
+      self.assertEqual(sym_info.inlined_by.name, exp_name + '_middle')
+      self.assertEqual(sym_info.inlined_by.inlined_by.name,
+                       exp_name + '_outer')
+
+    # Check against duplicate callbacks.
+    self.assertNotIn(addr, self._resolved_addresses)
+    self._resolved_addresses.add(addr)
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/build/android/pylib/symbols/mock_addr2line/__init__.py b/build/android/pylib/symbols/mock_addr2line/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/build/android/pylib/symbols/mock_addr2line/__init__.py
diff --git a/build/android/pylib/symbols/mock_addr2line/mock_addr2line b/build/android/pylib/symbols/mock_addr2line/mock_addr2line
new file mode 100755
index 0000000..cd58f56
--- /dev/null
+++ b/build/android/pylib/symbols/mock_addr2line/mock_addr2line
@@ -0,0 +1,79 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Simple mock for addr2line.
+
+Outputs mock symbol information, with each symbol being a function of the
+original address (so it is easy to double-check consistency in unittests).
+"""
+
+import optparse
+import os
+import posixpath
+import sys
+import time
+
+
+def main(argv):
+  parser = optparse.OptionParser()
+  parser.add_option('-e', '--exe', dest='exe')  # Path of the debug-library.so.
+  # Silently swallow the other unnecessary arguments.
+  parser.add_option('-C', '--demangle', action='store_true')
+  parser.add_option('-f', '--functions', action='store_true')
+  parser.add_option('-i', '--inlines', action='store_true')
+  options, _ = parser.parse_args(argv[1:])
+  lib_file_name = posixpath.basename(options.exe)
+  processed_sym_count = 0
+  crash_every = int(os.environ.get('MOCK_A2L_CRASH_EVERY', 0))
+  hang_every = int(os.environ.get('MOCK_A2L_HANG_EVERY', 0))
+
+  while(True):
+    line = sys.stdin.readline().rstrip('\r')
+    if not line:
+      break
+
+    # An empty line should generate '??,??:0' (is used as marker for inlines).
+    if line == '\n':
+      print '??'
+      print '??:0'
+      sys.stdout.flush()
+      continue
+
+    addr = int(line, 16)
+    processed_sym_count += 1
+    if crash_every and processed_sym_count % crash_every == 0:
+      sys.exit(1)
+    if hang_every and processed_sym_count % hang_every == 0:
+      time.sleep(1)
+
+    # Addresses < 1M will return good mock symbol information.
+    if addr < 1024 * 1024:
+      print 'mock_sym_for_addr_%d' % addr
+      print 'mock_src/%s.c:%d' % (lib_file_name, addr)
+
+    # Addresses 1M <= x < 2M will return symbols with a name but a missing path.
+    elif addr < 2 * 1024 * 1024:
+      print 'mock_sym_for_addr_%d' % addr
+      print '??:0'
+
+    # Addresses 2M <= x < 3M will return unknown symbol information.
+    elif addr < 3 * 1024 * 1024:
+      print '??'
+      print '??'
+
+    # Addresses 3M <= x < 4M will return inlines.
+    elif addr < 4 * 1024 * 1024:
+      print 'mock_sym_for_addr_%d_inner' % addr
+      print 'mock_src/%s.c:%d' % (lib_file_name, addr)
+      print 'mock_sym_for_addr_%d_middle' % addr
+      print 'mock_src/%s.c:%d' % (lib_file_name, addr)
+      print 'mock_sym_for_addr_%d_outer' % addr
+      print 'mock_src/%s.c:%d' % (lib_file_name, addr)
+
+    sys.stdout.flush()
+
+
+if __name__ == '__main__':
+  main(sys.argv)
\ No newline at end of file
diff --git a/build/android/pylib/system_properties.py b/build/android/pylib/system_properties.py
new file mode 100644
index 0000000..3f16f86
--- /dev/null
+++ b/build/android/pylib/system_properties.py
@@ -0,0 +1,40 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+class SystemProperties(dict):
+
+  """A dict interface to interact with device system properties.
+
+  System properties are key/value pairs as exposed by adb shell getprop/setprop.
+
+  This implementation minimizes interaction with the physical device. It is
+  valid for the lifetime of a boot.
+  """
+
+  def __init__(self, android_commands):
+    super(SystemProperties, self).__init__()
+    self._adb = android_commands
+    self._cached_static_properties = {}
+
+  def __getitem__(self, key):
+    if self._IsStatic(key):
+      if key not in self._cached_static_properties:
+        self._cached_static_properties[key] = self._GetProperty(key)
+      return self._cached_static_properties[key]
+    return self._GetProperty(key)
+
+  def __setitem__(self, key, value):
+    # TODO(tonyg): This can fail with no root. Verify that it succeeds.
+    self._adb.SendShellCommand('setprop %s "%s"' % (key, value), retry_count=3)
+
+  @staticmethod
+  def _IsStatic(key):
+    # TODO(tonyg): This list is conservative and could be expanded as needed.
+    return (key.startswith('ro.boot.') or
+            key.startswith('ro.build.') or
+            key.startswith('ro.product.'))
+
+  def _GetProperty(self, key):
+    return self._adb.SendShellCommand('getprop %s' % key, retry_count=3).strip()
diff --git a/build/android/pylib/uiautomator/__init__.py b/build/android/pylib/uiautomator/__init__.py
new file mode 100644
index 0000000..cda7672
--- /dev/null
+++ b/build/android/pylib/uiautomator/__init__.py
@@ -0,0 +1,4 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
diff --git a/build/android/pylib/uiautomator/setup.py b/build/android/pylib/uiautomator/setup.py
new file mode 100644
index 0000000..bd8ffc7
--- /dev/null
+++ b/build/android/pylib/uiautomator/setup.py
@@ -0,0 +1,35 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Generates test runner factory and tests for uiautomator tests."""
+
+import logging
+
+from pylib.uiautomator import test_package
+from pylib.uiautomator import test_runner
+
+
+def Setup(test_options):
+  """Runs uiautomator tests on connected device(s).
+
+  Args:
+    test_options: A UIAutomatorOptions object.
+
+  Returns:
+    A tuple of (TestRunnerFactory, tests).
+  """
+  test_pkg = test_package.TestPackage(test_options.uiautomator_jar,
+                                      test_options.uiautomator_info_jar)
+  tests = test_pkg.GetAllMatchingTests(test_options.annotations,
+                                       test_options.exclude_annotations,
+                                       test_options.test_filter)
+
+  if not tests:
+    logging.error('No uiautomator tests to run with current args.')
+
+  def TestRunnerFactory(device, shard_index):
+    return test_runner.TestRunner(
+        test_options, device, shard_index, test_pkg)
+
+  return (TestRunnerFactory, tests)
diff --git a/build/android/pylib/uiautomator/test_options.py b/build/android/pylib/uiautomator/test_options.py
new file mode 100644
index 0000000..3f5f950
--- /dev/null
+++ b/build/android/pylib/uiautomator/test_options.py
@@ -0,0 +1,20 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Defines the UIAutomatorOptions named tuple."""
+
+import collections
+
+UIAutomatorOptions = collections.namedtuple('UIAutomatorOptions', [
+    'tool',
+    'annotations',
+    'exclude_annotations',
+    'test_filter',
+    'test_data',
+    'save_perf_json',
+    'screenshot_failures',
+    'uiautomator_jar',
+    'uiautomator_info_jar',
+    'package',
+    'set_asserts'])
diff --git a/build/android/pylib/uiautomator/test_package.py b/build/android/pylib/uiautomator/test_package.py
new file mode 100644
index 0000000..cb51fdf
--- /dev/null
+++ b/build/android/pylib/uiautomator/test_package.py
@@ -0,0 +1,33 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Class representing uiautomator test package."""
+
+import os
+
+from pylib import constants
+from pylib.instrumentation import test_jar
+
+
+class TestPackage(test_jar.TestJar):
+
+  UIAUTOMATOR_PATH = 'uiautomator/'
+  UIAUTOMATOR_DEVICE_DIR = os.path.join(constants.TEST_EXECUTABLE_DIR,
+                                        UIAUTOMATOR_PATH)
+
+  def __init__(self, jar_path, jar_info_path):
+    test_jar.TestJar.__init__(self, jar_info_path)
+
+    if not os.path.exists(jar_path):
+      raise Exception('%s not found, please build it' % jar_path)
+    self._jar_path = jar_path
+
+  def GetPackageName(self):
+    """Returns the JAR named that is installed on the device."""
+    return os.path.basename(self._jar_path)
+
+  # Override.
+  def Install(self, device):
+    device.PushChangedFiles([(self._jar_path, self.UIAUTOMATOR_DEVICE_DIR +
+                              self.GetPackageName())])
diff --git a/build/android/pylib/uiautomator/test_runner.py b/build/android/pylib/uiautomator/test_runner.py
new file mode 100644
index 0000000..bda6687
--- /dev/null
+++ b/build/android/pylib/uiautomator/test_runner.py
@@ -0,0 +1,89 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Class for running uiautomator tests on a single device."""
+
+from pylib import constants
+from pylib import flag_changer
+from pylib.device import intent
+from pylib.instrumentation import test_options as instr_test_options
+from pylib.instrumentation import test_runner as instr_test_runner
+
+
+class TestRunner(instr_test_runner.TestRunner):
+  """Responsible for running a series of tests connected to a single device."""
+
+  def __init__(self, test_options, device, shard_index, test_pkg):
+    """Create a new TestRunner.
+
+    Args:
+      test_options: A UIAutomatorOptions object.
+      device: Attached android device.
+      shard_index: Shard index.
+      test_pkg: A TestPackage object.
+    """
+    # Create an InstrumentationOptions object to pass to the super class
+    instrumentation_options = instr_test_options.InstrumentationOptions(
+        test_options.tool,
+        test_options.annotations,
+        test_options.exclude_annotations,
+        test_options.test_filter,
+        test_options.test_data,
+        test_options.save_perf_json,
+        test_options.screenshot_failures,
+        wait_for_debugger=False,
+        coverage_dir=None,
+        test_apk=None,
+        test_apk_path=None,
+        test_apk_jar_path=None,
+        test_runner=None,
+        test_support_apk_path=None,
+        device_flags=None,
+        isolate_file_path=None,
+        set_asserts=test_options.set_asserts,
+        delete_stale_data=False)
+    super(TestRunner, self).__init__(instrumentation_options, device,
+                                     shard_index, test_pkg)
+
+    cmdline_file = constants.PACKAGE_INFO[test_options.package].cmdline_file
+    self.flags = None
+    if cmdline_file:
+      self.flags = flag_changer.FlagChanger(self.device, cmdline_file)
+    self._package = constants.PACKAGE_INFO[test_options.package].package
+    self._activity = constants.PACKAGE_INFO[test_options.package].activity
+
+  #override
+  def InstallTestPackage(self):
+    self.test_pkg.Install(self.device)
+
+  #override
+  def _RunTest(self, test, timeout):
+    self.device.ClearApplicationState(self._package)
+    if self.flags:
+      annotations = self.test_pkg.GetTestAnnotations(test)
+      if 'FirstRunExperience' == annotations.get('Feature', None):
+        self.flags.RemoveFlags(['--disable-fre'])
+      else:
+        self.flags.AddFlags(['--disable-fre'])
+    self.device.StartActivity(
+        intent.Intent(action='android.intent.action.MAIN',
+                      activity=self._activity,
+                      package=self._package),
+        blocking=True,
+        force_stop=True)
+    cmd = ['uiautomator', 'runtest',
+           self.test_pkg.UIAUTOMATOR_PATH + self.test_pkg.GetPackageName(),
+           '-e', 'class', test,
+           '-e', 'test_package', self._package]
+    return self.device.RunShellCommand(cmd, timeout=timeout, retries=0)
+
+  #override
+  def _GenerateTestResult(self, test, _result_code, _result_bundle, statuses,
+                          start_ms, duration_ms):
+    # uiautomator emits its summary status with INSTRUMENTATION_STATUS_CODE,
+    # not INSTRUMENTATION_CODE, so we have to drop if off the list of statuses.
+    summary_code, summary_bundle = statuses[-1]
+    return super(TestRunner, self)._GenerateTestResult(
+        test, summary_code, summary_bundle, statuses[:-1], start_ms,
+        duration_ms)
diff --git a/build/android/pylib/uirobot/__init__.py b/build/android/pylib/uirobot/__init__.py
new file mode 100644
index 0000000..5cac026
--- /dev/null
+++ b/build/android/pylib/uirobot/__init__.py
@@ -0,0 +1,4 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
diff --git a/build/android/pylib/uirobot/uirobot_test_instance.py b/build/android/pylib/uirobot/uirobot_test_instance.py
new file mode 100644
index 0000000..e3f6eb7
--- /dev/null
+++ b/build/android/pylib/uirobot/uirobot_test_instance.py
@@ -0,0 +1,79 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import json
+import logging
+
+from pylib import constants
+from pylib.base import test_instance
+from pylib.utils import apk_helper
+
+class UirobotTestInstance(test_instance.TestInstance):
+
+  def __init__(self, args, error_func):
+    """Constructor.
+
+    Args:
+      args: Command line arguments.
+    """
+    super(UirobotTestInstance, self).__init__()
+    if not args.app_under_test:
+      error_func('Must set --app-under-test.')
+    self._app_under_test = args.app_under_test
+    self._minutes = args.minutes
+
+    if args.remote_device_file:
+      with open(args.remote_device_file) as remote_device_file:
+        device_json = json.load(remote_device_file)
+    else:
+      device_json = {}
+    device_type = device_json.get('device_type', 'Android')
+    if args.device_type:
+      if device_type and device_type != args.device_type:
+        logging.info('Overriding device_type from %s to %s',
+                     device_type, args.device_type)
+      device_type = args.device_type
+
+    if device_type == 'Android':
+      self._suite = 'Android Uirobot'
+      self._package_name = apk_helper.GetPackageName(self._app_under_test)
+    elif device_type == 'iOS':
+      self._suite = 'iOS Uirobot'
+      self._package_name = self._app_under_test
+
+
+  #override
+  def TestType(self):
+    """Returns type of test."""
+    return 'uirobot'
+
+  #override
+  def SetUp(self):
+    """Setup for test."""
+    pass
+
+  #override
+  def TearDown(self):
+    """Teardown for test."""
+    pass
+
+  @property
+  def app_under_test(self):
+    """Returns the app to run the test on."""
+    return self._app_under_test
+
+  @property
+  def minutes(self):
+    """Returns the number of minutes to run the uirobot for."""
+    return self._minutes
+
+  @property
+  def package_name(self):
+    """Returns the name of the package in the APK."""
+    return self._package_name
+
+  @property
+  def suite(self):
+    return self._suite
diff --git a/build/android/pylib/utils/__init__.py b/build/android/pylib/utils/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/build/android/pylib/utils/__init__.py
diff --git a/build/android/pylib/utils/apk_helper.py b/build/android/pylib/utils/apk_helper.py
new file mode 100644
index 0000000..a556e7b
--- /dev/null
+++ b/build/android/pylib/utils/apk_helper.py
@@ -0,0 +1,131 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Module containing utilities for apk packages."""
+
+import os.path
+import re
+
+from pylib import cmd_helper
+from pylib import constants
+from pylib.sdk import aapt
+
+
+_AAPT_PATH = os.path.join(constants.ANDROID_SDK_TOOLS, 'aapt')
+_MANIFEST_ATTRIBUTE_RE = re.compile(
+    r'\s*A: ([^\(\)= ]*)\([^\(\)= ]*\)="(.*)" \(Raw: .*\)$')
+_MANIFEST_ELEMENT_RE = re.compile(r'\s*(?:E|N): (\S*) .*$')
+_PACKAGE_NAME_RE = re.compile(r'package: .*name=\'(\S*)\'')
+_SPLIT_NAME_RE = re.compile(r'package: .*split=\'(\S*)\'')
+
+
+def GetPackageName(apk_path):
+  """Returns the package name of the apk."""
+  return ApkHelper(apk_path).GetPackageName()
+
+
+# TODO(jbudorick): Deprecate and remove this function once callers have been
+# converted to ApkHelper.GetInstrumentationName
+def GetInstrumentationName(apk_path):
+  """Returns the name of the Instrumentation in the apk."""
+  return ApkHelper(apk_path).GetInstrumentationName()
+
+
+def _ParseManifestFromApk(apk_path):
+  aapt_output = aapt.Dump('xmltree', apk_path, 'AndroidManifest.xml')
+
+  parsed_manifest = {}
+  node_stack = [parsed_manifest]
+  indent = '  '
+
+  for line in aapt_output[1:]:
+    if len(line) == 0:
+      continue
+
+    indent_depth = 0
+    while line[(len(indent) * indent_depth):].startswith(indent):
+      indent_depth += 1
+
+    node_stack = node_stack[:indent_depth]
+    node = node_stack[-1]
+
+    m = _MANIFEST_ELEMENT_RE.match(line[len(indent) * indent_depth:])
+    if m:
+      if not m.group(1) in node:
+        node[m.group(1)] = {}
+      node_stack += [node[m.group(1)]]
+      continue
+
+    m = _MANIFEST_ATTRIBUTE_RE.match(line[len(indent) * indent_depth:])
+    if m:
+      if not m.group(1) in node:
+        node[m.group(1)] = []
+      node[m.group(1)].append(m.group(2))
+      continue
+
+  return parsed_manifest
+
+
+class ApkHelper(object):
+  def __init__(self, apk_path):
+    self._apk_path = apk_path
+    self._manifest = None
+    self._package_name = None
+    self._split_name = None
+
+  def GetActivityName(self):
+    """Returns the name of the Activity in the apk."""
+    manifest_info = self._GetManifest()
+    try:
+      activity = (
+          manifest_info['manifest']['application']['activity']
+              ['android:name'][0])
+    except KeyError:
+      return None
+    if '.' not in activity:
+      activity = '%s.%s' % (self.GetPackageName(), activity)
+    elif activity.startswith('.'):
+      activity = '%s%s' % (self.GetPackageName(), activity)
+    return activity
+
+  def GetInstrumentationName(
+      self, default='android.test.InstrumentationTestRunner'):
+    """Returns the name of the Instrumentation in the apk."""
+    manifest_info = self._GetManifest()
+    try:
+      return manifest_info['manifest']['instrumentation']['android:name'][0]
+    except KeyError:
+      return default
+
+  def GetPackageName(self):
+    """Returns the package name of the apk."""
+    if self._package_name:
+      return self._package_name
+
+    aapt_output = aapt.Dump('badging', self._apk_path)
+    for line in aapt_output:
+      m = _PACKAGE_NAME_RE.match(line)
+      if m:
+        self._package_name = m.group(1)
+        return self._package_name
+    raise Exception('Failed to determine package name of %s' % self._apk_path)
+
+  def GetSplitName(self):
+    """Returns the name of the split of the apk."""
+    if self._split_name:
+      return self._split_name
+
+    aapt_output = aapt.Dump('badging', self._apk_path)
+    for line in aapt_output:
+      m = _SPLIT_NAME_RE.match(line)
+      if m:
+        self._split_name = m.group(1)
+        return self._split_name
+    return None
+
+  def _GetManifest(self):
+    if not self._manifest:
+      self._manifest = _ParseManifestFromApk(self._apk_path)
+    return self._manifest
+
diff --git a/build/android/pylib/utils/base_error.py b/build/android/pylib/utils/base_error.py
new file mode 100644
index 0000000..31eaa54
--- /dev/null
+++ b/build/android/pylib/utils/base_error.py
@@ -0,0 +1,16 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+class BaseError(Exception):
+  """Base error for all test runner errors."""
+
+  def __init__(self, message, is_infra_error=False):
+    super(BaseError, self).__init__(message)
+    self._is_infra_error = is_infra_error
+
+  @property
+  def is_infra_error(self):
+    """Property to indicate if error was caused by an infrastructure issue."""
+    return self._is_infra_error
\ No newline at end of file
diff --git a/build/android/pylib/utils/command_option_parser.py b/build/android/pylib/utils/command_option_parser.py
new file mode 100644
index 0000000..cf501d0
--- /dev/null
+++ b/build/android/pylib/utils/command_option_parser.py
@@ -0,0 +1,75 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""An option parser which handles the first arg as a command.
+
+Add other nice functionality such as printing a list of commands
+and an example in usage.
+"""
+
+import optparse
+import sys
+
+
+class CommandOptionParser(optparse.OptionParser):
+  """Wrapper class for OptionParser to help with listing commands."""
+
+  def __init__(self, *args, **kwargs):
+    """Creates a CommandOptionParser.
+
+    Args:
+      commands_dict: A dictionary mapping command strings to an object defining
+          - add_options_func: Adds options to the option parser
+          - run_command_func: Runs the command itself.
+      example: An example command.
+      everything else: Passed to optparse.OptionParser contructor.
+    """
+    self.commands_dict = kwargs.pop('commands_dict', {})
+    self.example = kwargs.pop('example', '')
+    if not 'usage' in kwargs:
+      kwargs['usage'] = 'Usage: %prog <command> [options]'
+    optparse.OptionParser.__init__(self, *args, **kwargs)
+
+  #override
+  def get_usage(self):
+    normal_usage = optparse.OptionParser.get_usage(self)
+    command_list = self.get_command_list()
+    example = self.get_example()
+    return self.expand_prog_name(normal_usage + example + command_list)
+
+  #override
+  def get_command_list(self):
+    if self.commands_dict.keys():
+      return '\nCommands:\n  %s\n' % '\n  '.join(
+          sorted(self.commands_dict.keys()))
+    return ''
+
+  def get_example(self):
+    if self.example:
+      return '\nExample:\n  %s\n' % self.example
+    return ''
+
+
+def ParseAndExecute(option_parser, argv=None):
+  """Parses options/args from argv and runs the specified command.
+
+  Args:
+    option_parser: A CommandOptionParser object.
+    argv: Command line arguments. If None, automatically draw from sys.argv.
+
+  Returns:
+    An exit code.
+  """
+  if not argv:
+    argv = sys.argv
+
+    if len(argv) < 2 or argv[1] not in option_parser.commands_dict:
+      # Parse args first, if this is '--help', optparse will print help and exit
+      option_parser.parse_args(argv)
+      option_parser.error('Invalid command.')
+
+    cmd = option_parser.commands_dict[argv[1]]
+    cmd.add_options_func(option_parser)
+    options, args = option_parser.parse_args(argv)
+    return cmd.run_command_func(argv[1], options, args, option_parser)
diff --git a/build/android/pylib/utils/device_temp_file.py b/build/android/pylib/utils/device_temp_file.py
new file mode 100644
index 0000000..7d3b95b
--- /dev/null
+++ b/build/android/pylib/utils/device_temp_file.py
@@ -0,0 +1,57 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""A temp file that automatically gets pushed and deleted from a device."""
+
+# pylint: disable=W0622
+
+import random
+import time
+
+from pylib import cmd_helper
+from pylib.device import device_errors
+
+
+class DeviceTempFile(object):
+  def __init__(self, adb, suffix='', prefix='temp_file', dir='/data/local/tmp'):
+    """Find an unused temporary file path in the devices external directory.
+
+    When this object is closed, the file will be deleted on the device.
+
+    Args:
+      adb: An instance of AdbWrapper
+      suffix: The suffix of the name of the temp file.
+      prefix: The prefix of the name of the temp file.
+      dir: The directory on the device where to place the temp file.
+    """
+    self._adb = adb
+    # make sure that the temp dir is writable
+    self._adb.Shell('test -d %s' % cmd_helper.SingleQuote(dir))
+    while True:
+      self.name = '{dir}/{prefix}-{time:d}-{nonce:d}{suffix}'.format(
+        dir=dir, prefix=prefix, time=int(time.time()),
+        nonce=random.randint(0, 1000000), suffix=suffix)
+      self.name_quoted = cmd_helper.SingleQuote(self.name)
+      try:
+        self._adb.Shell('test -e %s' % self.name_quoted)
+      except device_errors.AdbCommandFailedError:
+        break # file does not exist
+
+    # Immediately touch the file, so other temp files can't get the same name.
+    self._adb.Shell('touch %s' % self.name_quoted)
+
+  def close(self):
+    """Deletes the temporary file from the device."""
+    # ignore exception if the file is already gone.
+    try:
+      self._adb.Shell('rm -f %s' % self.name_quoted)
+    except device_errors.AdbCommandFailedError:
+      # file does not exist on Android version without 'rm -f' support (ICS)
+      pass
+
+  def __enter__(self):
+    return self
+
+  def __exit__(self, type, value, traceback):
+    self.close()
diff --git a/build/android/pylib/utils/device_temp_file_test.py b/build/android/pylib/utils/device_temp_file_test.py
new file mode 100755
index 0000000..f839ce0
--- /dev/null
+++ b/build/android/pylib/utils/device_temp_file_test.py
@@ -0,0 +1,91 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Unit tests for the contents of device_temp_file.py.
+"""
+
+import logging
+import os
+import sys
+import unittest
+
+from pylib import constants
+from pylib.device import adb_wrapper
+from pylib.device import device_errors
+from pylib.utils import device_temp_file
+from pylib.utils import mock_calls
+
+sys.path.append(os.path.join(
+    constants.DIR_SOURCE_ROOT, 'third_party', 'pymock'))
+import mock # pylint: disable=F0401
+
+class DeviceTempFileTest(mock_calls.TestCase):
+
+  def setUp(self):
+    test_serial = '0123456789abcdef'
+    self.adb = mock.Mock(spec=adb_wrapper.AdbWrapper)
+    self.adb.__str__ = mock.Mock(return_value=test_serial)
+    self.watchMethodCalls(self.call.adb)
+
+  def mockShellCall(self, cmd_prefix, action=''):
+    """Expect an adb.Shell(cmd) call with cmd_prefix and do some action
+
+    Args:
+      cmd_prefix: A string, the cmd of the received call is expected to have
+          this as a prefix.
+      action: If callable, an action to perform when the expected call is
+          received, otherwise a return value.
+    Returns:
+      An (expected_call, action) pair suitable for use in assertCalls.
+    """
+    def check_and_return(cmd):
+      self.assertTrue(
+          cmd.startswith(cmd_prefix),
+          'command %r does not start with prefix %r' % (cmd, cmd_prefix))
+      if callable(action):
+        return action(cmd)
+      else:
+        return action
+    return (self.call.adb.Shell(mock.ANY), check_and_return)
+
+  def mockExistsTest(self, exists_result):
+    def action(cmd):
+      if exists_result:
+        return ''
+      else:
+        raise device_errors.AdbCommandFailedError(
+            cmd, 'File not found', 1, str(self.adb))
+    return self.mockShellCall('test -e ', action)
+
+  def testTempFileNameAlreadyExists(self):
+    with self.assertCalls(
+        self.mockShellCall('test -d /data/local/tmp'),
+        self.mockExistsTest(True),
+        self.mockExistsTest(True),
+        self.mockExistsTest(True),
+        self.mockExistsTest(False),
+        self.mockShellCall('touch '),
+        self.mockShellCall('rm -f ')):
+      with device_temp_file.DeviceTempFile(self.adb) as tmpfile:
+        logging.debug('Temp file name: %s' % tmpfile.name)
+
+  def testTempFileLifecycle(self):
+    with self.assertCalls(
+        self.mockShellCall('test -d /data/local/tmp'),
+        self.mockExistsTest(False),
+        self.mockShellCall('touch ')):
+      tempFileContextManager = device_temp_file.DeviceTempFile(self.adb)
+    with mock.patch.object(self.adb, 'Shell'):
+      with tempFileContextManager as tmpfile:
+        logging.debug('Temp file name: %s' % tmpfile.name)
+        self.assertEquals(0, self.adb.Shell.call_count)
+      self.assertEquals(1, self.adb.Shell.call_count)
+      args, _ = self.adb.Shell.call_args
+      self.assertTrue(args[0].startswith('rm -f '))
+
+if __name__ == '__main__':
+  logging.getLogger().setLevel(logging.DEBUG)
+  unittest.main(verbosity=2)
diff --git a/build/android/pylib/utils/emulator.py b/build/android/pylib/utils/emulator.py
new file mode 100644
index 0000000..cc07e61
--- /dev/null
+++ b/build/android/pylib/utils/emulator.py
@@ -0,0 +1,444 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Provides an interface to start and stop Android emulator.
+
+  Emulator: The class provides the methods to launch/shutdown the emulator with
+            the android virtual device named 'avd_armeabi' .
+"""
+
+import logging
+import os
+import signal
+import subprocess
+import time
+
+# TODO(craigdh): Move these pylib dependencies to pylib/utils/.
+from pylib import cmd_helper
+from pylib import constants
+from pylib import pexpect
+from pylib.device import device_errors
+from pylib.device import device_utils
+from pylib.utils import time_profile
+
+import errors
+import run_command
+
+# SD card size
+SDCARD_SIZE = '512M'
+
+# Template used to generate config.ini files for the emulator
+CONFIG_TEMPLATE = """avd.ini.encoding=ISO-8859-1
+hw.dPad=no
+hw.lcd.density=320
+sdcard.size=512M
+hw.cpu.arch={hw.cpu.arch}
+hw.device.hash=-708107041
+hw.camera.back=none
+disk.dataPartition.size=800M
+hw.gpu.enabled=yes
+skin.path=720x1280
+skin.dynamic=yes
+hw.keyboard=yes
+hw.ramSize=1024
+hw.device.manufacturer=Google
+hw.sdCard=yes
+hw.mainKeys=no
+hw.accelerometer=yes
+skin.name=720x1280
+abi.type={abi.type}
+hw.trackBall=no
+hw.device.name=Galaxy Nexus
+hw.battery=yes
+hw.sensors.proximity=yes
+image.sysdir.1=system-images/android-{api.level}/{abi.type}/
+hw.sensors.orientation=yes
+hw.audioInput=yes
+hw.camera.front=none
+hw.gps=yes
+vm.heapSize=128
+{extras}"""
+
+CONFIG_REPLACEMENTS = {
+  'x86': {
+    '{hw.cpu.arch}': 'x86',
+    '{abi.type}': 'x86',
+    '{extras}': ''
+  },
+  'arm': {
+    '{hw.cpu.arch}': 'arm',
+    '{abi.type}': 'armeabi-v7a',
+    '{extras}': 'hw.cpu.model=cortex-a8\n'
+  },
+  'mips': {
+    '{hw.cpu.arch}': 'mips',
+    '{abi.type}': 'mips',
+    '{extras}': ''
+  }
+}
+
+class EmulatorLaunchException(Exception):
+  """Emulator failed to launch."""
+  pass
+
+def _KillAllEmulators():
+  """Kill all running emulators that look like ones we started.
+
+  There are odd 'sticky' cases where there can be no emulator process
+  running but a device slot is taken.  A little bot trouble and we're out of
+  room forever.
+  """
+  emulators = [d for d in device_utils.DeviceUtils.HealthyDevices()
+               if d.adb.is_emulator]
+  if not emulators:
+    return
+  for e in emulators:
+    e.adb.Emu(['kill'])
+  logging.info('Emulator killing is async; give a few seconds for all to die.')
+  for _ in range(5):
+    if not any(d.adb.is_emulator for d
+               in device_utils.DeviceUtils.HealthyDevices()):
+      return
+    time.sleep(1)
+
+
+def DeleteAllTempAVDs():
+  """Delete all temporary AVDs which are created for tests.
+
+  If the test exits abnormally and some temporary AVDs created when testing may
+  be left in the system. Clean these AVDs.
+  """
+  avds = device_utils.GetAVDs()
+  if not avds:
+    return
+  for avd_name in avds:
+    if 'run_tests_avd' in avd_name:
+      cmd = ['android', '-s', 'delete', 'avd', '--name', avd_name]
+      cmd_helper.RunCmd(cmd)
+      logging.info('Delete AVD %s' % avd_name)
+
+
+class PortPool(object):
+  """Pool for emulator port starting position that changes over time."""
+  _port_min = 5554
+  _port_max = 5585
+  _port_current_index = 0
+
+  @classmethod
+  def port_range(cls):
+    """Return a range of valid ports for emulator use.
+
+    The port must be an even number between 5554 and 5584.  Sometimes
+    a killed emulator "hangs on" to a port long enough to prevent
+    relaunch.  This is especially true on slow machines (like a bot).
+    Cycling through a port start position helps make us resilient."""
+    ports = range(cls._port_min, cls._port_max, 2)
+    n = cls._port_current_index
+    cls._port_current_index = (n + 1) % len(ports)
+    return ports[n:] + ports[:n]
+
+
+def _GetAvailablePort():
+  """Returns an available TCP port for the console."""
+  used_ports = []
+  emulators = [d for d in device_utils.DeviceUtils.HealthyDevices()
+               if d.adb.is_emulator]
+  for emulator in emulators:
+    used_ports.append(emulator.adb.GetDeviceSerial().split('-')[1])
+  for port in PortPool.port_range():
+    if str(port) not in used_ports:
+      return port
+
+
+def LaunchTempEmulators(emulator_count, abi, api_level, wait_for_boot=True):
+  """Create and launch temporary emulators and wait for them to boot.
+
+  Args:
+    emulator_count: number of emulators to launch.
+    abi: the emulator target platform
+    api_level: the api level (e.g., 19 for Android v4.4 - KitKat release)
+    wait_for_boot: whether or not to wait for emulators to boot up
+
+  Returns:
+    List of emulators.
+  """
+  emulators = []
+  for n in xrange(emulator_count):
+    t = time_profile.TimeProfile('Emulator launch %d' % n)
+    # Creates a temporary AVD.
+    avd_name = 'run_tests_avd_%d' % n
+    logging.info('Emulator launch %d with avd_name=%s and api=%d',
+        n, avd_name, api_level)
+    emulator = Emulator(avd_name, abi)
+    emulator.CreateAVD(api_level)
+    emulator.Launch(kill_all_emulators=n == 0)
+    t.Stop()
+    emulators.append(emulator)
+  # Wait for all emulators to boot completed.
+  if wait_for_boot:
+    for emulator in emulators:
+      emulator.ConfirmLaunch(True)
+  return emulators
+
+
+def LaunchEmulator(avd_name, abi):
+  """Launch an existing emulator with name avd_name.
+
+  Args:
+    avd_name: name of existing emulator
+    abi: the emulator target platform
+
+  Returns:
+    emulator object.
+  """
+  logging.info('Specified emulator named avd_name=%s launched', avd_name)
+  emulator = Emulator(avd_name, abi)
+  emulator.Launch(kill_all_emulators=True)
+  emulator.ConfirmLaunch(True)
+  return emulator
+
+
+class Emulator(object):
+  """Provides the methods to launch/shutdown the emulator.
+
+  The emulator has the android virtual device named 'avd_armeabi'.
+
+  The emulator could use any even TCP port between 5554 and 5584 for the
+  console communication, and this port will be part of the device name like
+  'emulator-5554'. Assume it is always True, as the device name is the id of
+  emulator managed in this class.
+
+  Attributes:
+    emulator: Path of Android's emulator tool.
+    popen: Popen object of the running emulator process.
+    device: Device name of this emulator.
+  """
+
+  # Signals we listen for to kill the emulator on
+  _SIGNALS = (signal.SIGINT, signal.SIGHUP)
+
+  # Time to wait for an emulator launch, in seconds.  This includes
+  # the time to launch the emulator and a wait-for-device command.
+  _LAUNCH_TIMEOUT = 120
+
+  # Timeout interval of wait-for-device command before bouncing to a a
+  # process life check.
+  _WAITFORDEVICE_TIMEOUT = 5
+
+  # Time to wait for a "wait for boot complete" (property set on device).
+  _WAITFORBOOT_TIMEOUT = 300
+
+  def __init__(self, avd_name, abi):
+    """Init an Emulator.
+
+    Args:
+      avd_name: name of the AVD to create
+      abi: target platform for emulator being created, defaults to x86
+    """
+    android_sdk_root = os.path.join(constants.EMULATOR_SDK_ROOT, 'sdk')
+    self.emulator = os.path.join(android_sdk_root, 'tools', 'emulator')
+    self.android = os.path.join(android_sdk_root, 'tools', 'android')
+    self.popen = None
+    self.device_serial = None
+    self.abi = abi
+    self.avd_name = avd_name
+
+  @staticmethod
+  def _DeviceName():
+    """Return our device name."""
+    port = _GetAvailablePort()
+    return ('emulator-%d' % port, port)
+
+  def CreateAVD(self, api_level):
+    """Creates an AVD with the given name.
+
+    Args:
+      api_level: the api level of the image
+
+    Return avd_name.
+    """
+
+    if self.abi == 'arm':
+      abi_option = 'armeabi-v7a'
+    elif self.abi == 'mips':
+      abi_option = 'mips'
+    else:
+      abi_option = 'x86'
+
+    api_target = 'android-%s' % api_level
+
+    avd_command = [
+        self.android,
+        '--silent',
+        'create', 'avd',
+        '--name', self.avd_name,
+        '--abi', abi_option,
+        '--target', api_target,
+        '--sdcard', SDCARD_SIZE,
+        '--force',
+    ]
+    avd_cmd_str = ' '.join(avd_command)
+    logging.info('Create AVD command: %s', avd_cmd_str)
+    avd_process = pexpect.spawn(avd_cmd_str)
+
+    # Instead of creating a custom profile, we overwrite config files.
+    avd_process.expect('Do you wish to create a custom hardware profile')
+    avd_process.sendline('no\n')
+    avd_process.expect('Created AVD \'%s\'' % self.avd_name)
+
+    # Replace current configuration with default Galaxy Nexus config.
+    avds_dir = os.path.join(os.path.expanduser('~'), '.android', 'avd')
+    ini_file = os.path.join(avds_dir, '%s.ini' % self.avd_name)
+    new_config_ini = os.path.join(avds_dir, '%s.avd' % self.avd_name,
+                                  'config.ini')
+
+    # Remove config files with defaults to replace with Google's GN settings.
+    os.unlink(ini_file)
+    os.unlink(new_config_ini)
+
+    # Create new configuration files with Galaxy Nexus by Google settings.
+    with open(ini_file, 'w') as new_ini:
+      new_ini.write('avd.ini.encoding=ISO-8859-1\n')
+      new_ini.write('target=%s\n' % api_target)
+      new_ini.write('path=%s/%s.avd\n' % (avds_dir, self.avd_name))
+      new_ini.write('path.rel=avd/%s.avd\n' % self.avd_name)
+
+    custom_config = CONFIG_TEMPLATE
+    replacements = CONFIG_REPLACEMENTS[self.abi]
+    for key in replacements:
+      custom_config = custom_config.replace(key, replacements[key])
+    custom_config = custom_config.replace('{api.level}', str(api_level))
+
+    with open(new_config_ini, 'w') as new_config_ini:
+      new_config_ini.write(custom_config)
+
+    return self.avd_name
+
+
+  def _DeleteAVD(self):
+    """Delete the AVD of this emulator."""
+    avd_command = [
+        self.android,
+        '--silent',
+        'delete',
+        'avd',
+        '--name', self.avd_name,
+    ]
+    logging.info('Delete AVD command: %s', ' '.join(avd_command))
+    cmd_helper.RunCmd(avd_command)
+
+
+  def Launch(self, kill_all_emulators):
+    """Launches the emulator asynchronously. Call ConfirmLaunch() to ensure the
+    emulator is ready for use.
+
+    If fails, an exception will be raised.
+    """
+    if kill_all_emulators:
+      _KillAllEmulators()  # just to be sure
+    self._AggressiveImageCleanup()
+    (self.device_serial, port) = self._DeviceName()
+    emulator_command = [
+        self.emulator,
+        # Speed up emulator launch by 40%.  Really.
+        '-no-boot-anim',
+        # The default /data size is 64M.
+        # That's not enough for 8 unit test bundles and their data.
+        '-partition-size', '512',
+        # Use a familiar name and port.
+        '-avd', self.avd_name,
+        '-port', str(port),
+        # Wipe the data.  We've seen cases where an emulator gets 'stuck' if we
+        # don't do this (every thousand runs or so).
+        '-wipe-data',
+        # Enable GPU by default.
+        '-gpu', 'on',
+        '-qemu', '-m', '1024',
+        ]
+    if self.abi == 'x86':
+      emulator_command.extend([
+          # For x86 emulator --enable-kvm will fail early, avoiding accidental
+          # runs in a slow mode (i.e. without hardware virtualization support).
+          '--enable-kvm',
+          ])
+
+    logging.info('Emulator launch command: %s', ' '.join(emulator_command))
+    self.popen = subprocess.Popen(args=emulator_command,
+                                  stderr=subprocess.STDOUT)
+    self._InstallKillHandler()
+
+  @staticmethod
+  def _AggressiveImageCleanup():
+    """Aggressive cleanup of emulator images.
+
+    Experimentally it looks like our current emulator use on the bot
+    leaves image files around in /tmp/android-$USER.  If a "random"
+    name gets reused, we choke with a 'File exists' error.
+    TODO(jrg): is there a less hacky way to accomplish the same goal?
+    """
+    logging.info('Aggressive Image Cleanup')
+    emulator_imagedir = '/tmp/android-%s' % os.environ['USER']
+    if not os.path.exists(emulator_imagedir):
+      return
+    for image in os.listdir(emulator_imagedir):
+      full_name = os.path.join(emulator_imagedir, image)
+      if 'emulator' in full_name:
+        logging.info('Deleting emulator image %s', full_name)
+        os.unlink(full_name)
+
+  def ConfirmLaunch(self, wait_for_boot=False):
+    """Confirm the emulator launched properly.
+
+    Loop on a wait-for-device with a very small timeout.  On each
+    timeout, check the emulator process is still alive.
+    After confirming a wait-for-device can be successful, make sure
+    it returns the right answer.
+    """
+    seconds_waited = 0
+    number_of_waits = 2  # Make sure we can wfd twice
+
+    device = device_utils.DeviceUtils(self.device_serial)
+    while seconds_waited < self._LAUNCH_TIMEOUT:
+      try:
+        device.adb.WaitForDevice(
+            timeout=self._WAITFORDEVICE_TIMEOUT, retries=1)
+        number_of_waits -= 1
+        if not number_of_waits:
+          break
+      except device_errors.CommandTimeoutError:
+        seconds_waited += self._WAITFORDEVICE_TIMEOUT
+        device.adb.KillServer()
+      self.popen.poll()
+      if self.popen.returncode != None:
+        raise EmulatorLaunchException('EMULATOR DIED')
+
+    if seconds_waited >= self._LAUNCH_TIMEOUT:
+      raise EmulatorLaunchException('TIMEOUT with wait-for-device')
+
+    logging.info('Seconds waited on wait-for-device: %d', seconds_waited)
+    if wait_for_boot:
+      # Now that we checked for obvious problems, wait for a boot complete.
+      # Waiting for the package manager is sometimes problematic.
+      device.WaitUntilFullyBooted(timeout=self._WAITFORBOOT_TIMEOUT)
+
+  def Shutdown(self):
+    """Shuts down the process started by launch."""
+    self._DeleteAVD()
+    if self.popen:
+      self.popen.poll()
+      if self.popen.returncode == None:
+        self.popen.kill()
+      self.popen = None
+
+  def _ShutdownOnSignal(self, _signum, _frame):
+    logging.critical('emulator _ShutdownOnSignal')
+    for sig in self._SIGNALS:
+      signal.signal(sig, signal.SIG_DFL)
+    self.Shutdown()
+    raise KeyboardInterrupt  # print a stack
+
+  def _InstallKillHandler(self):
+    """Install a handler to kill the emulator when we exit unexpectedly."""
+    for sig in self._SIGNALS:
+      signal.signal(sig, self._ShutdownOnSignal)
diff --git a/build/android/pylib/utils/findbugs.py b/build/android/pylib/utils/findbugs.py
new file mode 100644
index 0000000..8deb0fe
--- /dev/null
+++ b/build/android/pylib/utils/findbugs.py
@@ -0,0 +1,154 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import logging
+import os
+import re
+import shlex
+import sys
+import xml.dom.minidom
+
+from pylib import cmd_helper
+from pylib import constants
+
+
+_FINDBUGS_HOME = os.path.join(constants.DIR_SOURCE_ROOT, 'third_party',
+                              'findbugs')
+_FINDBUGS_JAR = os.path.join(_FINDBUGS_HOME, 'lib', 'findbugs.jar')
+_FINDBUGS_MAX_HEAP = 768
+_FINDBUGS_PLUGIN_PATH = os.path.join(
+    constants.DIR_SOURCE_ROOT, 'tools', 'android', 'findbugs_plugin', 'lib',
+    'chromiumPlugin.jar')
+
+
+def _ParseXmlResults(results_doc):
+  warnings = set()
+  for en in (n for n in results_doc.documentElement.childNodes
+             if n.nodeType == xml.dom.Node.ELEMENT_NODE):
+    if en.tagName == 'BugInstance':
+      warnings.add(_ParseBugInstance(en))
+  return warnings
+
+
+def _GetMessage(node):
+  for c in (n for n in node.childNodes
+            if n.nodeType == xml.dom.Node.ELEMENT_NODE):
+    if c.tagName == 'Message':
+      if (len(c.childNodes) == 1
+          and c.childNodes[0].nodeType == xml.dom.Node.TEXT_NODE):
+        return c.childNodes[0].data
+  return None
+
+
+def _ParseBugInstance(node):
+  bug = FindBugsWarning(node.getAttribute('type'))
+  msg_parts = []
+  for c in (n for n in node.childNodes
+            if n.nodeType == xml.dom.Node.ELEMENT_NODE):
+    if c.tagName == 'Class':
+      msg_parts.append(_GetMessage(c))
+    elif c.tagName == 'Method':
+      msg_parts.append(_GetMessage(c))
+    elif c.tagName == 'Field':
+      msg_parts.append(_GetMessage(c))
+    elif c.tagName == 'SourceLine':
+      bug.file_name = c.getAttribute('sourcefile')
+      if c.hasAttribute('start'):
+        bug.start_line = int(c.getAttribute('start'))
+      if c.hasAttribute('end'):
+        bug.end_line = int(c.getAttribute('end'))
+      msg_parts.append(_GetMessage(c))
+    elif (c.tagName == 'ShortMessage' and len(c.childNodes) == 1
+          and c.childNodes[0].nodeType == xml.dom.Node.TEXT_NODE):
+      msg_parts.append(c.childNodes[0].data)
+  bug.message = tuple(m for m in msg_parts if m)
+  return bug
+
+
+class FindBugsWarning(object):
+
+  def __init__(self, bug_type='', end_line=0, file_name='', message=None,
+               start_line=0):
+    self.bug_type = bug_type
+    self.end_line = end_line
+    self.file_name = file_name
+    if message is None:
+      self.message = tuple()
+    else:
+      self.message = message
+    self.start_line = start_line
+
+  def __cmp__(self, other):
+    return (cmp(self.file_name, other.file_name)
+            or cmp(self.start_line, other.start_line)
+            or cmp(self.end_line, other.end_line)
+            or cmp(self.bug_type, other.bug_type)
+            or cmp(self.message, other.message))
+
+  def __eq__(self, other):
+    return self.__dict__ == other.__dict__
+
+  def __hash__(self):
+    return hash((self.bug_type, self.end_line, self.file_name, self.message,
+                 self.start_line))
+
+  def __ne__(self, other):
+    return not self == other
+
+  def __str__(self):
+    return '%s: %s' % (self.bug_type, '\n  '.join(self.message))
+
+
+def Run(exclude, classes_to_analyze, auxiliary_classes, output_file,
+        findbug_args, jars):
+  """Run FindBugs.
+
+  Args:
+    exclude: the exclude xml file, refer to FindBugs's -exclude command option.
+    classes_to_analyze: the list of classes need to analyze, refer to FindBug's
+                        -onlyAnalyze command line option.
+    auxiliary_classes: the classes help to analyze, refer to FindBug's
+                       -auxclasspath command line option.
+    output_file: An optional path to dump XML results to.
+    findbug_args: A list of addtional command line options to pass to Findbugs.
+  """
+  # TODO(jbudorick): Get this from the build system.
+  system_classes = [
+    os.path.join(constants.ANDROID_SDK_ROOT, 'platforms',
+                 'android-%s' % constants.ANDROID_SDK_VERSION, 'android.jar')
+  ]
+  system_classes.extend(os.path.abspath(classes)
+                        for classes in auxiliary_classes or [])
+
+  cmd = ['java',
+         '-classpath', '%s:' % _FINDBUGS_JAR,
+         '-Xmx%dm' % _FINDBUGS_MAX_HEAP,
+         '-Dfindbugs.home="%s"' % _FINDBUGS_HOME,
+         '-jar', _FINDBUGS_JAR,
+         '-textui', '-sortByClass',
+         '-pluginList', _FINDBUGS_PLUGIN_PATH, '-xml:withMessages']
+  if system_classes:
+    cmd.extend(['-auxclasspath', ':'.join(system_classes)])
+  if classes_to_analyze:
+    cmd.extend(['-onlyAnalyze', classes_to_analyze])
+  if exclude:
+    cmd.extend(['-exclude', os.path.abspath(exclude)])
+  if output_file:
+    cmd.extend(['-output', output_file])
+  if findbug_args:
+    cmd.extend(findbug_args)
+  cmd.extend(os.path.abspath(j) for j in jars or [])
+
+  if output_file:
+    cmd_helper.RunCmd(cmd)
+    results_doc = xml.dom.minidom.parse(output_file)
+  else:
+    raw_out = cmd_helper.GetCmdOutput(cmd)
+    results_doc = xml.dom.minidom.parseString(raw_out)
+
+  current_warnings_set = _ParseXmlResults(results_doc)
+
+  return (' '.join(cmd), current_warnings_set)
+
diff --git a/build/android/pylib/utils/host_path_finder.py b/build/android/pylib/utils/host_path_finder.py
new file mode 100644
index 0000000..389ac43
--- /dev/null
+++ b/build/android/pylib/utils/host_path_finder.py
@@ -0,0 +1,22 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+
+from pylib import constants
+
+
+def GetMostRecentHostPath(file_name):
+  """Returns the most recent existing full path for the given file name.
+
+  Returns: An empty string if no path could be found.
+  """
+  out_dir = os.path.join(
+      constants.DIR_SOURCE_ROOT, os.environ.get('CHROMIUM_OUT_DIR', 'out'))
+  candidate_paths = [os.path.join(out_dir, build_type, file_name)
+                     for build_type in ['Debug', 'Release']]
+  candidate_paths = filter(os.path.exists, candidate_paths)
+  candidate_paths = sorted(candidate_paths, key=os.path.getmtime, reverse=True)
+  candidate_paths.append('')
+  return candidate_paths[0]
diff --git a/build/android/pylib/utils/host_utils.py b/build/android/pylib/utils/host_utils.py
new file mode 100644
index 0000000..580721f
--- /dev/null
+++ b/build/android/pylib/utils/host_utils.py
@@ -0,0 +1,16 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+
+
+def GetRecursiveDiskUsage(path):
+  """Returns the disk usage in bytes of |path|. Similar to `du -sb |path|`."""
+  running_size = os.path.getsize(path)
+  if os.path.isdir(path):
+    for root, dirs, files in os.walk(path):
+      running_size += sum([os.path.getsize(os.path.join(root, f))
+                           for f in files + dirs])
+  return running_size
+
diff --git a/build/android/pylib/utils/isolator.py b/build/android/pylib/utils/isolator.py
new file mode 100644
index 0000000..cac39d8
--- /dev/null
+++ b/build/android/pylib/utils/isolator.py
@@ -0,0 +1,173 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import fnmatch
+import glob
+import os
+import shutil
+import sys
+
+from pylib import cmd_helper
+from pylib import constants
+
+
+_ISOLATE_SCRIPT = os.path.join(
+    constants.DIR_SOURCE_ROOT, 'tools', 'swarming_client', 'isolate.py')
+
+
+def DefaultPathVariables():
+  return {
+    'DEPTH': constants.DIR_SOURCE_ROOT,
+    'PRODUCT_DIR': constants.GetOutDirectory(),
+  }
+
+
+def DefaultConfigVariables():
+  # Note: This list must match the --config-vars in build/isolate.gypi
+  return {
+    'CONFIGURATION_NAME': constants.GetBuildType(),
+    'OS': 'android',
+    'asan': '0',
+    'branding': 'Chromium',
+    'chromeos': '0',
+    'component': 'static_library',
+    'enable_pepper_cdms': '0',
+    'enable_plugins': '0',
+    'fastbuild': '0',
+    'icu_use_data_file_flag': '1',
+    'kasko': '0',
+    'lsan': '0',
+    'msan': '0',
+    # TODO(maruel): This may not always be true.
+    'target_arch': 'arm',
+    'tsan': '0',
+    'use_custom_libcxx': '0',
+    'use_instrumented_libraries': '0',
+    'use_prebuilt_instrumented_libraries': '0',
+    'use_openssl': '0',
+    'use_ozone': '0',
+    'use_x11': '0',
+    'v8_use_external_startup_data': '1',
+  }
+
+
+class Isolator(object):
+  """Manages calls to isolate.py for the android test runner scripts."""
+
+  def __init__(self, isolate_deps_dir):
+    """
+    Args:
+      isolate_deps_dir: The directory in which dependencies specified by
+        isolate are or should be stored.
+    """
+    self._isolate_deps_dir = isolate_deps_dir
+
+  def Clear(self):
+    """Deletes the isolate dependency directory."""
+    if os.path.exists(self._isolate_deps_dir):
+      shutil.rmtree(self._isolate_deps_dir)
+
+  def Remap(self, isolate_abs_path, isolated_abs_path,
+            path_variables=None, config_variables=None):
+    """Remaps data dependencies into |self._isolate_deps_dir|.
+
+    Args:
+      isolate_abs_path: The absolute path to the .isolate file, which specifies
+        data dependencies in the source tree.
+      isolated_abs_path: The absolute path to the .isolated file, which is
+        generated by isolate.py and specifies data dependencies in
+        |self._isolate_deps_dir| and their digests.
+      path_variables: A dict containing everything that should be passed
+        as a |--path-variable| to the isolate script. Defaults to the return
+        value of |DefaultPathVariables()|.
+      config_variables: A dict containing everything that should be passed
+        as a |--config-variable| to the isolate script. Defaults to the return
+        value of |DefaultConfigVariables()|.
+    Raises:
+      Exception if the isolate command fails for some reason.
+    """
+    if not path_variables:
+      path_variables = DefaultPathVariables()
+    if not config_variables:
+      config_variables = DefaultConfigVariables()
+
+    isolate_cmd = [
+      sys.executable, _ISOLATE_SCRIPT, 'remap',
+      '--isolate', isolate_abs_path,
+      '--isolated', isolated_abs_path,
+      '--outdir', self._isolate_deps_dir,
+    ]
+    for k, v in path_variables.iteritems():
+      isolate_cmd.extend(['--path-variable', k, v])
+    for k, v in config_variables.iteritems():
+      isolate_cmd.extend(['--config-variable', k, v])
+
+    if cmd_helper.RunCmd(isolate_cmd):
+      raise Exception('isolate command failed: %s' % ' '.join(isolate_cmd))
+
+  def VerifyHardlinks(self):
+    """Checks |isolate_deps_dir| for a hardlink.
+
+    Returns:
+      True if a hardlink is found.
+      False if nothing is found.
+    Raises:
+      Exception if a non-hardlink is found.
+    """
+    for root, _, filenames in os.walk(self._isolate_deps_dir):
+      if filenames:
+        linked_file = os.path.join(root, filenames[0])
+        orig_file = os.path.join(
+            self._isolate_deps_dir,
+            os.path.relpath(linked_file, self._isolate_deps_dir))
+        if os.stat(linked_file).st_ino == os.stat(orig_file).st_ino:
+          return True
+        else:
+          raise Exception('isolate remap command did not use hardlinks.')
+    return False
+
+  def PurgeExcluded(self, deps_exclusion_list):
+    """Deletes anything on |deps_exclusion_list| from |self._isolate_deps_dir|.
+
+    Args:
+      deps_exclusion_list: A list of globs to exclude from the isolate
+        dependency directory.
+    """
+    excluded_paths = (
+        x for y in deps_exclusion_list
+        for x in glob.glob(
+            os.path.abspath(os.path.join(self._isolate_deps_dir, y))))
+    for p in excluded_paths:
+      if os.path.isdir(p):
+        shutil.rmtree(p)
+      else:
+        os.remove(p)
+
+  def MoveOutputDeps(self):
+    """Moves files from the output directory to the top level of
+      |self._isolate_deps_dir|.
+
+    Moves pak files from the output directory to to <isolate_deps_dir>/paks
+    Moves files from the product directory to <isolate_deps_dir>
+    """
+    # On Android, all pak files need to be in the top-level 'paks' directory.
+    paks_dir = os.path.join(self._isolate_deps_dir, 'paks')
+    os.mkdir(paks_dir)
+
+    deps_out_dir = os.path.join(
+        self._isolate_deps_dir,
+        os.path.relpath(os.path.join(constants.GetOutDirectory(), os.pardir),
+                        constants.DIR_SOURCE_ROOT))
+    for root, _, filenames in os.walk(deps_out_dir):
+      for filename in fnmatch.filter(filenames, '*.pak'):
+        shutil.move(os.path.join(root, filename), paks_dir)
+
+    # Move everything in PRODUCT_DIR to top level.
+    deps_product_dir = os.path.join(deps_out_dir, constants.GetBuildType())
+    if os.path.isdir(deps_product_dir):
+      for p in os.listdir(deps_product_dir):
+        shutil.move(os.path.join(deps_product_dir, p), self._isolate_deps_dir)
+      os.rmdir(deps_product_dir)
+      os.rmdir(deps_out_dir)
+
diff --git a/build/android/pylib/utils/json_results_generator_unittest.py b/build/android/pylib/utils/json_results_generator_unittest.py
new file mode 100644
index 0000000..41ab77b
--- /dev/null
+++ b/build/android/pylib/utils/json_results_generator_unittest.py
@@ -0,0 +1,213 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+#
+# Most of this file was ported over from Blink's
+# webkitpy/layout_tests/layout_package/json_results_generator_unittest.py
+#
+
+import unittest
+import json
+
+from pylib.utils import json_results_generator
+
+
+class JSONGeneratorTest(unittest.TestCase):
+
+  def setUp(self):
+    self.builder_name = 'DUMMY_BUILDER_NAME'
+    self.build_name = 'DUMMY_BUILD_NAME'
+    self.build_number = 'DUMMY_BUILDER_NUMBER'
+
+    # For archived results.
+    self._json = None
+    self._num_runs = 0
+    self._tests_set = set([])
+    self._test_timings = {}
+    self._failed_count_map = {}
+
+    self._PASS_count = 0
+    self._DISABLED_count = 0
+    self._FLAKY_count = 0
+    self._FAILS_count = 0
+    self._fixable_count = 0
+
+    self._orig_write_json = json_results_generator.WriteJSON
+
+    # unused arguments ... pylint: disable=W0613
+    def _WriteJSONStub(json_object, file_path, callback=None):
+      pass
+
+    json_results_generator.WriteJSON = _WriteJSONStub
+
+  def tearDown(self):
+    json_results_generator.WriteJSON = self._orig_write_json
+
+  def _TestJSONGeneration(self, passed_tests_list, failed_tests_list):
+    tests_set = set(passed_tests_list) | set(failed_tests_list)
+
+    DISABLED_tests = set([t for t in tests_set
+                          if t.startswith('DISABLED_')])
+    FLAKY_tests = set([t for t in tests_set
+                       if t.startswith('FLAKY_')])
+    FAILS_tests = set([t for t in tests_set
+                       if t.startswith('FAILS_')])
+    PASS_tests = tests_set - (DISABLED_tests | FLAKY_tests | FAILS_tests)
+
+    failed_tests = set(failed_tests_list) - DISABLED_tests
+    failed_count_map = dict([(t, 1) for t in failed_tests])
+
+    test_timings = {}
+    i = 0
+    for test in tests_set:
+      test_timings[test] = float(self._num_runs * 100 + i)
+      i += 1
+
+    test_results_map = dict()
+    for test in tests_set:
+      test_results_map[test] = json_results_generator.TestResult(
+          test, failed=(test in failed_tests),
+          elapsed_time=test_timings[test])
+
+    generator = json_results_generator.JSONResultsGeneratorBase(
+        self.builder_name, self.build_name, self.build_number,
+        '',
+        None,   # don't fetch past json results archive
+        test_results_map)
+
+    failed_count_map = dict([(t, 1) for t in failed_tests])
+
+    # Test incremental json results
+    incremental_json = generator.GetJSON()
+    self._VerifyJSONResults(
+        tests_set,
+        test_timings,
+        failed_count_map,
+        len(PASS_tests),
+        len(DISABLED_tests),
+        len(FLAKY_tests),
+        len(DISABLED_tests | failed_tests),
+        incremental_json,
+        1)
+
+    # We don't verify the results here, but at least we make sure the code
+    # runs without errors.
+    generator.GenerateJSONOutput()
+    generator.GenerateTimesMSFile()
+
+  def _VerifyJSONResults(self, tests_set, test_timings, failed_count_map,
+                         PASS_count, DISABLED_count, FLAKY_count,
+                         fixable_count, json_obj, num_runs):
+    # Aliasing to a short name for better access to its constants.
+    JRG = json_results_generator.JSONResultsGeneratorBase
+
+    self.assertIn(JRG.VERSION_KEY, json_obj)
+    self.assertIn(self.builder_name, json_obj)
+
+    buildinfo = json_obj[self.builder_name]
+    self.assertIn(JRG.FIXABLE, buildinfo)
+    self.assertIn(JRG.TESTS, buildinfo)
+    self.assertEqual(len(buildinfo[JRG.BUILD_NUMBERS]), num_runs)
+    self.assertEqual(buildinfo[JRG.BUILD_NUMBERS][0], self.build_number)
+
+    if tests_set or DISABLED_count:
+      fixable = {}
+      for fixable_items in buildinfo[JRG.FIXABLE]:
+        for (result_type, count) in fixable_items.iteritems():
+          if result_type in fixable:
+            fixable[result_type] = fixable[result_type] + count
+          else:
+            fixable[result_type] = count
+
+      if PASS_count:
+        self.assertEqual(fixable[JRG.PASS_RESULT], PASS_count)
+      else:
+        self.assertTrue(JRG.PASS_RESULT not in fixable or
+                        fixable[JRG.PASS_RESULT] == 0)
+      if DISABLED_count:
+        self.assertEqual(fixable[JRG.SKIP_RESULT], DISABLED_count)
+      else:
+        self.assertTrue(JRG.SKIP_RESULT not in fixable or
+                        fixable[JRG.SKIP_RESULT] == 0)
+      if FLAKY_count:
+        self.assertEqual(fixable[JRG.FLAKY_RESULT], FLAKY_count)
+      else:
+        self.assertTrue(JRG.FLAKY_RESULT not in fixable or
+                        fixable[JRG.FLAKY_RESULT] == 0)
+
+    if failed_count_map:
+      tests = buildinfo[JRG.TESTS]
+      for test_name in failed_count_map.iterkeys():
+        test = self._FindTestInTrie(test_name, tests)
+
+        failed = 0
+        for result in test[JRG.RESULTS]:
+          if result[1] == JRG.FAIL_RESULT:
+            failed += result[0]
+        self.assertEqual(failed_count_map[test_name], failed)
+
+        timing_count = 0
+        for timings in test[JRG.TIMES]:
+          if timings[1] == test_timings[test_name]:
+            timing_count = timings[0]
+        self.assertEqual(1, timing_count)
+
+    if fixable_count:
+      self.assertEqual(sum(buildinfo[JRG.FIXABLE_COUNT]), fixable_count)
+
+  def _FindTestInTrie(self, path, trie):
+    nodes = path.split('/')
+    sub_trie = trie
+    for node in nodes:
+      self.assertIn(node, sub_trie)
+      sub_trie = sub_trie[node]
+    return sub_trie
+
+  def testJSONGeneration(self):
+    self._TestJSONGeneration([], [])
+    self._TestJSONGeneration(['A1', 'B1'], [])
+    self._TestJSONGeneration([], ['FAILS_A2', 'FAILS_B2'])
+    self._TestJSONGeneration(['DISABLED_A3', 'DISABLED_B3'], [])
+    self._TestJSONGeneration(['A4'], ['B4', 'FAILS_C4'])
+    self._TestJSONGeneration(['DISABLED_C5', 'DISABLED_D5'], ['A5', 'B5'])
+    self._TestJSONGeneration(
+        ['A6', 'B6', 'FAILS_C6', 'DISABLED_E6', 'DISABLED_F6'],
+        ['FAILS_D6'])
+
+    # Generate JSON with the same test sets. (Both incremental results and
+    # archived results must be updated appropriately.)
+    self._TestJSONGeneration(
+        ['A', 'FLAKY_B', 'DISABLED_C'],
+        ['FAILS_D', 'FLAKY_E'])
+    self._TestJSONGeneration(
+        ['A', 'DISABLED_C', 'FLAKY_E'],
+        ['FLAKY_B', 'FAILS_D'])
+    self._TestJSONGeneration(
+        ['FLAKY_B', 'DISABLED_C', 'FAILS_D'],
+        ['A', 'FLAKY_E'])
+
+  def testHierarchicalJSNGeneration(self):
+    # FIXME: Re-work tests to be more comprehensible and comprehensive.
+    self._TestJSONGeneration(['foo/A'], ['foo/B', 'bar/C'])
+
+  def testTestTimingsTrie(self):
+    individual_test_timings = []
+    individual_test_timings.append(
+        json_results_generator.TestResult(
+            'foo/bar/baz.html',
+            elapsed_time=1.2))
+    individual_test_timings.append(
+        json_results_generator.TestResult('bar.html', elapsed_time=0.0001))
+    trie = json_results_generator.TestTimingsTrie(individual_test_timings)
+
+    expected_trie = {
+        'bar.html': 0,
+        'foo': {
+            'bar': {
+                'baz.html': 1200,
+            }
+        }
+    }
+
+    self.assertEqual(json.dumps(trie), json.dumps(expected_trie))
diff --git a/build/android/pylib/utils/logging_utils.py b/build/android/pylib/utils/logging_utils.py
new file mode 100644
index 0000000..1e46fa8
--- /dev/null
+++ b/build/android/pylib/utils/logging_utils.py
@@ -0,0 +1,27 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import contextlib
+import logging
+
+@contextlib.contextmanager
+def SuppressLogging(level=logging.ERROR):
+  """Momentarilly suppress logging events from all loggers.
+
+  TODO(jbudorick): This is not thread safe. Log events from other threads might
+  also inadvertently dissapear.
+
+  Example:
+
+    with logging_utils.SuppressLogging():
+      # all but CRITICAL logging messages are suppressed
+      logging.info('just doing some thing') # not shown
+      logging.critical('something really bad happened') # still shown
+
+  Args:
+    level: logging events with this or lower levels are suppressed.
+  """
+  logging.disable(level)
+  yield
+  logging.disable(logging.NOTSET)
diff --git a/build/android/pylib/utils/md5sum.py b/build/android/pylib/utils/md5sum.py
new file mode 100644
index 0000000..3e61c8f
--- /dev/null
+++ b/build/android/pylib/utils/md5sum.py
@@ -0,0 +1,91 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import collections
+import logging
+import os
+import re
+import tempfile
+import types
+
+from pylib import cmd_helper
+from pylib import constants
+from pylib.utils import device_temp_file
+
+MD5SUM_DEVICE_LIB_PATH = '/data/local/tmp/md5sum/'
+MD5SUM_DEVICE_BIN_PATH = MD5SUM_DEVICE_LIB_PATH + 'md5sum_bin'
+
+MD5SUM_DEVICE_SCRIPT_FORMAT = (
+    'test -f {path} -o -d {path} '
+    '&& LD_LIBRARY_PATH={md5sum_lib} {md5sum_bin} {path}')
+
+_STARTS_WITH_CHECKSUM_RE = re.compile(r'^\s*[0-9a-fA-F]{32}\s+')
+
+
+def CalculateHostMd5Sums(paths):
+  """Calculates the MD5 sum value for all items in |paths|.
+
+  Directories are traversed recursively and the MD5 sum of each file found is
+  reported in the result.
+
+  Args:
+    paths: A list of host paths to md5sum.
+  Returns:
+    A dict mapping file paths to their respective md5sum checksums.
+  """
+  if isinstance(paths, basestring):
+    paths = [paths]
+
+  md5sum_bin_host_path = os.path.join(
+      constants.GetOutDirectory(), 'md5sum_bin_host')
+  if not os.path.exists(md5sum_bin_host_path):
+    raise IOError('File not built: %s' % md5sum_bin_host_path)
+  out = cmd_helper.GetCmdOutput([md5sum_bin_host_path] + [p for p in paths])
+
+  return _ParseMd5SumOutput(out.splitlines())
+
+
+def CalculateDeviceMd5Sums(paths, device):
+  """Calculates the MD5 sum value for all items in |paths|.
+
+  Directories are traversed recursively and the MD5 sum of each file found is
+  reported in the result.
+
+  Args:
+    paths: A list of device paths to md5sum.
+  Returns:
+    A dict mapping file paths to their respective md5sum checksums.
+  """
+  if isinstance(paths, basestring):
+    paths = [paths]
+
+  if not device.FileExists(MD5SUM_DEVICE_BIN_PATH):
+    md5sum_dist_path = os.path.join(constants.GetOutDirectory(), 'md5sum_dist')
+    if not os.path.exists(md5sum_dist_path):
+      raise IOError('File not built: %s' % md5sum_dist_path)
+    device.adb.Push(md5sum_dist_path, MD5SUM_DEVICE_LIB_PATH)
+
+  out = []
+
+  with tempfile.NamedTemporaryFile() as md5sum_script_file:
+    with device_temp_file.DeviceTempFile(
+        device.adb) as md5sum_device_script_file:
+      md5sum_script = (
+          MD5SUM_DEVICE_SCRIPT_FORMAT.format(
+              path=p, md5sum_lib=MD5SUM_DEVICE_LIB_PATH,
+              md5sum_bin=MD5SUM_DEVICE_BIN_PATH)
+          for p in paths)
+      md5sum_script_file.write('; '.join(md5sum_script))
+      md5sum_script_file.flush()
+      device.adb.Push(md5sum_script_file.name, md5sum_device_script_file.name)
+      out = device.RunShellCommand(['sh', md5sum_device_script_file.name])
+
+  return _ParseMd5SumOutput(out)
+
+
+def _ParseMd5SumOutput(out):
+  hash_and_path = (l.split(None, 1) for l in out
+                   if l and _STARTS_WITH_CHECKSUM_RE.match(l))
+  return dict((p, h) for h, p in hash_and_path)
+
diff --git a/build/android/pylib/utils/md5sum_test.py b/build/android/pylib/utils/md5sum_test.py
new file mode 100755
index 0000000..c94c19d
--- /dev/null
+++ b/build/android/pylib/utils/md5sum_test.py
@@ -0,0 +1,231 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import sys
+import unittest
+
+from pylib import cmd_helper
+from pylib import constants
+from pylib.utils import md5sum
+
+sys.path.append(
+    os.path.join(constants.DIR_SOURCE_ROOT, 'third_party', 'pymock'))
+import mock
+
+TEST_OUT_DIR = os.path.join('test', 'out', 'directory')
+HOST_MD5_EXECUTABLE = os.path.join(TEST_OUT_DIR, 'md5sum_bin_host')
+
+class Md5SumTest(unittest.TestCase):
+
+  def setUp(self):
+    self._patchers = [
+        mock.patch('pylib.constants.GetOutDirectory',
+                   new=mock.Mock(return_value=TEST_OUT_DIR)),
+        mock.patch('os.path.exists',
+                   new=mock.Mock(return_value=True)),
+    ]
+    for p in self._patchers:
+      p.start()
+
+  def tearDown(self):
+    for p in self._patchers:
+      p.stop()
+
+  def testCalculateHostMd5Sums_singlePath(self):
+    test_path = '/test/host/file.dat'
+    mock_get_cmd_output = mock.Mock(
+        return_value='0123456789abcdeffedcba9876543210 /test/host/file.dat')
+    with mock.patch('pylib.cmd_helper.GetCmdOutput', new=mock_get_cmd_output):
+      out = md5sum.CalculateHostMd5Sums(test_path)
+      self.assertEquals(1, len(out))
+      self.assertTrue('/test/host/file.dat' in out)
+      self.assertEquals('0123456789abcdeffedcba9876543210',
+                        out['/test/host/file.dat'])
+      mock_get_cmd_output.assert_called_once_with(
+          [HOST_MD5_EXECUTABLE, '/test/host/file.dat'])
+
+  def testCalculateHostMd5Sums_list(self):
+    test_paths = ['/test/host/file0.dat', '/test/host/file1.dat']
+    mock_get_cmd_output = mock.Mock(
+        return_value='0123456789abcdeffedcba9876543210 /test/host/file0.dat\n'
+                     '123456789abcdef00fedcba987654321 /test/host/file1.dat\n')
+    with mock.patch('pylib.cmd_helper.GetCmdOutput', new=mock_get_cmd_output):
+      out = md5sum.CalculateHostMd5Sums(test_paths)
+      self.assertEquals(2, len(out))
+      self.assertTrue('/test/host/file0.dat' in out)
+      self.assertEquals('0123456789abcdeffedcba9876543210',
+                        out['/test/host/file0.dat'])
+      self.assertTrue('/test/host/file1.dat' in out)
+      self.assertEquals('123456789abcdef00fedcba987654321',
+                        out['/test/host/file1.dat'])
+      mock_get_cmd_output.assert_called_once_with(
+          [HOST_MD5_EXECUTABLE, '/test/host/file0.dat',
+           '/test/host/file1.dat'])
+
+  def testCalculateHostMd5Sums_generator(self):
+    test_paths = ('/test/host/' + p for p in ['file0.dat', 'file1.dat'])
+    mock_get_cmd_output = mock.Mock(
+        return_value='0123456789abcdeffedcba9876543210 /test/host/file0.dat\n'
+                     '123456789abcdef00fedcba987654321 /test/host/file1.dat\n')
+    with mock.patch('pylib.cmd_helper.GetCmdOutput', new=mock_get_cmd_output):
+      out = md5sum.CalculateHostMd5Sums(test_paths)
+      self.assertEquals(2, len(out))
+      self.assertTrue('/test/host/file0.dat' in out)
+      self.assertEquals('0123456789abcdeffedcba9876543210',
+                        out['/test/host/file0.dat'])
+      self.assertTrue('/test/host/file1.dat' in out)
+      self.assertEquals('123456789abcdef00fedcba987654321',
+                        out['/test/host/file1.dat'])
+      mock_get_cmd_output.assert_called_once_with(
+          [HOST_MD5_EXECUTABLE, '/test/host/file0.dat', '/test/host/file1.dat'])
+
+  def testCalculateDeviceMd5Sums_singlePath(self):
+    test_path = '/storage/emulated/legacy/test/file.dat'
+
+    device = mock.NonCallableMock()
+    device.adb = mock.NonCallableMock()
+    device.adb.Push = mock.Mock()
+    device_md5sum_output = [
+        '0123456789abcdeffedcba9876543210 '
+            '/storage/emulated/legacy/test/file.dat',
+    ]
+    device.RunShellCommand = mock.Mock(return_value=device_md5sum_output)
+
+    mock_temp_file = mock.mock_open()
+    mock_temp_file.return_value.name = '/tmp/test/script/file.sh'
+
+    mock_device_temp_file = mock.mock_open()
+    mock_device_temp_file.return_value.name = (
+        '/data/local/tmp/test/script/file.sh')
+
+    with mock.patch('tempfile.NamedTemporaryFile', new=mock_temp_file), (
+         mock.patch('pylib.utils.device_temp_file.DeviceTempFile',
+                    new=mock_device_temp_file)):
+      out = md5sum.CalculateDeviceMd5Sums(test_path, device)
+      self.assertEquals(1, len(out))
+      self.assertTrue('/storage/emulated/legacy/test/file.dat' in out)
+      self.assertEquals('0123456789abcdeffedcba9876543210',
+                        out['/storage/emulated/legacy/test/file.dat'])
+      device.adb.Push.assert_called_once_with(
+          '/tmp/test/script/file.sh', '/data/local/tmp/test/script/file.sh')
+      device.RunShellCommand.assert_called_once_with(
+          ['sh', '/data/local/tmp/test/script/file.sh'])
+
+  def testCalculateDeviceMd5Sums_list(self):
+    test_path = ['/storage/emulated/legacy/test/file0.dat',
+                 '/storage/emulated/legacy/test/file1.dat']
+    device = mock.NonCallableMock()
+    device.adb = mock.NonCallableMock()
+    device.adb.Push = mock.Mock()
+    device_md5sum_output = [
+        '0123456789abcdeffedcba9876543210 '
+            '/storage/emulated/legacy/test/file0.dat',
+        '123456789abcdef00fedcba987654321 '
+            '/storage/emulated/legacy/test/file1.dat',
+    ]
+    device.RunShellCommand = mock.Mock(return_value=device_md5sum_output)
+
+    mock_temp_file = mock.mock_open()
+    mock_temp_file.return_value.name = '/tmp/test/script/file.sh'
+
+    mock_device_temp_file = mock.mock_open()
+    mock_device_temp_file.return_value.name = (
+        '/data/local/tmp/test/script/file.sh')
+
+    with mock.patch('tempfile.NamedTemporaryFile', new=mock_temp_file), (
+         mock.patch('pylib.utils.device_temp_file.DeviceTempFile',
+                    new=mock_device_temp_file)):
+      out = md5sum.CalculateDeviceMd5Sums(test_path, device)
+      self.assertEquals(2, len(out))
+      self.assertTrue('/storage/emulated/legacy/test/file0.dat' in out)
+      self.assertEquals('0123456789abcdeffedcba9876543210',
+                        out['/storage/emulated/legacy/test/file0.dat'])
+      self.assertTrue('/storage/emulated/legacy/test/file1.dat' in out)
+      self.assertEquals('123456789abcdef00fedcba987654321',
+                        out['/storage/emulated/legacy/test/file1.dat'])
+      device.adb.Push.assert_called_once_with(
+          '/tmp/test/script/file.sh', '/data/local/tmp/test/script/file.sh')
+      device.RunShellCommand.assert_called_once_with(
+          ['sh', '/data/local/tmp/test/script/file.sh'])
+
+  def testCalculateDeviceMd5Sums_generator(self):
+    test_path = ('/storage/emulated/legacy/test/file%d.dat' % n
+                 for n in xrange(0, 2))
+
+    device = mock.NonCallableMock()
+    device.adb = mock.NonCallableMock()
+    device.adb.Push = mock.Mock()
+    device_md5sum_output = [
+        '0123456789abcdeffedcba9876543210 '
+            '/storage/emulated/legacy/test/file0.dat',
+        '123456789abcdef00fedcba987654321 '
+            '/storage/emulated/legacy/test/file1.dat',
+    ]
+    device.RunShellCommand = mock.Mock(return_value=device_md5sum_output)
+
+    mock_temp_file = mock.mock_open()
+    mock_temp_file.return_value.name = '/tmp/test/script/file.sh'
+
+    mock_device_temp_file = mock.mock_open()
+    mock_device_temp_file.return_value.name = (
+        '/data/local/tmp/test/script/file.sh')
+
+    with mock.patch('tempfile.NamedTemporaryFile', new=mock_temp_file), (
+         mock.patch('pylib.utils.device_temp_file.DeviceTempFile',
+                    new=mock_device_temp_file)):
+      out = md5sum.CalculateDeviceMd5Sums(test_path, device)
+      self.assertEquals(2, len(out))
+      self.assertTrue('/storage/emulated/legacy/test/file0.dat' in out)
+      self.assertEquals('0123456789abcdeffedcba9876543210',
+                        out['/storage/emulated/legacy/test/file0.dat'])
+      self.assertTrue('/storage/emulated/legacy/test/file1.dat' in out)
+      self.assertEquals('123456789abcdef00fedcba987654321',
+                        out['/storage/emulated/legacy/test/file1.dat'])
+      device.adb.Push.assert_called_once_with(
+          '/tmp/test/script/file.sh', '/data/local/tmp/test/script/file.sh')
+      device.RunShellCommand.assert_called_once_with(
+          ['sh', '/data/local/tmp/test/script/file.sh'])
+
+  def testCalculateDeviceMd5Sums_singlePath_linkerWarning(self):
+    # See crbug/479966
+    test_path = '/storage/emulated/legacy/test/file.dat'
+
+    device = mock.NonCallableMock()
+    device.adb = mock.NonCallableMock()
+    device.adb.Push = mock.Mock()
+    device_md5sum_output = [
+        'WARNING: linker: /data/local/tmp/md5sum/md5sum_bin: '
+            'unused DT entry: type 0x1d arg 0x15db',
+        'THIS_IS_NOT_A_VALID_CHECKSUM_ZZZ some random text',
+        '0123456789abcdeffedcba9876543210 '
+            '/storage/emulated/legacy/test/file.dat',
+    ]
+    device.RunShellCommand = mock.Mock(return_value=device_md5sum_output)
+
+    mock_temp_file = mock.mock_open()
+    mock_temp_file.return_value.name = '/tmp/test/script/file.sh'
+
+    mock_device_temp_file = mock.mock_open()
+    mock_device_temp_file.return_value.name = (
+        '/data/local/tmp/test/script/file.sh')
+
+    with mock.patch('tempfile.NamedTemporaryFile', new=mock_temp_file), (
+         mock.patch('pylib.utils.device_temp_file.DeviceTempFile',
+                    new=mock_device_temp_file)):
+      out = md5sum.CalculateDeviceMd5Sums(test_path, device)
+      self.assertEquals(1, len(out))
+      self.assertTrue('/storage/emulated/legacy/test/file.dat' in out)
+      self.assertEquals('0123456789abcdeffedcba9876543210',
+                        out['/storage/emulated/legacy/test/file.dat'])
+      device.adb.Push.assert_called_once_with(
+          '/tmp/test/script/file.sh', '/data/local/tmp/test/script/file.sh')
+      device.RunShellCommand.assert_called_once_with(
+          ['sh', '/data/local/tmp/test/script/file.sh'])
+
+
+if __name__ == '__main__':
+  unittest.main(verbosity=2)
+
diff --git a/build/android/pylib/utils/mock_calls.py b/build/android/pylib/utils/mock_calls.py
new file mode 100644
index 0000000..59167ba
--- /dev/null
+++ b/build/android/pylib/utils/mock_calls.py
@@ -0,0 +1,182 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+A test facility to assert call sequences while mocking their behavior.
+"""
+
+import os
+import sys
+import unittest
+
+from pylib import constants
+
+sys.path.append(os.path.join(
+    constants.DIR_SOURCE_ROOT, 'third_party', 'pymock'))
+import mock # pylint: disable=F0401
+
+
+class TestCase(unittest.TestCase):
+  """Adds assertCalls to TestCase objects."""
+  class _AssertCalls(object):
+    def __init__(self, test_case, expected_calls, watched):
+      def call_action(pair):
+        if isinstance(pair, type(mock.call)):
+          return (pair, None)
+        else:
+          return pair
+
+      def do_check(call):
+        def side_effect(*args, **kwargs):
+          received_call = call(*args, **kwargs)
+          self._test_case.assertTrue(
+              self._expected_calls,
+              msg=('Unexpected call: %s' % str(received_call)))
+          expected_call, action = self._expected_calls.pop(0)
+          self._test_case.assertTrue(
+              received_call == expected_call,
+              msg=('Expected call mismatch:\n'
+                   '  expected: %s\n'
+                   '  received: %s\n'
+                   % (str(expected_call), str(received_call))))
+          if callable(action):
+            return action(*args, **kwargs)
+          else:
+            return action
+        return side_effect
+
+      self._test_case = test_case
+      self._expected_calls = [call_action(pair) for pair in expected_calls]
+      watched = watched.copy() # do not pollute the caller's dict
+      watched.update((call.parent.name, call.parent)
+                     for call, _ in self._expected_calls)
+      self._patched = [test_case.patch_call(call, side_effect=do_check(call))
+                       for call in watched.itervalues()]
+
+    def __enter__(self):
+      for patch in self._patched:
+        patch.__enter__()
+      return self
+
+    def __exit__(self, exc_type, exc_val, exc_tb):
+      for patch in self._patched:
+        patch.__exit__(exc_type, exc_val, exc_tb)
+      if exc_type is None:
+        missing = ''.join('  expected: %s\n' % str(call)
+                          for call, _ in self._expected_calls)
+        self._test_case.assertFalse(
+            missing,
+            msg='Expected calls not found:\n' + missing)
+
+  def __init__(self, *args, **kwargs):
+    super(TestCase, self).__init__(*args, **kwargs)
+    self.call = mock.call.self
+    self._watched = {}
+
+  def call_target(self, call):
+    """Resolve a self.call instance to the target it represents.
+
+    Args:
+      call: a self.call instance, e.g. self.call.adb.Shell
+
+    Returns:
+      The target object represented by the call, e.g. self.adb.Shell
+
+    Raises:
+      ValueError if the path of the call does not start with "self", i.e. the
+          target of the call is external to the self object.
+      AttributeError if the path of the call does not specify a valid
+          chain of attributes (without any calls) starting from "self".
+    """
+    path = call.name.split('.')
+    if path.pop(0) != 'self':
+      raise ValueError("Target %r outside of 'self' object" % call.name)
+    target = self
+    for attr in path:
+      target = getattr(target, attr)
+    return target
+
+  def patch_call(self, call, **kwargs):
+    """Patch the target of a mock.call instance.
+
+    Args:
+      call: a mock.call instance identifying a target to patch
+      Extra keyword arguments are processed by mock.patch
+
+    Returns:
+      A context manager to mock/unmock the target of the call
+    """
+    if call.name.startswith('self.'):
+      target = self.call_target(call.parent)
+      _, attribute = call.name.rsplit('.', 1)
+      if (hasattr(type(target), attribute)
+          and isinstance(getattr(type(target), attribute), property)):
+        return mock.patch.object(
+            type(target), attribute, new_callable=mock.PropertyMock, **kwargs)
+      else:
+        return mock.patch.object(target, attribute, **kwargs)
+    else:
+      return mock.patch(call.name, **kwargs)
+
+  def watchCalls(self, calls):
+    """Add calls to the set of watched calls.
+
+    Args:
+      calls: a sequence of mock.call instances identifying targets to watch
+    """
+    self._watched.update((call.name, call) for call in calls)
+
+  def watchMethodCalls(self, call, ignore=None):
+    """Watch all public methods of the target identified by a self.call.
+
+    Args:
+      call: a self.call instance indetifying an object
+      ignore: a list of public methods to ignore when watching for calls
+    """
+    target = self.call_target(call)
+    if ignore is None:
+      ignore = []
+    self.watchCalls(getattr(call, method)
+                    for method in dir(target.__class__)
+                    if not method.startswith('_') and not method in ignore)
+
+  def clearWatched(self):
+    """Clear the set of watched calls."""
+    self._watched = {}
+
+  def assertCalls(self, *calls):
+    """A context manager to assert that a sequence of calls is made.
+
+    During the assertion, a number of functions and methods will be "watched",
+    and any calls made to them is expected to appear---in the exact same order,
+    and with the exact same arguments---as specified by the argument |calls|.
+
+    By default, the targets of all expected calls are watched. Further targets
+    to watch may be added using watchCalls and watchMethodCalls.
+
+    Optionaly, each call may be accompanied by an action. If the action is a
+    (non-callable) value, this value will be used as the return value given to
+    the caller when the matching call is found. Alternatively, if the action is
+    a callable, the action will be then called with the same arguments as the
+    intercepted call, so that it can provide a return value or perform other
+    side effects. If the action is missing, a return value of None is assumed.
+
+    Note that mock.Mock objects are often convenient to use as a callable
+    action, e.g. to raise exceptions or return other objects which are
+    themselves callable.
+
+    Args:
+      calls: each argument is either a pair (expected_call, action) or just an
+          expected_call, where expected_call is a mock.call instance.
+
+    Raises:
+      AssertionError if the watched targets do not receive the exact sequence
+          of calls specified. Missing calls, extra calls, and calls with
+          mismatching arguments, all cause the assertion to fail.
+    """
+    return self._AssertCalls(self, calls, self._watched)
+
+  def assertCall(self, call, action=None):
+    return self.assertCalls((call, action))
+
diff --git a/build/android/pylib/utils/mock_calls_test.py b/build/android/pylib/utils/mock_calls_test.py
new file mode 100755
index 0000000..4dbafd4
--- /dev/null
+++ b/build/android/pylib/utils/mock_calls_test.py
@@ -0,0 +1,175 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Unit tests for the contents of mock_calls.py.
+"""
+
+import logging
+import os
+import sys
+import unittest
+
+from pylib import constants
+from pylib.utils import mock_calls
+
+sys.path.append(os.path.join(
+    constants.DIR_SOURCE_ROOT, 'third_party', 'pymock'))
+import mock # pylint: disable=F0401
+
+
+class _DummyAdb(object):
+  def __str__(self):
+    return '0123456789abcdef'
+
+  def Push(self, host_path, device_path):
+    logging.debug('(device %s) pushing %r to %r', self, host_path, device_path)
+
+  def IsOnline(self):
+    logging.debug('(device %s) checking device online', self)
+    return True
+
+  def Shell(self, cmd):
+    logging.debug('(device %s) running command %r', self, cmd)
+    return "nice output\n"
+
+  def Reboot(self):
+    logging.debug('(device %s) rebooted!', self)
+
+  @property
+  def build_version_sdk(self):
+    logging.debug('(device %s) getting build_version_sdk', self)
+    return constants.ANDROID_SDK_VERSION_CODES.LOLLIPOP
+
+
+class TestCaseWithAssertCallsTest(mock_calls.TestCase):
+  def setUp(self):
+    self.adb = _DummyAdb()
+
+  def ShellError(self):
+    def action(cmd):
+      raise ValueError('(device %s) command %r is not nice' % (self.adb, cmd))
+    return action
+
+  def get_answer(self):
+    logging.debug("called 'get_answer' of %r object", self)
+    return 42
+
+  def echo(self, thing):
+    logging.debug("called 'echo' of %r object", self)
+    return thing
+
+  def testCallTarget_succeds(self):
+    self.assertEquals(self.adb.Shell,
+                      self.call_target(self.call.adb.Shell))
+
+  def testCallTarget_failsExternal(self):
+    with self.assertRaises(ValueError):
+      self.call_target(mock.call.sys.getcwd)
+
+  def testCallTarget_failsUnknownAttribute(self):
+    with self.assertRaises(AttributeError):
+      self.call_target(self.call.adb.Run)
+
+  def testCallTarget_failsIntermediateCalls(self):
+    with self.assertRaises(AttributeError):
+      self.call_target(self.call.adb.RunShell('cmd').append)
+
+  def testPatchCall_method(self):
+    self.assertEquals(42, self.get_answer())
+    with self.patch_call(self.call.get_answer, return_value=123):
+      self.assertEquals(123, self.get_answer())
+    self.assertEquals(42, self.get_answer())
+
+  def testPatchCall_attribute_method(self):
+    with self.patch_call(self.call.adb.Shell, return_value='hello'):
+      self.assertEquals('hello', self.adb.Shell('echo hello'))
+
+  def testPatchCall_global(self):
+    with self.patch_call(mock.call.os.getcwd, return_value='/some/path'):
+      self.assertEquals('/some/path', os.getcwd())
+
+  def testPatchCall_withSideEffect(self):
+    with self.patch_call(self.call.adb.Shell, side_effect=ValueError):
+      with self.assertRaises(ValueError):
+        self.adb.Shell('echo hello')
+
+  def testPatchCall_property(self):
+    self.assertEquals(constants.ANDROID_SDK_VERSION_CODES.LOLLIPOP,
+                      self.adb.build_version_sdk)
+    with self.patch_call(
+        self.call.adb.build_version_sdk,
+        return_value=constants.ANDROID_SDK_VERSION_CODES.KITKAT):
+      self.assertEquals(constants.ANDROID_SDK_VERSION_CODES.KITKAT,
+                        self.adb.build_version_sdk)
+    self.assertEquals(constants.ANDROID_SDK_VERSION_CODES.LOLLIPOP,
+                      self.adb.build_version_sdk)
+
+  def testAssertCalls_succeeds_simple(self):
+    self.assertEquals(42, self.get_answer())
+    with self.assertCall(self.call.get_answer(), 123):
+      self.assertEquals(123, self.get_answer())
+    self.assertEquals(42, self.get_answer())
+
+  def testAssertCalls_succeeds_multiple(self):
+    with self.assertCalls(
+        (mock.call.os.getcwd(), '/some/path'),
+        (self.call.echo('hello'), 'hello'),
+        (self.call.get_answer(), 11),
+        self.call.adb.Push('this_file', 'that_file'),
+        (self.call.get_answer(), 12)):
+      self.assertEquals(os.getcwd(), '/some/path')
+      self.assertEquals('hello', self.echo('hello'))
+      self.assertEquals(11, self.get_answer())
+      self.adb.Push('this_file', 'that_file')
+      self.assertEquals(12, self.get_answer())
+
+  def testAsserCalls_succeeds_withAction(self):
+    with self.assertCall(
+        self.call.adb.Shell('echo hello'), self.ShellError()):
+      with self.assertRaises(ValueError):
+        self.adb.Shell('echo hello')
+
+  def testAssertCalls_fails_tooManyCalls(self):
+    with self.assertRaises(AssertionError):
+      with self.assertCalls(self.call.adb.IsOnline()):
+        self.adb.IsOnline()
+        self.adb.IsOnline()
+
+  def testAssertCalls_fails_tooFewCalls(self):
+    with self.assertRaises(AssertionError):
+      with self.assertCalls(self.call.adb.IsOnline()):
+        pass
+
+  def testAssertCalls_succeeds_extraCalls(self):
+    # we are not watching Reboot, so the assertion succeeds
+    with self.assertCalls(self.call.adb.IsOnline()):
+      self.adb.IsOnline()
+      self.adb.Reboot()
+
+  def testAssertCalls_fails_extraCalls(self):
+    self.watchCalls([self.call.adb.Reboot])
+    # this time we are also watching Reboot, so the assertion fails
+    with self.assertRaises(AssertionError):
+      with self.assertCalls(self.call.adb.IsOnline()):
+        self.adb.IsOnline()
+        self.adb.Reboot()
+
+  def testAssertCalls_succeeds_NoCalls(self):
+    self.watchMethodCalls(self.call.adb) # we are watching all adb methods
+    with self.assertCalls():
+      pass
+
+  def testAssertCalls_fails_NoCalls(self):
+    self.watchMethodCalls(self.call.adb)
+    with self.assertRaises(AssertionError):
+      with self.assertCalls():
+        self.adb.IsOnline()
+
+
+if __name__ == '__main__':
+  logging.getLogger().setLevel(logging.DEBUG)
+  unittest.main(verbosity=2)
+
diff --git a/build/android/pylib/utils/parallelizer.py b/build/android/pylib/utils/parallelizer.py
new file mode 100644
index 0000000..9a85b54
--- /dev/null
+++ b/build/android/pylib/utils/parallelizer.py
@@ -0,0 +1,242 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+""" Wrapper that allows method execution in parallel.
+
+This class wraps a list of objects of the same type, emulates their
+interface, and executes any functions called on the objects in parallel
+in ReraiserThreads.
+
+This means that, given a list of objects:
+
+  class Foo:
+    def __init__(self):
+      self.baz = Baz()
+
+    def bar(self, my_param):
+      // do something
+
+  list_of_foos = [Foo(1), Foo(2), Foo(3)]
+
+we can take a sequential operation on that list of objects:
+
+  for f in list_of_foos:
+    f.bar('Hello')
+
+and run it in parallel across all of the objects:
+
+  Parallelizer(list_of_foos).bar('Hello')
+
+It can also handle (non-method) attributes of objects, so that this:
+
+  for f in list_of_foos:
+    f.baz.myBazMethod()
+
+can be run in parallel with:
+
+  Parallelizer(list_of_foos).baz.myBazMethod()
+
+Because it emulates the interface of the wrapped objects, a Parallelizer
+can be passed to a method or function that takes objects of that type:
+
+  def DoesSomethingWithFoo(the_foo):
+    the_foo.bar('Hello')
+    the_foo.bar('world')
+    the_foo.baz.myBazMethod
+
+  DoesSomethingWithFoo(Parallelizer(list_of_foos))
+
+Note that this class spins up a thread for each object. Using this class
+to parallelize operations that are already fast will incur a net performance
+penalty.
+
+"""
+# pylint: disable=protected-access
+
+from pylib.utils import reraiser_thread
+from pylib.utils import watchdog_timer
+
+_DEFAULT_TIMEOUT = 30
+_DEFAULT_RETRIES = 3
+
+
+class Parallelizer(object):
+  """Allows parallel execution of method calls across a group of objects."""
+
+  def __init__(self, objs):
+    assert (objs is not None and len(objs) > 0), (
+        "Passed empty list to 'Parallelizer'")
+    self._orig_objs = objs
+    self._objs = objs
+
+  def __getattr__(self, name):
+    """Emulate getting the |name| attribute of |self|.
+
+    Args:
+      name: The name of the attribute to retrieve.
+    Returns:
+      A Parallelizer emulating the |name| attribute of |self|.
+    """
+    self.pGet(None)
+
+    r = type(self)(self._orig_objs)
+    r._objs = [getattr(o, name) for o in self._objs]
+    return r
+
+  def __getitem__(self, index):
+    """Emulate getting the value of |self| at |index|.
+
+    Returns:
+      A Parallelizer emulating the value of |self| at |index|.
+    """
+    self.pGet(None)
+
+    r = type(self)(self._orig_objs)
+    r._objs = [o[index] for o in self._objs]
+    return r
+
+  def __call__(self, *args, **kwargs):
+    """Emulate calling |self| with |args| and |kwargs|.
+
+    Note that this call is asynchronous. Call pFinish on the return value to
+    block until the call finishes.
+
+    Returns:
+      A Parallelizer wrapping the ReraiserThreadGroup running the call in
+      parallel.
+    Raises:
+      AttributeError if the wrapped objects aren't callable.
+    """
+    self.pGet(None)
+
+    if not self._objs:
+      raise AttributeError('Nothing to call.')
+    for o in self._objs:
+      if not callable(o):
+        raise AttributeError("'%s' is not callable" % o.__name__)
+
+    r = type(self)(self._orig_objs)
+    r._objs = reraiser_thread.ReraiserThreadGroup(
+        [reraiser_thread.ReraiserThread(
+            o, args=args, kwargs=kwargs,
+            name='%s.%s' % (str(d), o.__name__))
+         for d, o in zip(self._orig_objs, self._objs)])
+    r._objs.StartAll() # pylint: disable=W0212
+    return r
+
+  def pFinish(self, timeout):
+    """Finish any outstanding asynchronous operations.
+
+    Args:
+      timeout: The maximum number of seconds to wait for an individual
+               result to return, or None to wait forever.
+    Returns:
+      self, now emulating the return values.
+    """
+    self._assertNoShadow('pFinish')
+    if isinstance(self._objs, reraiser_thread.ReraiserThreadGroup):
+      self._objs.JoinAll()
+      self._objs = self._objs.GetAllReturnValues(
+          watchdog_timer.WatchdogTimer(timeout))
+    return self
+
+  def pGet(self, timeout):
+    """Get the current wrapped objects.
+
+    Args:
+      timeout: Same as |pFinish|.
+    Returns:
+      A list of the results, in order of the provided devices.
+    Raises:
+      Any exception raised by any of the called functions.
+    """
+    self._assertNoShadow('pGet')
+    self.pFinish(timeout)
+    return self._objs
+
+  def pMap(self, f, *args, **kwargs):
+    """Map a function across the current wrapped objects in parallel.
+
+    This calls f(o, *args, **kwargs) for each o in the set of wrapped objects.
+
+    Note that this call is asynchronous. Call pFinish on the return value to
+    block until the call finishes.
+
+    Args:
+      f: The function to call.
+      args: The positional args to pass to f.
+      kwargs: The keyword args to pass to f.
+    Returns:
+      A Parallelizer wrapping the ReraiserThreadGroup running the map in
+      parallel.
+    """
+    self._assertNoShadow('pMap')
+    r = type(self)(self._orig_objs)
+    r._objs = reraiser_thread.ReraiserThreadGroup(
+        [reraiser_thread.ReraiserThread(
+            f, args=tuple([o] + list(args)), kwargs=kwargs,
+            name='%s(%s)' % (f.__name__, d))
+         for d, o in zip(self._orig_objs, self._objs)])
+    r._objs.StartAll() # pylint: disable=W0212
+    return r
+
+  def _assertNoShadow(self, attr_name):
+    """Ensures that |attr_name| isn't shadowing part of the wrapped obejcts.
+
+    If the wrapped objects _do_ have an |attr_name| attribute, it will be
+    inaccessible to clients.
+
+    Args:
+      attr_name: The attribute to check.
+    Raises:
+      AssertionError if the wrapped objects have an attribute named 'attr_name'
+      or '_assertNoShadow'.
+    """
+    if isinstance(self._objs, reraiser_thread.ReraiserThreadGroup):
+      assert not hasattr(self._objs, '_assertNoShadow')
+      assert not hasattr(self._objs, attr_name)
+    else:
+      assert not any(hasattr(o, '_assertNoShadow') for o in self._objs)
+      assert not any(hasattr(o, attr_name) for o in self._objs)
+
+
+class SyncParallelizer(Parallelizer):
+  """A Parallelizer that blocks on function calls."""
+
+  #override
+  def __call__(self, *args, **kwargs):
+    """Emulate calling |self| with |args| and |kwargs|.
+
+    Note that this call is synchronous.
+
+    Returns:
+      A Parallelizer emulating the value returned from calling |self| with
+      |args| and |kwargs|.
+    Raises:
+      AttributeError if the wrapped objects aren't callable.
+    """
+    r = super(SyncParallelizer, self).__call__(*args, **kwargs)
+    r.pFinish(None)
+    return r
+
+  #override
+  def pMap(self, f, *args, **kwargs):
+    """Map a function across the current wrapped objects in parallel.
+
+    This calls f(o, *args, **kwargs) for each o in the set of wrapped objects.
+
+    Note that this call is synchronous.
+
+    Args:
+      f: The function to call.
+      args: The positional args to pass to f.
+      kwargs: The keyword args to pass to f.
+    Returns:
+      A Parallelizer wrapping the ReraiserThreadGroup running the map in
+      parallel.
+    """
+    r = super(SyncParallelizer, self).pMap(f, *args, **kwargs)
+    r.pFinish(None)
+    return r
+
diff --git a/build/android/pylib/utils/parallelizer_test.py b/build/android/pylib/utils/parallelizer_test.py
new file mode 100644
index 0000000..6e0c7e7
--- /dev/null
+++ b/build/android/pylib/utils/parallelizer_test.py
@@ -0,0 +1,166 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unit tests for the contents of parallelizer.py."""
+
+# pylint: disable=W0212
+# pylint: disable=W0613
+
+import os
+import tempfile
+import time
+import unittest
+
+from pylib.utils import parallelizer
+
+
+class ParallelizerTestObject(object):
+  """Class used to test parallelizer.Parallelizer."""
+
+  parallel = parallelizer.Parallelizer
+
+  def __init__(self, thing, completion_file_name=None):
+    self._thing = thing
+    self._completion_file_name = completion_file_name
+    self.helper = ParallelizerTestObjectHelper(thing)
+
+  @staticmethod
+  def doReturn(what):
+    return what
+
+  @classmethod
+  def doRaise(cls, what):
+    raise what
+
+  def doSetTheThing(self, new_thing):
+    self._thing = new_thing
+
+  def doReturnTheThing(self):
+    return self._thing
+
+  def doRaiseTheThing(self):
+    raise self._thing
+
+  def doRaiseIfExceptionElseSleepFor(self, sleep_duration):
+    if isinstance(self._thing, Exception):
+      raise self._thing
+    time.sleep(sleep_duration)
+    self._write_completion_file()
+    return self._thing
+
+  def _write_completion_file(self):
+    if self._completion_file_name and len(self._completion_file_name):
+      with open(self._completion_file_name, 'w+b') as completion_file:
+        completion_file.write('complete')
+
+  def __getitem__(self, index):
+    return self._thing[index]
+
+  def __str__(self):
+    return type(self).__name__
+
+
+class ParallelizerTestObjectHelper(object):
+
+  def __init__(self, thing):
+    self._thing = thing
+
+  def doReturnStringThing(self):
+    return str(self._thing)
+
+
+class ParallelizerTest(unittest.TestCase):
+
+  def testInitWithNone(self):
+    with self.assertRaises(AssertionError):
+      parallelizer.Parallelizer(None)
+
+  def testInitEmptyList(self):
+    with self.assertRaises(AssertionError):
+      parallelizer.Parallelizer([])
+
+  def testMethodCall(self):
+    test_data = ['abc_foo', 'def_foo', 'ghi_foo']
+    expected = ['abc_bar', 'def_bar', 'ghi_bar']
+    r = parallelizer.Parallelizer(test_data).replace('_foo', '_bar').pGet(0.1)
+    self.assertEquals(expected, r)
+
+  def testMutate(self):
+    devices = [ParallelizerTestObject(True) for _ in xrange(0, 10)]
+    self.assertTrue(all(d.doReturnTheThing() for d in devices))
+    ParallelizerTestObject.parallel(devices).doSetTheThing(False).pFinish(1)
+    self.assertTrue(not any(d.doReturnTheThing() for d in devices))
+
+  def testAllReturn(self):
+    devices = [ParallelizerTestObject(True) for _ in xrange(0, 10)]
+    results = ParallelizerTestObject.parallel(
+        devices).doReturnTheThing().pGet(1)
+    self.assertTrue(isinstance(results, list))
+    self.assertEquals(10, len(results))
+    self.assertTrue(all(results))
+
+  def testAllRaise(self):
+    devices = [ParallelizerTestObject(Exception('thing %d' % i))
+               for i in xrange(0, 10)]
+    p = ParallelizerTestObject.parallel(devices).doRaiseTheThing()
+    with self.assertRaises(Exception):
+      p.pGet(1)
+
+  def testOneFailOthersComplete(self):
+    parallel_device_count = 10
+    exception_index = 7
+    exception_msg = 'thing %d' % exception_index
+
+    try:
+      completion_files = [tempfile.NamedTemporaryFile(delete=False)
+                          for _ in xrange(0, parallel_device_count)]
+      devices = [
+          ParallelizerTestObject(
+              i if i != exception_index else Exception(exception_msg),
+              completion_files[i].name)
+          for i in xrange(0, parallel_device_count)]
+      for f in completion_files:
+        f.close()
+      p = ParallelizerTestObject.parallel(devices)
+      with self.assertRaises(Exception) as e:
+        p.doRaiseIfExceptionElseSleepFor(2).pGet(3)
+      self.assertTrue(exception_msg in str(e.exception))
+      for i in xrange(0, parallel_device_count):
+        with open(completion_files[i].name) as f:
+          if i == exception_index:
+            self.assertEquals('', f.read())
+          else:
+            self.assertEquals('complete', f.read())
+    finally:
+      for f in completion_files:
+        os.remove(f.name)
+
+  def testReusable(self):
+    devices = [ParallelizerTestObject(True) for _ in xrange(0, 10)]
+    p = ParallelizerTestObject.parallel(devices)
+    results = p.doReturn(True).pGet(1)
+    self.assertTrue(all(results))
+    results = p.doReturn(True).pGet(1)
+    self.assertTrue(all(results))
+    with self.assertRaises(Exception):
+      results = p.doRaise(Exception('reusableTest')).pGet(1)
+
+  def testContained(self):
+    devices = [ParallelizerTestObject(i) for i in xrange(0, 10)]
+    results = (ParallelizerTestObject.parallel(devices).helper
+        .doReturnStringThing().pGet(1))
+    self.assertTrue(isinstance(results, list))
+    self.assertEquals(10, len(results))
+    for i in xrange(0, 10):
+      self.assertEquals(str(i), results[i])
+
+  def testGetItem(self):
+    devices = [ParallelizerTestObject(range(i, i+10)) for i in xrange(0, 10)]
+    results = ParallelizerTestObject.parallel(devices)[9].pGet(1)
+    self.assertEquals(range(9, 19), results)
+
+
+if __name__ == '__main__':
+  unittest.main(verbosity=2)
+
diff --git a/build/android/pylib/utils/proguard.py b/build/android/pylib/utils/proguard.py
new file mode 100644
index 0000000..34ad5c3
--- /dev/null
+++ b/build/android/pylib/utils/proguard.py
@@ -0,0 +1,148 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import re
+import tempfile
+
+from pylib import constants
+from pylib import cmd_helper
+
+
+_PROGUARD_CLASS_RE = re.compile(r'\s*?- Program class:\s*([\S]+)$')
+_PROGUARD_SUPERCLASS_RE = re.compile(r'\s*?  Superclass:\s*([\S]+)$')
+_PROGUARD_SECTION_RE = re.compile(
+    r'^(?:Interfaces|Constant Pool|Fields|Methods|Class file attributes) '
+    r'\(count = \d+\):$')
+_PROGUARD_METHOD_RE = re.compile(r'\s*?- Method:\s*(\S*)[(].*$')
+_PROGUARD_ANNOTATION_RE = re.compile(r'\s*?- Annotation \[L(\S*);\]:$')
+_PROGUARD_ANNOTATION_CONST_RE = (
+    re.compile(r'\s*?- Constant element value.*$'))
+_PROGUARD_ANNOTATION_VALUE_RE = re.compile(r'\s*?- \S+? \[(.*)\]$')
+
+_PROGUARD_PATH_SDK = os.path.join(
+    constants.ANDROID_SDK_ROOT, 'tools', 'proguard', 'lib', 'proguard.jar')
+_PROGUARD_PATH_BUILT = (
+    os.path.join(os.environ['ANDROID_BUILD_TOP'], 'external', 'proguard',
+                 'lib', 'proguard.jar')
+    if 'ANDROID_BUILD_TOP' in os.environ else None)
+_PROGUARD_PATH = (
+    _PROGUARD_PATH_SDK if os.path.exists(_PROGUARD_PATH_SDK)
+    else _PROGUARD_PATH_BUILT)
+
+
+def Dump(jar_path):
+  """Dumps class and method information from a JAR into a dict via proguard.
+
+  Args:
+    jar_path: An absolute path to the JAR file to dump.
+  Returns:
+    A dict in the following format:
+      {
+        'classes': [
+          {
+            'class': '',
+            'superclass': '',
+            'annotations': {},
+            'methods': [
+              {
+                'method': '',
+                'annotations': {},
+              },
+              ...
+            ],
+          },
+          ...
+        ],
+      }
+  """
+
+  with tempfile.NamedTemporaryFile() as proguard_output:
+    cmd_helper.RunCmd(['java', '-jar',
+                       _PROGUARD_PATH,
+                       '-injars', jar_path,
+                       '-dontshrink',
+                       '-dontoptimize',
+                       '-dontobfuscate',
+                       '-dontpreverify',
+                       '-dump', proguard_output.name])
+
+
+    results = {
+      'classes': [],
+    }
+
+    annotation = None
+    annotation_has_value = False
+    class_result = None
+    method_result = None
+
+    for line in proguard_output:
+      line = line.strip('\r\n')
+
+      m = _PROGUARD_CLASS_RE.match(line)
+      if m:
+        class_result = {
+          'class': m.group(1).replace('/', '.'),
+          'superclass': '',
+          'annotations': {},
+          'methods': [],
+        }
+        results['classes'].append(class_result)
+        annotation = None
+        annotation_has_value = False
+        method_result = None
+        continue
+
+      if not class_result:
+        continue
+
+      m = _PROGUARD_SUPERCLASS_RE.match(line)
+      if m:
+        class_result['superclass'] = m.group(1).replace('/', '.')
+        continue
+
+      m = _PROGUARD_SECTION_RE.match(line)
+      if m:
+        annotation = None
+        annotation_has_value = False
+        method_result = None
+        continue
+
+      m = _PROGUARD_METHOD_RE.match(line)
+      if m:
+        method_result = {
+          'method': m.group(1),
+          'annotations': {},
+        }
+        class_result['methods'].append(method_result)
+        annotation = None
+        annotation_has_value = False
+        continue
+
+      m = _PROGUARD_ANNOTATION_RE.match(line)
+      if m:
+        # Ignore the annotation package.
+        annotation = m.group(1).split('/')[-1]
+        if method_result:
+          method_result['annotations'][annotation] = None
+        else:
+          class_result['annotations'][annotation] = None
+        continue
+
+      if annotation:
+        if not annotation_has_value:
+          m = _PROGUARD_ANNOTATION_CONST_RE.match(line)
+          annotation_has_value = bool(m)
+        else:
+          m = _PROGUARD_ANNOTATION_VALUE_RE.match(line)
+          if m:
+            if method_result:
+              method_result['annotations'][annotation] = m.group(1)
+            else:
+              class_result['annotations'][annotation] = m.group(1)
+          annotation_has_value = None
+
+  return results
+
diff --git a/build/android/pylib/utils/repo_utils.py b/build/android/pylib/utils/repo_utils.py
new file mode 100644
index 0000000..e0c7d2c
--- /dev/null
+++ b/build/android/pylib/utils/repo_utils.py
@@ -0,0 +1,16 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+from pylib import cmd_helper
+
+
+def GetGitHeadSHA1(in_directory):
+  """Returns the git hash tag for the given directory.
+
+  Args:
+    in_directory: The directory where git is to be run.
+  """
+  command_line = ['git', 'log', '-1', '--pretty=format:%H']
+  output = cmd_helper.GetCmdOutput(command_line, cwd=in_directory)
+  return output[0:40]
diff --git a/build/android/pylib/utils/reraiser_thread.py b/build/android/pylib/utils/reraiser_thread.py
new file mode 100644
index 0000000..0ec16b1
--- /dev/null
+++ b/build/android/pylib/utils/reraiser_thread.py
@@ -0,0 +1,158 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Thread and ThreadGroup that reraise exceptions on the main thread."""
+# pylint: disable=W0212
+
+import logging
+import sys
+import threading
+import traceback
+
+from pylib.utils import watchdog_timer
+
+
+class TimeoutError(Exception):
+  """Module-specific timeout exception."""
+  pass
+
+
+def LogThreadStack(thread):
+  """Log the stack for the given thread.
+
+  Args:
+    thread: a threading.Thread instance.
+  """
+  stack = sys._current_frames()[thread.ident]
+  logging.critical('*' * 80)
+  logging.critical('Stack dump for thread %r', thread.name)
+  logging.critical('*' * 80)
+  for filename, lineno, name, line in traceback.extract_stack(stack):
+    logging.critical('File: "%s", line %d, in %s', filename, lineno, name)
+    if line:
+      logging.critical('  %s', line.strip())
+  logging.critical('*' * 80)
+
+
+class ReraiserThread(threading.Thread):
+  """Thread class that can reraise exceptions."""
+
+  def __init__(self, func, args=None, kwargs=None, name=None):
+    """Initialize thread.
+
+    Args:
+      func: callable to call on a new thread.
+      args: list of positional arguments for callable, defaults to empty.
+      kwargs: dictionary of keyword arguments for callable, defaults to empty.
+      name: thread name, defaults to Thread-N.
+    """
+    super(ReraiserThread, self).__init__(name=name)
+    if not args:
+      args = []
+    if not kwargs:
+      kwargs = {}
+    self.daemon = True
+    self._func = func
+    self._args = args
+    self._kwargs = kwargs
+    self._ret = None
+    self._exc_info = None
+
+  def ReraiseIfException(self):
+    """Reraise exception if an exception was raised in the thread."""
+    if self._exc_info:
+      raise self._exc_info[0], self._exc_info[1], self._exc_info[2]
+
+  def GetReturnValue(self):
+    """Reraise exception if present, otherwise get the return value."""
+    self.ReraiseIfException()
+    return self._ret
+
+  #override
+  def run(self):
+    """Overrides Thread.run() to add support for reraising exceptions."""
+    try:
+      self._ret = self._func(*self._args, **self._kwargs)
+    except: # pylint: disable=W0702
+      self._exc_info = sys.exc_info()
+
+
+class ReraiserThreadGroup(object):
+  """A group of ReraiserThread objects."""
+
+  def __init__(self, threads=None):
+    """Initialize thread group.
+
+    Args:
+      threads: a list of ReraiserThread objects; defaults to empty.
+    """
+    if not threads:
+      threads = []
+    self._threads = threads
+
+  def Add(self, thread):
+    """Add a thread to the group.
+
+    Args:
+      thread: a ReraiserThread object.
+    """
+    self._threads.append(thread)
+
+  def StartAll(self):
+    """Start all threads."""
+    for thread in self._threads:
+      thread.start()
+
+  def _JoinAll(self, watcher=None):
+    """Join all threads without stack dumps.
+
+    Reraises exceptions raised by the child threads and supports breaking
+    immediately on exceptions raised on the main thread.
+
+    Args:
+      watcher: Watchdog object providing timeout, by default waits forever.
+    """
+    if watcher is None:
+      watcher = watchdog_timer.WatchdogTimer(None)
+    alive_threads = self._threads[:]
+    while alive_threads:
+      for thread in alive_threads[:]:
+        if watcher.IsTimedOut():
+          raise TimeoutError('Timed out waiting for %d of %d threads.' %
+                             (len(alive_threads), len(self._threads)))
+        # Allow the main thread to periodically check for interrupts.
+        thread.join(0.1)
+        if not thread.isAlive():
+          alive_threads.remove(thread)
+    # All threads are allowed to complete before reraising exceptions.
+    for thread in self._threads:
+      thread.ReraiseIfException()
+
+  def JoinAll(self, watcher=None):
+    """Join all threads.
+
+    Reraises exceptions raised by the child threads and supports breaking
+    immediately on exceptions raised on the main thread. Unfinished threads'
+    stacks will be logged on watchdog timeout.
+
+    Args:
+      watcher: Watchdog object providing timeout, by default waits forever.
+    """
+    try:
+      self._JoinAll(watcher)
+    except TimeoutError:
+      for thread in (t for t in self._threads if t.isAlive()):
+        LogThreadStack(thread)
+      raise
+
+  def GetAllReturnValues(self, watcher=None):
+    """Get all return values, joining all threads if necessary.
+
+    Args:
+      watcher: same as in |JoinAll|. Only used if threads are alive.
+    """
+    if any([t.isAlive() for t in self._threads]):
+      self.JoinAll(watcher)
+    return [t.GetReturnValue() for t in self._threads]
+
diff --git a/build/android/pylib/utils/reraiser_thread_unittest.py b/build/android/pylib/utils/reraiser_thread_unittest.py
new file mode 100644
index 0000000..2392d0e
--- /dev/null
+++ b/build/android/pylib/utils/reraiser_thread_unittest.py
@@ -0,0 +1,96 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unittests for reraiser_thread.py."""
+
+import threading
+import unittest
+
+from pylib.utils import reraiser_thread
+from pylib.utils import watchdog_timer
+
+
+class TestException(Exception):
+  pass
+
+
+class TestReraiserThread(unittest.TestCase):
+  """Tests for reraiser_thread.ReraiserThread."""
+  def testNominal(self):
+    result = [None, None]
+
+    def f(a, b=None):
+      result[0] = a
+      result[1] = b
+
+    thread = reraiser_thread.ReraiserThread(f, [1], {'b': 2})
+    thread.start()
+    thread.join()
+    self.assertEqual(result[0], 1)
+    self.assertEqual(result[1], 2)
+
+  def testRaise(self):
+    def f():
+      raise TestException
+
+    thread = reraiser_thread.ReraiserThread(f)
+    thread.start()
+    thread.join()
+    with self.assertRaises(TestException):
+      thread.ReraiseIfException()
+
+
+class TestReraiserThreadGroup(unittest.TestCase):
+  """Tests for reraiser_thread.ReraiserThreadGroup."""
+  def testInit(self):
+    ran = [False] * 5
+    def f(i):
+      ran[i] = True
+
+    group = reraiser_thread.ReraiserThreadGroup(
+      [reraiser_thread.ReraiserThread(f, args=[i]) for i in range(5)])
+    group.StartAll()
+    group.JoinAll()
+    for v in ran:
+      self.assertTrue(v)
+
+  def testAdd(self):
+    ran = [False] * 5
+    def f(i):
+      ran[i] = True
+
+    group = reraiser_thread.ReraiserThreadGroup()
+    for i in xrange(5):
+      group.Add(reraiser_thread.ReraiserThread(f, args=[i]))
+    group.StartAll()
+    group.JoinAll()
+    for v in ran:
+      self.assertTrue(v)
+
+  def testJoinRaise(self):
+    def f():
+      raise TestException
+    group = reraiser_thread.ReraiserThreadGroup(
+      [reraiser_thread.ReraiserThread(f) for _ in xrange(5)])
+    group.StartAll()
+    with self.assertRaises(TestException):
+      group.JoinAll()
+
+  def testJoinTimeout(self):
+    def f():
+      pass
+    event = threading.Event()
+    def g():
+      event.wait()
+    group = reraiser_thread.ReraiserThreadGroup(
+        [reraiser_thread.ReraiserThread(g),
+         reraiser_thread.ReraiserThread(f)])
+    group.StartAll()
+    with self.assertRaises(reraiser_thread.TimeoutError):
+      group.JoinAll(watchdog_timer.WatchdogTimer(0.01))
+    event.set()
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/build/android/pylib/utils/run_tests_helper.py b/build/android/pylib/utils/run_tests_helper.py
new file mode 100644
index 0000000..43f654d
--- /dev/null
+++ b/build/android/pylib/utils/run_tests_helper.py
@@ -0,0 +1,44 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Helper functions common to native, java and host-driven test runners."""
+
+import logging
+import sys
+import time
+
+
+class CustomFormatter(logging.Formatter):
+  """Custom log formatter."""
+
+  #override
+  def __init__(self, fmt='%(threadName)-4s  %(message)s'):
+    # Can't use super() because in older Python versions logging.Formatter does
+    # not inherit from object.
+    logging.Formatter.__init__(self, fmt=fmt)
+    self._creation_time = time.time()
+
+  #override
+  def format(self, record):
+    # Can't use super() because in older Python versions logging.Formatter does
+    # not inherit from object.
+    msg = logging.Formatter.format(self, record)
+    if 'MainThread' in msg[:19]:
+      msg = msg.replace('MainThread', 'Main', 1)
+    timediff = time.time() - self._creation_time
+    return '%s %8.3fs %s' % (record.levelname[0], timediff, msg)
+
+
+def SetLogLevel(verbose_count):
+  """Sets log level as |verbose_count|."""
+  log_level = logging.WARNING  # Default.
+  if verbose_count == 1:
+    log_level = logging.INFO
+  elif verbose_count >= 2:
+    log_level = logging.DEBUG
+  logger = logging.getLogger()
+  logger.setLevel(log_level)
+  custom_handler = logging.StreamHandler(sys.stdout)
+  custom_handler.setFormatter(CustomFormatter())
+  logging.getLogger().addHandler(custom_handler)
diff --git a/build/android/pylib/utils/test_environment.py b/build/android/pylib/utils/test_environment.py
new file mode 100644
index 0000000..e78eb5c
--- /dev/null
+++ b/build/android/pylib/utils/test_environment.py
@@ -0,0 +1,47 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+import psutil
+import signal
+
+from pylib.device import device_errors
+from pylib.device import device_utils
+
+
+def _KillWebServers():
+  for s in [signal.SIGTERM, signal.SIGINT, signal.SIGQUIT, signal.SIGKILL]:
+    signalled = []
+    for server in ['lighttpd', 'webpagereplay']:
+      for p in psutil.process_iter():
+        try:
+          if not server in ' '.join(p.cmdline):
+            continue
+          logging.info('Killing %s %s %s', s, server, p.pid)
+          p.send_signal(s)
+          signalled.append(p)
+        except Exception as e:
+          logging.warning('Failed killing %s %s %s', server, p.pid, e)
+    for p in signalled:
+      try:
+        p.wait(1)
+      except Exception as e:
+        logging.warning('Failed waiting for %s to die. %s', p.pid, e)
+
+
+def CleanupLeftoverProcesses():
+  """Clean up the test environment, restarting fresh adb and HTTP daemons."""
+  _KillWebServers()
+  device_utils.RestartServer()
+
+  def cleanup_device(d):
+    d.old_interface.RestartAdbdOnDevice()
+    try:
+      d.EnableRoot()
+    except device_errors.CommandFailedError as e:
+      logging.error(str(e))
+    d.WaitUntilFullyBooted()
+
+  device_utils.DeviceUtils.parallel().pMap(cleanup_device)
+
diff --git a/build/android/pylib/utils/time_profile.py b/build/android/pylib/utils/time_profile.py
new file mode 100644
index 0000000..45da7ff
--- /dev/null
+++ b/build/android/pylib/utils/time_profile.py
@@ -0,0 +1,26 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+import time
+
+
+class TimeProfile(object):
+  """Class for simple profiling of action, with logging of cost."""
+
+  def __init__(self, description):
+    self._starttime = None
+    self._description = description
+    self.Start()
+
+  def Start(self):
+    self._starttime = time.time()
+
+  def Stop(self):
+    """Stop profiling and dump a log."""
+    if self._starttime:
+      stoptime = time.time()
+      logging.info('%fsec to perform %s',
+                   stoptime - self._starttime, self._description)
+      self._starttime = None
diff --git a/build/android/pylib/utils/timeout_retry.py b/build/android/pylib/utils/timeout_retry.py
new file mode 100644
index 0000000..61f7c70
--- /dev/null
+++ b/build/android/pylib/utils/timeout_retry.py
@@ -0,0 +1,167 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""A utility to run functions with timeouts and retries."""
+# pylint: disable=W0702
+
+import logging
+import threading
+import time
+import traceback
+
+from pylib.utils import reraiser_thread
+from pylib.utils import watchdog_timer
+
+
+class TimeoutRetryThread(reraiser_thread.ReraiserThread):
+  def __init__(self, func, timeout, name):
+    super(TimeoutRetryThread, self).__init__(func, name=name)
+    self._watcher = watchdog_timer.WatchdogTimer(timeout)
+    self._expired = False
+
+  def GetWatcher(self):
+    """Returns the watchdog keeping track of this thread's time."""
+    return self._watcher
+
+  def GetElapsedTime(self):
+    return self._watcher.GetElapsed()
+
+  def GetRemainingTime(self, required=0, msg=None):
+    """Get the remaining time before the thread times out.
+
+    Useful to send as the |timeout| parameter of async IO operations.
+
+    Args:
+      required: minimum amount of time that will be required to complete, e.g.,
+        some sleep or IO operation.
+      msg: error message to show if timing out.
+
+    Returns:
+      The number of seconds remaining before the thread times out, or None
+      if the thread never times out.
+
+    Raises:
+      reraiser_thread.TimeoutError if the remaining time is less than the
+        required time.
+    """
+    remaining = self._watcher.GetRemaining()
+    if remaining is not None and remaining < required:
+      if msg is None:
+        msg = 'Timeout expired'
+      if remaining > 0:
+        msg += (', wait of %.1f secs required but only %.1f secs left'
+                % (required, remaining))
+      self._expired = True
+      raise reraiser_thread.TimeoutError(msg)
+    return remaining
+
+  def LogTimeoutException(self):
+    """Log the exception that terminated this thread."""
+    if not self._expired:
+      return
+    logging.critical('*' * 80)
+    logging.critical('%s on thread %r', self._exc_info[0].__name__, self.name)
+    logging.critical('*' * 80)
+    fmt_exc = ''.join(traceback.format_exception(*self._exc_info))
+    for line in fmt_exc.splitlines():
+      logging.critical(line.rstrip())
+    logging.critical('*' * 80)
+
+
+def CurrentTimeoutThread():
+  """Get the current thread if it is a TimeoutRetryThread.
+
+  Returns:
+    The current thread if it is a TimeoutRetryThread, otherwise None.
+  """
+  current_thread = threading.current_thread()
+  if isinstance(current_thread, TimeoutRetryThread):
+    return current_thread
+  else:
+    return None
+
+
+def WaitFor(condition, wait_period=5, max_tries=None):
+  """Wait for a condition to become true.
+
+  Repeadly call the function condition(), with no arguments, until it returns
+  a true value.
+
+  If called within a TimeoutRetryThread, it cooperates nicely with it.
+
+  Args:
+    condition: function with the condition to check
+    wait_period: number of seconds to wait before retrying to check the
+      condition
+    max_tries: maximum number of checks to make, the default tries forever
+      or until the TimeoutRetryThread expires.
+
+  Returns:
+    The true value returned by the condition, or None if the condition was
+    not met after max_tries.
+
+  Raises:
+    reraiser_thread.TimeoutError if the current thread is a TimeoutRetryThread
+      and the timeout expires.
+  """
+  condition_name = condition.__name__
+  timeout_thread = CurrentTimeoutThread()
+  while max_tries is None or max_tries > 0:
+    result = condition()
+    if max_tries is not None:
+      max_tries -= 1
+    msg = ['condition', repr(condition_name), 'met' if result else 'not met']
+    if timeout_thread:
+      msg.append('(%.1fs)' % timeout_thread.GetElapsedTime())
+    logging.info(' '.join(msg))
+    if result:
+      return result
+    if timeout_thread:
+      timeout_thread.GetRemainingTime(wait_period,
+          msg='Timed out waiting for %r' % condition_name)
+    time.sleep(wait_period)
+  return None
+
+
+def Run(func, timeout, retries, args=None, kwargs=None):
+  """Runs the passed function in a separate thread with timeouts and retries.
+
+  Args:
+    func: the function to be wrapped.
+    timeout: the timeout in seconds for each try.
+    retries: the number of retries.
+    args: list of positional args to pass to |func|.
+    kwargs: dictionary of keyword args to pass to |func|.
+
+  Returns:
+    The return value of func(*args, **kwargs).
+  """
+  if not args:
+    args = []
+  if not kwargs:
+    kwargs = {}
+
+  # The return value uses a list because Python variables are references, not
+  # values. Closures make a copy of the reference, so updating the closure's
+  # reference wouldn't update where the original reference pointed.
+  ret = [None]
+  def RunOnTimeoutThread():
+    ret[0] = func(*args, **kwargs)
+
+  num_try = 1
+  while True:
+    child_thread = TimeoutRetryThread(
+      RunOnTimeoutThread, timeout,
+      name='TimeoutThread-%d-for-%s' % (num_try,
+                                        threading.current_thread().name))
+    try:
+      thread_group = reraiser_thread.ReraiserThreadGroup([child_thread])
+      thread_group.StartAll()
+      thread_group.JoinAll(child_thread.GetWatcher())
+      return ret[0]
+    except:
+      child_thread.LogTimeoutException()
+      if num_try > retries:
+        raise
+      num_try += 1
diff --git a/build/android/pylib/utils/timeout_retry_unittest.py b/build/android/pylib/utils/timeout_retry_unittest.py
new file mode 100644
index 0000000..dc36c42
--- /dev/null
+++ b/build/android/pylib/utils/timeout_retry_unittest.py
@@ -0,0 +1,52 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unittests for timeout_and_retry.py."""
+
+import unittest
+
+from pylib.utils import reraiser_thread
+from pylib.utils import timeout_retry
+
+
+class TestException(Exception):
+  pass
+
+
+def _NeverEnding(tries):
+  tries[0] += 1
+  while True:
+    pass
+
+
+def _CountTries(tries):
+  tries[0] += 1
+  raise TestException
+
+
+class TestRun(unittest.TestCase):
+  """Tests for timeout_retry.Run."""
+
+  def testRun(self):
+    self.assertTrue(timeout_retry.Run(
+        lambda x: x, 30, 3, [True], {}))
+
+  def testTimeout(self):
+    tries = [0]
+    self.assertRaises(reraiser_thread.TimeoutError,
+        timeout_retry.Run, lambda: _NeverEnding(tries), 0, 3)
+    self.assertEqual(tries[0], 4)
+
+  def testRetries(self):
+    tries = [0]
+    self.assertRaises(TestException,
+        timeout_retry.Run, lambda: _CountTries(tries), 30, 3)
+    self.assertEqual(tries[0], 4)
+
+  def testReturnValue(self):
+    self.assertTrue(timeout_retry.Run(lambda: True, 30, 3))
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/build/android/pylib/utils/watchdog_timer.py b/build/android/pylib/utils/watchdog_timer.py
new file mode 100644
index 0000000..2f4c464
--- /dev/null
+++ b/build/android/pylib/utils/watchdog_timer.py
@@ -0,0 +1,47 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""WatchdogTimer timeout objects."""
+
+import time
+
+
+class WatchdogTimer(object):
+  """A resetable timeout-based watchdog.
+
+  This object is threadsafe.
+  """
+
+  def __init__(self, timeout):
+    """Initializes the watchdog.
+
+    Args:
+      timeout: The timeout in seconds. If timeout is None it will never timeout.
+    """
+    self._start_time = time.time()
+    self._timeout = timeout
+
+  def Reset(self):
+    """Resets the timeout countdown."""
+    self._start_time = time.time()
+
+  def GetElapsed(self):
+    """Returns the elapsed time of the watchdog."""
+    return time.time() - self._start_time
+
+  def GetRemaining(self):
+    """Returns the remaining time of the watchdog."""
+    if self._timeout:
+      return self._timeout - self.GetElapsed()
+    else:
+      return None
+
+  def IsTimedOut(self):
+    """Whether the watchdog has timed out.
+
+    Returns:
+      True if the watchdog has timed out, False otherwise.
+    """
+    remaining = self.GetRemaining()
+    return remaining is not None and remaining < 0
diff --git a/build/android/pylib/utils/xvfb.py b/build/android/pylib/utils/xvfb.py
new file mode 100644
index 0000000..cb9d50e
--- /dev/null
+++ b/build/android/pylib/utils/xvfb.py
@@ -0,0 +1,58 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# pylint: disable=W0702
+
+import os
+import signal
+import subprocess
+import sys
+import time
+
+
+def _IsLinux():
+  """Return True if on Linux; else False."""
+  return sys.platform.startswith('linux')
+
+
+class Xvfb(object):
+  """Class to start and stop Xvfb if relevant.  Nop if not Linux."""
+
+  def __init__(self):
+    self._pid = 0
+
+  def Start(self):
+    """Start Xvfb and set an appropriate DISPLAY environment.  Linux only.
+
+    Copied from tools/code_coverage/coverage_posix.py
+    """
+    if not _IsLinux():
+      return
+    proc = subprocess.Popen(['Xvfb', ':9', '-screen', '0', '1024x768x24',
+                             '-ac'],
+                            stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+    self._pid = proc.pid
+    if not self._pid:
+      raise Exception('Could not start Xvfb')
+    os.environ['DISPLAY'] = ':9'
+
+    # Now confirm, giving a chance for it to start if needed.
+    for _ in range(10):
+      proc = subprocess.Popen('xdpyinfo >/dev/null', shell=True)
+      _, retcode = os.waitpid(proc.pid, 0)
+      if retcode == 0:
+        break
+      time.sleep(0.25)
+    if retcode != 0:
+      raise Exception('Could not confirm Xvfb happiness')
+
+  def Stop(self):
+    """Stop Xvfb if needed.  Linux only."""
+    if self._pid:
+      try:
+        os.kill(self._pid, signal.SIGKILL)
+      except:
+        pass
+      del os.environ['DISPLAY']
+      self._pid = 0
diff --git a/build/android/pylib/utils/zip_utils.py b/build/android/pylib/utils/zip_utils.py
new file mode 100644
index 0000000..d799463
--- /dev/null
+++ b/build/android/pylib/utils/zip_utils.py
@@ -0,0 +1,31 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import logging
+import os
+import zipfile
+
+
+def WriteToZipFile(zip_file, path, arc_path):
+  """Recursively write |path| to |zip_file| as |arc_path|.
+
+  zip_file: An open instance of zipfile.ZipFile.
+  path: An absolute path to the file or directory to be zipped.
+  arc_path: A relative path within the zip file to which the file or directory
+    located at |path| should be written.
+  """
+  if os.path.isdir(path):
+    for dir_path, _, file_names in os.walk(path):
+      dir_arc_path = os.path.join(arc_path, os.path.relpath(dir_path, path))
+      logging.debug('dir:  %s -> %s', dir_path, dir_arc_path)
+      zip_file.write(dir_path, dir_arc_path, zipfile.ZIP_STORED)
+      for f in file_names:
+        file_path = os.path.join(dir_path, f)
+        file_arc_path = os.path.join(dir_arc_path, f)
+        logging.debug('file: %s -> %s', file_path, file_arc_path)
+        zip_file.write(file_path, file_arc_path, zipfile.ZIP_DEFLATED)
+  else:
+    logging.debug('file: %s -> %s', path, arc_path)
+    zip_file.write(path, arc_path, zipfile.ZIP_DEFLATED)
+
diff --git a/build/android/pylib/valgrind_tools.py b/build/android/pylib/valgrind_tools.py
new file mode 100644
index 0000000..99719d0
--- /dev/null
+++ b/build/android/pylib/valgrind_tools.py
@@ -0,0 +1,304 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Classes in this file define additional actions that need to be taken to run a
+test under some kind of runtime error detection tool.
+
+The interface is intended to be used as follows.
+
+1. For tests that simply run a native process (i.e. no activity is spawned):
+
+Call tool.CopyFiles(device).
+Prepend test command line with tool.GetTestWrapper().
+
+2. For tests that spawn an activity:
+
+Call tool.CopyFiles(device).
+Call tool.SetupEnvironment().
+Run the test as usual.
+Call tool.CleanUpEnvironment().
+"""
+# pylint: disable=R0201
+
+import glob
+import logging
+import os.path
+import subprocess
+import sys
+
+from pylib.constants import DIR_SOURCE_ROOT
+from pylib.device import device_errors
+
+
+def SetChromeTimeoutScale(device, scale):
+  """Sets the timeout scale in /data/local/tmp/chrome_timeout_scale to scale."""
+  path = '/data/local/tmp/chrome_timeout_scale'
+  if not scale or scale == 1.0:
+    # Delete if scale is None/0.0/1.0 since the default timeout scale is 1.0
+    device.RunShellCommand('rm %s' % path)
+  else:
+    device.WriteFile(path, '%f' % scale, as_root=True)
+
+
+class BaseTool(object):
+  """A tool that does nothing."""
+
+  def __init__(self):
+    """Does nothing."""
+    pass
+
+  def GetTestWrapper(self):
+    """Returns a string that is to be prepended to the test command line."""
+    return ''
+
+  def GetUtilWrapper(self):
+    """Returns the wrapper name for the utilities.
+
+    Returns:
+       A string that is to be prepended to the command line of utility
+    processes (forwarder, etc.).
+    """
+    return ''
+
+  @classmethod
+  def CopyFiles(cls, device):
+    """Copies tool-specific files to the device, create directories, etc."""
+    pass
+
+  def SetupEnvironment(self):
+    """Sets up the system environment for a test.
+
+    This is a good place to set system properties.
+    """
+    pass
+
+  def CleanUpEnvironment(self):
+    """Cleans up environment."""
+    pass
+
+  def GetTimeoutScale(self):
+    """Returns a multiplier that should be applied to timeout values."""
+    return 1.0
+
+  def NeedsDebugInfo(self):
+    """Whether this tool requires debug info.
+
+    Returns:
+      True if this tool can not work with stripped binaries.
+    """
+    return False
+
+
+class AddressSanitizerTool(BaseTool):
+  """AddressSanitizer tool."""
+
+  WRAPPER_NAME = '/system/bin/asanwrapper'
+  # Disable memcmp overlap check.There are blobs (gl drivers)
+  # on some android devices that use memcmp on overlapping regions,
+  # nothing we can do about that.
+  EXTRA_OPTIONS = 'strict_memcmp=0,use_sigaltstack=1'
+
+  def __init__(self, device):
+    super(AddressSanitizerTool, self).__init__()
+    self._device = device
+    # Configure AndroidCommands to run utils (such as md5sum_bin) under ASan.
+    # This is required because ASan is a compiler-based tool, and md5sum
+    # includes instrumented code from base.
+    device.old_interface.SetUtilWrapper(self.GetUtilWrapper())
+
+  @classmethod
+  def CopyFiles(cls, device):
+    """Copies ASan tools to the device."""
+    libs = glob.glob(os.path.join(DIR_SOURCE_ROOT,
+                                  'third_party/llvm-build/Release+Asserts/',
+                                  'lib/clang/*/lib/linux/',
+                                  'libclang_rt.asan-arm-android.so'))
+    assert len(libs) == 1
+    subprocess.call(
+        [os.path.join(
+             DIR_SOURCE_ROOT,
+             'tools/android/asan/third_party/asan_device_setup.sh'),
+         '--device', str(device),
+         '--lib', libs[0],
+         '--extra-options', AddressSanitizerTool.EXTRA_OPTIONS])
+    device.WaitUntilFullyBooted()
+
+  def GetTestWrapper(self):
+    return AddressSanitizerTool.WRAPPER_NAME
+
+  def GetUtilWrapper(self):
+    """Returns the wrapper for utilities, such as forwarder.
+
+    AddressSanitizer wrapper must be added to all instrumented binaries,
+    including forwarder and the like. This can be removed if such binaries
+    were built without instrumentation. """
+    return self.GetTestWrapper()
+
+  def SetupEnvironment(self):
+    try:
+      self._device.EnableRoot()
+    except device_errors.CommandFailedError as e:
+      # Try to set the timeout scale anyway.
+      # TODO(jbudorick) Handle this exception appropriately after interface
+      #                 conversions are finished.
+      logging.error(str(e))
+    SetChromeTimeoutScale(self._device, self.GetTimeoutScale())
+
+  def CleanUpEnvironment(self):
+    SetChromeTimeoutScale(self._device, None)
+
+  def GetTimeoutScale(self):
+    # Very slow startup.
+    return 20.0
+
+
+class ValgrindTool(BaseTool):
+  """Base abstract class for Valgrind tools."""
+
+  VG_DIR = '/data/local/tmp/valgrind'
+  VGLOGS_DIR = '/data/local/tmp/vglogs'
+
+  def __init__(self, device):
+    super(ValgrindTool, self).__init__()
+    self._device = device
+    # exactly 31 chars, SystemProperties::PROP_NAME_MAX
+    self._wrap_properties = ['wrap.com.google.android.apps.ch',
+                             'wrap.org.chromium.native_test']
+
+  @classmethod
+  def CopyFiles(cls, device):
+    """Copies Valgrind tools to the device."""
+    device.RunShellCommand(
+        'rm -r %s; mkdir %s' % (ValgrindTool.VG_DIR, ValgrindTool.VG_DIR))
+    device.RunShellCommand(
+        'rm -r %s; mkdir %s' % (ValgrindTool.VGLOGS_DIR,
+                                ValgrindTool.VGLOGS_DIR))
+    files = cls.GetFilesForTool()
+    device.PushChangedFiles(
+        [((os.path.join(DIR_SOURCE_ROOT, f),
+          os.path.join(ValgrindTool.VG_DIR, os.path.basename(f)))
+         for f in files)])
+
+  def SetupEnvironment(self):
+    """Sets up device environment."""
+    self._device.RunShellCommand('chmod 777 /data/local/tmp')
+    self._device.RunShellCommand('setenforce 0')
+    for prop in self._wrap_properties:
+      self._device.RunShellCommand(
+          'setprop %s "logwrapper %s"' % (prop, self.GetTestWrapper()))
+    SetChromeTimeoutScale(self._device, self.GetTimeoutScale())
+
+  def CleanUpEnvironment(self):
+    """Cleans up device environment."""
+    for prop in self._wrap_properties:
+      self._device.RunShellCommand('setprop %s ""' % (prop,))
+    SetChromeTimeoutScale(self._device, None)
+
+  @staticmethod
+  def GetFilesForTool():
+    """Returns a list of file names for the tool."""
+    raise NotImplementedError()
+
+  def NeedsDebugInfo(self):
+    """Whether this tool requires debug info.
+
+    Returns:
+      True if this tool can not work with stripped binaries.
+    """
+    return True
+
+
+class MemcheckTool(ValgrindTool):
+  """Memcheck tool."""
+
+  def __init__(self, device):
+    super(MemcheckTool, self).__init__(device)
+
+  @staticmethod
+  def GetFilesForTool():
+    """Returns a list of file names for the tool."""
+    return ['tools/valgrind/android/vg-chrome-wrapper.sh',
+            'tools/valgrind/memcheck/suppressions.txt',
+            'tools/valgrind/memcheck/suppressions_android.txt']
+
+  def GetTestWrapper(self):
+    """Returns a string that is to be prepended to the test command line."""
+    return ValgrindTool.VG_DIR + '/' + 'vg-chrome-wrapper.sh'
+
+  def GetTimeoutScale(self):
+    """Returns a multiplier that should be applied to timeout values."""
+    return 30
+
+
+class TSanTool(ValgrindTool):
+  """ThreadSanitizer tool. See http://code.google.com/p/data-race-test ."""
+
+  def __init__(self, device):
+    super(TSanTool, self).__init__(device)
+
+  @staticmethod
+  def GetFilesForTool():
+    """Returns a list of file names for the tool."""
+    return ['tools/valgrind/android/vg-chrome-wrapper-tsan.sh',
+            'tools/valgrind/tsan/suppressions.txt',
+            'tools/valgrind/tsan/suppressions_android.txt',
+            'tools/valgrind/tsan/ignores.txt']
+
+  def GetTestWrapper(self):
+    """Returns a string that is to be prepended to the test command line."""
+    return ValgrindTool.VG_DIR + '/' + 'vg-chrome-wrapper-tsan.sh'
+
+  def GetTimeoutScale(self):
+    """Returns a multiplier that should be applied to timeout values."""
+    return 30.0
+
+
+TOOL_REGISTRY = {
+    'memcheck': MemcheckTool,
+    'memcheck-renderer': MemcheckTool,
+    'tsan': TSanTool,
+    'tsan-renderer': TSanTool,
+    'asan': AddressSanitizerTool,
+}
+
+
+def CreateTool(tool_name, device):
+  """Creates a tool with the specified tool name.
+
+  Args:
+    tool_name: Name of the tool to create.
+    device: A DeviceUtils instance.
+  Returns:
+    A tool for the specified tool_name.
+  """
+  if not tool_name:
+    return BaseTool()
+
+  ctor = TOOL_REGISTRY.get(tool_name)
+  if ctor:
+    return ctor(device)
+  else:
+    print 'Unknown tool %s, available tools: %s' % (
+        tool_name, ', '.join(sorted(TOOL_REGISTRY.keys())))
+    sys.exit(1)
+
+def PushFilesForTool(tool_name, device):
+  """Pushes the files required for |tool_name| to |device|.
+
+  Args:
+    tool_name: Name of the tool to create.
+    device: A DeviceUtils instance.
+  """
+  if not tool_name:
+    return
+
+  clazz = TOOL_REGISTRY.get(tool_name)
+  if clazz:
+    clazz.CopyFiles(device)
+  else:
+    print 'Unknown tool %s, available tools: %s' % (
+        tool_name, ', '.join(sorted(TOOL_REGISTRY.keys())))
+    sys.exit(1)
+
diff --git a/build/android/rezip.gyp b/build/android/rezip.gyp
new file mode 100644
index 0000000..1115177
--- /dev/null
+++ b/build/android/rezip.gyp
@@ -0,0 +1,45 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Build the rezip build tool.
+{
+  'targets': [
+    {
+      # GN: //build/android/rezip:rezip
+      'target_name': 'rezip_apk_jar',
+      'type': 'none',
+      'variables': {
+        'java_in_dir': 'rezip',
+        'compile_stamp': '<(SHARED_INTERMEDIATE_DIR)/<(_target_name)/compile.stamp',
+        'javac_jar_path': '<(PRODUCT_DIR)/lib.java/rezip_apk.jar',
+      },
+      'actions': [
+        {
+          'action_name': 'javac_<(_target_name)',
+          'message': 'Compiling <(_target_name) java sources',
+          'variables': {
+            'java_sources': ['>!@(find >(java_in_dir) -name "*.java")'],
+          },
+          'inputs': [
+            '<(DEPTH)/build/android/gyp/util/build_utils.py',
+            '<(DEPTH)/build/android/gyp/javac.py',
+            '>@(java_sources)',
+          ],
+          'outputs': [
+            '<(compile_stamp)',
+            '<(javac_jar_path)',
+          ],
+          'action': [
+            'python', '<(DEPTH)/build/android/gyp/javac.py',
+            '--classpath=',
+            '--classes-dir=<(SHARED_INTERMEDIATE_DIR)/<(_target_name)',
+            '--jar-path=<(javac_jar_path)',
+            '--stamp=<(compile_stamp)',
+            '>@(java_sources)',
+          ]
+        },
+      ],
+    }
+  ],
+}
diff --git a/build/android/rezip/BUILD.gn b/build/android/rezip/BUILD.gn
new file mode 100644
index 0000000..8b8f78e
--- /dev/null
+++ b/build/android/rezip/BUILD.gn
@@ -0,0 +1,11 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/android/rules.gni")
+
+# GYP: //build/android/rezip.gyp:rezip_apk_jar
+java_library("rezip") {
+  jar_path = "$root_build_dir/lib.java/rezip_apk.jar"
+  DEPRECATED_java_in_dir = "."
+}
diff --git a/build/android/rezip/RezipApk.java b/build/android/rezip/RezipApk.java
new file mode 100644
index 0000000..43d7544
--- /dev/null
+++ b/build/android/rezip/RezipApk.java
@@ -0,0 +1,448 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.Enumeration;
+import java.util.List;
+import java.util.jar.JarEntry;
+import java.util.jar.JarFile;
+import java.util.jar.JarOutputStream;
+import java.util.regex.Pattern;
+import java.util.zip.CRC32;
+
+/**
+ * Command line tool used to build APKs which support loading the native code library
+ * directly from the APK file. To construct the APK we rename the native library by
+ * adding the prefix "crazy." to the filename. This is done to prevent the Android
+ * Package Manager from extracting the library. The native code must be page aligned
+ * and uncompressed. The page alignment is implemented by adding a zero filled file
+ * in front of the the native code library. This tool is designed so that running
+ * SignApk and/or zipalign on the resulting APK does not break the page alignment.
+ * This is achieved by outputing the filenames in the same canonical order used
+ * by SignApk and adding the same alignment fields added by zipalign.
+ */
+class RezipApk {
+    // Alignment to use for non-compressed files (must match zipalign).
+    private static final int ALIGNMENT = 4;
+
+    // Alignment to use for non-compressed *.so files
+    private static final int LIBRARY_ALIGNMENT = 4096;
+
+    // Files matching this pattern are not copied to the output when adding alignment.
+    // When reordering and verifying the APK they are copied to the end of the file.
+    private static Pattern sMetaFilePattern =
+            Pattern.compile("^(META-INF/((.*)[.](SF|RSA|DSA)|com/android/otacert))|("
+                    + Pattern.quote(JarFile.MANIFEST_NAME) + ")$");
+
+    // Pattern for matching a shared library in the APK
+    private static Pattern sLibraryPattern = Pattern.compile("^lib/[^/]*/lib.*[.]so$");
+    // Pattern for match the crazy linker in the APK
+    private static Pattern sCrazyLinkerPattern =
+            Pattern.compile("^lib/[^/]*/libchromium_android_linker.so$");
+    // Pattern for matching a crazy loaded shared library in the APK
+    private static Pattern sCrazyLibraryPattern = Pattern.compile("^lib/[^/]*/crazy.lib.*[.]so$");
+
+    private static boolean isLibraryFilename(String filename) {
+        return sLibraryPattern.matcher(filename).matches()
+                && !sCrazyLinkerPattern.matcher(filename).matches();
+    }
+
+    private static boolean isCrazyLibraryFilename(String filename) {
+        return sCrazyLibraryPattern.matcher(filename).matches();
+    }
+
+    private static String renameLibraryForCrazyLinker(String filename) {
+        int lastSlash = filename.lastIndexOf('/');
+        // We rename the library, so that the Android Package Manager
+        // no longer extracts the library.
+        return filename.substring(0, lastSlash + 1) + "crazy." + filename.substring(lastSlash + 1);
+    }
+
+    /**
+     * Wraps another output stream, counting the number of bytes written.
+     */
+    private static class CountingOutputStream extends OutputStream {
+        private long mCount = 0;
+        private OutputStream mOut;
+
+        public CountingOutputStream(OutputStream out) {
+            this.mOut = out;
+        }
+
+        /** Returns the number of bytes written. */
+        public long getCount() {
+            return mCount;
+        }
+
+        @Override public void write(byte[] b, int off, int len) throws IOException {
+            mOut.write(b, off, len);
+            mCount += len;
+        }
+
+        @Override public void write(int b) throws IOException {
+            mOut.write(b);
+            mCount++;
+        }
+
+        @Override public void close() throws IOException {
+            mOut.close();
+        }
+
+        @Override public void flush() throws IOException {
+            mOut.flush();
+        }
+    }
+
+    private static String outputName(JarEntry entry, boolean rename) {
+        String inName = entry.getName();
+        if (rename && entry.getSize() > 0 && isLibraryFilename(inName)) {
+            return renameLibraryForCrazyLinker(inName);
+        }
+        return inName;
+    }
+
+    /**
+     * Comparator used to sort jar entries from the input file.
+     * Sorting is done based on the output filename (which maybe renamed).
+     * Filenames are in natural string order, except that filenames matching
+     * the meta-file pattern are always after other files. This is so the manifest
+     * and signature are at the end of the file after any alignment file.
+     */
+    private static class EntryComparator implements Comparator<JarEntry> {
+        private boolean mRename;
+
+        public EntryComparator(boolean rename) {
+            mRename = rename;
+        }
+
+        @Override
+        public int compare(JarEntry j1, JarEntry j2) {
+            String o1 = outputName(j1, mRename);
+            String o2 = outputName(j2, mRename);
+            boolean o1Matches = sMetaFilePattern.matcher(o1).matches();
+            boolean o2Matches = sMetaFilePattern.matcher(o2).matches();
+            if (o1Matches != o2Matches) {
+                return o1Matches ? 1 : -1;
+            } else {
+                return o1.compareTo(o2);
+            }
+        }
+    }
+
+    // Build an ordered list of jar entries. The jar entries from the input are
+    // sorted based on the output filenames (which maybe renamed). If |omitMetaFiles|
+    // is true do not include the jar entries for the META-INF files.
+    // Entries are ordered in the deterministic order used by SignApk.
+    private static List<JarEntry> getOutputFileOrderEntries(
+            JarFile jar, boolean omitMetaFiles, boolean rename) {
+        List<JarEntry> entries = new ArrayList<JarEntry>();
+        for (Enumeration<JarEntry> e = jar.entries(); e.hasMoreElements(); ) {
+            JarEntry entry = e.nextElement();
+            if (entry.isDirectory()) {
+                continue;
+            }
+            if (omitMetaFiles && sMetaFilePattern.matcher(entry.getName()).matches()) {
+                continue;
+            }
+            entries.add(entry);
+        }
+
+        // We sort the input entries by name. When present META-INF files
+        // are sorted to the end.
+        Collections.sort(entries, new EntryComparator(rename));
+        return entries;
+    }
+
+    /**
+     * Add a zero filled alignment file at this point in the zip file,
+     * The added file will be added before |name| and after |prevName|.
+     * The size of the alignment file is such that the location of the
+     * file |name| will be on a LIBRARY_ALIGNMENT boundary.
+     *
+     * Note this arrangement is devised so that running SignApk and/or zipalign on the resulting
+     * file will not alter the alignment.
+     *
+     * @param offset number of bytes into the output file at this point.
+     * @param timestamp time in millis since the epoch to include in the header.
+     * @param name the name of the library filename.
+     * @param prevName the name of the previous file in the archive (or null).
+     * @param out jar output stream to write the alignment file to.
+     *
+     * @throws IOException if the output file can not be written.
+     */
+    private static void addAlignmentFile(
+            long offset, long timestamp, String name, String prevName,
+            JarOutputStream out) throws IOException {
+
+        // Compute the start and alignment of the library, as if it was next.
+        int headerSize = JarFile.LOCHDR + name.length();
+        long libOffset = offset + headerSize;
+        int libNeeded = LIBRARY_ALIGNMENT - (int) (libOffset % LIBRARY_ALIGNMENT);
+        if (libNeeded == LIBRARY_ALIGNMENT) {
+            // Already aligned, no need to added alignment file.
+            return;
+        }
+
+        // Check that there is not another file between the library and the
+        // alignment file.
+        String alignName = name.substring(0, name.length() - 2) + "align";
+        if (prevName != null && prevName.compareTo(alignName) >= 0) {
+            throw new UnsupportedOperationException(
+                "Unable to insert alignment file, because there is "
+                + "another file in front of the file to be aligned. "
+                + "Other file: " + prevName + " Alignment file: " + alignName
+                + " file: " + name);
+        }
+
+        // Compute the size of the alignment file header.
+        headerSize = JarFile.LOCHDR + alignName.length();
+        // We are going to add an alignment file of type STORED. This file
+        // will itself induce a zipalign alignment adjustment.
+        int extraNeeded =
+                (ALIGNMENT - (int) ((offset + headerSize) % ALIGNMENT)) % ALIGNMENT;
+        headerSize += extraNeeded;
+
+        if (libNeeded < headerSize + 1) {
+            // The header was bigger than the alignment that we need, add another page.
+            libNeeded += LIBRARY_ALIGNMENT;
+        }
+        // Compute the size of the alignment file.
+        libNeeded -= headerSize;
+
+        // Build the header for the alignment file.
+        byte[] zeroBuffer = new byte[libNeeded];
+        JarEntry alignEntry = new JarEntry(alignName);
+        alignEntry.setMethod(JarEntry.STORED);
+        alignEntry.setSize(libNeeded);
+        alignEntry.setTime(timestamp);
+        CRC32 crc = new CRC32();
+        crc.update(zeroBuffer);
+        alignEntry.setCrc(crc.getValue());
+
+        if (extraNeeded != 0) {
+            alignEntry.setExtra(new byte[extraNeeded]);
+        }
+
+        // Output the alignment file.
+        out.putNextEntry(alignEntry);
+        out.write(zeroBuffer);
+        out.closeEntry();
+        out.flush();
+    }
+
+    // Make a JarEntry for the output file which corresponds to the input
+    // file. The output file will be called |name|. The output file will always
+    // be uncompressed (STORED). If the input is not STORED it is necessary to inflate
+    // it to compute the CRC and size of the output entry.
+    private static JarEntry makeStoredEntry(String name, JarEntry inEntry, JarFile in)
+            throws IOException {
+        JarEntry outEntry = new JarEntry(name);
+        outEntry.setMethod(JarEntry.STORED);
+
+        if (inEntry.getMethod() == JarEntry.STORED) {
+            outEntry.setCrc(inEntry.getCrc());
+            outEntry.setSize(inEntry.getSize());
+        } else {
+            // We are inflating the file. We need to compute the CRC and size.
+            byte[] buffer = new byte[4096];
+            CRC32 crc = new CRC32();
+            int size = 0;
+            int num;
+            InputStream data = in.getInputStream(inEntry);
+            while ((num = data.read(buffer)) > 0) {
+                crc.update(buffer, 0, num);
+                size += num;
+            }
+            data.close();
+            outEntry.setCrc(crc.getValue());
+            outEntry.setSize(size);
+        }
+        return outEntry;
+    }
+
+    /**
+     * Copy the contents of the input APK file to the output APK file. If |rename| is
+     * true then non-empty libraries (*.so) in the input will be renamed by prefixing
+     * "crazy.". This is done to prevent the Android Package Manager extracting the
+     * library. Note the crazy linker itself is not renamed, for bootstrapping reasons.
+     * Empty libraries are not renamed (they are in the APK to workaround a bug where
+     * the Android Package Manager fails to delete old versions when upgrading).
+     * There must be exactly one "crazy" library in the output stream. The "crazy"
+     * library will be uncompressed and page aligned in the output stream. Page
+     * alignment is implemented by adding a zero filled file, regular alignment is
+     * implemented by adding a zero filled extra field to the zip file header. If
+     * |addAlignment| is true a page alignment file is added, otherwise the "crazy"
+     * library must already be page aligned. Care is taken so that the output is generated
+     * in the same way as SignApk. This is important so that running SignApk and
+     * zipalign on the output does not break the page alignment. The archive may not
+     * contain a "*.apk" as SignApk has special nested signing logic that we do not
+     * support.
+     *
+     * @param in The input APK File.
+     * @param out The output APK stream.
+     * @param countOut Counting output stream (to measure the current offset).
+     * @param addAlignment Whether to add the alignment file or just check.
+     * @param rename Whether to rename libraries to be "crazy".
+     *
+     * @throws IOException if the output file can not be written.
+     */
+    private static void rezip(
+            JarFile in, JarOutputStream out, CountingOutputStream countOut,
+            boolean addAlignment, boolean rename) throws IOException {
+
+        List<JarEntry> entries = getOutputFileOrderEntries(in, addAlignment, rename);
+        long timestamp = System.currentTimeMillis();
+        byte[] buffer = new byte[4096];
+        boolean firstEntry = true;
+        String prevName = null;
+        int numCrazy = 0;
+        for (JarEntry inEntry : entries) {
+            // Rename files, if specied.
+            String name = outputName(inEntry, rename);
+            if (name.endsWith(".apk")) {
+                throw new UnsupportedOperationException(
+                        "Nested APKs are not supported: " + name);
+            }
+
+            // Build the header.
+            JarEntry outEntry = null;
+            boolean isCrazy = isCrazyLibraryFilename(name);
+            if (isCrazy) {
+                // "crazy" libraries are alway output uncompressed (STORED).
+                outEntry = makeStoredEntry(name, inEntry, in);
+                numCrazy++;
+                if (numCrazy > 1) {
+                    throw new UnsupportedOperationException(
+                            "Found more than one library\n"
+                            + "Multiple libraries are not supported for APKs that use "
+                            + "'load_library_from_zip'.\n"
+                            + "See crbug/388223.\n"
+                            + "Note, check that your build is clean.\n"
+                            + "An unclean build can incorrectly incorporate old "
+                            + "libraries in the APK.");
+                }
+            } else if (inEntry.getMethod() == JarEntry.STORED) {
+                // Preserve the STORED method of the input entry.
+                outEntry = new JarEntry(inEntry);
+                outEntry.setExtra(null);
+            } else {
+                // Create a new entry so that the compressed len is recomputed.
+                outEntry = new JarEntry(name);
+            }
+            outEntry.setTime(timestamp);
+
+            // Compute and add alignment
+            long offset = countOut.getCount();
+            if (firstEntry) {
+                // The first entry in a jar file has an extra field of
+                // four bytes that you can't get rid of; any extra
+                // data you specify in the JarEntry is appended to
+                // these forced four bytes.  This is JAR_MAGIC in
+                // JarOutputStream; the bytes are 0xfeca0000.
+                firstEntry = false;
+                offset += 4;
+            }
+            if (outEntry.getMethod() == JarEntry.STORED) {
+                if (isCrazy) {
+                    if (addAlignment) {
+                        addAlignmentFile(offset, timestamp, name, prevName, out);
+                    }
+                    // We check that we did indeed get to a page boundary.
+                    offset = countOut.getCount() + JarFile.LOCHDR + name.length();
+                    if ((offset % LIBRARY_ALIGNMENT) != 0) {
+                        throw new AssertionError(
+                                "Library was not page aligned when verifying page alignment. "
+                                + "Library name: " + name + " Expected alignment: "
+                                + LIBRARY_ALIGNMENT + "Offset: " + offset + " Error: "
+                                + (offset % LIBRARY_ALIGNMENT));
+                    }
+                } else {
+                    // This is equivalent to zipalign.
+                    offset += JarFile.LOCHDR + name.length();
+                    int needed = (ALIGNMENT - (int) (offset % ALIGNMENT)) % ALIGNMENT;
+                    if (needed != 0) {
+                        outEntry.setExtra(new byte[needed]);
+                    }
+                }
+            }
+            out.putNextEntry(outEntry);
+
+            // Copy the data from the input to the output
+            int num;
+            InputStream data = in.getInputStream(inEntry);
+            while ((num = data.read(buffer)) > 0) {
+                out.write(buffer, 0, num);
+            }
+            data.close();
+            out.closeEntry();
+            out.flush();
+            prevName = name;
+        }
+        if (numCrazy == 0) {
+            throw new AssertionError("There was no crazy library in the archive");
+        }
+    }
+
+    private static void usage() {
+        System.err.println("Usage: prealignapk (addalignment|reorder) input.apk output.apk");
+        System.err.println("\"crazy\" libraries are always inflated in the output");
+        System.err.println(
+                "  renamealign  - rename libraries with \"crazy.\" prefix and add alignment file");
+        System.err.println("  align        - add alignment file");
+        System.err.println("  reorder      - re-creates canonical ordering and checks alignment");
+        System.exit(2);
+    }
+
+    public static void main(String[] args) throws IOException {
+        if (args.length != 3) usage();
+
+        boolean addAlignment = false;
+        boolean rename = false;
+        if (args[0].equals("renamealign")) {
+            // Normal case. Before signing we rename the library and add an alignment file.
+            addAlignment = true;
+            rename = true;
+        } else if (args[0].equals("align")) {
+            // LGPL compliance case. Before signing, we add an alignment file to a
+            // reconstructed APK which already contains the "crazy" library.
+            addAlignment = true;
+            rename = false;
+        } else if (args[0].equals("reorder")) {
+            // Normal case. After jarsigning we write the file in the canonical order and check.
+            addAlignment = false;
+        } else {
+            usage();
+        }
+
+        String inputFilename = args[1];
+        String outputFilename = args[2];
+
+        JarFile inputJar = null;
+        FileOutputStream outputFile = null;
+
+        try {
+            inputJar = new JarFile(new File(inputFilename), true);
+            outputFile = new FileOutputStream(outputFilename);
+
+            CountingOutputStream outCount = new CountingOutputStream(outputFile);
+            JarOutputStream outputJar = new JarOutputStream(outCount);
+
+            // Match the compression level used by SignApk.
+            outputJar.setLevel(9);
+
+            rezip(inputJar, outputJar, outCount, addAlignment, rename);
+            outputJar.close();
+        } finally {
+            if (inputJar != null) inputJar.close();
+            if (outputFile != null) outputFile.close();
+        }
+    }
+}
diff --git a/build/android/screenshot.py b/build/android/screenshot.py
new file mode 100755
index 0000000..097739f
--- /dev/null
+++ b/build/android/screenshot.py
@@ -0,0 +1,98 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Takes a screenshot or a screen video capture from an Android device."""
+
+import logging
+import optparse
+import os
+import sys
+
+from pylib import screenshot
+from pylib.device import device_errors
+from pylib.device import device_utils
+
+def _PrintMessage(heading, eol='\n'):
+  sys.stdout.write('%s%s' % (heading, eol))
+  sys.stdout.flush()
+
+
+def _CaptureScreenshot(device, host_file):
+  host_file = device.TakeScreenshot(host_file)
+  _PrintMessage('Screenshot written to %s' % os.path.abspath(host_file))
+
+
+def _CaptureVideo(device, host_file, options):
+  size = tuple(map(int, options.size.split('x'))) if options.size else None
+  recorder = screenshot.VideoRecorder(device,
+                                      megabits_per_second=options.bitrate,
+                                      size=size,
+                                      rotate=options.rotate)
+  try:
+    recorder.Start()
+    _PrintMessage('Recording. Press Enter to stop...', eol='')
+    raw_input()
+  finally:
+    recorder.Stop()
+  host_file = recorder.Pull(host_file)
+  _PrintMessage('Video written to %s' % os.path.abspath(host_file))
+
+
+def main():
+  # Parse options.
+  parser = optparse.OptionParser(description=__doc__,
+                                 usage='screenshot.py [options] [filename]')
+  parser.add_option('-d', '--device', metavar='ANDROID_DEVICE', help='Serial '
+                    'number of Android device to use.', default=None)
+  parser.add_option('-f', '--file', help='Save result to file instead of '
+                    'generating a timestamped file name.', metavar='FILE')
+  parser.add_option('-v', '--verbose', help='Verbose logging.',
+                    action='store_true')
+  video_options = optparse.OptionGroup(parser, 'Video capture')
+  video_options.add_option('--video', help='Enable video capturing. Requires '
+                           'Android KitKat or later', action='store_true')
+  video_options.add_option('-b', '--bitrate', help='Bitrate in megabits/s, '
+                           'from 0.1 to 100 mbps, %default mbps by default.',
+                           default=4, type='float')
+  video_options.add_option('-r', '--rotate', help='Rotate video by 90 degrees.',
+                           default=False, action='store_true')
+  video_options.add_option('-s', '--size', metavar='WIDTHxHEIGHT',
+                           help='Frame size to use instead of the device '
+                           'screen size.', default=None)
+  parser.add_option_group(video_options)
+
+  (options, args) = parser.parse_args()
+
+  if len(args) > 1:
+    parser.error('Too many positional arguments.')
+  host_file = args[0] if args else options.file
+
+  if options.verbose:
+    logging.getLogger().setLevel(logging.DEBUG)
+
+  devices = device_utils.DeviceUtils.HealthyDevices()
+  if options.device:
+    device = next((d for d in devices if d == options.device), None)
+    if not device:
+      raise device_errors.DeviceUnreachableError(options.device)
+  else:
+    if len(devices) > 1:
+      parser.error('Multiple devices are attached. '
+                   'Please specify device serial number with --device.')
+    elif len(devices) == 1:
+      device = devices[0]
+    else:
+      raise device_errors.NoDevicesError()
+
+  if options.video:
+    _CaptureVideo(device, host_file, options)
+  else:
+    _CaptureScreenshot(device, host_file)
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/android/setup.gyp b/build/android/setup.gyp
new file mode 100644
index 0000000..0e1c2c4
--- /dev/null
+++ b/build/android/setup.gyp
@@ -0,0 +1,111 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+  'conditions': [
+    ['component == "shared_library"', {
+      'targets': [
+        {
+          # These libraries from the Android ndk are required to be packaged with
+          # any APK that is built with them. build/java_apk.gypi expects any
+          # libraries that should be packaged with the apk to be in
+          # <(SHARED_LIB_DIR)
+          'target_name': 'copy_system_libraries',
+          'type': 'none',
+          'copies': [
+            {
+              'destination': '<(SHARED_LIB_DIR)/',
+              'files': [
+                '<(android_libcpp_libs_dir)/libc++_shared.so',
+              ],
+            },
+          ],
+        },
+      ],
+    }],
+  ],
+  'targets': [
+    {
+      'target_name': 'get_build_device_configurations',
+      'type': 'none',
+      'actions': [
+        {
+          'action_name': 'get configurations',
+          'inputs': [
+            'gyp/util/build_device.py',
+            'gyp/get_device_configuration.py',
+          ],
+          'outputs': [
+            '<(build_device_config_path)',
+            '<(build_device_config_path).fake',
+          ],
+          'action': [
+            'python', 'gyp/get_device_configuration.py',
+            '--output=<(build_device_config_path)',
+          ],
+        }
+      ],
+    },
+    {
+      # Target for creating common output build directories. Creating output
+      # dirs beforehand ensures that build scripts can assume these folders to
+      # exist and there are no race conditions resulting from build scripts
+      # trying to create these directories.
+      # The build/java.gypi target depends on this target.
+      'target_name': 'build_output_dirs',
+      'type': 'none',
+      'actions': [
+        {
+          'action_name': 'create_java_output_dirs',
+          'variables' : {
+            'output_dirs' : [
+              '<(PRODUCT_DIR)/apks',
+              '<(PRODUCT_DIR)/lib.java',
+              '<(PRODUCT_DIR)/test.lib.java',
+            ]
+          },
+          'inputs' : [],
+          # By not specifying any outputs, we ensure that this command isn't
+          # re-run when the output directories are touched (i.e. apks are
+          # written to them).
+          'outputs': [''],
+          'action': [
+            'mkdir',
+            '-p',
+            '<@(output_dirs)',
+          ],
+        },
+      ],
+    }, # build_output_dirs
+    {
+      'target_name': 'sun_tools_java',
+      'type': 'none',
+      'variables': {
+        'found_jar_path': '<(PRODUCT_DIR)/sun_tools_java/tools.jar',
+        'jar_path': '<(found_jar_path)',
+      },
+      'includes': [
+        '../../build/host_prebuilt_jar.gypi',
+      ],
+      'actions': [
+        {
+          'action_name': 'find_sun_tools_jar',
+          'variables' : {
+          },
+          'inputs' : [
+            'gyp/find_sun_tools_jar.py',
+            'gyp/util/build_utils.py',
+          ],
+          'outputs': [
+            '<(found_jar_path)',
+          ],
+          'action': [
+            'python', 'gyp/find_sun_tools_jar.py',
+            '--output', '<(found_jar_path)',
+          ],
+        },
+      ],
+    }, # sun_tools_java
+  ]
+}
+
diff --git a/build/android/strip_native_libraries.gypi b/build/android/strip_native_libraries.gypi
new file mode 100644
index 0000000..bdffcfd
--- /dev/null
+++ b/build/android/strip_native_libraries.gypi
@@ -0,0 +1,54 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into an action to provide a rule that strips
+# native libraries.
+#
+# To use this, create a gyp target with the following form:
+#  {
+#    'action_name': 'strip_native_libraries',
+#    'actions': [
+#      'variables': {
+#        'ordered_libraries_file': 'file generated by write_ordered_libraries'
+#        'input_paths': 'files to be added to the list of inputs'
+#        'stamp': 'file to touch when the action is complete'
+#        'stripped_libraries_dir': 'directory to store stripped libraries',
+#      },
+#      'includes': [ '../../build/android/strip_native_libraries.gypi' ],
+#    ],
+#  },
+#
+
+{
+  'message': 'Stripping libraries for <(_target_name)',
+  'variables': {
+    'input_paths': [],
+  },
+  'inputs': [
+    '<(DEPTH)/build/android/gyp/util/build_utils.py',
+    '<(DEPTH)/build/android/gyp/strip_library_for_device.py',
+    '<(ordered_libraries_file)',
+    '>@(input_paths)',
+  ],
+  'outputs': [
+    '<(stamp)',
+  ],
+  'conditions': [
+    ['component == "shared_library"', {
+      # Add a fake output to force the build to always re-run this step. This
+      # is required because the real inputs are not known at gyp-time and
+      # changing base.so may not trigger changes to dependent libraries.
+      'outputs': [ '<(stamp).fake' ]
+    }],
+  ],
+  'action': [
+    'python', '<(DEPTH)/build/android/gyp/strip_library_for_device.py',
+    '--android-strip=<(android_strip)',
+    '--android-strip-arg=--strip-unneeded',
+    '--stripped-libraries-dir=<(stripped_libraries_dir)',
+    '--libraries-dir=<(SHARED_LIB_DIR),<(PRODUCT_DIR)',
+    '--libraries=@FileArg(<(ordered_libraries_file):libraries)',
+    '--stamp=<(stamp)',
+  ],
+}
diff --git a/build/android/symbolize.py b/build/android/symbolize.py
new file mode 100755
index 0000000..56d3b19
--- /dev/null
+++ b/build/android/symbolize.py
@@ -0,0 +1,88 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Symbolizes stack traces generated by Chromium for Android.
+
+Sample usage:
+  adb logcat chromium:V | symbolize.py
+"""
+
+import os
+import re
+import sys
+
+from pylib import constants
+
+# Uses symbol.py from third_party/android_platform, not python's.
+sys.path.insert(0,
+                os.path.join(constants.DIR_SOURCE_ROOT,
+                            'third_party/android_platform/development/scripts'))
+import symbol
+
+# Sample output from base/debug/stack_trace_android.cc
+#00 0x693cd34f /path/to/some/libfoo.so+0x0007434f
+TRACE_LINE = re.compile(r'(?P<frame>\#[0-9]+ 0x[0-9a-f]{8,8}) '
+                        r'(?P<lib>[^+]+)\+0x(?P<addr>[0-9a-f]{8,8})')
+
+class Symbolizer(object):
+  def __init__(self, output):
+    self._output = output
+
+  def write(self, data):
+    while True:
+      match = re.search(TRACE_LINE, data)
+      if not match:
+        self._output.write(data)
+        break
+
+      frame = match.group('frame')
+      lib = match.group('lib')
+      addr = match.group('addr')
+
+      # TODO(scherkus): Doing a single lookup per line is pretty slow,
+      # especially with larger libraries. Consider caching strategies such as:
+      # 1) Have Python load the libraries and do symbol lookups instead of
+      #    calling out to addr2line each time.
+      # 2) Have Python keep multiple addr2line instances open as subprocesses,
+      #    piping addresses and reading back symbols as we find them
+      # 3) Read ahead the entire stack trace until we find no more, then batch
+      #    the symbol lookups.
+      #
+      # TODO(scherkus): These results are memoized, which could result in
+      # incorrect lookups when running this script on long-lived instances
+      # (e.g., adb logcat) when doing incremental development. Consider clearing
+      # the cache when modification timestamp of libraries change.
+      sym = symbol.SymbolInformation(lib, addr, False)[0][0]
+
+      if not sym:
+        post = match.end('addr')
+        self._output.write(data[:post])
+        data = data[post:]
+        continue
+
+      pre = match.start('frame')
+      post = match.end('addr')
+
+      self._output.write(data[:pre])
+      self._output.write(frame)
+      self._output.write(' ')
+      self._output.write(sym)
+
+      data = data[post:]
+
+  def flush(self):
+    self._output.flush()
+
+
+def main():
+  symbolizer = Symbolizer(sys.stdout)
+  for line in sys.stdin:
+    symbolizer.write(line)
+  symbolizer.flush()
+
+
+if __name__ == '__main__':
+  main()
diff --git a/build/android/symbolize_test.py b/build/android/symbolize_test.py
new file mode 100755
index 0000000..826d852
--- /dev/null
+++ b/build/android/symbolize_test.py
@@ -0,0 +1,130 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Unittest for symbolize.py.
+
+This test uses test libraries generated by the Android g++ toolchain.
+
+Should things break you can recreate the libraries and get the updated
+addresses and demangled names by running the following:
+  cd test/symbolize/
+  make
+  nm -gC *.so
+"""
+
+import StringIO
+import unittest
+
+import symbolize
+
+LIB_A_PATH = '/build/android/tests/symbolize/liba.so'
+LIB_B_PATH = '/build/android/tests/symbolize/libb.so'
+
+def RunSymbolizer(text):
+  output = StringIO.StringIO()
+  s = symbolize.Symbolizer(output)
+  s.write(text)
+  return output.getvalue()
+
+
+class SymbolizerUnittest(unittest.TestCase):
+  def testSingleLineNoMatch(self):
+    # Leading '#' is required.
+    expected = '00 0x00000000 ' + LIB_A_PATH + '+0x00000254\n'
+    self.assertEqual(expected, RunSymbolizer(expected))
+
+    # Whitespace should be exactly one space.
+    expected = '#00  0x00000000 ' + LIB_A_PATH + '+0x00000254\n'
+    self.assertEqual(expected, RunSymbolizer(expected))
+    expected = '#00 0x00000000  ' + LIB_A_PATH + '+0x00000254\n'
+    self.assertEqual(expected, RunSymbolizer(expected))
+
+    # Decimal stack frame numbers are required.
+    expected = '#0a 0x00000000 ' + LIB_A_PATH + '+0x00000254\n'
+    self.assertEqual(expected, RunSymbolizer(expected))
+
+    # Hexadecimal addresses are required.
+    expected = '#00 0xghijklmn ' + LIB_A_PATH + '+0x00000254\n'
+    self.assertEqual(expected, RunSymbolizer(expected))
+    expected = '#00 0x00000000 ' + LIB_A_PATH + '+0xghijklmn\n'
+    self.assertEqual(expected, RunSymbolizer(expected))
+
+    # Addresses must be exactly 8 characters.
+    expected = '#00 0x0000000 ' + LIB_A_PATH + '+0x00000254\n'
+    self.assertEqual(expected, RunSymbolizer(expected))
+    expected = '#00 0x000000000 ' + LIB_A_PATH + '+0x00000254\n'
+    self.assertEqual(expected, RunSymbolizer(expected))
+
+    expected = '#00 0x0000000 ' + LIB_A_PATH + '+0x0000254\n'
+    self.assertEqual(expected, RunSymbolizer(expected))
+    expected = '#00 0x000000000 ' + LIB_A_PATH + '+0x000000254\n'
+    self.assertEqual(expected, RunSymbolizer(expected))
+
+    # Addresses must be prefixed with '0x'.
+    expected = '#00 00000000 ' + LIB_A_PATH + '+0x00000254\n'
+    self.assertEqual(expected, RunSymbolizer(expected))
+    expected = '#00 0x00000000 ' + LIB_A_PATH + '+00000254\n'
+    self.assertEqual(expected, RunSymbolizer(expected))
+
+    # Library name is required.
+    expected = '#00 0x00000000\n'
+    self.assertEqual(expected, RunSymbolizer(expected))
+    expected = '#00 0x00000000 +0x00000254\n'
+    self.assertEqual(expected, RunSymbolizer(expected))
+
+    # Library name must be followed by offset with no spaces around '+'.
+    expected = '#00 0x00000000 ' + LIB_A_PATH + ' +0x00000254\n'
+    self.assertEqual(expected, RunSymbolizer(expected))
+    expected = '#00 0x00000000 ' + LIB_A_PATH + '+ 0x00000254\n'
+    self.assertEqual(expected, RunSymbolizer(expected))
+    expected = '#00 0x00000000 ' + LIB_A_PATH + ' 0x00000254\n'
+    self.assertEqual(expected, RunSymbolizer(expected))
+    expected = '#00 0x00000000 ' + LIB_A_PATH + '+\n'
+    self.assertEqual(expected, RunSymbolizer(expected))
+
+  def testSingleLine(self):
+    text = '#00 0x00000000 ' + LIB_A_PATH + '+0x00000254\n'
+    expected = '#00 0x00000000 A::Bar(char const*)\n'
+    actual = RunSymbolizer(text)
+    self.assertEqual(expected, actual)
+
+  def testSingleLineWithSurroundingText(self):
+    text = 'LEFT #00 0x00000000 ' + LIB_A_PATH + '+0x00000254 RIGHT\n'
+    expected = 'LEFT #00 0x00000000 A::Bar(char const*) RIGHT\n'
+    actual = RunSymbolizer(text)
+    self.assertEqual(expected, actual)
+
+  def testMultipleLinesSameLibrary(self):
+    text = '#00 0x00000000 ' + LIB_A_PATH + '+0x00000254\n'
+    text += '#01 0x00000000 ' + LIB_A_PATH + '+0x00000234\n'
+    expected = '#00 0x00000000 A::Bar(char const*)\n'
+    expected += '#01 0x00000000 A::Foo(int)\n'
+    actual = RunSymbolizer(text)
+    self.assertEqual(expected, actual)
+
+  def testMultipleLinesDifferentLibrary(self):
+    text = '#00 0x00000000 ' + LIB_A_PATH + '+0x00000254\n'
+    text += '#01 0x00000000 ' + LIB_B_PATH + '+0x00000234\n'
+    expected = '#00 0x00000000 A::Bar(char const*)\n'
+    expected += '#01 0x00000000 B::Baz(float)\n'
+    actual = RunSymbolizer(text)
+    self.assertEqual(expected, actual)
+
+  def testMultipleLinesWithSurroundingTextEverywhere(self):
+    text = 'TOP\n'
+    text += 'LEFT #00 0x00000000 ' + LIB_A_PATH + '+0x00000254 RIGHT\n'
+    text += 'LEFT #01 0x00000000 ' + LIB_B_PATH + '+0x00000234 RIGHT\n'
+    text += 'BOTTOM\n'
+    expected = 'TOP\n'
+    expected += 'LEFT #00 0x00000000 A::Bar(char const*) RIGHT\n'
+    expected += 'LEFT #01 0x00000000 B::Baz(float) RIGHT\n'
+    expected += 'BOTTOM\n'
+    actual = RunSymbolizer(text)
+    self.assertEqual(expected, actual)
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/build/android/test_runner.gypi b/build/android/test_runner.gypi
new file mode 100644
index 0000000..f92b7ce
--- /dev/null
+++ b/build/android/test_runner.gypi
@@ -0,0 +1,81 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Generates a script in the output bin directory which runs the test
+# target using the test runner script in build/android/pylib/test_runner.py.
+#
+# To use this, include this file in a gtest or instrumentation test target.
+# {
+#   'target_name': 'gtest',
+#   'type': 'none',
+#   'variables': {
+#     'test_type': 'gtest',  # string
+#     'test_suite_name': 'gtest_suite'  # string
+#     'isolate_file': 'path/to/gtest.isolate'  # string
+#   },
+#   'includes': ['path/to/this/gypi/file'],
+# }
+#
+# {
+#   'target_name': 'instrumentation_apk',
+#   'type': 'none',
+#   'variables': {
+#     'test_type': 'instrumentation',  # string
+#     'apk_name': 'TestApk'  # string
+#     'isolate_file': 'path/to/instrumentation_test.isolate'  # string
+#   },
+#   'includes': ['path/to/this/gypi/file'],
+# }
+#
+
+{
+  'variables': {
+    'variables': {
+      'isolate_file%': '',
+      'support_apk_path%': '',
+    },
+    'test_runner_args': ['--output-directory', '<(PRODUCT_DIR)'],
+    'conditions': [
+      ['test_type == "gtest"', {
+        'test_runner_args': ['--suite', '<(test_suite_name)'],
+        'script_name': 'run_<(test_suite_name)',
+      }],
+      ['test_type == "instrumentation"', {
+        'test_runner_args': ['--test-apk', '<(apk_name)'],
+        'script_name': 'run_<(_target_name)',
+        'conditions': [
+          ['support_apk_path != ""', {
+            'test_runner_args': [
+              '--support-apk',
+              '<(support_apk_path)'
+            ],
+          }],
+        ],
+      }],
+      ['isolate_file != ""', {
+        'test_runner_args': ['--isolate-file-path', '<(isolate_file)']
+      }],
+    ],
+  },
+  'actions': [
+    {
+      'action_name': 'create_test_runner_script_<(script_name)',
+      'message': 'Creating test runner script <(script_name)',
+      'variables': {
+        'script_output_path': '<(PRODUCT_DIR)/bin/<(script_name)',
+      },
+      'inputs': [
+        '<(DEPTH)/build/android/gyp/create_test_runner_script.py',
+      ],
+      'outputs': [
+        '<(script_output_path)'
+      ],
+      'action': [
+        'python', '<(DEPTH)/build/android/gyp/create_test_runner_script.py',
+        '--script-output-path=<(script_output_path)',
+        '<(test_type)', '<@(test_runner_args)',
+      ],
+    },
+  ],
+}
\ No newline at end of file
diff --git a/build/android/test_runner.py b/build/android/test_runner.py
new file mode 100755
index 0000000..1fc48ec
--- /dev/null
+++ b/build/android/test_runner.py
@@ -0,0 +1,1067 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Runs all types of tests from one unified interface."""
+
+import argparse
+import collections
+import logging
+import os
+import shutil
+import signal
+import sys
+import threading
+import unittest
+
+from pylib import constants
+from pylib import forwarder
+from pylib import ports
+from pylib.base import base_test_result
+from pylib.base import environment_factory
+from pylib.base import test_dispatcher
+from pylib.base import test_instance_factory
+from pylib.base import test_run_factory
+from pylib.device import device_errors
+from pylib.device import device_utils
+from pylib.gtest import gtest_config
+# TODO(jbudorick): Remove this once we stop selectively enabling platform mode.
+from pylib.gtest import gtest_test_instance
+from pylib.gtest import setup as gtest_setup
+from pylib.gtest import test_options as gtest_test_options
+from pylib.linker import setup as linker_setup
+from pylib.host_driven import setup as host_driven_setup
+from pylib.instrumentation import setup as instrumentation_setup
+from pylib.instrumentation import test_options as instrumentation_test_options
+from pylib.junit import setup as junit_setup
+from pylib.junit import test_dispatcher as junit_dispatcher
+from pylib.monkey import setup as monkey_setup
+from pylib.monkey import test_options as monkey_test_options
+from pylib.perf import setup as perf_setup
+from pylib.perf import test_options as perf_test_options
+from pylib.perf import test_runner as perf_test_runner
+from pylib.results import json_results
+from pylib.results import report_results
+from pylib.uiautomator import setup as uiautomator_setup
+from pylib.uiautomator import test_options as uiautomator_test_options
+from pylib.utils import apk_helper
+from pylib.utils import base_error
+from pylib.utils import reraiser_thread
+from pylib.utils import run_tests_helper
+
+
+def AddCommonOptions(parser):
+  """Adds all common options to |parser|."""
+
+  group = parser.add_argument_group('Common Options')
+
+  default_build_type = os.environ.get('BUILDTYPE', 'Debug')
+
+  debug_or_release_group = group.add_mutually_exclusive_group()
+  debug_or_release_group.add_argument(
+      '--debug', action='store_const', const='Debug', dest='build_type',
+      default=default_build_type,
+      help=('If set, run test suites under out/Debug. '
+            'Default is env var BUILDTYPE or Debug.'))
+  debug_or_release_group.add_argument(
+      '--release', action='store_const', const='Release', dest='build_type',
+      help=('If set, run test suites under out/Release. '
+            'Default is env var BUILDTYPE or Debug.'))
+
+  group.add_argument('--build-directory', dest='build_directory',
+                     help=('Path to the directory in which build files are'
+                           ' located (should not include build type)'))
+  group.add_argument('--output-directory', dest='output_directory',
+                     help=('Path to the directory in which build files are'
+                           ' located (must include build type). This will take'
+                           ' precedence over --debug, --release and'
+                           ' --build-directory'))
+  group.add_argument('--num_retries', dest='num_retries', type=int, default=2,
+                     help=('Number of retries for a test before '
+                           'giving up (default: %(default)s).'))
+  group.add_argument('-v',
+                     '--verbose',
+                     dest='verbose_count',
+                     default=0,
+                     action='count',
+                     help='Verbose level (multiple times for more)')
+  group.add_argument('--flakiness-dashboard-server',
+                     dest='flakiness_dashboard_server',
+                     help=('Address of the server that is hosting the '
+                           'Chrome for Android flakiness dashboard.'))
+  group.add_argument('--enable-platform-mode', action='store_true',
+                     help=('Run the test scripts in platform mode, which '
+                           'conceptually separates the test runner from the '
+                           '"device" (local or remote, real or emulated) on '
+                           'which the tests are running. [experimental]'))
+  group.add_argument('-e', '--environment', default='local',
+                     choices=constants.VALID_ENVIRONMENTS,
+                     help='Test environment to run in (default: %(default)s).')
+  group.add_argument('--adb-path',
+                     help=('Specify the absolute path of the adb binary that '
+                           'should be used.'))
+  group.add_argument('--json-results-file', dest='json_results_file',
+                     help='If set, will dump results in JSON form '
+                          'to specified file.')
+
+def ProcessCommonOptions(args):
+  """Processes and handles all common options."""
+  run_tests_helper.SetLogLevel(args.verbose_count)
+  constants.SetBuildType(args.build_type)
+  if args.build_directory:
+    constants.SetBuildDirectory(args.build_directory)
+  if args.output_directory:
+    constants.SetOutputDirectory(args.output_directory)
+  if args.adb_path:
+    constants.SetAdbPath(args.adb_path)
+  # Some things such as Forwarder require ADB to be in the environment path.
+  adb_dir = os.path.dirname(constants.GetAdbPath())
+  if adb_dir and adb_dir not in os.environ['PATH'].split(os.pathsep):
+    os.environ['PATH'] = adb_dir + os.pathsep + os.environ['PATH']
+
+
+def AddRemoteDeviceOptions(parser):
+  group = parser.add_argument_group('Remote Device Options')
+
+  group.add_argument('--trigger',
+                     help=('Only triggers the test if set. Stores test_run_id '
+                           'in given file path. '))
+  group.add_argument('--collect',
+                     help=('Only collects the test results if set. '
+                           'Gets test_run_id from given file path.'))
+  group.add_argument('--remote-device', action='append',
+                     help='Device type to run test on.')
+  group.add_argument('--results-path',
+                     help='File path to download results to.')
+  group.add_argument('--api-protocol',
+                     help='HTTP protocol to use. (http or https)')
+  group.add_argument('--api-address',
+                     help='Address to send HTTP requests.')
+  group.add_argument('--api-port',
+                     help='Port to send HTTP requests to.')
+  group.add_argument('--runner-type',
+                     help='Type of test to run as.')
+  group.add_argument('--runner-package',
+                     help='Package name of test.')
+  group.add_argument('--device-type',
+                     choices=constants.VALID_DEVICE_TYPES,
+                     help=('Type of device to run on. iOS or android'))
+  group.add_argument('--device-oem', action='append',
+                     help='Device OEM to run on.')
+  group.add_argument('--remote-device-file',
+                     help=('File with JSON to select remote device. '
+                           'Overrides all other flags.'))
+  group.add_argument('--remote-device-timeout', type=int,
+                     help='Times to retry finding remote device')
+  group.add_argument('--network-config', type=int,
+                     help='Integer that specifies the network environment '
+                          'that the tests will be run in.')
+
+  device_os_group = group.add_mutually_exclusive_group()
+  device_os_group.add_argument('--remote-device-minimum-os',
+                               help='Minimum OS on device.')
+  device_os_group.add_argument('--remote-device-os', action='append',
+                               help='OS to have on the device.')
+
+  api_secret_group = group.add_mutually_exclusive_group()
+  api_secret_group.add_argument('--api-secret', default='',
+                                help='API secret for remote devices.')
+  api_secret_group.add_argument('--api-secret-file', default='',
+                                help='Path to file that contains API secret.')
+
+  api_key_group = group.add_mutually_exclusive_group()
+  api_key_group.add_argument('--api-key', default='',
+                             help='API key for remote devices.')
+  api_key_group.add_argument('--api-key-file', default='',
+                             help='Path to file that contains API key.')
+
+
+def AddDeviceOptions(parser):
+  """Adds device options to |parser|."""
+  group = parser.add_argument_group(title='Device Options')
+  group.add_argument('--tool',
+                     dest='tool',
+                     help=('Run the test under a tool '
+                           '(use --tool help to list them)'))
+  group.add_argument('-d', '--device', dest='test_device',
+                     help=('Target device for the test suite '
+                           'to run on.'))
+
+
+def AddGTestOptions(parser):
+  """Adds gtest options to |parser|."""
+
+  gtest_suites = list(gtest_config.STABLE_TEST_SUITES
+                      + gtest_config.EXPERIMENTAL_TEST_SUITES)
+
+  group = parser.add_argument_group('GTest Options')
+  group.add_argument('-s', '--suite', dest='suite_name',
+                     nargs='+', metavar='SUITE_NAME', required=True,
+                     help=('Executable name of the test suite to run. '
+                           'Available suites include (but are not limited to): '
+                            '%s' % ', '.join('"%s"' % s for s in gtest_suites)))
+  group.add_argument('--gtest_also_run_disabled_tests',
+                     '--gtest-also-run-disabled-tests',
+                     dest='run_disabled', action='store_true',
+                     help='Also run disabled tests if applicable.')
+  group.add_argument('-a', '--test-arguments', dest='test_arguments',
+                     default='',
+                     help='Additional arguments to pass to the test.')
+  group.add_argument('-t', dest='timeout', type=int, default=60,
+                     help='Timeout to wait for each test '
+                          '(default: %(default)s).')
+  group.add_argument('--isolate_file_path',
+                     '--isolate-file-path',
+                     dest='isolate_file_path',
+                     help='.isolate file path to override the default '
+                          'path')
+  group.add_argument('--app-data-file', action='append', dest='app_data_files',
+                     help='A file path relative to the app data directory '
+                          'that should be saved to the host.')
+  group.add_argument('--app-data-file-dir',
+                     help='Host directory to which app data files will be'
+                          ' saved. Used with --app-data-file.')
+  group.add_argument('--delete-stale-data', dest='delete_stale_data',
+                     action='store_true',
+                     help='Delete stale test data on the device.')
+
+  filter_group = group.add_mutually_exclusive_group()
+  filter_group.add_argument('-f', '--gtest_filter', '--gtest-filter',
+                            dest='test_filter',
+                            help='googletest-style filter string.')
+  filter_group.add_argument('--gtest-filter-file', dest='test_filter_file',
+                            help='Path to file that contains googletest-style '
+                                  'filter strings. (Lines will be joined with '
+                                  '":" to create a single filter string.)')
+
+  AddDeviceOptions(parser)
+  AddCommonOptions(parser)
+  AddRemoteDeviceOptions(parser)
+
+
+def AddLinkerTestOptions(parser):
+  group = parser.add_argument_group('Linker Test Options')
+  group.add_argument('-f', '--gtest-filter', dest='test_filter',
+                     help='googletest-style filter string.')
+  AddCommonOptions(parser)
+  AddDeviceOptions(parser)
+
+
+def AddJavaTestOptions(argument_group):
+  """Adds the Java test options to |option_parser|."""
+
+  argument_group.add_argument(
+      '-f', '--test-filter', dest='test_filter',
+      help=('Test filter (if not fully qualified, will run all matches).'))
+  argument_group.add_argument(
+      '-A', '--annotation', dest='annotation_str',
+      help=('Comma-separated list of annotations. Run only tests with any of '
+            'the given annotations. An annotation can be either a key or a '
+            'key-values pair. A test that has no annotation is considered '
+            '"SmallTest".'))
+  argument_group.add_argument(
+      '-E', '--exclude-annotation', dest='exclude_annotation_str',
+      help=('Comma-separated list of annotations. Exclude tests with these '
+            'annotations.'))
+  argument_group.add_argument(
+      '--screenshot', dest='screenshot_failures', action='store_true',
+      help='Capture screenshots of test failures')
+  argument_group.add_argument(
+      '--save-perf-json', action='store_true',
+      help='Saves the JSON file for each UI Perf test.')
+  argument_group.add_argument(
+      '--official-build', action='store_true', help='Run official build tests.')
+  argument_group.add_argument(
+      '--test_data', '--test-data', action='append', default=[],
+      help=('Each instance defines a directory of test data that should be '
+            'copied to the target(s) before running the tests. The argument '
+            'should be of the form <target>:<source>, <target> is relative to '
+            'the device data directory, and <source> is relative to the '
+            'chromium build directory.'))
+  argument_group.add_argument(
+      '--disable-dalvik-asserts', dest='set_asserts', action='store_false',
+      default=True, help='Removes the dalvik.vm.enableassertions property')
+
+
+
+def ProcessJavaTestOptions(args):
+  """Processes options/arguments and populates |options| with defaults."""
+
+  # TODO(jbudorick): Handle most of this function in argparse.
+  if args.annotation_str:
+    args.annotations = args.annotation_str.split(',')
+  elif args.test_filter:
+    args.annotations = []
+  else:
+    args.annotations = ['Smoke', 'SmallTest', 'MediumTest', 'LargeTest',
+                        'EnormousTest', 'IntegrationTest']
+
+  if args.exclude_annotation_str:
+    args.exclude_annotations = args.exclude_annotation_str.split(',')
+  else:
+    args.exclude_annotations = []
+
+
+def AddInstrumentationTestOptions(parser):
+  """Adds Instrumentation test options to |parser|."""
+
+  parser.usage = '%(prog)s [options]'
+
+  group = parser.add_argument_group('Instrumentation Test Options')
+  AddJavaTestOptions(group)
+
+  java_or_python_group = group.add_mutually_exclusive_group()
+  java_or_python_group.add_argument(
+      '-j', '--java-only', action='store_false',
+      dest='run_python_tests', default=True, help='Run only the Java tests.')
+  java_or_python_group.add_argument(
+      '-p', '--python-only', action='store_false',
+      dest='run_java_tests', default=True,
+      help='Run only the host-driven tests.')
+
+  group.add_argument('--host-driven-root',
+                     help='Root of the host-driven tests.')
+  group.add_argument('-w', '--wait_debugger', dest='wait_for_debugger',
+                     action='store_true',
+                     help='Wait for debugger.')
+  group.add_argument('--apk-under-test', dest='apk_under_test',
+                     help=('the name of the apk under test.'))
+  group.add_argument('--test-apk', dest='test_apk', required=True,
+                     help=('The name of the apk containing the tests '
+                           '(without the .apk extension; '
+                           'e.g. "ContentShellTest").'))
+  group.add_argument('--support-apk', dest='test_support_apk_path',
+                     help=('The path to an optional support apk to be '
+                           'installed alongside the test apk. The '
+                           'path should be relative to the output '
+                           'directory (--output-directory).'))
+  group.add_argument('--coverage-dir',
+                     help=('Directory in which to place all generated '
+                           'EMMA coverage files.'))
+  group.add_argument('--device-flags', dest='device_flags', default='',
+                     help='The relative filepath to a file containing '
+                          'command-line flags to set on the device')
+  group.add_argument('--device-flags-file', default='',
+                     help='The relative filepath to a file containing '
+                          'command-line flags to set on the device')
+  group.add_argument('--isolate_file_path',
+                     '--isolate-file-path',
+                     dest='isolate_file_path',
+                     help='.isolate file path to override the default '
+                          'path')
+  group.add_argument('--delete-stale-data', dest='delete_stale_data',
+                     action='store_true',
+                     help='Delete stale test data on the device.')
+
+  AddCommonOptions(parser)
+  AddDeviceOptions(parser)
+  AddRemoteDeviceOptions(parser)
+
+
+def ProcessInstrumentationOptions(args):
+  """Processes options/arguments and populate |options| with defaults.
+
+  Args:
+    args: argparse.Namespace object.
+
+  Returns:
+    An InstrumentationOptions named tuple which contains all options relevant to
+    instrumentation tests.
+  """
+
+  ProcessJavaTestOptions(args)
+
+  if not args.host_driven_root:
+    args.run_python_tests = False
+
+  args.test_apk_path = os.path.join(
+      constants.GetOutDirectory(),
+      constants.SDK_BUILD_APKS_DIR,
+      '%s.apk' % args.test_apk)
+  args.test_apk_jar_path = os.path.join(
+      constants.GetOutDirectory(),
+      constants.SDK_BUILD_TEST_JAVALIB_DIR,
+      '%s.jar' %  args.test_apk)
+
+  args.test_runner = apk_helper.GetInstrumentationName(args.test_apk_path)
+
+  # TODO(jbudorick): Get rid of InstrumentationOptions.
+  return instrumentation_test_options.InstrumentationOptions(
+      args.tool,
+      args.annotations,
+      args.exclude_annotations,
+      args.test_filter,
+      args.test_data,
+      args.save_perf_json,
+      args.screenshot_failures,
+      args.wait_for_debugger,
+      args.coverage_dir,
+      args.test_apk,
+      args.test_apk_path,
+      args.test_apk_jar_path,
+      args.test_runner,
+      args.test_support_apk_path,
+      args.device_flags,
+      args.isolate_file_path,
+      args.set_asserts,
+      args.delete_stale_data
+      )
+
+
+def AddUIAutomatorTestOptions(parser):
+  """Adds UI Automator test options to |parser|."""
+
+  group = parser.add_argument_group('UIAutomator Test Options')
+  AddJavaTestOptions(group)
+  group.add_argument(
+      '--package', required=True, choices=constants.PACKAGE_INFO.keys(),
+      metavar='PACKAGE', help='Package under test.')
+  group.add_argument(
+      '--test-jar', dest='test_jar', required=True,
+      help=('The name of the dexed jar containing the tests (without the '
+            '.dex.jar extension). Alternatively, this can be a full path '
+            'to the jar.'))
+
+  AddCommonOptions(parser)
+  AddDeviceOptions(parser)
+
+
+def ProcessUIAutomatorOptions(args):
+  """Processes UIAutomator options/arguments.
+
+  Args:
+    args: argparse.Namespace object.
+
+  Returns:
+    A UIAutomatorOptions named tuple which contains all options relevant to
+    uiautomator tests.
+  """
+
+  ProcessJavaTestOptions(args)
+
+  if os.path.exists(args.test_jar):
+    # The dexed JAR is fully qualified, assume the info JAR lives along side.
+    args.uiautomator_jar = args.test_jar
+  else:
+    args.uiautomator_jar = os.path.join(
+        constants.GetOutDirectory(),
+        constants.SDK_BUILD_JAVALIB_DIR,
+        '%s.dex.jar' % args.test_jar)
+  args.uiautomator_info_jar = (
+      args.uiautomator_jar[:args.uiautomator_jar.find('.dex.jar')] +
+      '_java.jar')
+
+  return uiautomator_test_options.UIAutomatorOptions(
+      args.tool,
+      args.annotations,
+      args.exclude_annotations,
+      args.test_filter,
+      args.test_data,
+      args.save_perf_json,
+      args.screenshot_failures,
+      args.uiautomator_jar,
+      args.uiautomator_info_jar,
+      args.package,
+      args.set_asserts)
+
+
+def AddJUnitTestOptions(parser):
+  """Adds junit test options to |parser|."""
+
+  group = parser.add_argument_group('JUnit Test Options')
+  group.add_argument(
+      '-s', '--test-suite', dest='test_suite', required=True,
+      help=('JUnit test suite to run.'))
+  group.add_argument(
+      '-f', '--test-filter', dest='test_filter',
+      help='Filters tests googletest-style.')
+  group.add_argument(
+      '--package-filter', dest='package_filter',
+      help='Filters tests by package.')
+  group.add_argument(
+      '--runner-filter', dest='runner_filter',
+      help='Filters tests by runner class. Must be fully qualified.')
+  group.add_argument(
+      '--sdk-version', dest='sdk_version', type=int,
+      help='The Android SDK version.')
+  AddCommonOptions(parser)
+
+
+def AddMonkeyTestOptions(parser):
+  """Adds monkey test options to |parser|."""
+
+  group = parser.add_argument_group('Monkey Test Options')
+  group.add_argument(
+      '--package', required=True, choices=constants.PACKAGE_INFO.keys(),
+      metavar='PACKAGE', help='Package under test.')
+  group.add_argument(
+      '--event-count', default=10000, type=int,
+      help='Number of events to generate (default: %(default)s).')
+  group.add_argument(
+      '--category', default='',
+      help='A list of allowed categories.')
+  group.add_argument(
+      '--throttle', default=100, type=int,
+      help='Delay between events (ms) (default: %(default)s). ')
+  group.add_argument(
+      '--seed', type=int,
+      help=('Seed value for pseudo-random generator. Same seed value generates '
+            'the same sequence of events. Seed is randomized by default.'))
+  group.add_argument(
+      '--extra-args', default='',
+      help=('String of other args to pass to the command verbatim.'))
+
+  AddCommonOptions(parser)
+  AddDeviceOptions(parser)
+
+def ProcessMonkeyTestOptions(args):
+  """Processes all monkey test options.
+
+  Args:
+    args: argparse.Namespace object.
+
+  Returns:
+    A MonkeyOptions named tuple which contains all options relevant to
+    monkey tests.
+  """
+  # TODO(jbudorick): Handle this directly in argparse with nargs='+'
+  category = args.category
+  if category:
+    category = args.category.split(',')
+
+  # TODO(jbudorick): Get rid of MonkeyOptions.
+  return monkey_test_options.MonkeyOptions(
+      args.verbose_count,
+      args.package,
+      args.event_count,
+      category,
+      args.throttle,
+      args.seed,
+      args.extra_args)
+
+def AddUirobotTestOptions(parser):
+  """Adds uirobot test options to |option_parser|."""
+  group = parser.add_argument_group('Uirobot Test Options')
+
+  group.add_argument('--app-under-test', required=True,
+                     help='APK to run tests on.')
+  group.add_argument(
+      '--minutes', default=5, type=int,
+      help='Number of minutes to run uirobot test [default: %(default)s].')
+
+  AddCommonOptions(parser)
+  AddDeviceOptions(parser)
+  AddRemoteDeviceOptions(parser)
+
+def AddPerfTestOptions(parser):
+  """Adds perf test options to |parser|."""
+
+  group = parser.add_argument_group('Perf Test Options')
+
+  class SingleStepAction(argparse.Action):
+    def __call__(self, parser, namespace, values, option_string=None):
+      if values and not namespace.single_step:
+        parser.error('single step command provided, '
+                     'but --single-step not specified.')
+      elif namespace.single_step and not values:
+        parser.error('--single-step specified, '
+                     'but no single step command provided.')
+      setattr(namespace, self.dest, values)
+
+  step_group = group.add_mutually_exclusive_group(required=True)
+  # TODO(jbudorick): Revise --single-step to use argparse.REMAINDER.
+  # This requires removing "--" from client calls.
+  step_group.add_argument(
+      '--single-step', action='store_true',
+      help='Execute the given command with retries, but only print the result '
+           'for the "most successful" round.')
+  step_group.add_argument(
+      '--steps',
+      help='JSON file containing the list of commands to run.')
+  step_group.add_argument(
+      '--print-step',
+      help='The name of a previously executed perf step to print.')
+
+  group.add_argument(
+      '--output-json-list',
+      help='Write a simple list of names from --steps into the given file.')
+  group.add_argument(
+      '--collect-chartjson-data',
+      action='store_true',
+      help='Cache the chartjson output from each step for later use.')
+  group.add_argument(
+      '--output-chartjson-data',
+      default='',
+      help='Write out chartjson into the given file.')
+  group.add_argument(
+      '--flaky-steps',
+      help=('A JSON file containing steps that are flaky '
+            'and will have its exit code ignored.'))
+  group.add_argument(
+      '--no-timeout', action='store_true',
+      help=('Do not impose a timeout. Each perf step is responsible for '
+            'implementing the timeout logic.'))
+  group.add_argument(
+      '-f', '--test-filter',
+      help=('Test filter (will match against the names listed in --steps).'))
+  group.add_argument(
+      '--dry-run', action='store_true',
+      help='Just print the steps without executing.')
+  # Uses 0.1 degrees C because that's what Android does.
+  group.add_argument(
+      '--max-battery-temp', type=int,
+      help='Only start tests when the battery is at or below the given '
+           'temperature (0.1 C)')
+  group.add_argument('single_step_command', nargs='*', action=SingleStepAction,
+                     help='If --single-step is specified, the command to run.')
+  group.add_argument('--min-battery-level', type=int,
+                     help='Only starts tests when the battery is charged above '
+                          'given level.')
+  AddCommonOptions(parser)
+  AddDeviceOptions(parser)
+
+
+def ProcessPerfTestOptions(args):
+  """Processes all perf test options.
+
+  Args:
+    args: argparse.Namespace object.
+
+  Returns:
+    A PerfOptions named tuple which contains all options relevant to
+    perf tests.
+  """
+  # TODO(jbudorick): Move single_step handling down into the perf tests.
+  if args.single_step:
+    args.single_step = ' '.join(args.single_step_command)
+  # TODO(jbudorick): Get rid of PerfOptions.
+  return perf_test_options.PerfOptions(
+      args.steps, args.flaky_steps, args.output_json_list,
+      args.print_step, args.no_timeout, args.test_filter,
+      args.dry_run, args.single_step, args.collect_chartjson_data,
+      args.output_chartjson_data, args.max_battery_temp, args.min_battery_level)
+
+
+def AddPythonTestOptions(parser):
+  group = parser.add_argument_group('Python Test Options')
+  group.add_argument(
+      '-s', '--suite', dest='suite_name', metavar='SUITE_NAME',
+      choices=constants.PYTHON_UNIT_TEST_SUITES.keys(),
+      help='Name of the test suite to run.')
+  AddCommonOptions(parser)
+
+
+def _RunGTests(args, devices):
+  """Subcommand of RunTestsCommands which runs gtests."""
+  exit_code = 0
+  for suite_name in args.suite_name:
+    # TODO(jbudorick): Either deprecate multi-suite or move its handling down
+    # into the gtest code.
+    gtest_options = gtest_test_options.GTestOptions(
+        args.tool,
+        args.test_filter,
+        args.run_disabled,
+        args.test_arguments,
+        args.timeout,
+        args.isolate_file_path,
+        suite_name,
+        args.app_data_files,
+        args.app_data_file_dir,
+        args.delete_stale_data)
+    runner_factory, tests = gtest_setup.Setup(gtest_options, devices)
+
+    results, test_exit_code = test_dispatcher.RunTests(
+        tests, runner_factory, devices, shard=True, test_timeout=None,
+        num_retries=args.num_retries)
+
+    if test_exit_code and exit_code != constants.ERROR_EXIT_CODE:
+      exit_code = test_exit_code
+
+    report_results.LogFull(
+        results=results,
+        test_type='Unit test',
+        test_package=suite_name,
+        flakiness_server=args.flakiness_dashboard_server)
+
+    if args.json_results_file:
+      json_results.GenerateJsonResultsFile(results, args.json_results_file)
+
+  return exit_code
+
+
+def _RunLinkerTests(args, devices):
+  """Subcommand of RunTestsCommands which runs linker tests."""
+  runner_factory, tests = linker_setup.Setup(args, devices)
+
+  results, exit_code = test_dispatcher.RunTests(
+      tests, runner_factory, devices, shard=True, test_timeout=60,
+      num_retries=args.num_retries)
+
+  report_results.LogFull(
+      results=results,
+      test_type='Linker test',
+      test_package='ChromiumLinkerTest')
+
+  if args.json_results_file:
+    json_results.GenerateJsonResultsFile(results, args.json_results_file)
+
+  return exit_code
+
+
+def _RunInstrumentationTests(args, devices):
+  """Subcommand of RunTestsCommands which runs instrumentation tests."""
+  logging.info('_RunInstrumentationTests(%s, %s)' % (str(args), str(devices)))
+
+  instrumentation_options = ProcessInstrumentationOptions(args)
+
+  if len(devices) > 1 and args.wait_for_debugger:
+    logging.warning('Debugger can not be sharded, using first available device')
+    devices = devices[:1]
+
+  results = base_test_result.TestRunResults()
+  exit_code = 0
+
+  if args.run_java_tests:
+    runner_factory, tests = instrumentation_setup.Setup(
+        instrumentation_options, devices)
+
+    test_results, exit_code = test_dispatcher.RunTests(
+        tests, runner_factory, devices, shard=True, test_timeout=None,
+        num_retries=args.num_retries)
+
+    results.AddTestRunResults(test_results)
+
+  if args.run_python_tests:
+    runner_factory, tests = host_driven_setup.InstrumentationSetup(
+        args.host_driven_root, args.official_build,
+        instrumentation_options)
+
+    if tests:
+      test_results, test_exit_code = test_dispatcher.RunTests(
+          tests, runner_factory, devices, shard=True, test_timeout=None,
+          num_retries=args.num_retries)
+
+      results.AddTestRunResults(test_results)
+
+      # Only allow exit code escalation
+      if test_exit_code and exit_code != constants.ERROR_EXIT_CODE:
+        exit_code = test_exit_code
+
+  if args.device_flags:
+    args.device_flags = os.path.join(constants.DIR_SOURCE_ROOT,
+                                     args.device_flags)
+
+  report_results.LogFull(
+      results=results,
+      test_type='Instrumentation',
+      test_package=os.path.basename(args.test_apk),
+      annotation=args.annotations,
+      flakiness_server=args.flakiness_dashboard_server)
+
+  if args.json_results_file:
+    json_results.GenerateJsonResultsFile(results, args.json_results_file)
+
+  return exit_code
+
+
+def _RunUIAutomatorTests(args, devices):
+  """Subcommand of RunTestsCommands which runs uiautomator tests."""
+  uiautomator_options = ProcessUIAutomatorOptions(args)
+
+  runner_factory, tests = uiautomator_setup.Setup(uiautomator_options)
+
+  results, exit_code = test_dispatcher.RunTests(
+      tests, runner_factory, devices, shard=True, test_timeout=None,
+      num_retries=args.num_retries)
+
+  report_results.LogFull(
+      results=results,
+      test_type='UIAutomator',
+      test_package=os.path.basename(args.test_jar),
+      annotation=args.annotations,
+      flakiness_server=args.flakiness_dashboard_server)
+
+  if args.json_results_file:
+    json_results.GenerateJsonResultsFile(results, args.json_results_file)
+
+  return exit_code
+
+
+def _RunJUnitTests(args):
+  """Subcommand of RunTestsCommand which runs junit tests."""
+  runner_factory, tests = junit_setup.Setup(args)
+  results, exit_code = junit_dispatcher.RunTests(tests, runner_factory)
+
+  report_results.LogFull(
+      results=results,
+      test_type='JUnit',
+      test_package=args.test_suite)
+
+  if args.json_results_file:
+    json_results.GenerateJsonResultsFile(results, args.json_results_file)
+
+  return exit_code
+
+
+def _RunMonkeyTests(args, devices):
+  """Subcommand of RunTestsCommands which runs monkey tests."""
+  monkey_options = ProcessMonkeyTestOptions(args)
+
+  runner_factory, tests = monkey_setup.Setup(monkey_options)
+
+  results, exit_code = test_dispatcher.RunTests(
+      tests, runner_factory, devices, shard=False, test_timeout=None,
+      num_retries=args.num_retries)
+
+  report_results.LogFull(
+      results=results,
+      test_type='Monkey',
+      test_package='Monkey')
+
+  if args.json_results_file:
+    json_results.GenerateJsonResultsFile(results, args.json_results_file)
+
+  return exit_code
+
+
+def _RunPerfTests(args):
+  """Subcommand of RunTestsCommands which runs perf tests."""
+  perf_options = ProcessPerfTestOptions(args)
+
+  # Just save a simple json with a list of test names.
+  if perf_options.output_json_list:
+    return perf_test_runner.OutputJsonList(
+        perf_options.steps, perf_options.output_json_list)
+
+  # Just print the results from a single previously executed step.
+  if perf_options.print_step:
+    return perf_test_runner.PrintTestOutput(
+        perf_options.print_step, perf_options.output_chartjson_data)
+
+  runner_factory, tests, devices = perf_setup.Setup(perf_options)
+
+  # shard=False means that each device will get the full list of tests
+  # and then each one will decide their own affinity.
+  # shard=True means each device will pop the next test available from a queue,
+  # which increases throughput but have no affinity.
+  results, _ = test_dispatcher.RunTests(
+      tests, runner_factory, devices, shard=False, test_timeout=None,
+      num_retries=args.num_retries)
+
+  report_results.LogFull(
+      results=results,
+      test_type='Perf',
+      test_package='Perf')
+
+  if args.json_results_file:
+    json_results.GenerateJsonResultsFile(results, args.json_results_file)
+
+  if perf_options.single_step:
+    return perf_test_runner.PrintTestOutput('single_step')
+
+  perf_test_runner.PrintSummary(tests)
+
+  # Always return 0 on the sharding stage. Individual tests exit_code
+  # will be returned on the print_step stage.
+  return 0
+
+
+def _RunPythonTests(args):
+  """Subcommand of RunTestsCommand which runs python unit tests."""
+  suite_vars = constants.PYTHON_UNIT_TEST_SUITES[args.suite_name]
+  suite_path = suite_vars['path']
+  suite_test_modules = suite_vars['test_modules']
+
+  sys.path = [suite_path] + sys.path
+  try:
+    suite = unittest.TestSuite()
+    suite.addTests(unittest.defaultTestLoader.loadTestsFromName(m)
+                   for m in suite_test_modules)
+    runner = unittest.TextTestRunner(verbosity=1+args.verbose_count)
+    return 0 if runner.run(suite).wasSuccessful() else 1
+  finally:
+    sys.path = sys.path[1:]
+
+
+def _GetAttachedDevices(test_device=None):
+  """Get all attached devices.
+
+  Args:
+    test_device: Name of a specific device to use.
+
+  Returns:
+    A list of attached devices.
+  """
+  attached_devices = device_utils.DeviceUtils.HealthyDevices()
+  if test_device:
+    test_device = [d for d in attached_devices if d == test_device]
+    if not test_device:
+      raise device_errors.DeviceUnreachableError(
+          'Did not find device %s among attached device. Attached devices: %s'
+          % (test_device, ', '.join(attached_devices)))
+    return test_device
+
+  else:
+    if not attached_devices:
+      raise device_errors.NoDevicesError()
+    return sorted(attached_devices)
+
+
+def RunTestsCommand(args, parser):
+  """Checks test type and dispatches to the appropriate function.
+
+  Args:
+    args: argparse.Namespace object.
+    parser: argparse.ArgumentParser object.
+
+  Returns:
+    Integer indicated exit code.
+
+  Raises:
+    Exception: Unknown command name passed in, or an exception from an
+        individual test runner.
+  """
+  command = args.command
+
+  ProcessCommonOptions(args)
+
+  if args.enable_platform_mode:
+    return RunTestsInPlatformMode(args, parser)
+
+  if command in constants.LOCAL_MACHINE_TESTS:
+    devices = []
+  else:
+    devices = _GetAttachedDevices(args.test_device)
+
+  forwarder.Forwarder.RemoveHostLog()
+  if not ports.ResetTestServerPortAllocation():
+    raise Exception('Failed to reset test server port.')
+
+  if command == 'gtest':
+    if args.suite_name[0] in gtest_test_instance.BROWSER_TEST_SUITES:
+      return RunTestsInPlatformMode(args, parser)
+    return _RunGTests(args, devices)
+  elif command == 'linker':
+    return _RunLinkerTests(args, devices)
+  elif command == 'instrumentation':
+    return _RunInstrumentationTests(args, devices)
+  elif command == 'uiautomator':
+    return _RunUIAutomatorTests(args, devices)
+  elif command == 'junit':
+    return _RunJUnitTests(args)
+  elif command == 'monkey':
+    return _RunMonkeyTests(args, devices)
+  elif command == 'perf':
+    return _RunPerfTests(args)
+  elif command == 'python':
+    return _RunPythonTests(args)
+  else:
+    raise Exception('Unknown test type.')
+
+
+_SUPPORTED_IN_PLATFORM_MODE = [
+  # TODO(jbudorick): Add support for more test types.
+  'gtest',
+  'instrumentation',
+  'uirobot',
+]
+
+
+def RunTestsInPlatformMode(args, parser):
+
+  if args.command not in _SUPPORTED_IN_PLATFORM_MODE:
+    parser.error('%s is not yet supported in platform mode' % args.command)
+
+  with environment_factory.CreateEnvironment(args, parser.error) as env:
+    with test_instance_factory.CreateTestInstance(args, parser.error) as test:
+      with test_run_factory.CreateTestRun(
+          args, env, test, parser.error) as test_run:
+        results = test_run.RunTests()
+
+        if args.environment == 'remote_device' and args.trigger:
+          return 0 # Not returning results, only triggering.
+
+        report_results.LogFull(
+            results=results,
+            test_type=test.TestType(),
+            test_package=test_run.TestPackage(),
+            annotation=getattr(args, 'annotations', None),
+            flakiness_server=getattr(args, 'flakiness_dashboard_server', None))
+
+        if args.json_results_file:
+          json_results.GenerateJsonResultsFile(
+              results, args.json_results_file)
+
+  return 0 if results.DidRunPass() else constants.ERROR_EXIT_CODE
+
+
+CommandConfigTuple = collections.namedtuple(
+    'CommandConfigTuple',
+    ['add_options_func', 'help_txt'])
+VALID_COMMANDS = {
+    'gtest': CommandConfigTuple(
+        AddGTestOptions,
+        'googletest-based C++ tests'),
+    'instrumentation': CommandConfigTuple(
+        AddInstrumentationTestOptions,
+        'InstrumentationTestCase-based Java tests'),
+    'uiautomator': CommandConfigTuple(
+        AddUIAutomatorTestOptions,
+        "Tests that run via Android's uiautomator command"),
+    'junit': CommandConfigTuple(
+        AddJUnitTestOptions,
+        'JUnit4-based Java tests'),
+    'monkey': CommandConfigTuple(
+        AddMonkeyTestOptions,
+        "Tests based on Android's monkey"),
+    'perf': CommandConfigTuple(
+        AddPerfTestOptions,
+        'Performance tests'),
+    'python': CommandConfigTuple(
+        AddPythonTestOptions,
+        'Python tests based on unittest.TestCase'),
+    'linker': CommandConfigTuple(
+        AddLinkerTestOptions,
+        'Linker tests'),
+    'uirobot': CommandConfigTuple(
+        AddUirobotTestOptions,
+        'Uirobot test'),
+}
+
+
+def DumpThreadStacks(_signal, _frame):
+  for thread in threading.enumerate():
+    reraiser_thread.LogThreadStack(thread)
+
+
+def main():
+  signal.signal(signal.SIGUSR1, DumpThreadStacks)
+
+  parser = argparse.ArgumentParser()
+  command_parsers = parser.add_subparsers(title='test types',
+                                          dest='command')
+
+  for test_type, config in sorted(VALID_COMMANDS.iteritems(),
+                                  key=lambda x: x[0]):
+    subparser = command_parsers.add_parser(
+        test_type, usage='%(prog)s [options]', help=config.help_txt)
+    config.add_options_func(subparser)
+
+  args = parser.parse_args()
+
+  try:
+    return RunTestsCommand(args, parser)
+  except base_error.BaseError as e:
+    logging.exception('Error occurred.')
+    if e.is_infra_error:
+      return constants.INFRA_EXIT_CODE
+    return constants.ERROR_EXIT_CODE
+  except: # pylint: disable=W0702
+    logging.exception('Unrecognized error occurred.')
+    return constants.ERROR_EXIT_CODE
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/android/tests/symbolize/Makefile b/build/android/tests/symbolize/Makefile
new file mode 100644
index 0000000..5178a04
--- /dev/null
+++ b/build/android/tests/symbolize/Makefile
@@ -0,0 +1,11 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+TOOLCHAIN=../../../../third_party/android_tools/ndk/toolchains/arm-linux-androideabi-4.6/prebuilt/linux-x86_64/bin/arm-linux-androideabi-
+CXX=$(TOOLCHAIN)g++
+
+lib%.so: %.cc
+	$(CXX) -nostdlib -g -fPIC -shared $< -o $@
+
+all: liba.so libb.so
diff --git a/build/android/tests/symbolize/a.cc b/build/android/tests/symbolize/a.cc
new file mode 100644
index 0000000..f0c7ca4
--- /dev/null
+++ b/build/android/tests/symbolize/a.cc
@@ -0,0 +1,14 @@
+// Copyright 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+class A {
+ public:
+  A();
+  void Foo(int i);
+  void Bar(const char* c);
+};
+
+A::A() {}
+void A::Foo(int i) {}
+void A::Bar(const char* c) {}
diff --git a/build/android/tests/symbolize/b.cc b/build/android/tests/symbolize/b.cc
new file mode 100644
index 0000000..db87520
--- /dev/null
+++ b/build/android/tests/symbolize/b.cc
@@ -0,0 +1,14 @@
+// Copyright 2013 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+class B {
+ public:
+  B();
+  void Baz(float f);
+  void Qux(double d);
+};
+
+B::B() {}
+void B::Baz(float f) {}
+void B::Qux(double d) {}
diff --git a/build/android/tests/symbolize/liba.so b/build/android/tests/symbolize/liba.so
new file mode 100644
index 0000000..79cb739
--- /dev/null
+++ b/build/android/tests/symbolize/liba.so
Binary files differ
diff --git a/build/android/tests/symbolize/libb.so b/build/android/tests/symbolize/libb.so
new file mode 100644
index 0000000..7cf01d4
--- /dev/null
+++ b/build/android/tests/symbolize/libb.so
Binary files differ
diff --git a/build/android/tombstones.py b/build/android/tombstones.py
new file mode 100755
index 0000000..dbfe3f7
--- /dev/null
+++ b/build/android/tombstones.py
@@ -0,0 +1,252 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Find the most recent tombstone file(s) on all connected devices
+# and prints their stacks.
+#
+# Assumes tombstone file was created with current symbols.
+
+import datetime
+import itertools
+import logging
+import multiprocessing
+import os
+import re
+import subprocess
+import sys
+import optparse
+
+from pylib.device import adb_wrapper
+from pylib.device import device_errors
+from pylib.device import device_utils
+from pylib.utils import run_tests_helper
+
+
+_TZ_UTC = {'TZ': 'UTC'}
+
+def _ListTombstones(device):
+  """List the tombstone files on the device.
+
+  Args:
+    device: An instance of DeviceUtils.
+
+  Yields:
+    Tuples of (tombstone filename, date time of file on device).
+  """
+  try:
+    lines = device.RunShellCommand(
+        ['ls', '-a', '-l', '/data/tombstones'],
+        as_root=True, check_return=True, env=_TZ_UTC, timeout=60)
+    for line in lines:
+      if 'tombstone' in line and not 'No such file or directory' in line:
+        details = line.split()
+        t = datetime.datetime.strptime(details[-3] + ' ' + details[-2],
+                                       '%Y-%m-%d %H:%M')
+        yield details[-1], t
+  except device_errors.CommandFailedError:
+    logging.exception('Could not retrieve tombstones.')
+
+
+def _GetDeviceDateTime(device):
+  """Determine the date time on the device.
+
+  Args:
+    device: An instance of DeviceUtils.
+
+  Returns:
+    A datetime instance.
+  """
+  device_now_string = device.RunShellCommand(
+      ['date'], check_return=True, env=_TZ_UTC)
+  return datetime.datetime.strptime(
+      device_now_string[0], '%a %b %d %H:%M:%S %Z %Y')
+
+
+def _GetTombstoneData(device, tombstone_file):
+  """Retrieve the tombstone data from the device
+
+  Args:
+    device: An instance of DeviceUtils.
+    tombstone_file: the tombstone to retrieve
+
+  Returns:
+    A list of lines
+  """
+  return device.ReadFile(
+      '/data/tombstones/' + tombstone_file, as_root=True).splitlines()
+
+
+def _EraseTombstone(device, tombstone_file):
+  """Deletes a tombstone from the device.
+
+  Args:
+    device: An instance of DeviceUtils.
+    tombstone_file: the tombstone to delete.
+  """
+  return device.RunShellCommand(
+      ['rm', '/data/tombstones/' + tombstone_file],
+      as_root=True, check_return=True)
+
+
+def _DeviceAbiToArch(device_abi):
+  # The order of this list is significant to find the more specific match (e.g.,
+  # arm64) before the less specific (e.g., arm).
+  arches = ['arm64', 'arm', 'x86_64', 'x86_64', 'x86', 'mips']
+  for arch in arches:
+    if arch in device_abi:
+      return arch
+  raise RuntimeError('Unknown device ABI: %s' % device_abi)
+
+def _ResolveSymbols(tombstone_data, include_stack, device_abi):
+  """Run the stack tool for given tombstone input.
+
+  Args:
+    tombstone_data: a list of strings of tombstone data.
+    include_stack: boolean whether to include stack data in output.
+    device_abi: the default ABI of the device which generated the tombstone.
+
+  Yields:
+    A string for each line of resolved stack output.
+  """
+  # Check if the tombstone data has an ABI listed, if so use this in preference
+  # to the device's default ABI.
+  for line in tombstone_data:
+    found_abi = re.search('ABI: \'(.+?)\'', line)
+    if found_abi:
+      device_abi = found_abi.group(1)
+  arch = _DeviceAbiToArch(device_abi)
+  if not arch:
+    return
+
+  stack_tool = os.path.join(os.path.dirname(__file__), '..', '..',
+                            'third_party', 'android_platform', 'development',
+                            'scripts', 'stack')
+  proc = subprocess.Popen([stack_tool, '--arch', arch], stdin=subprocess.PIPE,
+                          stdout=subprocess.PIPE)
+  output = proc.communicate(input='\n'.join(tombstone_data))[0]
+  for line in output.split('\n'):
+    if not include_stack and 'Stack Data:' in line:
+      break
+    yield line
+
+
+def _ResolveTombstone(tombstone):
+  lines = []
+  lines += [tombstone['file'] + ' created on ' + str(tombstone['time']) +
+            ', about this long ago: ' +
+            (str(tombstone['device_now'] - tombstone['time']) +
+            ' Device: ' + tombstone['serial'])]
+  logging.info('\n'.join(lines))
+  logging.info('Resolving...')
+  lines += _ResolveSymbols(tombstone['data'], tombstone['stack'],
+                           tombstone['device_abi'])
+  return lines
+
+
+def _ResolveTombstones(jobs, tombstones):
+  """Resolve a list of tombstones.
+
+  Args:
+    jobs: the number of jobs to use with multiprocess.
+    tombstones: a list of tombstones.
+  """
+  if not tombstones:
+    logging.warning('No tombstones to resolve.')
+    return
+  if len(tombstones) == 1:
+    data = [_ResolveTombstone(tombstones[0])]
+  else:
+    pool = multiprocessing.Pool(processes=jobs)
+    data = pool.map(_ResolveTombstone, tombstones)
+  for tombstone in data:
+    for line in tombstone:
+      logging.info(line)
+
+
+def _GetTombstonesForDevice(device, options):
+  """Returns a list of tombstones on a given device.
+
+  Args:
+    device: An instance of DeviceUtils.
+    options: command line arguments from OptParse
+  """
+  ret = []
+  all_tombstones = list(_ListTombstones(device))
+  if not all_tombstones:
+    logging.warning('No tombstones.')
+    return ret
+
+  # Sort the tombstones in date order, descending
+  all_tombstones.sort(cmp=lambda a, b: cmp(b[1], a[1]))
+
+  # Only resolve the most recent unless --all-tombstones given.
+  tombstones = all_tombstones if options.all_tombstones else [all_tombstones[0]]
+
+  device_now = _GetDeviceDateTime(device)
+  try:
+    for tombstone_file, tombstone_time in tombstones:
+      ret += [{'serial': str(device),
+               'device_abi': device.product_cpu_abi,
+               'device_now': device_now,
+               'time': tombstone_time,
+               'file': tombstone_file,
+               'stack': options.stack,
+               'data': _GetTombstoneData(device, tombstone_file)}]
+  except device_errors.CommandFailedError:
+    for line in device.RunShellCommand(
+        ['ls', '-a', '-l', '/data/tombstones'],
+        as_root=True, check_return=True, env=_TZ_UTC, timeout=60):
+      logging.info('%s: %s', str(device), line)
+    raise
+
+  # Erase all the tombstones if desired.
+  if options.wipe_tombstones:
+    for tombstone_file, _ in all_tombstones:
+      _EraseTombstone(device, tombstone_file)
+
+  return ret
+
+
+def main():
+  custom_handler = logging.StreamHandler(sys.stdout)
+  custom_handler.setFormatter(run_tests_helper.CustomFormatter())
+  logging.getLogger().addHandler(custom_handler)
+  logging.getLogger().setLevel(logging.INFO)
+
+  parser = optparse.OptionParser()
+  parser.add_option('--device',
+                    help='The serial number of the device. If not specified '
+                         'will use all devices.')
+  parser.add_option('-a', '--all-tombstones', action='store_true',
+                    help="""Resolve symbols for all tombstones, rather than just
+                         the most recent""")
+  parser.add_option('-s', '--stack', action='store_true',
+                    help='Also include symbols for stack data')
+  parser.add_option('-w', '--wipe-tombstones', action='store_true',
+                    help='Erase all tombstones from device after processing')
+  parser.add_option('-j', '--jobs', type='int',
+                    default=4,
+                    help='Number of jobs to use when processing multiple '
+                         'crash stacks.')
+  options, _ = parser.parse_args()
+
+  if options.device:
+    devices = [device_utils.DeviceUtils(options.device)]
+  else:
+    devices = device_utils.DeviceUtils.HealthyDevices()
+
+  # This must be done serially because strptime can hit a race condition if
+  # used for the first time in a multithreaded environment.
+  # http://bugs.python.org/issue7980
+  tombstones = []
+  for device in devices:
+    tombstones += _GetTombstonesForDevice(device, options)
+
+  _ResolveTombstones(options.jobs, tombstones)
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/android/update_verification.py b/build/android/update_verification.py
new file mode 100755
index 0000000..05d083b
--- /dev/null
+++ b/build/android/update_verification.py
@@ -0,0 +1,108 @@
+#!/usr/bin/env python
+#
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Runs semi-automated update testing on a non-rooted device.
+
+This script will help verify that app data is preserved during an update.
+To use this script first run it with the create_app_data option.
+
+./update_verification.py create_app_data --old-apk <path> --app-data <path>
+
+The script will then install the old apk, prompt you to create some app data
+(bookmarks, etc.), and then save the app data in the path you gave it.
+
+Next, once you have some app data saved, run this script with the test_update
+option.
+
+./update_verification.py test_update --old-apk <path> --new-apk <path>
+--app-data <path>
+
+This will install the old apk, load the saved app data, install the new apk,
+and ask the user to verify that all of the app data was preserved.
+"""
+
+import argparse
+import logging
+import os
+import sys
+import time
+
+from pylib import constants
+from pylib.device import device_errors
+from pylib.device import device_utils
+from pylib.utils import apk_helper
+from pylib.utils import run_tests_helper
+
+def CreateAppData(device, old_apk, app_data, package_name):
+  device.Install(old_apk)
+  raw_input('Set the application state. Once ready, press enter and '
+            'select "Backup my data" on the device.')
+  device.adb.Backup(app_data, packages=[package_name])
+  logging.critical('Application data saved to %s' % app_data)
+
+def TestUpdate(device, old_apk, new_apk, app_data, package_name):
+  device.Install(old_apk)
+  device.adb.Restore(app_data)
+  # Restore command is not synchronous
+  raw_input('Select "Restore my data" on the device. Then press enter to '
+            'continue.')
+  device_path = device.GetApplicationPaths(package_name)
+  if not device_path:
+    raise Exception('Expected package %s to already be installed. '
+                    'Package name might have changed!' % package_name)
+
+  logging.info('Verifying that %s can be overinstalled.', new_apk)
+  device.adb.Install(new_apk, reinstall=True)
+  logging.critical('Successfully updated to the new apk. Please verify that '
+                   'the application data is preserved.')
+
+def main():
+  parser = argparse.ArgumentParser(
+      description="Script to do semi-automated upgrade testing.")
+  parser.add_argument('-v', '--verbose', action='count',
+                      help='Print verbose log information.')
+  command_parsers = parser.add_subparsers(dest='command')
+
+  subparser = command_parsers.add_parser('create_app_data')
+  subparser.add_argument('--old-apk', required=True,
+                         help='Path to apk to update from.')
+  subparser.add_argument('--app-data', required=True,
+                         help='Path to where the app data backup should be '
+                           'saved to.')
+  subparser.add_argument('--package-name',
+                         help='Chrome apk package name.')
+
+  subparser = command_parsers.add_parser('test_update')
+  subparser.add_argument('--old-apk', required=True,
+                         help='Path to apk to update from.')
+  subparser.add_argument('--new-apk', required=True,
+                         help='Path to apk to update to.')
+  subparser.add_argument('--app-data', required=True,
+                         help='Path to where the app data backup is saved.')
+  subparser.add_argument('--package-name',
+                         help='Chrome apk package name.')
+
+  args = parser.parse_args()
+  run_tests_helper.SetLogLevel(args.verbose)
+
+  devices = device_utils.DeviceUtils.HealthyDevices()
+  if not devices:
+    raise device_errors.NoDevicesError()
+  device = devices[0]
+  logging.info('Using device %s for testing.' % str(device))
+
+  package_name = (args.package_name if args.package_name
+                  else apk_helper.GetPackageName(args.old_apk))
+  if args.command == 'create_app_data':
+    CreateAppData(device, args.old_apk, args.app_data, package_name)
+  elif args.command == 'test_update':
+    TestUpdate(
+        device, args.old_apk, args.new_apk, args.app_data, package_name)
+  else:
+    raise Exception('Unknown test command: %s' % args.command)
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/android/write_ordered_libraries.gypi b/build/android/write_ordered_libraries.gypi
new file mode 100644
index 0000000..1b52e71
--- /dev/null
+++ b/build/android/write_ordered_libraries.gypi
@@ -0,0 +1,43 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into an action to provide a rule that
+# generates a json file with the list of dependent libraries needed for a given
+# shared library or executable.
+#
+# To use this, create a gyp target with the following form:
+#  {
+#    'actions': [
+#      'variables': {
+#        'input_libraries': 'shared library or executable to process',
+#        'ordered_libraries_file': 'file to generate'
+#      },
+#      'includes': [ '../../build/android/write_ordered_libraries.gypi' ],
+#    ],
+#  },
+#
+
+{
+  'action_name': 'ordered_libraries_<(_target_name)<(subtarget)',
+  'message': 'Writing dependency ordered libraries for <(_target_name)',
+  'variables': {
+    'input_libraries%': [],
+    'subtarget%': '',
+  },
+  'inputs': [
+    '<(DEPTH)/build/android/gyp/util/build_utils.py',
+    '<(DEPTH)/build/android/gyp/write_ordered_libraries.py',
+    '<@(input_libraries)',
+  ],
+  'outputs': [
+    '<(ordered_libraries_file)',
+  ],
+  'action': [
+    'python', '<(DEPTH)/build/android/gyp/write_ordered_libraries.py',
+    '--input-libraries=<(input_libraries)',
+    '--libraries-dir=<(SHARED_LIB_DIR),<(PRODUCT_DIR)',
+    '--readelf=<(android_readelf)',
+    '--output=<(ordered_libraries_file)',
+  ],
+}
diff --git a/build/android_sdk_extras.json b/build/android_sdk_extras.json
new file mode 100644
index 0000000..25b47c3
--- /dev/null
+++ b/build/android_sdk_extras.json
@@ -0,0 +1,9 @@
+[
+  {
+    "dir_name": "google",
+    "version": "21.0.0",
+    "zip": "google_google_play_services_21.0.0.zip",
+    "package": "google_play_services",
+    "package_id": "extra-google-google_play_services"
+  }
+]
diff --git a/build/apk_browsertest.gypi b/build/apk_browsertest.gypi
new file mode 100644
index 0000000..316f52f
--- /dev/null
+++ b/build/apk_browsertest.gypi
@@ -0,0 +1,43 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into a target to provide a rule
+# to build APK-based browser test suites.
+#
+# To use this, create a gyp target with the following form:
+# {
+#   'target_name': 'test_suite_name_apk',
+#   'type': 'none',
+#   'variables': {
+#     'test_suite_name': 'test_suite_name',  # string
+#     'java_in_dir': 'path/to/java/dir',
+#   },
+#   'includes': ['path/to/this/gypi/file'],
+# }
+#
+
+{
+  'dependencies': [
+    '<(DEPTH)/base/base.gyp:base_java',
+    '<(DEPTH)/build/android/pylib/device/commands/commands.gyp:chromium_commands',
+    '<(DEPTH)/build/android/pylib/remote/device/dummy/dummy.gyp:remote_device_dummy_apk',
+    '<(DEPTH)/testing/android/appurify_support.gyp:appurify_support_java',
+    '<(DEPTH)/testing/android/native_test.gyp:native_test_java',
+    '<(DEPTH)/tools/android/android_tools.gyp:android_tools',
+  ],
+  'conditions': [
+     ['OS == "android"', {
+       'variables': {
+         # These are used to configure java_apk.gypi included below.
+         'apk_name': '<(test_suite_name)',
+         'intermediate_dir': '<(PRODUCT_DIR)/<(test_suite_name)_apk',
+         'final_apk_path': '<(intermediate_dir)/<(test_suite_name)-debug.apk',
+         'native_lib_target': 'lib<(test_suite_name)',
+         # TODO(yfriedman, cjhopman): Support managed installs for gtests.
+         'gyp_managed_install': 0,
+       },
+       'includes': [ 'java_apk.gypi' ],
+     }],  # 'OS == "android"
+  ],  # conditions
+}
diff --git a/build/apk_fake_jar.gypi b/build/apk_fake_jar.gypi
new file mode 100644
index 0000000..128b84c
--- /dev/null
+++ b/build/apk_fake_jar.gypi
@@ -0,0 +1,15 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into a target to provide a rule
+# to build Java in a consistent manner.
+
+{
+  'all_dependent_settings': {
+    'variables': {
+      'input_jars_paths': ['>(apk_output_jar_path)'],
+      'library_dexed_jars_paths': ['>(apk_output_jar_path)'],
+    },
+  },
+}
diff --git a/build/apk_test.gypi b/build/apk_test.gypi
new file mode 100644
index 0000000..e0d323f
--- /dev/null
+++ b/build/apk_test.gypi
@@ -0,0 +1,45 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into a target to provide a rule
+# to build APK based test suites.
+#
+# To use this, create a gyp target with the following form:
+# {
+#   'target_name': 'test_suite_name_apk',
+#   'type': 'none',
+#   'variables': {
+#     'test_suite_name': 'test_suite_name',  # string
+#     'input_jars_paths': ['/path/to/test_suite.jar', ... ],  # list
+#   },
+#   'includes': ['path/to/this/gypi/file'],
+# }
+#
+
+{
+  'dependencies': [
+    '<(DEPTH)/base/base.gyp:base_java',
+    '<(DEPTH)/build/android/pylib/device/commands/commands.gyp:chromium_commands',
+    '<(DEPTH)/build/android/pylib/remote/device/dummy/dummy.gyp:remote_device_dummy_apk',
+    '<(DEPTH)/testing/android/appurify_support.gyp:appurify_support_java',
+    '<(DEPTH)/testing/android/on_device_instrumentation.gyp:reporter_java',
+    '<(DEPTH)/tools/android/android_tools.gyp:android_tools',
+  ],
+  'conditions': [
+     ['OS == "android"', {
+       'variables': {
+         # These are used to configure java_apk.gypi included below.
+         'test_type': 'gtest',
+         'apk_name': '<(test_suite_name)',
+         'intermediate_dir': '<(PRODUCT_DIR)/<(test_suite_name)_apk',
+         'final_apk_path': '<(intermediate_dir)/<(test_suite_name)-debug.apk',
+         'java_in_dir': '<(DEPTH)/testing/android/native_test/java',
+         'native_lib_target': 'lib<(test_suite_name)',
+         # TODO(yfriedman, cjhopman): Support managed installs for gtests.
+         'gyp_managed_install': 0,
+       },
+       'includes': [ 'java_apk.gypi', 'android/test_runner.gypi' ],
+     }],  # 'OS == "android"
+  ],  # conditions
+}
diff --git a/build/apply_locales.py b/build/apply_locales.py
new file mode 100755
index 0000000..6af7280
--- /dev/null
+++ b/build/apply_locales.py
@@ -0,0 +1,45 @@
+#!/usr/bin/env python
+# Copyright (c) 2009 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# TODO: remove this script when GYP has for loops
+
+import sys
+import optparse
+
+def main(argv):
+
+  parser = optparse.OptionParser()
+  usage = 'usage: %s [options ...] format_string locale_list'
+  parser.set_usage(usage.replace('%s', '%prog'))
+  parser.add_option('-d', dest='dash_to_underscore', action="store_true",
+                    default=False,
+                    help='map "en-US" to "en" and "-" to "_" in locales')
+
+  (options, arglist) = parser.parse_args(argv)
+
+  if len(arglist) < 3:
+    print 'ERROR: need string and list of locales'
+    return 1
+
+  str_template = arglist[1]
+  locales = arglist[2:]
+
+  results = []
+  for locale in locales:
+    # For Cocoa to find the locale at runtime, it needs to use '_' instead
+    # of '-' (http://crbug.com/20441).  Also, 'en-US' should be represented
+    # simply as 'en' (http://crbug.com/19165, http://crbug.com/25578).
+    if options.dash_to_underscore:
+      if locale == 'en-US':
+        locale = 'en'
+      locale = locale.replace('-', '_')
+    results.append(str_template.replace('ZZLOCALE', locale))
+
+  # Quote each element so filename spaces don't mess up GYP's attempt to parse
+  # it into a list.
+  print ' '.join(["'%s'" % x for x in results])
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
diff --git a/build/branding_value.sh b/build/branding_value.sh
new file mode 100755
index 0000000..9fcb550
--- /dev/null
+++ b/build/branding_value.sh
@@ -0,0 +1,51 @@
+#!/bin/sh
+
+# Copyright (c) 2008 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This is a wrapper for fetching values from the BRANDING files.  Pass the
+# value of GYP's branding variable followed by the key you want and the right
+# file is checked.
+#
+#  branding_value.sh Chromium COPYRIGHT
+#  branding_value.sh Chromium PRODUCT_FULLNAME
+#
+
+set -e
+
+if [ $# -ne 2 ] ;  then
+  echo "error: expect two arguments, branding and key" >&2
+  exit 1
+fi
+
+BUILD_BRANDING=$1
+THE_KEY=$2
+
+pushd $(dirname "${0}") > /dev/null
+BUILD_DIR=$(pwd)
+popd > /dev/null
+
+TOP="${BUILD_DIR}/.."
+
+case ${BUILD_BRANDING} in
+  Chromium)
+    BRANDING_FILE="${TOP}/chrome/app/theme/chromium/BRANDING"
+    ;;
+  Chrome)
+    BRANDING_FILE="${TOP}/chrome/app/theme/google_chrome/BRANDING"
+    ;;
+  *)
+    echo "error: unknown branding: ${BUILD_BRANDING}" >&2
+    exit 1
+    ;;
+esac
+
+BRANDING_VALUE=$(sed -n -e "s/^${THE_KEY}=\(.*\)\$/\1/p" "${BRANDING_FILE}")
+
+if [ -z "${BRANDING_VALUE}" ] ; then
+  echo "error: failed to find key '${THE_KEY}'" >&2
+  exit 1
+fi
+
+echo "${BRANDING_VALUE}"
diff --git a/build/build-ctags.sh b/build/build-ctags.sh
new file mode 100755
index 0000000..61e017e
--- /dev/null
+++ b/build/build-ctags.sh
@@ -0,0 +1,49 @@
+#!/bin/bash
+
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+if [[ a"`ctags --version | head -1 | grep \"^Exuberant Ctags\"`" == "a" ]]; then
+  cat <<EOF
+  You must be using Exuberant Ctags, not just standard GNU ctags. If you are on
+  Debian or a related flavor of Linux, you may want to try running
+  apt-get install exuberant-ctags.
+EOF
+  exit
+fi
+
+CHROME_SRC_DIR="$PWD"
+
+fail() {
+  echo "Failed to create ctags for $1"
+  exit 1
+}
+
+ctags_cmd() {
+  echo "ctags --languages=C++ $1 --exclude=.git -R -f .tmp_tags"
+}
+
+build_dir() {
+  local extraexcludes=""
+  if [[ a"$1" == "a--extra-excludes" ]]; then
+    extraexcludes="--exclude=third_party --exclude=build --exclude=out"
+    shift
+  fi
+
+  cd "$CHROME_SRC_DIR/$1" || fail $1
+  # Redirect error messages so they aren't seen because they are almost always
+  # errors about components that you just happen to have not built (NaCl, for
+  # example).
+  $(ctags_cmd "$extraexcludes") 2> /dev/null || fail $1
+  mv -f .tmp_tags tags
+}
+
+# We always build the top level but leave all submodules as optional.
+build_dir --extra-excludes "" "top level"
+
+# Build any other directies that are listed on the command line.
+for dir in $@; do
+  build_dir "$1"
+  shift
+done
diff --git a/build/build_config.h b/build/build_config.h
new file mode 100644
index 0000000..d8c3db6
--- /dev/null
+++ b/build/build_config.h
@@ -0,0 +1,168 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// This file adds defines about the platform we're currently building on.
+//  Operating System:
+//    OS_WIN / OS_MACOSX / OS_LINUX / OS_POSIX (MACOSX or LINUX) /
+//    OS_NACL (NACL_SFI or NACL_NONSFI) / OS_NACL_SFI / OS_NACL_NONSFI
+//  Compiler:
+//    COMPILER_MSVC / COMPILER_GCC
+//  Processor:
+//    ARCH_CPU_X86 / ARCH_CPU_X86_64 / ARCH_CPU_X86_FAMILY (X86 or X86_64)
+//    ARCH_CPU_32_BITS / ARCH_CPU_64_BITS
+
+#ifndef BUILD_BUILD_CONFIG_H_
+#define BUILD_BUILD_CONFIG_H_
+
+// A set of macros to use for platform detection.
+#if defined(__native_client__)
+// __native_client__ must be first, so that other OS_ defines are not set.
+#define OS_NACL 1
+// OS_NACL comes in two sandboxing technology flavors, SFI or Non-SFI.
+// PNaCl toolchain defines __native_client_nonsfi__ macro in Non-SFI build
+// mode, while it does not in SFI build mode.
+#if defined(__native_client_nonsfi__)
+#define OS_NACL_NONSFI
+#else
+#define OS_NACL_SFI
+#endif
+#elif defined(ANDROID)
+#define OS_ANDROID 1
+#elif defined(__APPLE__)
+// only include TargetConditions after testing ANDROID as some android builds
+// on mac don't have this header available and it's not needed unless the target
+// is really mac/ios.
+#include <TargetConditionals.h>
+#define OS_MACOSX 1
+#if defined(TARGET_OS_IPHONE) && TARGET_OS_IPHONE
+#define OS_IOS 1
+#endif  // defined(TARGET_OS_IPHONE) && TARGET_OS_IPHONE
+#elif defined(__linux__)
+#define OS_LINUX 1
+// include a system header to pull in features.h for glibc/uclibc macros.
+#include <unistd.h>
+#if defined(__GLIBC__) && !defined(__UCLIBC__)
+// we really are using glibc, not uClibc pretending to be glibc
+#define LIBC_GLIBC 1
+#endif
+#elif defined(_WIN32)
+#define OS_WIN 1
+#define TOOLKIT_VIEWS 1
+#elif defined(__FreeBSD__)
+#define OS_FREEBSD 1
+#elif defined(__OpenBSD__)
+#define OS_OPENBSD 1
+#elif defined(__sun)
+#define OS_SOLARIS 1
+#elif defined(__QNXNTO__)
+#define OS_QNX 1
+#else
+#error Please add support for your platform in build/build_config.h
+#endif
+
+#if defined(USE_OPENSSL_CERTS) && defined(USE_NSS_CERTS)
+#error Cannot use both OpenSSL and NSS for certificates
+#endif
+
+// For access to standard BSD features, use OS_BSD instead of a
+// more specific macro.
+#if defined(OS_FREEBSD) || defined(OS_OPENBSD)
+#define OS_BSD 1
+#endif
+
+// For access to standard POSIXish features, use OS_POSIX instead of a
+// more specific macro.
+#if defined(OS_MACOSX) || defined(OS_LINUX) || defined(OS_FREEBSD) ||     \
+    defined(OS_OPENBSD) || defined(OS_SOLARIS) || defined(OS_ANDROID) ||  \
+    defined(OS_NACL) || defined(OS_QNX)
+#define OS_POSIX 1
+#endif
+
+// Use tcmalloc
+#if (defined(OS_WIN) || defined(OS_LINUX) || defined(OS_ANDROID)) && \
+    !defined(NO_TCMALLOC)
+#define USE_TCMALLOC 1
+#endif
+
+// Compiler detection.
+#if defined(__GNUC__)
+#define COMPILER_GCC 1
+#elif defined(_MSC_VER)
+#define COMPILER_MSVC 1
+#else
+#error Please add support for your compiler in build/build_config.h
+#endif
+
+// Processor architecture detection.  For more info on what's defined, see:
+//   http://msdn.microsoft.com/en-us/library/b0084kay.aspx
+//   http://www.agner.org/optimize/calling_conventions.pdf
+//   or with gcc, run: "echo | gcc -E -dM -"
+#if defined(_M_X64) || defined(__x86_64__)
+#define ARCH_CPU_X86_FAMILY 1
+#define ARCH_CPU_X86_64 1
+#define ARCH_CPU_64_BITS 1
+#define ARCH_CPU_LITTLE_ENDIAN 1
+#elif defined(_M_IX86) || defined(__i386__)
+#define ARCH_CPU_X86_FAMILY 1
+#define ARCH_CPU_X86 1
+#define ARCH_CPU_32_BITS 1
+#define ARCH_CPU_LITTLE_ENDIAN 1
+#elif defined(__ARMEL__)
+#define ARCH_CPU_ARM_FAMILY 1
+#define ARCH_CPU_ARMEL 1
+#define ARCH_CPU_32_BITS 1
+#define ARCH_CPU_LITTLE_ENDIAN 1
+#elif defined(__aarch64__)
+#define ARCH_CPU_ARM_FAMILY 1
+#define ARCH_CPU_ARM64 1
+#define ARCH_CPU_64_BITS 1
+#define ARCH_CPU_LITTLE_ENDIAN 1
+#elif defined(__pnacl__)
+#define ARCH_CPU_32_BITS 1
+#define ARCH_CPU_LITTLE_ENDIAN 1
+#elif defined(__MIPSEL__)
+#if defined(__LP64__)
+#define ARCH_CPU_MIPS64_FAMILY 1
+#define ARCH_CPU_MIPS64EL 1
+#define ARCH_CPU_64_BITS 1
+#define ARCH_CPU_LITTLE_ENDIAN 1
+#else
+#define ARCH_CPU_MIPS_FAMILY 1
+#define ARCH_CPU_MIPSEL 1
+#define ARCH_CPU_32_BITS 1
+#define ARCH_CPU_LITTLE_ENDIAN 1
+#endif
+#else
+#error Please add support for your architecture in build/build_config.h
+#endif
+
+// Type detection for wchar_t.
+#if defined(OS_WIN)
+#define WCHAR_T_IS_UTF16
+#elif defined(OS_POSIX) && defined(COMPILER_GCC) && \
+    defined(__WCHAR_MAX__) && \
+    (__WCHAR_MAX__ == 0x7fffffff || __WCHAR_MAX__ == 0xffffffff)
+#define WCHAR_T_IS_UTF32
+#elif defined(OS_POSIX) && defined(COMPILER_GCC) && \
+    defined(__WCHAR_MAX__) && \
+    (__WCHAR_MAX__ == 0x7fff || __WCHAR_MAX__ == 0xffff)
+// On Posix, we'll detect short wchar_t, but projects aren't guaranteed to
+// compile in this mode (in particular, Chrome doesn't). This is intended for
+// other projects using base who manage their own dependencies and make sure
+// short wchar works for them.
+#define WCHAR_T_IS_UTF16
+#else
+#error Please add support for your compiler in build/build_config.h
+#endif
+
+#if defined(OS_ANDROID)
+// The compiler thinks std::string::const_iterator and "const char*" are
+// equivalent types.
+#define STD_STRING_ITERATOR_IS_CHAR_POINTER
+// The compiler thinks base::string16::const_iterator and "char16*" are
+// equivalent types.
+#define BASE_STRING16_ITERATOR_IS_CHAR16_POINTER
+#endif
+
+#endif  // BUILD_BUILD_CONFIG_H_
diff --git a/build/check_return_value.py b/build/check_return_value.py
new file mode 100755
index 0000000..c659d1e
--- /dev/null
+++ b/build/check_return_value.py
@@ -0,0 +1,17 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""This program wraps an arbitrary command and prints "1" if the command ran
+successfully."""
+
+import os
+import subprocess
+import sys
+
+devnull = open(os.devnull, 'wb')
+if not subprocess.call(sys.argv[1:], stdout=devnull, stderr=devnull):
+  print 1
+else:
+  print 0
diff --git a/build/check_sdk_extras_version.py b/build/check_sdk_extras_version.py
new file mode 100755
index 0000000..9b2f10d
--- /dev/null
+++ b/build/check_sdk_extras_version.py
@@ -0,0 +1,131 @@
+#!/usr/bin/env python
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+'''Checks the status of an Android SDK package.
+
+Verifies the given package has been installed from the Android SDK Manager and
+that its version is at least the minimum version required by the project
+configuration.
+'''
+
+import argparse
+import json
+import os
+import re
+import sys
+
+
+COLORAMA_ROOT = os.path.join(os.path.dirname(__file__),
+                 os.pardir, 'third_party', 'colorama', 'src')
+
+sys.path.append(COLORAMA_ROOT)
+import colorama
+
+
+UDPATE_SCRIPT_PATH = 'build/install-android-sdks.sh'
+
+SDK_EXTRAS_JSON_FILE = os.path.join(os.path.dirname(__file__),
+                                    'android_sdk_extras.json')
+
+PACKAGE_VERSION_PATTERN = r'^Pkg\.Revision=(?P<version>\d+).*$'
+
+PKG_NOT_FOUND_MSG = ('Error while checking Android SDK extras versions. '
+                     'Could not find the "{package_id}" package in '
+                     '{checked_location}. Please run {script} to download it.')
+UPDATE_NEEDED_MSG = ('Error while checking Android SDK extras versions. '
+                     'Version {minimum_version} or greater is required for the '
+                     'package "{package_id}". Version {actual_version} found. '
+                     'Please run {script} to update it.')
+REQUIRED_VERSION_ERROR_MSG = ('Error while checking Android SDK extras '
+                              'versions. '
+                              'Could not retrieve the required version for '
+                              'package "{package_id}".')
+
+
+def main():
+  parser = argparse.ArgumentParser(description=__doc__)
+  parser.add_argument('--package-id',
+                      help=('id of the package to check for. The list of '
+                            'available packages and their ids can be obtained '
+                            'by running '
+                            'third_party/android_tools/sdk/tools/android list '
+                            'sdk --extended'))
+  parser.add_argument('--package-location',
+                      help='path to the package\'s expected install location.',
+                      metavar='DIR')
+  parser.add_argument('--stamp',
+                      help=('if specified, a stamp file will be created at the '
+                            'provided location.'),
+                      metavar='FILE')
+
+  args = parser.parse_args()
+
+  if not ShouldSkipVersionCheck():
+    minimum_version = GetRequiredMinimumVersion(args.package_id)
+    CheckPackageVersion(args.package_id, args.package_location, minimum_version)
+
+  # Create the stamp file.
+  if args.stamp:
+    with open(args.stamp, 'a'):
+      os.utime(args.stamp, None)
+
+  sys.exit(0)
+
+def ExitError(msg):
+  sys.exit(colorama.Fore.MAGENTA + colorama.Style.BRIGHT + msg +
+           colorama.Style.RESET_ALL)
+
+
+def GetRequiredMinimumVersion(package_id):
+  with open(SDK_EXTRAS_JSON_FILE, 'r') as json_file:
+    packages = json.load(json_file)
+
+  for package in packages:
+    if package['package_id'] == package_id:
+      return int(package['version'].split('.')[0])
+
+  ExitError(REQUIRED_VERSION_ERROR_MSG.format(package_id=package_id))
+
+
+def CheckPackageVersion(pkg_id, location, minimum_version):
+  version_file_path = os.path.join(location, 'source.properties')
+  # Extracts the version of the package described by the property file. We only
+  # care about the major version number here.
+  version_pattern = re.compile(PACKAGE_VERSION_PATTERN, re.MULTILINE)
+
+  if not os.path.isfile(version_file_path):
+    ExitError(PKG_NOT_FOUND_MSG.format(
+      package_id=pkg_id,
+      checked_location=location,
+      script=UDPATE_SCRIPT_PATH))
+
+  with open(version_file_path, 'r') as f:
+    match = version_pattern.search(f.read())
+
+    if not match:
+      ExitError(PKG_NOT_FOUND_MSG.format(
+        package_id=pkg_id,
+        checked_location=location,
+        script=UDPATE_SCRIPT_PATH))
+
+    pkg_version = int(match.group('version'))
+    if pkg_version < minimum_version:
+      ExitError(UPDATE_NEEDED_MSG.format(
+        package_id=pkg_id,
+        minimum_version=minimum_version,
+        actual_version=pkg_version,
+        script=UDPATE_SCRIPT_PATH))
+
+  # Everything looks ok, print nothing.
+
+def ShouldSkipVersionCheck():
+  '''
+  Bots should not run the version check, since they download the sdk extras
+  in a different way.
+  '''
+  return bool(os.environ.get('CHROME_HEADLESS'))
+
+if __name__ == '__main__':
+  main()
diff --git a/build/chrome_settings.gypi b/build/chrome_settings.gypi
new file mode 100644
index 0000000..e9c7535
--- /dev/null
+++ b/build/chrome_settings.gypi
@@ -0,0 +1,30 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file contains settings for ../chrome/chrome.gyp that other gyp files
+# also use.
+{
+  'variables': {
+    # TODO: remove this helper when we have loops in GYP
+    'apply_locales_cmd': ['python', '<(DEPTH)/build/apply_locales.py'],
+
+    'conditions': [
+      ['OS=="mac"', {
+        'conditions': [
+          ['branding=="Chrome"', {
+            'mac_bundle_id': 'com.google.Chrome',
+            'mac_creator': 'rimZ',
+            # The policy .grd file also needs the bundle id.
+            'grit_defines': ['-D', 'mac_bundle_id=com.google.Chrome'],
+          }, {  # else: branding!="Chrome"
+            'mac_bundle_id': 'org.chromium.Chromium',
+            'mac_creator': 'Cr24',
+            # The policy .grd file also needs the bundle id.
+            'grit_defines': ['-D', 'mac_bundle_id=org.chromium.Chromium'],
+          }],  # branding
+        ],  # conditions
+      }],  # OS=="mac"
+    ],  # conditions
+  },  # variables
+}
diff --git a/build/clobber.py b/build/clobber.py
new file mode 100755
index 0000000..785011a
--- /dev/null
+++ b/build/clobber.py
@@ -0,0 +1,110 @@
+#!/usr/bin/env python
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""This script provides methods for clobbering build directories."""
+
+import argparse
+import os
+import shutil
+import sys
+
+
+def extract_gn_build_commands(build_ninja_file):
+  """Extracts from a build.ninja the commands to run GN.
+
+  The commands to run GN are the gn rule and build.ninja build step at the
+  top of the build.ninja file. We want to keep these when deleting GN builds
+  since we want to preserve the command-line flags to GN.
+
+  On error, returns the empty string."""
+  result = ""
+  with open(build_ninja_file, 'r') as f:
+    # Read until the second blank line. The first thing GN writes to the file
+    # is the "rule gn" and the second is the section for "build build.ninja",
+    # separated by blank lines.
+    num_blank_lines = 0
+    while num_blank_lines < 2:
+      line = f.readline()
+      if len(line) == 0:
+        return ''  # Unexpected EOF.
+      result += line
+      if line[0] == '\n':
+        num_blank_lines = num_blank_lines + 1
+  return result
+
+
+def delete_build_dir(build_dir):
+  # GN writes a build.ninja.d file. Note that not all GN builds have args.gn.
+  build_ninja_d_file = os.path.join(build_dir, 'build.ninja.d')
+  if not os.path.exists(build_ninja_d_file):
+    shutil.rmtree(build_dir)
+    return
+
+  # GN builds aren't automatically regenerated when you sync. To avoid
+  # messing with the GN workflow, erase everything but the args file, and
+  # write a dummy build.ninja file that will automatically rerun GN the next
+  # time Ninja is run.
+  build_ninja_file = os.path.join(build_dir, 'build.ninja')
+  build_commands = extract_gn_build_commands(build_ninja_file)
+
+  try:
+    gn_args_file = os.path.join(build_dir, 'args.gn')
+    with open(gn_args_file, 'r') as f:
+      args_contents = f.read()
+  except IOError:
+    args_contents = ''
+
+  shutil.rmtree(build_dir)
+
+  # Put back the args file (if any).
+  os.mkdir(build_dir)
+  if args_contents != '':
+    with open(gn_args_file, 'w') as f:
+      f.write(args_contents)
+
+  # Write the build.ninja file sufficiently to regenerate itself.
+  with open(os.path.join(build_dir, 'build.ninja'), 'w') as f:
+    if build_commands != '':
+      f.write(build_commands)
+    else:
+      # Couldn't parse the build.ninja file, write a default thing.
+      f.write('''rule gn
+command = gn -q gen //out/%s/
+description = Regenerating ninja files
+
+build build.ninja: gn
+generator = 1
+depfile = build.ninja.d
+''' % (os.path.split(build_dir)[1]))
+
+  # Write a .d file for the build which references a nonexistant file. This
+  # will make Ninja always mark the build as dirty.
+  with open(build_ninja_d_file, 'w') as f:
+    f.write('build.ninja: nonexistant_file.gn\n')
+
+
+def clobber(out_dir):
+  """Clobber contents of build directory.
+
+  Don't delete the directory itself: some checkouts have the build directory
+  mounted."""
+  for f in os.listdir(out_dir):
+    path = os.path.join(out_dir, f)
+    if os.path.isfile(path):
+      os.unlink(path)
+    elif os.path.isdir(path):
+      delete_build_dir(path)
+
+
+def main():
+  parser = argparse.ArgumentParser()
+  parser.add_argument('out_dir', help='The output directory to clobber')
+  args = parser.parse_args()
+  clobber(args.out_dir)
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/common.croc b/build/common.croc
new file mode 100644
index 0000000..fde7a8b
--- /dev/null
+++ b/build/common.croc
@@ -0,0 +1,127 @@
+# -*- python -*-
+# Crocodile config file for Chromium - settings common to all platforms
+#
+# This should be speicified before the platform-specific config, for example:
+#       croc -c chrome_common.croc -c linux/chrome_linux.croc
+
+{
+  # List of root directories, applied in order
+  'roots' : [
+    # Sub-paths we specifically care about and want to call out
+    {
+      'root' : '_/src',
+      'altname' : 'CHROMIUM',
+    },
+  ],
+
+  # List of rules, applied in order
+  # Note that any 'include':0 rules here will be overridden by the 'include':1
+  # rules in the platform-specific configs.
+  'rules' : [
+    # Don't scan for executable lines in uninstrumented C++ header files
+    {
+      'regexp' : '.*\\.(h|hpp)$',
+      'add_if_missing' : 0,
+    },
+
+    # Groups
+    {
+      'regexp' : '',
+      'group' : 'source',
+    },
+    {
+      'regexp' : '.*_(test|unittest|uitest|browsertest)\\.',
+      'group' : 'test',
+    },
+
+    # Languages
+    {
+      'regexp' : '.*\\.(c|h)$',
+      'language' : 'C',
+    },
+    {
+      'regexp' : '.*\\.(cc|cpp|hpp)$',
+      'language' : 'C++',
+    },
+
+    # Files/paths to include.  Specify these before the excludes, since rules
+    # are in order.
+    {
+      'regexp' : '^CHROMIUM/(base|media|net|printing|remoting|chrome|content|webkit/glue|native_client)/',
+      'include' : 1,
+    },
+    # Don't include subversion or mercurial SCM dirs
+    {
+      'regexp' : '.*/(\\.svn|\\.hg)/',
+      'include' : 0,
+    },
+    # Don't include output dirs
+    {
+      'regexp' : '.*/(Debug|Release|out|xcodebuild)/',
+      'include' : 0,
+    },
+    # Don't include third-party source
+    {
+      'regexp' : '.*/third_party/',
+      'include' : 0,
+    },
+    # We don't run the V8 test suite, so we don't care about V8 coverage.
+    {
+      'regexp' : '.*/v8/',
+      'include' : 0,
+    },
+  ],
+
+  # Paths to add source from
+  'add_files' : [
+    'CHROMIUM'
+  ],
+
+  # Statistics to print
+  'print_stats' : [
+    {
+      'stat' : 'files_executable',
+      'format' : '*RESULT FilesKnown: files_executable= %d files',
+    },
+    {
+      'stat' : 'files_instrumented',
+      'format' : '*RESULT FilesInstrumented: files_instrumented= %d files',
+    },
+    {
+      'stat' : '100.0 * files_instrumented / files_executable',
+      'format' : '*RESULT FilesInstrumentedPercent: files_instrumented_percent= %g percent',
+    },
+    {
+      'stat' : 'lines_executable',
+      'format' : '*RESULT LinesKnown: lines_known= %d lines',
+    },
+    {
+      'stat' : 'lines_instrumented',
+      'format' : '*RESULT LinesInstrumented: lines_instrumented= %d lines',
+    },
+    {
+      'stat' : 'lines_covered',
+      'format' : '*RESULT LinesCoveredSource: lines_covered_source= %d lines',
+      'group' : 'source',
+    },
+    {
+      'stat' : 'lines_covered',
+      'format' : '*RESULT LinesCoveredTest: lines_covered_test= %d lines',
+      'group' : 'test',
+    },
+    {
+      'stat' : '100.0 * lines_covered / lines_executable',
+      'format' : '*RESULT PercentCovered: percent_covered= %g percent',
+    },
+    {
+      'stat' : '100.0 * lines_covered / lines_executable',
+      'format' : '*RESULT PercentCoveredSource: percent_covered_source= %g percent',
+      'group' : 'source',
+    },
+    {
+      'stat' : '100.0 * lines_covered / lines_executable',
+      'format' : '*RESULT PercentCoveredTest: percent_covered_test= %g percent',
+      'group' : 'test',
+    },
+  ],
+}
diff --git a/build/common.gypi b/build/common.gypi
new file mode 100644
index 0000000..3a2df58
--- /dev/null
+++ b/build/common.gypi
@@ -0,0 +1,6216 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# IMPORTANT:
+# Please don't directly include this file if you are building via gyp_chromium,
+# since gyp_chromium is automatically forcing its inclusion.
+{
+  # Variables expected to be overriden on the GYP command line (-D) or by
+  # ~/.gyp/include.gypi.
+  'variables': {
+    # Putting a variables dict inside another variables dict looks kind of
+    # weird.  This is done so that 'host_arch', 'chromeos', etc are defined as
+    # variables within the outer variables dict here.  This is necessary
+    # to get these variables defined for the conditions within this variables
+    # dict that operate on these variables.
+    'variables': {
+      'variables': {
+        'variables': {
+          'variables': {
+            # Whether we're building a ChromeOS build.
+            'chromeos%': 0,
+
+            # Whether we're building the cast (chromecast) shell
+            'chromecast%': 0,
+
+            # Whether or not we are using the Aura windowing framework.
+            'use_aura%': 0,
+
+            # Whether or not we are building the Ash shell.
+            'use_ash%': 0,
+
+            # Whether or not we are using CRAS, the ChromeOS Audio Server.
+            'use_cras%': 0,
+
+            # Use a raw surface abstraction.
+            'use_ozone%': 0,
+
+            # Configure the build for small devices. See crbug.com/318413
+            'embedded%': 0,
+
+            'conditions': [
+              # Compute the architecture that we're building on.
+              ['OS=="win" or OS=="ios"', {
+                'host_arch%': 'ia32',
+              }, {
+                'host_arch%': '<!pymod_do_main(detect_host_arch)',
+              }],
+            ],
+          },
+          # Copy conditionally-set variables out one scope.
+          'chromeos%': '<(chromeos)',
+          'chromecast%': '<(chromecast)',
+          'use_aura%': '<(use_aura)',
+          'use_ash%': '<(use_ash)',
+          'use_cras%': '<(use_cras)',
+          'use_ozone%': '<(use_ozone)',
+          'embedded%': '<(embedded)',
+          'host_arch%': '<(host_arch)',
+
+          # Whether we are using Views Toolkit
+          'toolkit_views%': 0,
+
+          # Use the PCI lib to collect GPU information.
+          'use_libpci%': 1,
+
+          # Use OpenSSL instead of NSS as the underlying SSL and crypto
+          # implementation. Certificate verification will in most cases be
+          # handled by the OS. If OpenSSL's struct X509 is used to represent
+          # certificates, use_openssl_certs must be set.
+          'use_openssl%': 1,
+
+          # Use OpenSSL for representing certificates. When targeting Android,
+          # the platform certificate library is used for certificate
+          # verification. On other targets, this flag also enables OpenSSL for
+          # certificate verification, but this configuration is unsupported.
+          'use_openssl_certs%': 0,
+
+          # Disable viewport meta tag by default.
+          'enable_viewport%': 0,
+
+          # Enable HiDPI support.
+          'enable_hidpi%': 0,
+
+          # Enable top chrome material design.
+          'enable_topchrome_md%' : 0,
+
+          # Force building against pre-built sysroot image on linux.  By default
+          # the sysroot image is only used for Official builds  or when cross
+          # compiling to arm or mips.
+          'use_sysroot%': 0,
+
+          # Override buildtype to select the desired build flavor.
+          # Dev - everyday build for development/testing
+          # Official - release build (generally implies additional processing)
+          # TODO(mmoss) Once 'buildtype' is fully supported (e.g. Windows gyp
+          # conversion is done), some of the things which are now controlled by
+          # 'branding', such as symbol generation, will need to be refactored
+          # based on 'buildtype' (i.e. we don't care about saving symbols for
+          # non-Official # builds).
+          'buildtype%': 'Dev',
+
+          # Override branding to select the desired branding flavor.
+          'branding%': 'Chromium',
+
+          'conditions': [
+            # Windows and Linux (including Chrome OS) use Aura and Ash.
+            ['OS=="win" or OS=="linux"', {
+              'use_ash%': 1,
+              'use_aura%': 1,
+            }],
+
+            ['chromecast==1 and OS!="android"', {
+              'embedded%': 1,
+              'use_ozone%': 1,
+            }],
+
+            # Ozone uses Aura.
+            ['use_ozone==1', {
+              'use_aura%': 1,
+            }],
+
+            # Whether we're a traditional desktop unix.
+            ['(OS=="linux" or OS=="freebsd" or OS=="openbsd" or OS=="solaris") and chromeos==0', {
+              'desktop_linux%': 1,
+            }, {
+              'desktop_linux%': 0,
+            }],
+
+            # Embedded implies ozone.
+            ['embedded==1', {
+              'use_ozone%': 1,
+            }],
+
+            ['OS=="android"', {
+              'target_arch%': 'arm',
+            }, {
+              # Default architecture we're building for is the architecture we're
+              # building on, and possibly sub-architecture (for iOS builds).
+              'target_arch%': '<(host_arch)',
+            }],
+          ],
+        },
+        # Copy conditionally-set variables out one scope.
+        'chromeos%': '<(chromeos)',
+        'chromecast%': '<(chromecast)',
+        'desktop_linux%': '<(desktop_linux)',
+        'use_aura%': '<(use_aura)',
+        'use_ash%': '<(use_ash)',
+        'use_cras%': '<(use_cras)',
+        'use_ozone%': '<(use_ozone)',
+        'embedded%': '<(embedded)',
+        'use_libpci%': '<(use_libpci)',
+        'use_openssl%': '<(use_openssl)',
+        'use_openssl_certs%': '<(use_openssl_certs)',
+        'enable_viewport%': '<(enable_viewport)',
+        'enable_hidpi%': '<(enable_hidpi)',
+        'enable_topchrome_md%': '<(enable_topchrome_md)',
+        'buildtype%': '<(buildtype)',
+        'branding%': '<(branding)',
+        'branding_path_component%': '<(branding)',
+        'host_arch%': '<(host_arch)',
+        'target_arch%': '<(target_arch)',
+
+        'target_subarch%': '',
+
+        # The channel to build on Android: stable, beta, dev, canary, or
+        # default. "default" should be used on non-official builds.
+        'android_channel%': 'default',
+
+        # Set ARM architecture version.
+        'arm_version%': 7,
+
+        # Use aurax11 for clipboard implementation. This is true on linux_aura.
+        'use_clipboard_aurax11%': 0,
+
+        # goma settings.
+        # 1 to use goma.
+        # If no gomadir is set, it uses the default gomadir.
+        'use_goma%': 0,
+        'gomadir%': '',
+
+        # The system root for cross-compiles. Default: none.
+        'sysroot%': '',
+        'chroot_cmd%': '',
+
+        # The system libdir used for this ABI.
+        'system_libdir%': 'lib',
+
+        # Default MIPS arch variant. This is set in the conditions block
+        # below for MIPS targets.
+        'mips_arch_variant%': '',
+
+        # MIPS DSP ASE revision. Possible values are:
+        #   0: unavailable
+        #   1: revision 1
+        #   2: revision 2
+        'mips_dsp_rev%': 0,
+
+        'conditions': [
+          ['branding == "Chrome"', {
+            'branding_path_component%': 'google_chrome',
+          }],
+
+          ['branding == "Chromium"', {
+            'branding_path_component%': 'chromium',
+          }],
+
+          # Ash needs Aura.
+          ['use_aura==0', {
+            'use_ash%': 0,
+          }],
+
+          # Set default value of toolkit_views based on OS.
+          ['OS=="mac" or OS=="win" or chromeos==1 or use_aura==1', {
+            'toolkit_views%': 1,
+          }, {
+            'toolkit_views%': 0,
+          }],
+
+          # Embedded builds use aura without ash or views.
+          ['embedded==1', {
+            'use_aura%': 1,
+            'use_ash%': 0,
+            'toolkit_views%': 0,
+          }],
+
+          # Enable HiDPI on Mac OS, Windows and Linux (including Chrome OS).
+          ['OS=="mac" or OS=="win" or OS=="linux"', {
+            'enable_hidpi%': 1,
+          }],
+
+          # Enable Top Chrome Material Design on Chrome OS, Windows, and Linux.
+          ['chromeos==1 or OS=="win" or OS=="linux"', {
+            'enable_topchrome_md%': 1,
+          }],
+
+          # On iOS, use NSS rather than OpenSSL. See http://crbug.com/338886.
+          ['OS=="ios"', {
+            'use_openssl%': 0,
+          }],
+
+          # Enable App Launcher everywhere but mobile.
+          ['OS!="ios" and OS!="android"', {
+            'enable_app_list%': 1,
+          }, {
+            'enable_app_list%': 0,
+          }],
+
+          ['use_aura==1 and OS!="android"', {
+            'use_default_render_theme%': 1,
+          }, {
+            'use_default_render_theme%': 0,
+          }],
+
+          ['use_ozone==1', {
+            'use_ozone_evdev%': 1,
+          }, {
+            'use_ozone_evdev%': 0,
+          }],
+
+          # Set default gomadir.
+          ['OS=="win"', {
+            'gomadir': 'c:\\goma\\goma-win',
+          }, {
+            'gomadir': '<!(/bin/echo -n ${HOME}/goma)',
+          }],
+
+          # Set the default "target_subarch" on iOS. Valid values are "arm32",
+          # "arm64" and "both" (meaning a fat binary).
+          ['OS=="ios"', {
+            'target_subarch%': 'arm64',
+          }],
+
+          # Set arch variants for MIPS platforms.
+          ['target_arch=="mips64el"', {
+            'conditions': [
+              ['OS=="android"', {
+                'mips_arch_variant%': 'r6',
+              }, {
+                'mips_arch_variant%': 'r2',
+              }],
+            ],
+          }],
+
+          ['target_arch=="mipsel"', {
+            'mips_arch_variant%': 'r1',
+          }],
+
+          ['OS=="linux" and target_arch=="arm" and chromeos==0', {
+            # sysroot needs to be an absolute path otherwise it generates
+            # incorrect results when passed to pkg-config
+            'sysroot%': '<!(cd <(DEPTH) && pwd -P)/build/linux/debian_wheezy_arm-sysroot',
+          }], # OS=="linux" and target_arch=="arm" and chromeos==0
+
+          ['OS=="linux" and ((branding=="Chrome" and buildtype=="Official" and chromeos==0) or use_sysroot==1)' , {
+            'conditions': [
+              ['target_arch=="x64"', {
+                'sysroot%': '<!(cd <(DEPTH) && pwd -P)/build/linux/debian_wheezy_amd64-sysroot',
+              }],
+              ['target_arch=="ia32"', {
+                'sysroot%': '<!(cd <(DEPTH) && pwd -P)/build/linux/debian_wheezy_i386-sysroot',
+              }],
+          ],
+          }], # OS=="linux" and branding=="Chrome" and buildtype=="Official" and chromeos==0
+
+          ['OS=="linux" and target_arch=="mipsel"', {
+            'sysroot%': '<!(cd <(DEPTH) && pwd -P)/mipsel-sysroot/sysroot',
+          }],
+        ],
+      },
+
+      # Copy conditionally-set variables out one scope.
+      'chromeos%': '<(chromeos)',
+      'chromecast%': '<(chromecast)',
+      'host_arch%': '<(host_arch)',
+      'target_arch%': '<(target_arch)',
+      'target_subarch%': '<(target_subarch)',
+      'mips_arch_variant%': '<(mips_arch_variant)',
+      'mips_dsp_rev%': '<(mips_dsp_rev)',
+      'toolkit_views%': '<(toolkit_views)',
+      'desktop_linux%': '<(desktop_linux)',
+      'use_aura%': '<(use_aura)',
+      'use_ash%': '<(use_ash)',
+      'use_cras%': '<(use_cras)',
+      'use_libpci%': '<(use_libpci)',
+      'use_ozone%': '<(use_ozone)',
+      'use_ozone_evdev%': '<(use_ozone_evdev)',
+      'use_clipboard_aurax11%': '<(use_clipboard_aurax11)',
+      'embedded%': '<(embedded)',
+      'use_openssl%': '<(use_openssl)',
+      'use_openssl_certs%': '<(use_openssl_certs)',
+      'enable_viewport%': '<(enable_viewport)',
+      'enable_hidpi%': '<(enable_hidpi)',
+      'enable_topchrome_md%': '<(enable_topchrome_md)',
+      'android_channel%': '<(android_channel)',
+      'use_goma%': '<(use_goma)',
+      'gomadir%': '<(gomadir)',
+      'enable_app_list%': '<(enable_app_list)',
+      'use_default_render_theme%': '<(use_default_render_theme)',
+      'buildtype%': '<(buildtype)',
+      'branding%': '<(branding)',
+      'branding_path_component%': '<(branding_path_component)',
+      'arm_version%': '<(arm_version)',
+      'sysroot%': '<(sysroot)',
+      'chroot_cmd%': '<(chroot_cmd)',
+      'system_libdir%': '<(system_libdir)',
+
+      # Set to 1 to enable fast builds. Set to 2 for even faster builds
+      # (it disables debug info for fastest compilation - only for use
+      # on compile-only bots).
+      'fastbuild%': 0,
+
+      # Set to 1 to not store any build metadata, e.g. ifdef out all __DATE__
+      # and __TIME__. Set to 0 to reenable the use of these macros in the code
+      # base. See http://crbug.com/314403.
+      'dont_embed_build_metadata%': 1,
+
+      # Set to 1 to force Visual C++ to use legacy debug information format /Z7.
+      # This is useful for parallel compilation tools which can't support /Zi.
+      # Only used on Windows.
+      'win_z7%' : 0,
+
+      # Set to 1 to enable dcheck in Release build.
+      'dcheck_always_on%': 0,
+
+      # Set to 1 to make a build that disables unshipped tracing events.
+      # Note: this setting is ignored if buildtype=="Official".
+      'tracing_like_official_build%': 0,
+
+      # Disable image loader component extension by default.
+      'image_loader_extension%': 0,
+
+      # Set NEON compilation flags.
+      'arm_neon%': 1,
+
+      # Detect NEON support at run-time.
+      'arm_neon_optional%': 0,
+
+      # Use libjpeg-turbo as the JPEG codec used by Chromium.
+      'use_libjpeg_turbo%': 1,
+
+      # Use system libjpeg. Note that the system's libjepg will be used even if
+      # use_libjpeg_turbo is set.
+      'use_system_libjpeg%': 0,
+
+      # By default, component is set to static_library and it can be overriden
+      # by the GYP command line or by ~/.gyp/include.gypi.
+      'component%': 'static_library',
+
+      # /analyze is off by default on Windows because it is very slow and noisy.
+      # Enable with GYP_DEFINES=win_analyze=1
+      'win_analyze%': 0,
+
+      # Set to select the Title Case versions of strings in GRD files.
+      'use_titlecase_in_grd%': 0,
+
+      # Use translations provided by volunteers at launchpad.net.  This
+      # currently only works on Linux.
+      'use_third_party_translations%': 0,
+
+      # Remoting compilation is enabled by default. Set to 0 to disable.
+      'remoting%': 1,
+
+      # Configuration policy is enabled by default. Set to 0 to disable.
+      'configuration_policy%': 1,
+
+      # Variable safe_browsing is used to control the build time configuration
+      # for safe browsing feature. Safe browsing can be compiled in 4 different
+      # levels: 0 disables it, 1 enables it fully, and 2 enables only UI and
+      # reporting features for use with Data Saver on Mobile, and 3 enables
+      # extended mobile protection via an external API.  When 3 is fully
+      # deployed, it will replace 2.
+      'safe_browsing%': 1,
+
+      # Web speech is enabled by default. Set to 0 to disable.
+      'enable_web_speech%': 1,
+
+      # 'Ok Google' hotwording is disabled by default in open source builds. Set
+      # to 1 to enable. (This will download a closed-source NaCl module at
+      # startup.) Chrome-branded builds have this enabled by default.
+      'enable_hotwording%': 0,
+
+      # Notifications are compiled in by default. Set to 0 to disable.
+      'notifications%' : 1,
+
+      # Use dsymutil to generate real .dSYM files on Mac. The default is 0 for
+      # regular builds and 1 for ASan builds.
+      'mac_want_real_dsym%': 'default',
+
+      # If this is set, the clang plugins used on the buildbot will be used.
+      # Run tools/clang/scripts/update.sh to make sure they are compiled.
+      # This causes 'clang_chrome_plugins_flags' to be set.
+      # Has no effect if 'clang' is not set as well.
+      'clang_use_chrome_plugins%': 1,
+
+      # Enable building with ASAN (Clang's -fsanitize=address option).
+      # -fsanitize=address only works with clang, but asan=1 implies clang=1
+      # See https://sites.google.com/a/chromium.org/dev/developers/testing/addresssanitizer
+      'asan%': 0,
+      'asan_blacklist%': '<(PRODUCT_DIR)/../../tools/memory/asan/blacklist.txt',
+      # Enable coverage gathering instrumentation in sanitizer tools. This flag
+      # also controls coverage granularity (1 for function-level coverage, 2
+      # for block-level coverage).
+      'sanitizer_coverage%': 0,
+      # Deprecated, only works if |sanitizer_coverage| isn't set.
+      # TODO(glider): remove this flag.
+      'asan_coverage%': 0,
+      # Enable intra-object-overflow detection in ASan (experimental).
+      'asan_field_padding%': 0,
+
+      # Enable Chromium overrides of the default configurations for various
+      # dynamic tools (like ASan).
+      'use_sanitizer_options%': 0,
+
+      # Enable building with SyzyAsan.
+      # See https://code.google.com/p/sawbuck/wiki/SyzyASanHowTo
+      'syzyasan%': 0,
+
+      # Enable crash reporting via Kasko.
+      'kasko%': 0,
+
+      # Enable building with LSan (Clang's -fsanitize=leak option).
+      # -fsanitize=leak only works with clang, but lsan=1 implies clang=1
+      # See https://sites.google.com/a/chromium.org/dev/developers/testing/leaksanitizer
+      'lsan%': 0,
+
+      # Enable building with TSan (Clang's -fsanitize=thread option).
+      # -fsanitize=thread only works with clang, but tsan=1 implies clang=1
+      # See http://clang.llvm.org/docs/ThreadSanitizer.html
+      'tsan%': 0,
+      'tsan_blacklist%': '<(PRODUCT_DIR)/../../tools/memory/tsan_v2/ignores.txt',
+
+      # Enable building with MSan (Clang's -fsanitize=memory option).
+      # MemorySanitizer only works with clang, but msan=1 implies clang=1
+      # See http://clang.llvm.org/docs/MemorySanitizer.html
+      'msan%': 0,
+      'msan_blacklist%': '<(PRODUCT_DIR)/../../tools/msan/blacklist.txt',
+      # Track where uninitialized memory originates from. From fastest to
+      # slowest: 0 - no tracking, 1 - track only the initial allocation site, 2
+      # - track the chain of stores leading from allocation site to use site.
+      'msan_track_origins%': 2,
+
+      # Enable building with UBSan (Clang's -fsanitize=undefined option).
+      # -fsanitize=undefined only works with clang, but ubsan=1 implies clang=1
+      # See http://clang.llvm.org/docs/UsersManual.html
+      'ubsan%': 0,
+      'ubsan_blacklist%': '<(PRODUCT_DIR)/../../tools/ubsan/blacklist.txt',
+      'ubsan_vptr_blacklist%': '<(PRODUCT_DIR)/../../tools/ubsan/vptr_blacklist.txt',
+
+      # Enable building with UBsan's vptr (Clang's -fsanitize=vptr option).
+      # -fsanitize=vptr only works with clang, but ubsan_vptr=1 implies clang=1
+      'ubsan_vptr%': 0,
+
+      # Use dynamic libraries instrumented by one of the sanitizers
+      # instead of the standard system libraries. Set this flag to build the
+      # libraries from source.
+      'use_instrumented_libraries%': 0,
+
+      # Use dynamic libraries instrumented by one of the sanitizers
+      # instead of the standard system libraries. Set this flag to download
+      # prebuilt binaries from GCS.
+      'use_prebuilt_instrumented_libraries%': 0,
+
+      # Use libc++ (third_party/libc++ and third_party/libc++abi) instead of
+      # stdlibc++ as standard library. This is intended to use for instrumented
+      # builds.
+      'use_custom_libcxx%': 0,
+
+      # Use system libc++ instead of the default C++ library, usually libstdc++.
+      # This is intended for iOS builds only.
+      'use_system_libcxx%': 0,
+
+      # Use a modified version of Clang to intercept allocated types and sizes
+      # for allocated objects. clang_type_profiler=1 implies clang=1.
+      # See http://dev.chromium.org/developers/deep-memory-profiler/cpp-object-type-identifier
+      # TODO(dmikurube): Support mac.  See http://crbug.com/123758#c11
+      'clang_type_profiler%': 0,
+
+      # Set to true to instrument the code with function call logger.
+      # See src/third_party/cygprofile/cyg-profile.cc for details.
+      'order_profiling%': 0,
+
+      # Use the provided profiled order file to link Chrome image with it.
+      # This makes Chrome faster by better using CPU cache when executing code.
+      # This is known as PGO (profile guided optimization).
+      # See https://sites.google.com/a/google.com/chrome-msk/dev/boot-speed-up-effort
+      'order_text_section%' : "",
+
+      # Set to 1 compile with -fPIC cflag on linux. This is a must for shared
+      # libraries on linux x86-64 and arm, plus ASLR.
+      'linux_fpic%': 1,
+
+      # Whether one-click signin is enabled or not.
+      'enable_one_click_signin%': 0,
+
+      # Whether to back up data before sync.
+      'enable_pre_sync_backup%': 0,
+
+      # Enable Chrome browser extensions
+      'enable_extensions%': 1,
+
+      # Enable Google Now.
+      'enable_google_now%': 1,
+
+      # Enable basic printing support and UI.
+      'enable_basic_printing%': 1,
+
+      # Enable printing with print preview. It does not imply
+      # enable_basic_printing. It's possible to build Chrome with preview only.
+      'enable_print_preview%': 1,
+
+      # Set the version of CLD.
+      #   0: Don't specify the version. This option is for the Finch testing.
+      #   1: Use only CLD1.
+      #   2: Use only CLD2.
+      'cld_version%': 2,
+
+      # For CLD2, the size of the tables that should be included in the build
+      # Only evaluated if cld_version == 2 or if building the CLD2 dynamic data
+      # tool explicitly.
+      # See third_party/cld_2/cld_2.gyp for more information.
+      #   0: Small tables, lower accuracy
+      #   2: Large tables, high accuracy
+      'cld2_table_size%': 2,
+
+      # Enable spell checker.
+      'enable_spellcheck%': 1,
+
+      # Use the operating system spellchecker, e.g. NSSpellChecker on Mac or
+      # SpellCheckerSession on Android.
+      'use_platform_spellchecker%': 0,
+
+      # Webrtc compilation is enabled by default. Set to 0 to disable.
+      'enable_webrtc%': 1,
+
+      # Media router support is enabled by default. Set to 0 to disable.
+      'enable_media_router%': 1,
+
+      # Enables use of the session service, which is enabled by default.
+      # Support for disabling depends on the platform.
+      'enable_session_service%': 1,
+
+      # Enables theme support, which is enabled by default.  Support for
+      # disabling depends on the platform.
+      'enable_themes%': 1,
+
+      # Enables autofill dialog and associated features; disabled by default.
+      'enable_autofill_dialog%' : 0,
+
+      # Defaults Wallet integration in Autofill dialog to use production
+      # servers. Unofficial builds won't have the proper API keys.
+      'enable_prod_wallet_service%': 0,
+
+      # Enables support for background apps.
+      'enable_background%': 1,
+
+      # Enable the task manager by default.
+      'enable_task_manager%': 1,
+
+      # Enables used resource whitelist generation; disabled by default.
+      'enable_resource_whitelist_generation%': 0,
+
+      # Enable FILE support by default.
+      'disable_file_support%': 0,
+
+      # Enable FTP support by default.
+      'disable_ftp_support%': 0,
+
+      # Use native android functions in place of ICU.  Not supported by most
+      # components.
+      'use_icu_alternatives_on_android%': 0,
+
+      # Use of precompiled headers on Windows.
+      #
+      # This variable may be explicitly set to 1 (enabled) or 0
+      # (disabled) in ~/.gyp/include.gypi or via the GYP command line.
+      # This setting will override the default.
+      #
+      # See
+      # http://code.google.com/p/chromium/wiki/WindowsPrecompiledHeaders
+      # for details.
+      'chromium_win_pch%': 0,
+
+      # Clang stuff.
+      'make_clang_dir%': 'third_party/llvm-build/Release+Asserts',
+      # Set this to true when building with Clang.
+      # See http://code.google.com/p/chromium/wiki/Clang for details.
+      # If this is set, clang is used as both host and target compiler in
+      # cross-compile builds.
+      'clang%': 0,
+
+      # Use experimental lld linker instead of the platform's default linker.
+      'use_lld%': 0,
+
+      # Enable plugin installation by default.
+      'enable_plugin_installation%': 1,
+
+      # Specifies whether to use canvas_skia.cc in place of platform
+      # specific implementations of gfx::Canvas. Affects text drawing in the
+      # Chrome UI.
+      # TODO(asvitkine): Enable this on all platforms and delete this flag.
+      #                  http://crbug.com/105550
+      'use_canvas_skia%': 0,
+
+      # Set to "tsan", "memcheck", or "drmemory" to configure the build to work
+      # with one of those tools.
+      'build_for_tool%': '',
+
+      'wix_path%': '<(DEPTH)/third_party/wix',
+
+      # Supervised users are enabled by default.
+      'enable_supervised_users%': 1,
+
+      # Platform sends memory pressure signals natively.
+      'native_memory_pressure_signals%': 0,
+
+      'enable_mdns%' : 0,
+      'enable_service_discovery%': 0,
+      'enable_wifi_bootstrapping%': 0,
+      'enable_hangout_services_extension%': 0,
+
+       # Enable the Syzygy optimization step.
+      'syzygy_optimize%': 0,
+
+      # Enable hole punching for the protected video.
+      'video_hole%': 0,
+
+      # Automatically select platforms under ozone. Turn this off to
+      # build only explicitly selected platforms.
+      'ozone_auto_platforms%': 1,
+
+      # If this is set clang is used as host compiler, but not as target
+      # compiler. Always do this by default.
+      'host_clang%': 1,
+
+      # Variables to control Link-Time Optimization (LTO).
+      # On Android, the variable use_lto enables LTO on code compiled with -Os,
+      # and use_lto_o2 enables LTO on code compiled with -O2. On other
+      # platforms, use_lto enables LTO in all translation units, and use_lto_o2
+      # has no effect.
+      #
+      # On Linux and Android, when using LLVM LTO, the script
+      # build/download_gold_plugin.py must be run to download a linker plugin.
+      # On Mac, LLVM needs to be built from scratch using
+      # tools/clang/scripts/update.py and the absolute path to
+      # third_party/llvm-build/Release+Asserts/lib must be added to
+      # $DYLD_LIBRARY_PATH to pick up the right version of the linker plugin.
+      #
+      # On Android, the variables must *not* be enabled at the same time.
+      # In this case LTO would 'merge' the optimization flags at link-time
+      # which would lead to all code be optimized with -O2. See crbug.com/407544
+      'use_lto%': 0,
+      'use_lto_o2%': 0,
+
+      # Allowed level of identical code folding in the gold linker.
+      'gold_icf_level%': 'all',
+
+      # Libxkbcommon usage.
+      'use_xkbcommon%': 0,
+
+      # Control Flow Integrity for virtual calls and casts.
+      # See http://clang.llvm.org/docs/ControlFlowIntegrity.html
+      'cfi_vptr%': 0,
+
+      'cfi_blacklist%': '<(PRODUCT_DIR)/../../tools/cfi/blacklist.txt',
+
+      # Whether the entire browser uses toolkit-views on Mac instead of Cocoa.
+      'mac_views_browser%': 0,
+
+      # By default, use ICU data file (icudtl.dat).
+      'icu_use_data_file_flag%': 1,
+
+      # Turn on JNI generation optimizations by default.
+      'optimize_jni_generation%': 1,
+
+      'conditions': [
+        # A flag for POSIX platforms
+        ['OS=="win"', {
+          'os_posix%': 0,
+        }, {
+          'os_posix%': 1,
+        }],
+
+        # A flag for BSD platforms
+        ['OS=="freebsd" or OS=="openbsd"', {
+          'os_bsd%': 1,
+        }, {
+          'os_bsd%': 0,
+        }],
+
+        # NSS usage.
+        ['(OS=="linux" or OS=="freebsd" or OS=="openbsd" or OS=="solaris")', {
+          'use_nss_certs%': 1,
+        }, {
+          'use_nss_certs%': 0,
+        }],
+
+        # libudev usage.  This currently only affects the content layer.
+        ['OS=="linux" and embedded==0', {
+          'use_udev%': 1,
+        }, {
+          'use_udev%': 0,
+        }],
+
+        # Flags to use X11 on non-Mac POSIX platforms.
+        ['OS=="win" or OS=="mac" or OS=="ios" or OS=="android" or use_ozone==1', {
+          'use_x11%': 0,
+        }, {
+          'use_x11%': 1,
+        }],
+
+        # Flags to use glib.
+        ['OS=="win" or OS=="mac" or OS=="ios" or OS=="android" or use_ozone==1', {
+          'use_glib%': 0,
+        }, {
+          'use_glib%': 1,
+        }],
+
+        # Flags to use pango and cairo.
+        ['OS=="win" or OS=="mac" or OS=="ios" or OS=="android" or embedded==1', {
+          'use_pango%': 0,
+          'use_cairo%': 0,
+        }, {
+          'use_pango%': 1,
+          'use_cairo%': 1,
+        }],
+
+        # DBus usage.
+        ['OS=="linux" and embedded==0', {
+          'use_dbus%': 1,
+        }, {
+          'use_dbus%': 0,
+        }],
+
+        # We always use skia text rendering in Aura on Windows, since GDI
+        # doesn't agree with our BackingStore.
+        # TODO(beng): remove once skia text rendering is on by default.
+        ['use_aura==1 and OS=="win"', {
+          'enable_skia_text%': 1,
+        }],
+
+        # A flag to enable or disable our compile-time dependency
+        # on gnome-keyring. If that dependency is disabled, no gnome-keyring
+        # support will be available. This option is useful
+        # for Linux distributions and for Aura.
+        ['OS!="linux" or chromeos==1', {
+          'use_gnome_keyring%': 0,
+        }, {
+          'use_gnome_keyring%': 1,
+        }],
+
+        ['OS=="mac" or OS=="ios"', {
+          # Mac and iOS want Title Case strings
+          'use_titlecase_in_grd%': 1,
+        }],
+
+        # Enable loader extensions on Chrome OS.
+        ['chromeos==1', {
+          'image_loader_extension%': 1,
+        }, {
+          'image_loader_extension%': 0,
+        }],
+
+        ['OS=="win" or OS=="mac" or (OS=="linux" and chromeos==0)', {
+          'enable_one_click_signin%': 1,
+          'enable_pre_sync_backup%': 1,
+        }],
+
+        ['OS=="android"', {
+          'enable_extensions%': 0,
+          'enable_google_now%': 0,
+          'cld_version%': 1,
+          'enable_spellcheck%': 0,
+          'enable_themes%': 0,
+          'remoting%': 0,
+          'arm_neon%': 0,
+          'arm_neon_optional%': 1,
+          'native_memory_pressure_signals%': 1,
+          'enable_basic_printing%': 1,
+          'enable_print_preview%': 0,
+          'enable_task_manager%':0,
+          'video_hole%': 1,
+        }],
+
+        # OSX has a built-in spellchecker can be utilized.
+        ['OS=="mac"', {
+          'use_platform_spellchecker%': 1,
+        }],
+
+        # Android OS includes support for proprietary codecs regardless of
+        # building Chromium or Google Chrome. We also ship Google Chrome and
+        # Chromecast with proprietary codecs.
+        ['OS=="android" or branding=="Chrome" or chromecast==1', {
+          'proprietary_codecs%': 1,
+        }, {
+          'proprietary_codecs%': 0,
+        }],
+
+        ['OS=="mac" or OS=="ios"', {
+          'native_memory_pressure_signals%': 1,
+        }],
+
+        # Enable autofill dialog when not on iOS.
+        ['OS!="ios"', {
+          'enable_autofill_dialog%': 1,
+        }],
+
+        ['buildtype=="Official"', {
+          'enable_prod_wallet_service%': 1,
+        }],
+
+        ['branding=="Chrome"', {
+          'enable_hotwording%': 1,
+        }],
+
+        ['OS=="android"', {
+          'enable_webrtc%': 1,
+        }],
+
+        ['OS=="ios"', {
+          'disable_ftp_support%': 1,
+          'enable_extensions%': 0,
+          'enable_google_now%': 0,
+          'cld_version%': 2,
+          'cld2_table_size%': 0,
+          'enable_basic_printing%': 0,
+          'enable_print_preview%': 0,
+          'enable_session_service%': 0,
+          'enable_spellcheck%': 0,
+          'enable_themes%': 0,
+          'enable_webrtc%': 0,
+          'notifications%': 0,
+          'remoting%': 0,
+          'safe_browsing%': 0,
+          'enable_supervised_users%': 0,
+          'enable_task_manager%': 0,
+          'use_system_libcxx%': 1,
+        }],
+
+        # Use GPU accelerated cross process image transport by default
+        # on linux builds with the Aura window manager
+        ['use_aura==1 and OS=="linux"', {
+          'ui_compositor_image_transport%': 1,
+        }, {
+          'ui_compositor_image_transport%': 0,
+        }],
+
+        # Turn precompiled headers on by default.
+        ['OS=="win" and buildtype!="Official"', {
+          'chromium_win_pch%': 1
+        }],
+
+        ['chromeos==1 or OS=="android" or OS=="ios" or desktop_linux==1', {
+          'enable_plugin_installation%': 0,
+        }, {
+          'enable_plugin_installation%': 1,
+        }],
+
+        # Whether PPAPI is enabled.
+        ['OS=="android" or OS=="ios" or (embedded==1 and chromecast==0)', {
+          'enable_plugins%': 0,
+        }, {
+          'enable_plugins%': 1,
+        }],
+
+        # linux_use_bundled_gold: whether to use the gold linker binary checked
+        # into third_party/binutils.  Force this off via GYP_DEFINES when you
+        # are using a custom toolchain and need to control -B in ldflags.
+        # Do not use 32-bit gold on 32-bit hosts as it runs out address space
+        # for component=static_library builds.
+        ['(OS=="linux" or OS=="android") and (target_arch=="x64" or target_arch=="arm")', {
+          'linux_use_bundled_gold%': 1,
+        }, {
+          'linux_use_bundled_gold%': 0,
+        }],
+
+        # linux_use_bundled_binutils: whether to use the binary binutils
+        # checked into third_party/binutils.  These are not multi-arch so cannot
+        # be used except on x86 and x86-64 (the only two architectures which
+        # are currently checke in).  Force this off via GYP_DEFINES when you
+        # are using a custom toolchain and need to control -B in cflags.
+        ['OS=="linux" and (target_arch=="x64")', {
+          'linux_use_bundled_binutils%': 1,
+        }, {
+          'linux_use_bundled_binutils%': 0,
+        }],
+
+        # linux_use_gold_flags: whether to use build flags that rely on gold.
+        # On by default for x64 Linux.
+        ['OS=="linux" and target_arch=="x64"', {
+          'linux_use_gold_flags%': 1,
+        }, {
+          'linux_use_gold_flags%': 0,
+        }],
+
+        # linux_use_debug_fission: whether to use split DWARF debug info
+        # files. This can reduce link time significantly, but is incompatible
+        # with some utilities such as icecc and ccache. Requires gold and
+        # gcc >= 4.8 or clang.
+        # http://gcc.gnu.org/wiki/DebugFission
+        ['OS=="linux" and target_arch=="x64"', {
+          'linux_use_debug_fission%': 1,
+        }, {
+          'linux_use_debug_fission%': 0,
+        }],
+
+        ['OS=="android" or OS=="ios"', {
+          'enable_captive_portal_detection%': 0,
+          'enable_media_router%': 0,
+        }, {
+          'enable_captive_portal_detection%': 1,
+          'enable_media_router%': 1,
+        }],
+
+        # Enable Skia UI text drawing incrementally on different platforms.
+        # http://crbug.com/105550
+        #
+        # On Aura, this allows per-tile painting to be used in the browser
+        # compositor.
+        ['OS!="android" and OS!="ios"', {
+          'use_canvas_skia%': 1,
+        }],
+
+        ['chromeos==1', {
+          'enable_basic_printing%': 0,
+          'enable_print_preview%': 1,
+        }],
+
+        # Do not enable the Settings App on ChromeOS.
+        ['enable_app_list==1 and chromeos==0', {
+          'enable_settings_app%': 1,
+        }, {
+          'enable_settings_app%': 0,
+        }],
+
+        # Whether tests targets should be run, archived or just have the
+        # dependencies verified. All the tests targets have the '_run' suffix,
+        # e.g. base_unittests_run runs the target base_unittests. The test
+        # target always calls tools/swarming_client/isolate.py. See the script's
+        # --help for more information. Meant to be overriden with GYP_DEFINES.
+        # TODO(maruel): Remove the conditions as more configurations are
+        # supported.
+        ['OS!="ios" and OS!="android" and chromeos==0', {
+          'test_isolation_mode%': 'check',
+        }, {
+          'test_isolation_mode%': 'noop',
+        }],
+        # Whether Android build uses OpenMAX DL FFT.
+        ['OS=="android" and ((target_arch=="arm" and arm_version >= 7) or target_arch=="ia32" or target_arch=="x64" or target_arch=="arm64" or target_arch=="mipsel")', {
+          # Currently only supported on Android ARMv7+, ARM64, ia32, x64 and mipsel.
+          # When enabled, this will also enable WebAudio support on
+          # Android for these architectures.  Default is enabled.  Whether
+          # WebAudio is actually available depends on runtime settings
+          # and flags.
+          'use_openmax_dl_fft%': 1,
+        }, {
+          'use_openmax_dl_fft%': 0,
+        }],
+        ['OS=="win" or OS=="linux"', {
+            'enable_mdns%' : 1,
+        }],
+
+        # Disable various features by default on embedded.
+        ['embedded==1', {
+          'remoting%': 0,
+          'enable_basic_printing%': 0,
+          'enable_print_preview%': 0,
+        }],
+
+        ['OS=="win" or OS=="mac"', {
+          'enable_wifi_bootstrapping%' : 1,
+        }],
+
+        # Path to sas.dll, which provides the SendSAS function.
+        # http://msdn.microsoft.com/en-us/library/windows/desktop/dd979761(v=vs.85).aspx
+        ['target_arch=="x64"', {
+          'sas_dll_path%': '<(DEPTH)/third_party/platformsdk_win7/files/redist/amd64',
+        }, {
+          'sas_dll_path%': '<(DEPTH)/third_party/platformsdk_win7/files/redist/x86',
+        }],
+
+        ['sysroot!=""', {
+          'pkg-config': '<(chroot_cmd) <(DEPTH)/build/linux/pkg-config-wrapper "<(sysroot)" "<(target_arch)" "<(system_libdir)"',
+        }, {
+          'pkg-config': 'pkg-config'
+        }],
+      ],
+
+      # WebVR support disabled until platform implementations have been added
+      'enable_webvr%': 0,
+
+      # Setting this to '0' will cause V8's startup snapshot to be
+      # embedded in the binary instead of being a external files.
+      'v8_use_external_startup_data%': 1,
+
+      # Set this to 1 to enable use of concatenated impulse responses
+      # for the HRTF panner in WebAudio.
+      'use_concatenated_impulse_responses': 1,
+
+      # You can set the variable 'use_official_google_api_keys' to 1
+      # to use the Google-internal file containing official API keys
+      # for Google Chrome even in a developer build.  Setting this
+      # variable explicitly to 1 will cause your build to fail if the
+      # internal file is missing.
+      #
+      # The variable is documented here, but not handled in this file;
+      # see //google_apis/determine_use_official_keys.gypi for the
+      # implementation.
+      #
+      # Set the variable to 0 to not use the internal file, even when
+      # it exists in your checkout.
+      #
+      # Leave it unset in your include.gypi to have the variable
+      # implicitly set to 1 if you have
+      # src/google_apis/internal/google_chrome_api_keys.h in your
+      # checkout, and implicitly set to 0 if not.
+      #
+      # Note that official builds always behave as if the variable
+      # was explicitly set to 1, i.e. they always use official keys,
+      # and will fail to build if the internal file is missing.
+      #
+      # NOTE: You MUST NOT explicitly set the variable to 2 in your
+      # include.gypi or by other means. Due to subtleties of GYP, this
+      # is not the same as leaving the variable unset, even though its
+      # default value in
+      # //google_apis/determine_use_official_keys.gypi is 2.
+
+      # Set these to bake the specified API keys and OAuth client
+      # IDs/secrets into your build.
+      #
+      # If you create a build without values baked in, you can instead
+      # set environment variables to provide the keys at runtime (see
+      # src/google_apis/google_api_keys.h for details).  Features that
+      # require server-side APIs may fail to work if no keys are
+      # provided.
+      #
+      # Note that if you are building an official build or if
+      # use_official_google_api_keys has been set to 1 (explicitly or
+      # implicitly), these values will be ignored and the official
+      # keys will be used instead.
+      'google_api_key%': '',
+      'google_default_client_id%': '',
+      'google_default_client_secret%': '',
+      # Native Client is enabled by default.
+      'disable_nacl%': '0',
+
+      # Sets the default version name and code for Android app, by default we
+      # do a developer build.
+      'android_app_version_name%': 'Developer Build',
+      'android_app_version_code%': 1,
+    },
+
+    # Copy conditionally-set variables out one scope.
+    'branding%': '<(branding)',
+    'branding_path_component%': '<(branding_path_component)',
+    'buildtype%': '<(buildtype)',
+    'target_arch%': '<(target_arch)',
+    'target_subarch%': '<(target_subarch)',
+    'mips_arch_variant%': '<(mips_arch_variant)',
+    'mips_dsp_rev%': '<(mips_dsp_rev)',
+    'host_arch%': '<(host_arch)',
+    'toolkit_views%': '<(toolkit_views)',
+    'ui_compositor_image_transport%': '<(ui_compositor_image_transport)',
+    'use_aura%': '<(use_aura)',
+    'use_ash%': '<(use_ash)',
+    'use_cras%': '<(use_cras)',
+    'use_libpci%': '<(use_libpci)',
+    'use_openssl%': '<(use_openssl)',
+    'use_openssl_certs%': '<(use_openssl_certs)',
+    'use_nss_certs%': '<(use_nss_certs)',
+    'use_udev%': '<(use_udev)',
+    'os_bsd%': '<(os_bsd)',
+    'os_posix%': '<(os_posix)',
+    'use_dbus%': '<(use_dbus)',
+    'use_glib%': '<(use_glib)',
+    'use_pango%': '<(use_pango)',
+    'use_cairo%': '<(use_cairo)',
+    'use_ozone%': '<(use_ozone)',
+    'use_ozone_evdev%': '<(use_ozone_evdev)',
+    'use_xkbcommon%': '<(use_xkbcommon)',
+    'use_clipboard_aurax11%': '<(use_clipboard_aurax11)',
+    'desktop_linux%': '<(desktop_linux)',
+    'use_x11%': '<(use_x11)',
+    'use_gnome_keyring%': '<(use_gnome_keyring)',
+    'linux_fpic%': '<(linux_fpic)',
+    'chromeos%': '<(chromeos)',
+    'chromecast%': '<(chromecast)',
+    'enable_viewport%': '<(enable_viewport)',
+    'enable_hidpi%': '<(enable_hidpi)',
+    'enable_topchrome_md%': '<(enable_topchrome_md)',
+    'image_loader_extension%': '<(image_loader_extension)',
+    'fastbuild%': '<(fastbuild)',
+    'dont_embed_build_metadata%': '<(dont_embed_build_metadata)',
+    'win_z7%': '<(win_z7)',
+    'dcheck_always_on%': '<(dcheck_always_on)',
+    'tracing_like_official_build%': '<(tracing_like_official_build)',
+    'arm_version%': '<(arm_version)',
+    'arm_neon%': '<(arm_neon)',
+    'arm_neon_optional%': '<(arm_neon_optional)',
+    'sysroot%': '<(sysroot)',
+    'pkg-config%': '<(pkg-config)',
+    'chroot_cmd%': '<(chroot_cmd)',
+    'system_libdir%': '<(system_libdir)',
+    'component%': '<(component)',
+    'win_analyze%': '<(win_analyze)',
+    'enable_resource_whitelist_generation%': '<(enable_resource_whitelist_generation)',
+    'use_titlecase_in_grd%': '<(use_titlecase_in_grd)',
+    'use_third_party_translations%': '<(use_third_party_translations)',
+    'remoting%': '<(remoting)',
+    'enable_one_click_signin%': '<(enable_one_click_signin)',
+    'enable_pre_sync_backup%': '<(enable_pre_sync_backup)',
+    'enable_media_router%': '<(enable_media_router)',
+    'enable_webrtc%': '<(enable_webrtc)',
+    'chromium_win_pch%': '<(chromium_win_pch)',
+    'configuration_policy%': '<(configuration_policy)',
+    'safe_browsing%': '<(safe_browsing)',
+    'enable_web_speech%': '<(enable_web_speech)',
+    'enable_hotwording%': '<(enable_hotwording)',
+    'notifications%': '<(notifications)',
+    'clang_use_chrome_plugins%': '<(clang_use_chrome_plugins)',
+    'mac_want_real_dsym%': '<(mac_want_real_dsym)',
+    'asan%': '<(asan)',
+    'asan_blacklist%': '<(asan_blacklist)',
+    'asan_coverage%': '<(asan_coverage)',
+    'sanitizer_coverage%': '<(sanitizer_coverage)',
+    'asan_field_padding%': '<(asan_field_padding)',
+    'use_sanitizer_options%': '<(use_sanitizer_options)',
+    'syzyasan%': '<(syzyasan)',
+    'kasko%': '<(kasko)',
+    'syzygy_optimize%': '<(syzygy_optimize)',
+    'lsan%': '<(lsan)',
+    'msan%': '<(msan)',
+    'msan_blacklist%': '<(msan_blacklist)',
+    'msan_track_origins%': '<(msan_track_origins)',
+    'tsan%': '<(tsan)',
+    'tsan_blacklist%': '<(tsan_blacklist)',
+    'ubsan%': '<(ubsan)',
+    'ubsan_blacklist%': '<(ubsan_blacklist)',
+    'ubsan_vptr_blacklist%': '<(ubsan_vptr_blacklist)',
+    'ubsan_vptr%': '<(ubsan_vptr)',
+    'use_instrumented_libraries%': '<(use_instrumented_libraries)',
+    'use_prebuilt_instrumented_libraries%': '<(use_prebuilt_instrumented_libraries)',
+    'use_custom_libcxx%': '<(use_custom_libcxx)',
+    'use_system_libcxx%': '<(use_system_libcxx)',
+    'clang_type_profiler%': '<(clang_type_profiler)',
+    'order_profiling%': '<(order_profiling)',
+    'order_text_section%': '<(order_text_section)',
+    'enable_extensions%': '<(enable_extensions)',
+    'enable_plugin_installation%': '<(enable_plugin_installation)',
+    'enable_plugins%': '<(enable_plugins)',
+    'enable_session_service%': '<(enable_session_service)',
+    'enable_themes%': '<(enable_themes)',
+    'enable_autofill_dialog%': '<(enable_autofill_dialog)',
+    'enable_prod_wallet_service%': '<(enable_prod_wallet_service)',
+    'enable_background%': '<(enable_background)',
+    'linux_use_bundled_gold%': '<(linux_use_bundled_gold)',
+    'linux_use_bundled_binutils%': '<(linux_use_bundled_binutils)',
+    'linux_use_gold_flags%': '<(linux_use_gold_flags)',
+    'linux_use_debug_fission%': '<(linux_use_debug_fission)',
+    'use_canvas_skia%': '<(use_canvas_skia)',
+    'test_isolation_mode%': '<(test_isolation_mode)',
+    'enable_basic_printing%': '<(enable_basic_printing)',
+    'enable_print_preview%': '<(enable_print_preview)',
+    'enable_spellcheck%': '<(enable_spellcheck)',
+    'use_platform_spellchecker%': '<(use_platform_spellchecker)',
+    'enable_google_now%': '<(enable_google_now)',
+    'cld_version%': '<(cld_version)',
+    'cld2_table_size%': '<(cld2_table_size)',
+    'enable_captive_portal_detection%': '<(enable_captive_portal_detection)',
+    'disable_file_support%': '<(disable_file_support)',
+    'disable_ftp_support%': '<(disable_ftp_support)',
+    'use_icu_alternatives_on_android%': '<(use_icu_alternatives_on_android)',
+    'enable_task_manager%': '<(enable_task_manager)',
+    'sas_dll_path%': '<(sas_dll_path)',
+    'wix_path%': '<(wix_path)',
+    'use_libjpeg_turbo%': '<(use_libjpeg_turbo)',
+    'use_system_libjpeg%': '<(use_system_libjpeg)',
+    'android_channel%': '<(android_channel)',
+    'icu_use_data_file_flag%': '<(icu_use_data_file_flag)',
+    'gyp_managed_install%': 0,
+    'create_standalone_apk%': 1,
+    'enable_app_list%': '<(enable_app_list)',
+    'use_default_render_theme%': '<(use_default_render_theme)',
+    'enable_settings_app%': '<(enable_settings_app)',
+    'google_api_key%': '<(google_api_key)',
+    'google_default_client_id%': '<(google_default_client_id)',
+    'google_default_client_secret%': '<(google_default_client_secret)',
+    'enable_supervised_users%': '<(enable_supervised_users)',
+    'native_memory_pressure_signals%': '<(native_memory_pressure_signals)',
+    'enable_mdns%' : '<(enable_mdns)',
+    'enable_service_discovery%' : '<(enable_service_discovery)',
+    'enable_wifi_bootstrapping%': '<(enable_wifi_bootstrapping)',
+    'enable_hangout_services_extension%' : '<(enable_hangout_services_extension)',
+    'proprietary_codecs%': '<(proprietary_codecs)',
+    'use_goma%': '<(use_goma)',
+    'gomadir%': '<(gomadir)',
+    'use_lto%': '<(use_lto)',
+    'use_lto_o2%': '<(use_lto_o2)',
+    'gold_icf_level%': '<(gold_icf_level)',
+    'video_hole%': '<(video_hole)',
+    'v8_use_external_startup_data%': '<(v8_use_external_startup_data)',
+    'cfi_vptr%': '<(cfi_vptr)',
+    'cfi_blacklist%': '<(cfi_blacklist)',
+    'mac_views_browser%': '<(mac_views_browser)',
+    'android_app_version_name%': '<(android_app_version_name)',
+    'android_app_version_code%': '<(android_app_version_code)',
+    'enable_webvr%': '<(enable_webvr)',
+
+    # Turns on compiler optimizations in V8 in Debug build.
+    'v8_optimized_debug%': 1,
+
+    # Use system protobuf instead of bundled one.
+    'use_system_protobuf%': 0,
+
+    # Use system yasm instead of bundled one.
+    'use_system_yasm%': 0,
+
+    # Use system ICU instead of bundled one.
+    'use_system_icu%' : 0,
+
+    # Default to enabled PIE; this is important for ASLR but we may need to be
+    # able to turn it off for various reasons.
+    'linux_disable_pie%': 0,
+
+    # The release channel that this build targets. This is used to restrict
+    # channel-specific build options, like which installer packages to create.
+    # The default is 'all', which does no channel-specific filtering.
+    'channel%': 'all',
+
+    # Override chromium_mac_pch and set it to 0 to suppress the use of
+    # precompiled headers on the Mac.  Prefix header injection may still be
+    # used, but prefix headers will not be precompiled.  This is useful when
+    # using distcc to distribute a build to compile slaves that don't
+    # share the same compiler executable as the system driving the compilation,
+    # because precompiled headers rely on pointers into a specific compiler
+    # executable's image.  Setting this to 0 is needed to use an experimental
+    # Linux-Mac cross compiler distcc farm.
+    'chromium_mac_pch%': 1,
+
+    # The default value for mac_strip in target_defaults. This cannot be
+    # set there, per the comment about variable% in a target_defaults.
+    'mac_strip_release%': 0,
+
+    # Set to 1 to enable java code coverage. Instruments classes during build
+    # to produce .ec files during runtime.
+    'emma_coverage%': 0,
+
+    # EMMA filter string consisting of a list of inclusion/exclusion patterns
+    # separated with whitespace and/or comma. Only has effect if
+    # 'emma_coverage=1'.
+    'emma_filter%': '',
+
+    # Set to 1 to enable running Android lint on java/class files.
+    'android_lint%': 1,
+
+    # Although base/allocator lets you select a heap library via an
+    # environment variable, the libcmt shim it uses sometimes gets in
+    # the way.  To disable it entirely, and switch to normal msvcrt, do e.g.
+    #  'win_use_allocator_shim': 0,
+    #  'win_release_RuntimeLibrary': 2
+    # to ~/.gyp/include.gypi, gclient runhooks --force, and do a release build.
+    'win_use_allocator_shim%': 1, # 1 = shim allocator via libcmt; 0 = msvcrt
+
+    # TODO(bradnelson): eliminate this when possible.
+    # To allow local gyp files to prevent release.vsprops from being included.
+    # Yes(1) means include release.vsprops.
+    # Once all vsprops settings are migrated into gyp, this can go away.
+    'msvs_use_common_release%': 1,
+
+    # TODO(bradnelson): eliminate this when possible.
+    # To allow local gyp files to override additional linker options for msvs.
+    # Yes(1) means set use the common linker options.
+    'msvs_use_common_linker_extras%': 1,
+
+    # TODO(sgk): eliminate this if possible.
+    # It would be nicer to support this via a setting in 'target_defaults'
+    # in chrome/app/locales/locales.gypi overriding the setting in the
+    # 'Debug' configuration in the 'target_defaults' dict below,
+    # but that doesn't work as we'd like.
+    'msvs_debug_link_incremental%': '2',
+
+    # Needed for some of the largest modules.
+    'msvs_debug_link_nonincremental%': '1',
+
+    # Turns on Use Library Dependency Inputs for linking chrome.dll on Windows
+    # to get incremental linking to be faster in debug builds.
+    'incremental_chrome_dll%': '0',
+
+    # Experimental setting to break chrome.dll into multiple pieces based on
+    # process type.
+    'chrome_multiple_dll%': '0',
+
+    # Experimental setting to optimize Chrome's DLLs with PGO.
+    'chrome_pgo_phase%': '0',
+
+    # Whether the VS xtree header has been patched to disable warning 4702. If
+    # it has, then we don't need to disable 4702 (unreachable code warning).
+    # The patch is preapplied to the internal toolchain and hence all bots.
+    'msvs_xtree_patched%': '<!pymod_do_main(win_is_xtree_patched)',
+
+    # Clang stuff.
+    'clang%': '<(clang)',
+    'host_clang%': '<(host_clang)',
+    'make_clang_dir%': '<(make_clang_dir)',
+    'use_lld%': '<(use_lld)',
+
+    # Control which version of clang to use when building for iOS.  If set to
+    # '1', uses the version of clang that ships with Xcode.  If set to '0', uses
+    # the version of clang that ships with the Chromium source.  This variable
+    # is automatically set to '1' in Official builds.
+    'clang_xcode%': 0,
+
+    # These two variables can be set in GYP_DEFINES while running
+    # |gclient runhooks| to let clang run a plugin in every compilation.
+    # Only has an effect if 'clang=1' is in GYP_DEFINES as well.
+    # Example:
+    #     GYP_DEFINES='clang=1 clang_load=/abs/path/to/libPrintFunctionNames.dylib clang_add_plugin=print-fns' gclient runhooks
+
+    'clang_load%': '',
+    'clang_add_plugin%': '',
+
+    # Tell ld64 to write map files describing binary layout. Useful
+    # for looking at what contributes to binary size, e.g. with
+    # https://github.com/nico/bloat
+    'mac_write_linker_maps%': 0,
+
+    # The default type of gtest.
+    'gtest_target_type%': 'executable',
+
+    # Enable sampling based profiler.
+    # See http://google-perftools.googlecode.com/svn/trunk/doc/cpuprofile.html
+    'profiling%': '0',
+    # Profile without optimizing out stack frames when profiling==1.
+    'profiling_full_stack_frames%': '0',
+
+    # And if we want to dump symbols for Breakpad-enabled builds.
+    'linux_dump_symbols%': 0,
+    # And if we want to strip the binary after dumping symbols.
+    'linux_strip_binary%': 0,
+    # If we want stack unwind support for backtrace().
+    'debug_unwind_tables%': 1,
+    'release_unwind_tables%': 1,
+
+    # Override where to find binutils
+    'binutils_version%': 0,
+    'binutils_dir%': '',
+
+    # Enable TCMalloc.
+    # Default of 'use_allocator' is set to 'none' if OS=='android' later.
+    'use_allocator%': 'tcmalloc',
+
+    # Set to 1 to link against libgnome-keyring instead of using dlopen().
+    'linux_link_gnome_keyring%': 0,
+    # Set to 1 to link against gsettings APIs instead of using dlopen().
+    'linux_link_gsettings%': 0,
+
+    # Enable use of OpenMAX DL FFT routines.
+    'use_openmax_dl_fft%': '<(use_openmax_dl_fft)',
+
+    # Enable new NPDevice API.
+    'enable_new_npdevice_api%': 0,
+
+    # .gyp files or targets should set chromium_code to 1 if they build
+    # Chromium-specific code, as opposed to external code.  This variable is
+    # used to control such things as the set of warnings to enable, and
+    # whether warnings are treated as errors.
+    'chromium_code%': 0,
+
+    # Disable fatal linker warnings, similarly to how we make it possible
+    # to disable -Werror (e.g. for different toolchain versions).
+    'disable_fatal_linker_warnings%': 0,
+
+    'release_valgrind_build%': 0,
+
+    # TODO(thakis): Make this a blacklist instead, http://crbug.com/101600
+    'enable_wexit_time_destructors%': 0,
+
+    # Build libpeerconnection as a static library by default.
+    'libpeer_target_type%': 'static_library',
+
+    # Set to 1 to compile with the OpenGL ES 2.0 conformance tests.
+    'internal_gles2_conform_tests%': 0,
+
+    # Set to 1 to compile with the Khronos GL-CTS conformance tests.
+    'internal_khronos_glcts_tests%': 0,
+
+    # Set to 1 to compile the filter fuzzer.
+    'internal_filter_fuzzer%': 0,
+
+    # NOTE: When these end up in the Mac bundle, we need to replace '-' for '_'
+    # so Cocoa is happy (http://crbug.com/20441).
+    'locales': [
+      'am', 'ar', 'bg', 'bn', 'ca', 'cs', 'da', 'de', 'el', 'en-GB',
+      'en-US', 'es-419', 'es', 'et', 'fa', 'fi', 'fil', 'fr', 'gu', 'he',
+      'hi', 'hr', 'hu', 'id', 'it', 'ja', 'kn', 'ko', 'lt', 'lv',
+      'ml', 'mr', 'ms', 'nb', 'nl', 'pl', 'pt-BR', 'pt-PT', 'ro', 'ru',
+      'sk', 'sl', 'sr', 'sv', 'sw', 'ta', 'te', 'th', 'tr', 'uk',
+      'vi', 'zh-CN', 'zh-TW',
+    ],
+
+    # Pseudo locales are special locales which are used for testing and
+    # debugging. They don't get copied to the final app. For more info,
+    # check out https://www.chromium.org/developers/testing/fake-bidi
+    'pseudo_locales': [
+      'fake-bidi',
+    ],
+
+    'grit_defines': [],
+
+    # If debug_devtools is set to 1, JavaScript files for DevTools are
+    # stored as is and loaded from disk. Otherwise, a concatenated file
+    # is stored in resources.pak. It is still possible to load JS files
+    # from disk by passing --debug-devtools cmdline switch.
+    'debug_devtools%': 0,
+
+    # The Java Bridge is not compiled in by default.
+    'java_bridge%': 0,
+
+    # Code signing for iOS binaries.  The bots need to be able to disable this.
+    'chromium_ios_signing%': 1,
+
+    # This flag is only used when disable_nacl==0 and disables all those
+    # subcomponents which would require the installation of a native_client
+    # untrusted toolchain.
+    'disable_nacl_untrusted%': 0,
+
+    # PNaCl toolchain does not support sanitizers. Disable by default.
+    'enable_nacl_nonsfi_test%': 0,
+
+    # Disable Dart by default.
+    'enable_dart%': 0,
+
+    # Copy out the setting of disable_nacl.
+    'disable_nacl%': '<(disable_nacl)',
+
+    # Portable Native Client is enabled by default.
+    'disable_pnacl%': 0,
+
+    # Whether to build full debug version for Debug configuration on Android.
+    # Compared to full debug version, the default Debug configuration on Android
+    # has no full v8 debug, has size optimization and linker gc section, so that
+    # we can build a debug version with acceptable size and performance.
+    'android_full_debug%': 0,
+
+    # Contains data about the attached devices for gyp_managed_install.
+    'build_device_config_path': '<(PRODUCT_DIR)/build_devices.cfg',
+
+    'sas_dll_exists': '<!pymod_do_main(dir_exists "<(sas_dll_path)")',
+    'wix_exists': '<!pymod_do_main(dir_exists "<(wix_path)")',
+
+    'windows_sdk_path%': 'C:/Program Files (x86)/Windows Kits/8.1',
+    'directx_sdk_default_path': '<(DEPTH)/third_party/directxsdk/files',
+
+    # Whether we are using the rlz library or not.  Platforms like Android send
+    # rlz codes for searches but do not use the library.
+    'enable_rlz_support%': 0,
+    'enable_rlz%': 0,
+
+    # Turns on the i18n support in V8.
+    'v8_enable_i18n_support': 1,
+
+    # Compile d8 for the host toolset.
+    'v8_toolset_for_d8': 'host',
+
+    # Use brlapi from brltty for braille display support.
+    'use_brlapi%': 0,
+
+    # Relative path to icu.gyp from this file.
+    'icu_gyp_path': '../third_party/icu/icu.gyp',
+
+    # IPC fuzzer is disabled by default.
+    'enable_ipc_fuzzer%': 0,
+
+    # Force disable libstdc++ debug mode.
+    'disable_glibcxx_debug%': 0,
+
+    # Set to 1 to compile with MSE support for MPEG2 TS
+    'enable_mpeg2ts_stream_parser%': 0,
+
+    # Support ChromeOS touchpad gestures with ozone.
+    'use_evdev_gestures%': 0,
+
+    # Default ozone platform (if no --ozone-platform flag).
+    'ozone_platform%': "",
+
+    # Ozone platforms to include in the build.
+    'ozone_platform_caca%': 0,
+    'ozone_platform_cast%': 0,
+    'ozone_platform_drm%': 0,
+    'ozone_platform_egltest%': 0,
+    'ozone_platform_gbm%': 0,
+    'ozone_platform_ozonex%': 0,
+    'ozone_platform_test%': 0,
+
+    # Experiment: http://crbug.com/426914
+    'envoy%': 0,
+
+    # Used to set libjpeg_gyp_path. Chrome OS ui/gfx/gfx.gyp uses the IJG path
+    # for robust login screen decoding.
+    'libjpeg_ijg_gyp_path': '<(DEPTH)/third_party/libjpeg/libjpeg.gyp',
+    'libjpeg_turbo_gyp_path': '<(DEPTH)/third_party/libjpeg_turbo/libjpeg.gyp',
+
+    'conditions': [
+      ['buildtype=="Official"', {
+        # Continue to embed build meta data in Official builds, basically the
+        # time it was built.
+        # TODO(maruel): This decision should be revisited because having an
+        # official deterministic build has high value too but MSVC toolset can't
+        # generate anything deterministic with WPO enabled AFAIK.
+        'dont_embed_build_metadata%': 0,
+      }],
+      # Enable the Syzygy optimization step for the official builds.
+      ['OS=="win" and buildtype=="Official" and syzyasan!=1 and clang!=1', {
+        'syzygy_optimize%': 1,
+      }, {
+        'syzygy_optimize%': 0,
+      }],
+      # Get binutils version so we can enable debug fission if we can.
+      ['os_posix==1 and OS!="mac" and OS!="ios"', {
+        'conditions': [
+          # compiler_version doesn't work with clang
+          # TODO(mithro): Land https://codereview.chromium.org/199793014/ so
+          # compiler_version works with clang.
+          # TODO(glider): set clang to 1 earlier for ASan and TSan builds so
+          # that it takes effect here.
+          ['clang==0 and asan==0 and lsan==0 and tsan==0 and msan==0 and ubsan==0 and ubsan_vptr==0', {
+            'binutils_version%': '<!pymod_do_main(compiler_version target assembler)',
+          }],
+          # On Android we know the binutils version in the toolchain.
+          ['OS=="android"', {
+            'binutils_version%': 222,
+          }],
+          ['host_arch=="x64"', {
+            'binutils_dir%': 'third_party/binutils/Linux_x64/Release/bin',
+          }],
+          ['host_arch=="ia32"', {
+            'binutils_dir%': 'third_party/binutils/Linux_ia32/Release/bin',
+          }],
+          # Our version of binutils in third_party/binutils
+          ['linux_use_bundled_binutils==1', {
+            'binutils_version%': 224,
+          }],
+        ],
+      }, {
+        'binutils_version%': 0,
+      }],
+      # The version of GCC in use, set later in platforms that use GCC and have
+      # not explicitly chosen to build with clang. Currently, this means all
+      # platforms except Windows, Mac and iOS.
+      # TODO(glider): set clang to 1 earlier for ASan and TSan builds so that
+      # it takes effect here.
+      ['os_posix==1 and OS!="mac" and OS!="ios" and clang==0 and asan==0 and lsan==0 and tsan==0 and msan==0 and ubsan_vptr==0', {
+        'conditions': [
+          ['OS=="android"', {
+            'host_gcc_version%': '<!pymod_do_main(compiler_version host compiler)',
+            # We directly set the gcc version since we know what we use.
+            'gcc_version%': 49,
+          }, {
+            'host_gcc_version%': '<!pymod_do_main(compiler_version host compiler)',
+            'gcc_version%': '<!pymod_do_main(compiler_version target compiler)',
+          }],
+        ],
+      }, {
+        'host_gcc_version%': 0,
+        'gcc_version%': 0,
+      }],
+      ['OS=="win" and "<!pymod_do_main(dir_exists <(directx_sdk_default_path))"=="True"', {
+        'directx_sdk_path%': '<(directx_sdk_default_path)',
+      }, {
+        'directx_sdk_path%': '$(DXSDK_DIR)',
+      }],
+      ['OS=="win"', {
+        'windows_driver_kit_path%': '$(WDK_DIR)',
+      }],
+      ['os_posix==1 and OS!="mac" and OS!="ios"', {
+        'conditions': [
+          ['target_arch=="mipsel" or target_arch=="mips64el"', {
+            'werror%': '',
+            'disable_nacl%': 1,
+            'nacl_untrusted_build%': 0,
+            'use_allocator%': 'none',
+          }],
+          # Use a 64-bit linker to avoid running out of address space. The
+          # buildbots should have a 64-bit kernel and a 64-bit libc installed.
+          ['host_arch=="ia32" and target_arch=="ia32"', {
+            # TODO(thestig) This is a horrible way to force the desired
+            # configuration. Our gyp variable scoping is likely wrong and
+            # needs to be cleaned up. The GN configuration should be changed
+            # to match.
+            'binutils_version%': 224,
+            'linux_use_bundled_binutils%': '1',
+            'linux_use_bundled_gold%': '1',
+            'binutils_dir%': 'third_party/binutils/Linux_x64/Release/bin',
+          }],
+          # All Chrome builds have breakpad symbols, but only process the
+          # symbols from official builds.
+          ['(branding=="Chrome" and buildtype=="Official")', {
+            'linux_dump_symbols%': 1,
+
+            # Omit unwind support in official release builds to save space. We
+            # can use breakpad for these builds.
+            'release_unwind_tables%': 0,
+          }],
+        ],
+      }],  # os_posix==1 and OS!="mac" and OS!="ios"
+      ['OS=="ios"', {
+        'disable_nacl%': 1,
+        'enable_background%': 0,
+        'icu_use_data_file_flag%': 1,
+        'enable_web_speech%': 0,
+        'use_system_libxml%': 1,
+        'use_system_sqlite%': 1,
+        'locales==': [
+          'ar', 'ca', 'cs', 'da', 'de', 'el', 'en-GB', 'en-US', 'es', 'es-MX',
+          'fi', 'fr', 'he', 'hi', 'hr', 'hu', 'id', 'it', 'ja', 'ko', 'ms',
+          'nb', 'nl', 'pl', 'pt', 'pt-PT', 'ro', 'ru', 'sk', 'sv', 'th', 'tr',
+          'uk', 'vi', 'zh-CN', 'zh-TW',
+        ],
+
+        # iOS SDK and deployment target support.  The |ios_sdk| value is left
+        # blank so that when it is set in the project files it will be the
+        # "current" iOS SDK.  Forcing a specific SDK even if it is "current"
+        # causes Xcode to spit out a warning for every single project file for
+        # not using the "current" SDK.
+        'ios_sdk%': '',
+        'ios_sdk_path%': '',
+        'ios_deployment_target%': '7.0',
+
+        'conditions': [
+          # ios_product_name is set to the name of the .app bundle as it should
+          # appear on disk.
+          ['branding=="Chrome"', {
+            'ios_product_name%': 'Chrome',
+          }, { # else: branding!="Chrome"
+            'ios_product_name%': 'Chromium',
+          }],
+          ['branding=="Chrome" and buildtype=="Official"', {
+            'ios_breakpad%': 1,
+          }, { # else: branding!="Chrome" or buildtype!="Official"
+            'ios_breakpad%': 0,
+          }],
+        ],
+      }],  # OS=="ios"
+      ['OS=="android"', {
+        # Location of Android NDK.
+        'variables': {
+          'variables': {
+            # Standard libraries can use the relative path to the NDK.
+            'android_ndk_root%': '../../third_party/android_tools/ndk/',
+            # Unfortunately, it is required to use the absolute path to the SDK
+            # because it us passed to ant which uses a different relative path
+            # from GYP.
+            'android_sdk_root%': '<!(cd <(DEPTH) && pwd -P)/third_party/android_tools/sdk/',
+            # Similarly, gdbserver and the Android toolchain need to use the
+            # absolute path to the NDK because they are used at different levels
+            # in the GYP files.
+            'android_ndk_absolute_root%': '<!(cd <(DEPTH) && pwd -P)/third_party/android_tools/ndk/',
+            'android_host_arch%': '<!(uname -m)',
+            # Android API-level of the SDK used for compilation.
+            'android_sdk_version%': '22',
+            'android_sdk_build_tools_version%': '22.0.1',
+            'host_os%': "<!(uname -s | sed -e 's/Linux/linux/;s/Darwin/mac/')",
+          },
+          # Copy conditionally-set variables out one scope.
+          'android_ndk_root%': '<(android_ndk_root)',
+          'android_ndk_absolute_root%': '<(android_ndk_absolute_root)',
+          'android_sdk_root%': '<(android_sdk_root)',
+          'android_sdk_version%': '<(android_sdk_version)',
+          'android_libcpp_root': '<(android_ndk_root)/sources/cxx-stl/llvm-libc++',
+          'host_os%': '<(host_os)',
+
+          'android_sdk%': '<(android_sdk_root)/platforms/android-<(android_sdk_version)',
+          # Android SDK build tools (e.g. dx, aidl)
+          'android_sdk_tools%': '<(android_sdk_root)/build-tools/<(android_sdk_build_tools_version)',
+
+          # Android API level 16 is JB (Android 4.1) which is the minimum
+          # platform requirement for Chrome on Android, we use it for native
+          # code compilation.
+          'conditions': [
+            ['target_arch == "ia32"', {
+              'android_app_abi%': 'x86',
+              'android_gdbserver%': '<(android_ndk_absolute_root)/prebuilt/android-x86/gdbserver/gdbserver',
+              'android_ndk_sysroot%': '<(android_ndk_root)/platforms/android-16/arch-x86',
+              'android_ndk_lib_dir%': 'usr/lib',
+              'android_toolchain%': '<(android_ndk_absolute_root)/toolchains/x86-4.9/prebuilt/<(host_os)-<(android_host_arch)/bin',
+            }],
+            ['target_arch == "x64"', {
+              'android_app_abi%': 'x86_64',
+              'android_gdbserver%': '<(android_ndk_absolute_root)/prebuilt/android-x86_64/gdbserver/gdbserver',
+              'android_ndk_sysroot%': '<(android_ndk_root)/platforms/android-21/arch-x86_64',
+              'android_ndk_lib_dir%': 'usr/lib64',
+              'android_toolchain%': '<(android_ndk_absolute_root)/toolchains/x86_64-4.9/prebuilt/<(host_os)-<(android_host_arch)/bin',
+            }],
+            ['target_arch=="arm"', {
+              'conditions': [
+                ['arm_version<7', {
+                  'android_app_abi%': 'armeabi',
+                }, {
+                  'android_app_abi%': 'armeabi-v7a',
+                }],
+              ],
+              'android_gdbserver%': '<(android_ndk_absolute_root)/prebuilt/android-arm/gdbserver/gdbserver',
+              'android_ndk_sysroot%': '<(android_ndk_root)/platforms/android-16/arch-arm',
+              'android_ndk_lib_dir%': 'usr/lib',
+              'android_toolchain%': '<(android_ndk_absolute_root)/toolchains/arm-linux-androideabi-4.9/prebuilt/<(host_os)-<(android_host_arch)/bin',
+            }],
+            ['target_arch == "arm64"', {
+              'android_app_abi%': 'arm64-v8a',
+              'android_gdbserver%': '<(android_ndk_absolute_root)/prebuilt/android-arm64/gdbserver/gdbserver',
+              'android_ndk_sysroot%': '<(android_ndk_root)/platforms/android-21/arch-arm64',
+              'android_ndk_lib_dir%': 'usr/lib',
+              'android_toolchain%': '<(android_ndk_absolute_root)/toolchains/aarch64-linux-android-4.9/prebuilt/<(host_os)-<(android_host_arch)/bin',
+            }],
+            ['target_arch == "mipsel"', {
+              'android_app_abi%': 'mips',
+              'android_gdbserver%': '<(android_ndk_absolute_root)/prebuilt/android-mips/gdbserver/gdbserver',
+              'android_ndk_sysroot%': '<(android_ndk_root)/platforms/android-16/arch-mips',
+              'android_ndk_lib_dir%': 'usr/lib',
+              'android_toolchain%': '<(android_ndk_absolute_root)/toolchains/mipsel-linux-android-4.9/prebuilt/<(host_os)-<(android_host_arch)/bin',
+            }],
+            ['target_arch == "mips64el"', {
+              'android_app_abi%': 'mips64',
+              'android_gdbserver%': '<(android_ndk_absolute_root)/prebuilt/android-mips64/gdbserver/gdbserver',
+              'android_ndk_sysroot%': '<(android_ndk_root)/platforms/android-21/arch-mips64',
+              'android_ndk_lib_dir%': 'usr/lib64',
+              'android_toolchain%': '<(android_ndk_absolute_root)/toolchains/mips64el-linux-android-4.9/prebuilt/<(host_os)-<(android_host_arch)/bin',
+            }],
+          ],
+        },
+        # Copy conditionally-set variables out one scope.
+        'android_app_abi%': '<(android_app_abi)',
+        'android_gdbserver%': '<(android_gdbserver)',
+        'android_ndk_root%': '<(android_ndk_root)',
+        'android_ndk_sysroot%': '<(android_ndk_sysroot)',
+        'android_sdk_root%': '<(android_sdk_root)',
+        'android_sdk_version%': '<(android_sdk_version)',
+        'android_toolchain%': '<(android_toolchain)',
+
+        'android_ndk_include': '<(android_ndk_sysroot)/usr/include',
+        'android_ndk_lib': '<(android_ndk_sysroot)/<(android_ndk_lib_dir)',
+        'android_sdk_tools%': '<(android_sdk_tools)',
+        'android_aapt_path%': '<(android_sdk_tools)/aapt',
+        'android_sdk%': '<(android_sdk)',
+        'android_sdk_jar%': '<(android_sdk)/android.jar',
+
+        'android_libcpp_root': '<(android_libcpp_root)',
+        'android_libcpp_include': '<(android_libcpp_root)/libcxx/include',
+        'android_libcpp_libs_dir%': '<(android_libcpp_root)/libs/<(android_app_abi)',
+        'host_os%': '<(host_os)',
+
+        # Location of the "objcopy" binary, used by both gyp and scripts.
+        'android_objcopy%' : '<!(/bin/echo -n <(android_toolchain)/*-objcopy)',
+
+        # Location of the "strip" binary, used by both gyp and scripts.
+        'android_strip%' : '<!(/bin/echo -n <(android_toolchain)/*-strip)',
+
+        # Location of the "readelf" binary.
+        'android_readelf%' : '<!(/bin/echo -n <(android_toolchain)/*-readelf)',
+
+        # Determines whether we should optimize JNI generation at the cost of
+        # breaking assumptions in the build system that when inputs have changed
+        # the outputs should always change as well.  This is meant purely for
+        # developer builds, to avoid spurious re-linking of native files.
+        'optimize_jni_generation%': '<(optimize_jni_generation)',
+
+        # Use OpenSSL's struct X509 to represent certificates.
+        'use_openssl_certs%': 1,
+
+        'proprietary_codecs%': '<(proprietary_codecs)',
+        'safe_browsing%': 2,
+        'enable_web_speech%': 0,
+        'java_bridge%': 1,
+        'build_ffmpegsumo%': 0,
+        'use_allocator%': 'none',
+
+        # Disable Native Client.
+        'disable_nacl%': 1,
+
+        # Android does not support background apps.
+        'enable_background%': 0,
+
+        # Sessions are store separately in the Java side.
+        'enable_session_service%': 0,
+
+        'p2p_apis%' : 0,
+
+        'gtest_target_type%': 'shared_library',
+      }],  # OS=="android"
+      ['embedded==1', {
+        'use_system_fontconfig%': 0,
+      }, {
+        'use_system_fontconfig%': 1,
+      }],
+      ['chromecast==1', {
+        'enable_mpeg2ts_stream_parser%': 1,
+        'ffmpeg_branding%': 'ChromeOS',
+        'ozone_platform_ozonex%': 1,
+        'use_custom_freetype%': 0,
+        'use_playready%': 0,
+        'conditions': [
+          ['target_arch=="arm"', {
+            'arm_arch%': '',
+            'arm_tune%': 'cortex-a9',
+            'arm_thumb%': 1,
+            'video_hole%': 1,
+          }],
+        ],
+      }],
+      ['chromecast==1 and OS!="android"', {
+        'ozone_platform_cast%': 1
+      }],
+      ['OS=="linux" and target_arch!="mipsel"', {
+        'clang%': 1,
+      }],  # OS=="mac"
+      ['OS=="mac"', {
+        'conditions': [
+          # All Chrome builds have breakpad symbols, but only process the
+          # symbols from official builds.
+          ['(branding=="Chrome" and buildtype=="Official")', {
+            'mac_strip_release%': 1,
+          }],
+        ],
+      }],  # OS=="mac"
+      ['OS=="mac" or OS=="ios"', {
+        'clang%': 1,
+
+        'variables': {
+          # Mac OS X SDK and deployment target support.  The SDK identifies
+          # the version of the system headers that will be used, and
+          # corresponds to the MAC_OS_X_VERSION_MAX_ALLOWED compile-time
+          # macro.  "Maximum allowed" refers to the operating system version
+          # whose APIs are available in the headers.  The deployment target
+          # identifies the minimum system version that the built products are
+          # expected to function on.  It corresponds to the
+          # MAC_OS_X_VERSION_MIN_REQUIRED compile-time macro.  To ensure these
+          # macros are available, #include <AvailabilityMacros.h>.  Additional
+          # documentation on these macros is available at
+          # http://developer.apple.com/mac/library/technotes/tn2002/tn2064.html#SECTION3
+          # Chrome normally builds with the Mac OS X 10.6 SDK and sets the
+          # deployment target to 10.6.  Other projects, such as O3D, may
+          # override these defaults.
+
+          # Normally, mac_sdk_min is used to find an SDK that Xcode knows
+          # about that is at least the specified version. In official builds,
+          # the SDK must match mac_sdk_min exactly. If the SDK is installed
+          # someplace that Xcode doesn't know about, set mac_sdk_path to the
+          # path to the SDK; when set to a non-empty string, SDK detection
+          # based on mac_sdk_min will be bypassed entirely.
+          'conditions': [
+            ['OS=="ios"', {
+              'mac_sdk_min%': '10.8',
+            }, {  # else OS!="ios"
+              'mac_sdk_min%': '10.6',
+            }],
+          ],
+          'mac_sdk_path%': '',
+
+          'mac_deployment_target%': '10.6',
+        },
+
+        'mac_sdk_min': '<(mac_sdk_min)',
+        'mac_sdk_path': '<(mac_sdk_path)',
+        'mac_deployment_target': '<(mac_deployment_target)',
+
+        # Compile in Breakpad support by default so that it can be
+        # tested, even if it is not enabled by default at runtime.
+        'mac_breakpad_compiled_in%': 1,
+        'conditions': [
+          # mac_product_name is set to the name of the .app bundle as it should
+          # appear on disk.  This duplicates data from
+          # chrome/app/theme/chromium/BRANDING and
+          # chrome/app/theme/google_chrome/BRANDING, but is necessary to get
+          # these names into the build system.
+          ['branding=="Chrome"', {
+            'mac_product_name%': 'Google Chrome',
+          }, { # else: branding!="Chrome"
+            'mac_product_name%': 'Chromium',
+          }],
+          # Official mac builds require a specific OS X SDK, but iOS and
+          # non-official mac builds do not.
+          ['branding=="Chrome" and buildtype=="Official" and OS=="mac"', {
+            'mac_sdk%': '<!(python <(DEPTH)/build/mac/find_sdk.py --verify <(mac_sdk_min) --sdk_path=<(mac_sdk_path))',
+          }, {
+            'mac_sdk%': '<!(python <(DEPTH)/build/mac/find_sdk.py <(mac_sdk_min))',
+          }],
+          ['branding=="Chrome" and buildtype=="Official"', {
+            # Enable uploading crash dumps.
+            'mac_breakpad_uploads%': 1,
+            # Enable dumping symbols at build time for use by Mac Breakpad.
+            'mac_breakpad%': 1,
+            # Enable Keystone auto-update support.
+            'mac_keystone%': 1,
+          }, { # else: branding!="Chrome" or buildtype!="Official"
+            'mac_breakpad_uploads%': 0,
+            'mac_breakpad%': 0,
+            'mac_keystone%': 0,
+          }],
+        ],
+      }],  # OS=="mac" or OS=="ios"
+      ['OS=="win"', {
+        'conditions': [
+          # This is the architecture convention used in WinSDK paths.
+          ['target_arch=="ia32"', {
+            'winsdk_arch%': 'x86',
+          },{
+            'winsdk_arch%': '<(target_arch)',
+          }],
+          ['component=="shared_library" or MSVS_VERSION == "2015"', {
+            # TODO(scottmg): The allocator shimming doesn't work on the 2015 CRT
+            # and we are hoping to be able to remove it if an additional feature
+            # lands in the 2015 CRT API. For now, don't shim and revisit once
+            # VS2015 is RTM: http://crbug.com/481611.
+            'win_use_allocator_shim%': 0,
+          }],
+          ['component=="static_library"', {
+            # Turn on multiple dll by default on Windows when in static_library.
+            'chrome_multiple_dll%': 1,
+          }],
+          ['asan==1 or syzyasan==1', {
+            'win_use_allocator_shim%': 0,
+          }],
+          ['syzyasan==1', {
+            'kasko%': 1,
+          }],
+          ['component=="shared_library" and "<(GENERATOR)"=="ninja"', {
+            # Only enabled by default for ninja because it's buggy in VS.
+            # Not enabled for component=static_library because some targets
+            # are too large and the toolchain fails due to the size of the
+            # .obj files.
+            'incremental_chrome_dll%': 1,
+          }],
+          # Don't do incremental linking for large modules on 32-bit or when
+          # component=static_library as the toolchain fails due to the size of
+          # the .ilk files.
+          ['MSVS_OS_BITS==32 or component=="static_library"', {
+            'msvs_large_module_debug_link_mode%': '1',  # No
+          },{
+            'msvs_large_module_debug_link_mode%': '2',  # Yes
+          }],
+        ],
+        'nacl_win64_defines': [
+          # This flag is used to minimize dependencies when building
+          # Native Client loader for 64-bit Windows.
+          'NACL_WIN64',
+        ],
+        # Need to include allocator target, but exclude tcmalloc files.
+        'use_allocator%': 'winheap',
+      }],
+
+      ['os_posix==1 and chromeos==0 and OS!="android" and OS!="ios" and embedded==0', {
+        'use_cups%': 1,
+      }, {
+        'use_cups%': 0,
+      }],
+
+      ['enable_plugins==1 and (OS=="linux" or OS=="mac" or OS=="win") and chromecast==0', {
+        'enable_pepper_cdms%': 1,
+      }, {
+        'enable_pepper_cdms%': 0,
+      }],
+
+      ['OS=="android" or chromecast==1', {
+        'enable_browser_cdms%': 1,
+      }, {
+        'enable_browser_cdms%': 0,
+      }],
+
+      # Native Client glibc toolchain is enabled
+      # by default except on arm, mips and mips64.
+      ['target_arch=="arm" or target_arch=="mipsel" or target_arch=="mips64el"', {
+        'disable_glibc%': 1,
+      }, {
+        'disable_glibc%': 0,
+      }],
+
+      # Set the relative path from this file to the GYP file of the JPEG
+      # library used by Chromium.
+      ['use_system_libjpeg==1 or use_libjpeg_turbo==0', {
+        # Configuration for using the system libjeg is here.
+        'libjpeg_gyp_path': '<(libjpeg_ijg_gyp_path)',
+      }, {
+        'libjpeg_gyp_path': '<(libjpeg_turbo_gyp_path)',
+      }],
+
+      # Options controlling the use of GConf (the classic GNOME configuration
+      # system) and GIO, which contains GSettings (the new GNOME config system).
+      ['chromeos==1 or embedded==1', {
+        'use_gconf%': 0,
+        'use_gio%': 0,
+      }, {
+        'use_gconf%': 1,
+        'use_gio%': 1,
+      }],
+
+      # Set up -D and -E flags passed into grit.
+      ['branding=="Chrome"', {
+        # TODO(mmoss) The .grd files look for _google_chrome, but for
+        # consistency they should look for google_chrome_build like C++.
+        'grit_defines': ['-D', '_google_chrome',
+                         '-E', 'CHROMIUM_BUILD=google_chrome'],
+      }, {
+        'grit_defines': ['-D', '_chromium',
+                         '-E', 'CHROMIUM_BUILD=chromium'],
+      }],
+      ['chromeos==1', {
+        'grit_defines': ['-D', 'chromeos', '-D', 'scale_factors=2x'],
+      }],
+      ['desktop_linux==1', {
+        'grit_defines': ['-D', 'desktop_linux'],
+      }],
+      ['toolkit_views==1', {
+        'grit_defines': ['-D', 'toolkit_views'],
+      }],
+      ['use_aura==1', {
+        'grit_defines': ['-D', 'use_aura'],
+      }],
+      ['use_ash==1', {
+        'grit_defines': ['-D', 'use_ash'],
+      }],
+      ['use_nss_certs==1', {
+        'grit_defines': ['-D', 'use_nss_certs'],
+      }],
+      ['use_ozone==1', {
+        'grit_defines': ['-D', 'use_ozone'],
+      }],
+      ['image_loader_extension==1', {
+        'grit_defines': ['-D', 'image_loader_extension'],
+      }],
+      ['remoting==1', {
+        'grit_defines': ['-D', 'remoting'],
+      }],
+      ['use_titlecase_in_grd==1', {
+        'grit_defines': ['-D', 'use_titlecase'],
+      }],
+      ['use_third_party_translations==1', {
+        'grit_defines': ['-D', 'use_third_party_translations'],
+        'locales': [
+          'ast', 'bs', 'ca@valencia', 'en-AU', 'eo', 'eu', 'gl', 'hy', 'ia',
+          'ka', 'ku', 'kw', 'ms', 'ug'
+        ],
+      }],
+      ['OS=="android"', {
+        'grit_defines': [
+          '-t', 'android',
+          '-E', 'ANDROID_JAVA_TAGGED_ONLY=true',
+          '--no-output-all-resource-defines',
+        ],
+      }],
+      ['OS=="mac" or OS=="ios"', {
+        'grit_defines': ['-D', 'scale_factors=2x'],
+      }],
+      ['OS == "ios"', {
+        'variables': {
+          'enable_coverage%': 0,
+        },
+        'grit_defines': [
+          '-t', 'ios',
+          '--no-output-all-resource-defines',
+        ],
+        # iOS uses a whitelist to filter resources.
+        'grit_whitelist%': '<(DEPTH)/build/ios/grit_whitelist.txt',
+
+        # Enable host builds when generating with ninja-ios.
+        'conditions': [
+          ['"<(GENERATOR)"=="ninja"', {
+            'host_os%': "mac",
+          }],
+
+          # Use the version of clang shipped with Xcode when building official
+          # version of Chrome for iOS.
+          #
+          # TODO(eugenebut): Remove enable_coverage check once
+          # libclang_rt.profile_ios.a is bundled with Chromium's clang.
+          # http://crbug.com/450379
+          #
+          # TODO(sdefresne): Remove xcodebuild version check onces clang ToT
+          # supports "nullable" and related. https://crbug.com/499448
+          ['buildtype=="Official" or enable_coverage or '
+            '<!(xcodebuild -version|awk \'/Xcode/{print ($2 >= 7.0)}\')==1', {
+            'clang_xcode%': 1,
+          }],
+        ],
+      }],
+      ['enable_extensions==1', {
+        'grit_defines': ['-D', 'enable_extensions'],
+      }],
+      ['enable_plugins!=0', {
+        'grit_defines': ['-D', 'enable_plugins'],
+      }],
+      ['enable_basic_printing==1 or enable_print_preview==1', {
+        'grit_defines': ['-D', 'enable_printing'],
+      }],
+      ['enable_print_preview==1', {
+        'grit_defines': ['-D', 'enable_print_preview'],
+      }],
+      ['enable_themes==1', {
+        'grit_defines': ['-D', 'enable_themes'],
+      }],
+      ['enable_app_list==1', {
+        'grit_defines': ['-D', 'enable_app_list'],
+      }],
+      ['enable_settings_app==1', {
+        'grit_defines': ['-D', 'enable_settings_app'],
+      }],
+      ['enable_google_now==1', {
+        'grit_defines': ['-D', 'enable_google_now'],
+      }],
+      ['use_concatenated_impulse_responses==1', {
+        'grit_defines': ['-D', 'use_concatenated_impulse_responses'],
+      }],
+      ['enable_media_router==1', {
+        'grit_defines': ['-D', 'enable_media_router'],
+      }],
+      ['enable_webrtc==1', {
+        'grit_defines': ['-D', 'enable_webrtc'],
+      }],
+      ['enable_hangout_services_extension==1', {
+        'grit_defines': ['-D', 'enable_hangout_services_extension'],
+      }],
+      ['enable_task_manager==1', {
+        'grit_defines': ['-D', 'enable_task_manager'],
+      }],
+      ['notifications==1', {
+        'grit_defines': ['-D', 'enable_notifications'],
+      }],
+      ['enable_wifi_bootstrapping==1', {
+        'grit_defines': ['-D', 'enable_wifi_bootstrapping'],
+      }],
+      ['mac_views_browser==1', {
+        'grit_defines': ['-D', 'mac_views_browser'],
+      }],
+      ['enable_resource_whitelist_generation==1 and OS!="win"', {
+        'grit_rc_header_format': ['-h', '#define {textual_id} _Pragma("whitelisted_resource_{numeric_id}") {numeric_id}'],
+      }],
+      ['enable_resource_whitelist_generation==1 and OS=="win"', {
+        'grit_rc_header_format': ['-h', '#define {textual_id} __pragma(message("whitelisted_resource_{numeric_id}")) {numeric_id}'],
+      }],
+      ['enable_mdns==1 or OS=="mac"', {
+        'grit_defines': ['-D', 'enable_service_discovery'],
+        'enable_service_discovery%': 1
+      }],
+      ['clang_use_chrome_plugins==1', {
+        'variables': {
+          'conditions': [
+            ['OS!="win"', {
+              'variables': {
+                'conditions': [
+                  ['OS=="mac" or OS=="ios"', {
+                    'clang_lib_path%': '<!(cd <(DEPTH) && pwd -P)/third_party/llvm-build/Release+Asserts/lib/libFindBadConstructs.dylib',
+                  }, { # OS != "mac" or OS != "ios"
+                    'clang_lib_path%': '<!(cd <(DEPTH) && pwd -P)/third_party/llvm-build/Release+Asserts/lib/libFindBadConstructs.so',
+                  }],
+                ],
+              },
+              'clang_dynlib_flags%': '-Xclang -load -Xclang <(clang_lib_path) ',
+            }, { # OS == "win"
+              # On Windows, the plugin is built directly into clang, so there's
+              # no need to load it dynamically.
+              'clang_dynlib_flags%': '',
+            }],
+            # https://crbug.com/441916
+            ['OS=="android" or OS=="linux" or OS=="mac"', {
+              'clang_plugin_args%': '-Xclang -plugin-arg-find-bad-constructs -Xclang check-templates ',
+            }, { # OS != "linux"
+              'clang_plugin_args%': ''
+            }],
+          ],
+        },
+        # If you change these, also change build/config/clang/BUILD.gn.
+        'clang_chrome_plugins_flags%':
+          '<(clang_dynlib_flags)'
+          '-Xclang -add-plugin -Xclang find-bad-constructs <(clang_plugin_args)',
+      }],
+      ['asan==1 or msan==1 or lsan==1 or tsan==1', {
+        'clang%': 1,
+        'use_allocator%': 'none',
+        'use_sanitizer_options%': 1,
+      }],
+
+      ['OS=="linux" and asan==0 and msan==0 and lsan==0 and tsan==0', {
+        # PNaCl toolchain Non-SFI build only supports linux OS build.
+        # Also, it does not support sanitizers.
+        'enable_nacl_nonsfi_test%': 1,
+      }],
+      ['asan==1 and OS=="linux" and chromeos==0', {
+        'use_custom_libcxx%': 1,
+      }],
+      ['ubsan==1', {
+        'clang%': 1,
+      }],
+      ['ubsan_vptr==1', {
+        'clang%': 1,
+      }],
+      ['asan==1 and OS=="mac"', {
+        'mac_strip_release': 1,
+      }],
+      ['tsan==1', {
+        'use_custom_libcxx%': 1,
+      }],
+      ['msan==1', {
+        # Use a just-built, MSan-instrumented libc++ instead of the system-wide
+        # libstdc++. This is required to avoid false positive reports whenever
+        # the C++ standard library is used.
+        'use_custom_libcxx%': 1,
+        # Running the V8-generated code on an ARM simulator is a powerful hack
+        # that allows the tool to see the memory accesses from JITted code.
+        # Without this flag, JS code causes false positive reports from MSan.
+        'v8_target_arch': 'arm64',
+      }],
+
+      ['OS=="linux" and clang_type_profiler==1', {
+        'clang%': 1,
+        'clang_use_chrome_plugins%': 0,
+        'conditions': [
+          ['host_arch=="x64"', {
+            'make_clang_dir%': 'third_party/llvm-allocated-type/Linux_x64',
+          }],
+          ['host_arch=="ia32"', {
+            # 32-bit Clang is unsupported.  It may not build.  Put your 32-bit
+            # Clang in this directory at your own risk if needed for some
+            # purpose (e.g. to compare 32-bit and 64-bit behavior like memory
+            # usage).  Any failure by this compiler should not close the tree.
+            'make_clang_dir%': 'third_party/llvm-allocated-type/Linux_ia32',
+          }],
+        ],
+      }],
+
+      # On valgrind bots, override the optimizer settings so we don't inline too
+      # much and make the stacks harder to figure out.
+      #
+      # TODO(rnk): Kill off variables that no one else uses and just implement
+      # them under a build_for_tool== condition.
+      ['build_for_tool=="memcheck" or build_for_tool=="tsan"', {
+        # gcc flags
+        'mac_debug_optimization': '1',
+        'mac_release_optimization': '1',
+        'release_optimize': '1',
+        'no_gc_sections': 1,
+        'debug_extra_cflags': '-g -fno-inline -fno-omit-frame-pointer '
+                              '-fno-builtin -fno-optimize-sibling-calls',
+        'release_extra_cflags': '-g -fno-inline -fno-omit-frame-pointer '
+                                '-fno-builtin -fno-optimize-sibling-calls',
+
+        # MSVS flags for TSan on Pin and Windows.
+        'win_debug_RuntimeChecks': '0',
+        'win_debug_disable_iterator_debugging': '1',
+        'win_debug_Optimization': '1',
+        'win_debug_InlineFunctionExpansion': '0',
+        'win_release_InlineFunctionExpansion': '0',
+        'win_release_OmitFramePointers': '0',
+
+        'use_allocator': 'tcmalloc',
+        'release_valgrind_build': 1,
+        'werror': '',
+        'component': 'static_library',
+        'use_system_zlib': 0,
+      }],
+
+      # Build tweaks for DrMemory.
+      # TODO(rnk): Combine with tsan config to share the builder.
+      # http://crbug.com/108155
+      ['build_for_tool=="drmemory"', {
+        # These runtime checks force initialization of stack vars which blocks
+        # DrMemory's uninit detection.
+        'win_debug_RuntimeChecks': '0',
+        # Iterator debugging is slow.
+        'win_debug_disable_iterator_debugging': '1',
+        # Try to disable optimizations that mess up stacks in a release build.
+        # DrM-i#1054 (https://github.com/DynamoRIO/drmemory/issues/1054)
+        # /O2 and /Ob0 (disable inline) cannot be used together because of a
+        # compiler bug, so we use /Ob1 instead.
+        'win_release_InlineFunctionExpansion': '1',
+        'win_release_OmitFramePointers': '0',
+        # Ditto for debug, to support bumping win_debug_Optimization.
+        'win_debug_InlineFunctionExpansion': 0,
+        'win_debug_OmitFramePointers': 0,
+        # Keep the code under #ifndef NVALGRIND.
+        'release_valgrind_build': 1,
+      }],
+
+      # RLZ library is used on Win, Mac, iOS and ChromeOS.
+      ['OS=="win" or OS=="mac" or OS=="ios" or chromeos==1', {
+        'enable_rlz_support%': 1,
+        'conditions': [
+          # RLZ is enabled for "Chrome" builds.
+          ['branding=="Chrome"', {
+            'enable_rlz%': 1,
+          }],
+        ],
+      }],
+
+      # Set default compiler flags depending on ARM version.
+      ['arm_version==6', {
+        'arm_arch%': 'armv6',
+        'arm_tune%': '',
+        'arm_fpu%': 'vfp',
+        'arm_float_abi%': 'softfp',
+        'arm_thumb%': 0,
+      }],
+      ['arm_version==7', {
+        'arm_arch%': 'armv7-a',
+        'arm_tune%': 'generic-armv7-a',
+        'conditions': [
+          ['arm_neon==1', {
+            'arm_fpu%': 'neon',
+          }, {
+            'arm_fpu%': 'vfpv3-d16',
+          }],
+          ['OS=="android"', {
+            'arm_float_abi%': 'softfp',
+          }, {
+            'arm_float_abi%': 'hard',
+          }],
+        ],
+        'arm_thumb%': 1,
+      }],
+
+      # Set default compiler flags for MIPS floating-point support.
+      ['target_arch=="mipsel"', {
+        'mips_float_abi%': 'hard',
+      }],
+      ['target_arch=="mipsel" and mips_arch_variant=="r2"', {
+        'mips_fpu_mode%': 'fp32',
+      }],
+
+      # Enable brlapi by default for chromeos.
+      [ 'chromeos==1', {
+        'use_brlapi%': 1,
+      }],
+
+      ['use_ozone==1 and ozone_auto_platforms==1', {
+        # Use test as the default platform.
+        'ozone_platform%': 'test',
+
+        # Build all platforms whose deps are in install-build-deps.sh.
+        # Only these platforms will be compile tested by buildbots.
+        'ozone_platform_drm%': 1,
+        'ozone_platform_test%': 1,
+        'ozone_platform_egltest%': 1,
+      }],
+
+      ['desktop_linux==1 and use_aura==1 and use_x11==1', {
+        'use_clipboard_aurax11%': 1,
+      }],
+
+      ['OS=="win" and use_goma==1', {
+        # goma doesn't support pch yet.
+        'chromium_win_pch': 0,
+        # goma doesn't support PDB yet, so win_z7=1 or fastbuild=1.
+        'conditions': [
+          ['win_z7==0 and fastbuild==0', {
+            'fastbuild': 1,
+          }],
+        ],
+      }],
+
+      ['OS=="win" and (clang==1 or asan==1)', {
+        'chromium_win_pch': 0,
+      }],
+
+      ['host_clang==1', {
+        'host_cc': '<(make_clang_dir)/bin/clang',
+        'host_cxx': '<(make_clang_dir)/bin/clang++',
+      }, {
+        'host_cc': '<!(which gcc)',
+        'host_cxx': '<!(which g++)',
+      }],
+
+      # The seccomp-bpf sandbox is only supported on five architectures
+      # currently.
+      # Do not disable seccomp_bpf anywhere without talking to
+      # security@chromium.org!
+      ['((OS=="linux" or OS=="android") and '
+           '(target_arch=="ia32" or target_arch=="x64" or '
+             'target_arch=="arm" or target_arch=="mipsel" or '
+             'target_arch=="arm64"))', {
+         'use_seccomp_bpf%': 1,
+      }, {
+         'use_seccomp_bpf%': 0,
+      }],
+
+      ['cfi_vptr==1', {
+        'use_lto%': 1,
+      }],
+
+      ['branding=="Chrome" and buildtype=="Official"', {
+        'enable_hangout_services_extension%': 1,
+      }, {
+        'enable_hangout_services_extension%': 0,
+      }],
+    ],
+
+    # The path to the ANGLE library.
+    'angle_path': '<(DEPTH)/third_party/angle',
+
+    # List of default apps to install in new profiles.  The first list contains
+    # the source files as found in svn.  The second list, used only for linux,
+    # contains the destination location for each of the files.  When a crx
+    # is added or removed from the list, the chrome/browser/resources/
+    # default_apps/external_extensions.json file must also be updated.
+    #
+    # README: GN version of these is in the target //chrome:default_apps
+    # (there's no global variable like in GYP). Be sure to update that target
+    # if you change these lists!
+    'default_apps_list': [
+      'browser/resources/default_apps/external_extensions.json',
+      'browser/resources/default_apps/gmail.crx',
+      'browser/resources/default_apps/search.crx',
+      'browser/resources/default_apps/youtube.crx',
+      'browser/resources/default_apps/drive.crx',
+      'browser/resources/default_apps/docs.crx',
+    ],
+    'default_apps_list_linux_dest': [
+      '<(PRODUCT_DIR)/default_apps/external_extensions.json',
+      '<(PRODUCT_DIR)/default_apps/gmail.crx',
+      '<(PRODUCT_DIR)/default_apps/search.crx',
+      '<(PRODUCT_DIR)/default_apps/youtube.crx',
+      '<(PRODUCT_DIR)/default_apps/drive.crx',
+      '<(PRODUCT_DIR)/default_apps/docs.crx',
+    ],
+
+    # Whether to allow building of the GPU-related isolates.
+    'archive_gpu_tests%': 0,
+
+     # Whether to allow building of chromoting related isolates.
+    'archive_chromoting_tests%': 0,
+  },
+  'target_defaults': {
+    'variables': {
+      # The condition that operates on chromium_code is in a target_conditions
+      # section, and will not have access to the default fallback value of
+      # chromium_code at the top of this file, or to the chromium_code
+      # variable placed at the root variables scope of .gyp files, because
+      # those variables are not set at target scope.  As a workaround,
+      # if chromium_code is not set at target scope, define it in target scope
+      # to contain whatever value it has during early variable expansion.
+      # That's enough to make it available during target conditional
+      # processing.
+      'chromium_code%': '<(chromium_code)',
+
+      'component%': '<(component)',
+
+      'chromecast%': '<(chromecast)',
+
+      # See http://msdn.microsoft.com/en-us/library/aa652360(VS.71).aspx
+      'win_release_Optimization%': '2', # 2 = /O2
+      'win_debug_Optimization%': '0',   # 0 = /Od
+
+      # See http://msdn.microsoft.com/en-us/library/2kxx5t2c(v=vs.80).aspx
+      # Tri-state: blank is default, 1 on, 0 off
+      'win_release_OmitFramePointers%': '0',
+      # Tri-state: blank is default, 1 on, 0 off
+      'win_debug_OmitFramePointers%': '',
+
+      # See http://msdn.microsoft.com/en-us/library/8wtf2dfz(VS.71).aspx
+      'win_debug_RuntimeChecks%': '3',    # 3 = all checks enabled, 0 = off
+
+      # See http://msdn.microsoft.com/en-us/library/47238hez(VS.71).aspx
+      'win_debug_InlineFunctionExpansion%': '',    # empty = default, 0 = off,
+      'win_release_InlineFunctionExpansion%': '2', # 1 = only __inline, 2 = max
+
+      # VS inserts quite a lot of extra checks to algorithms like
+      # std::partial_sort in Debug build which make them O(N^2)
+      # instead of O(N*logN). This is particularly slow under memory
+      # tools like ThreadSanitizer so we want it to be disablable.
+      # See http://msdn.microsoft.com/en-us/library/aa985982(v=VS.80).aspx
+      'win_debug_disable_iterator_debugging%': '0',
+
+      # An application manifest fragment to declare compatibility settings for
+      # 'executable' targets. Ignored in other target type.
+      'win_exe_compatibility_manifest%':
+          '<(DEPTH)\\build\\win\\compatibility.manifest',
+
+      'release_extra_cflags%': '',
+      'debug_extra_cflags%': '',
+
+      'release_valgrind_build%': '<(release_valgrind_build)',
+
+      # the non-qualified versions are widely assumed to be *nix-only
+      'win_release_extra_cflags%': '',
+      'win_debug_extra_cflags%': '',
+
+      # TODO(thakis): Make this a blacklist instead, http://crbug.com/101600
+      'enable_wexit_time_destructors%': '<(enable_wexit_time_destructors)',
+
+      # Only used by Windows build for now.  Can be used to build into a
+      # differet output directory, e.g., a build_dir_prefix of VS2010_ would
+      # output files in src/build/VS2010_{Debug,Release}.
+      'build_dir_prefix%': '',
+
+      # Targets are by default not nacl untrusted code.
+      'nacl_untrusted_build%': 0,
+
+      'pnacl_compile_flags': [
+        # pnacl uses the clang compiler so we need to suppress all the
+        # same warnings as we do for clang.
+        # TODO(sbc): Remove these if/when they are removed from the clang
+        # build.
+        '-Wno-unused-function',
+        '-Wno-char-subscripts',
+        '-Wno-c++11-extensions',
+        '-Wno-unnamed-type-template-args',
+      ],
+
+      # By default, Android targets have their exported JNI symbols stripped,
+      # so we test the manual JNI registration code paths that are required
+      # when using the crazy linker. To allow use of native JNI exports (lazily
+      # resolved by the JVM), targets can enable this variable, which will stop
+      # the stripping from happening. Only targets which do not need to be
+      # compatible with the crazy linker are permitted to set this.
+      'use_native_jni_exports%': 0,
+
+      'conditions': [
+        ['OS=="win" and component=="shared_library"', {
+          # See http://msdn.microsoft.com/en-us/library/aa652367.aspx
+          'win_release_RuntimeLibrary%': '2', # 2 = /MD (nondebug DLL)
+          'win_debug_RuntimeLibrary%': '3',   # 3 = /MDd (debug DLL)
+        }, {
+          # See http://msdn.microsoft.com/en-us/library/aa652367.aspx
+          'win_release_RuntimeLibrary%': '0', # 0 = /MT (nondebug static)
+          'win_debug_RuntimeLibrary%': '1',   # 1 = /MTd (debug static)
+        }],
+        ['OS=="ios"', {
+          # See http://gcc.gnu.org/onlinedocs/gcc-4.4.2/gcc/Optimize-Options.html
+          'mac_release_optimization%': 's', # Use -Os unless overridden
+          'mac_debug_optimization%': '0',   # Use -O0 unless overridden
+        }, {
+          # See http://gcc.gnu.org/onlinedocs/gcc-4.4.2/gcc/Optimize-Options.html
+          'mac_release_optimization%': '2', # Use -O2 unless overridden
+          'mac_debug_optimization%': '0',   # Use -O0 unless overridden
+        }],
+        ['OS=="android"', {
+          'host_os%': '<(host_os)',  # See comment above chromium_code.
+        }],
+      ],
+      'clang_warning_flags': [
+        '-Wheader-hygiene',
+
+        # Don't die on dtoa code that uses a char as an array index.
+        # This is required solely for base/third_party/dmg_fp/dtoa.cc.
+        '-Wno-char-subscripts',
+
+        # TODO(thakis): This used to be implied by -Wno-unused-function,
+        # which we no longer use. Check if it makes sense to remove
+        # this as well. http://crbug.com/316352
+        '-Wno-unneeded-internal-declaration',
+
+        # Warns on switches on enums that cover all enum values but
+        # also contain a default: branch. Chrome is full of that.
+        '-Wno-covered-switch-default',
+
+        # Warns when a const char[] is converted to bool.
+        '-Wstring-conversion',
+
+        # C++11-related flags:
+
+        # This warns on using ints as initializers for floats in
+        # initializer lists (e.g. |int a = f(); CGSize s = { a, a };|),
+        # which happens in several places in chrome code. Not sure if
+        # this is worth fixing.
+        '-Wno-c++11-narrowing',
+
+        # Clang considers the `register` keyword as deprecated, but e.g.
+        # code generated by flex (used in angle) contains that keyword.
+        # http://crbug.com/255186
+        '-Wno-deprecated-register',
+
+        # TODO(hans): Get this cleaned up, http://crbug.com/428099
+        '-Wno-inconsistent-missing-override',
+
+        # TODO(thakis): Enable this, crbug.com/507717
+        '-Wno-shift-negative-value',
+      ],
+    },
+    'includes': [ 'set_clang_warning_flags.gypi', ],
+    'defines': [
+      # Don't use deprecated V8 APIs anywhere.
+      'V8_DEPRECATION_WARNINGS',
+    ],
+    'include_dirs': [
+      '<(SHARED_INTERMEDIATE_DIR)',
+    ],
+    'conditions': [
+      ['OS=="mac"', {
+        # When compiling Objective C, warns if a method is used whose
+        # availability is newer than the deployment target.
+        'xcode_settings': { 'WARNING_CFLAGS': ['-Wpartial-availability']},
+      }],
+      ['(OS=="mac" or OS=="ios") and asan==1', {
+        'dependencies': [
+          '<(DEPTH)/build/mac/asan.gyp:asan_dynamic_runtime',
+        ],
+      }],
+      ['OS=="win" and asan==1 and component=="shared_library"', {
+        'dependencies': [
+          '<(DEPTH)/build/win/asan.gyp:asan_dynamic_runtime',
+        ],
+      }],
+      ['OS=="linux" and use_allocator!="none" and clang_type_profiler==1', {
+        'cflags_cc!': ['-fno-rtti'],
+        'cflags_cc+': [
+          '-frtti',
+          '-gline-tables-only',
+          '-fintercept-allocation-functions',
+        ],
+        'defines': ['TYPE_PROFILING'],
+        'dependencies': [
+          '<(DEPTH)/base/allocator/allocator.gyp:type_profiler',
+        ],
+      }],
+      ['branding=="Chrome"', {
+        'defines': ['GOOGLE_CHROME_BUILD'],
+      }, {  # else: branding!="Chrome"
+        'defines': ['CHROMIUM_BUILD'],
+      }],
+      ['OS=="mac" and component=="shared_library"', {
+        'xcode_settings': {
+          'DYLIB_INSTALL_NAME_BASE': '@rpath',
+          'LD_RUNPATH_SEARCH_PATHS': [
+            # For unbundled binaries.
+            '@loader_path/.',
+            # For bundled binaries, to get back from Binary.app/Contents/MacOS.
+            '@loader_path/../../..',
+          ],
+        },
+      }],
+      ['clang==1 or host_clang==1', {
+        # This is here so that all files get recompiled after a clang roll and
+        # when turning clang on or off.
+        # (defines are passed via the command line, and build systems rebuild
+        # things when their commandline changes). Nothing should ever read this
+        # define.
+        'defines': ['CR_CLANG_REVISION=<!(python <(DEPTH)/tools/clang/scripts/update.py --print-revision)'],
+      }],
+      ['enable_rlz==1', {
+        'defines': ['ENABLE_RLZ'],
+      }],
+      ['component=="shared_library"', {
+        'defines': ['COMPONENT_BUILD'],
+      }],
+      ['ui_compositor_image_transport==1', {
+        'defines': ['UI_COMPOSITOR_IMAGE_TRANSPORT'],
+      }],
+      ['use_aura==1', {
+        'defines': ['USE_AURA=1'],
+      }],
+      ['use_ash==1', {
+        'defines': ['USE_ASH=1'],
+      }],
+      ['use_pango==1', {
+        'defines': ['USE_PANGO=1'],
+      }],
+      ['use_cairo==1', {
+        'defines': ['USE_CAIRO=1'],
+      }],
+      ['use_cras==1', {
+        'defines': ['USE_CRAS=1'],
+      }],
+      ['use_ozone==1', {
+        'defines': ['USE_OZONE=1'],
+      }],
+      ['use_default_render_theme==1', {
+        'defines': ['USE_DEFAULT_RENDER_THEME=1'],
+      }],
+      ['use_libjpeg_turbo==1', {
+        'defines': ['USE_LIBJPEG_TURBO=1'],
+      }],
+      ['use_x11==1', {
+        'defines': ['USE_X11=1'],
+      }],
+      ['use_clipboard_aurax11==1', {
+        'defines': ['USE_CLIPBOARD_AURAX11=1'],
+      }],
+      ['enable_one_click_signin==1', {
+        'defines': ['ENABLE_ONE_CLICK_SIGNIN'],
+      }],
+      ['enable_pre_sync_backup==1', {
+        'defines': ['ENABLE_PRE_SYNC_BACKUP'],
+      }],
+      ['image_loader_extension==1', {
+        'defines': ['IMAGE_LOADER_EXTENSION=1'],
+      }],
+      ['profiling==1', {
+        'defines': ['ENABLE_PROFILING=1'],
+      }],
+      ['remoting==1', {
+        'defines': ['ENABLE_REMOTING=1'],
+      }],
+      ['enable_webrtc==1', {
+        'defines': ['ENABLE_WEBRTC=1'],
+      }],
+      ['enable_media_router==1', {
+        'defines': ['ENABLE_MEDIA_ROUTER=1'],
+      }],
+      ['proprietary_codecs==1', {
+        'defines': ['USE_PROPRIETARY_CODECS'],
+        'conditions': [
+          ['enable_mpeg2ts_stream_parser==1', {
+            'defines': ['ENABLE_MPEG2TS_STREAM_PARSER'],
+          }],
+        ],
+      }],
+      ['enable_viewport==1', {
+        'defines': ['ENABLE_VIEWPORT'],
+      }],
+      ['enable_pepper_cdms==1', {
+        'defines': ['ENABLE_PEPPER_CDMS'],
+      }],
+      ['enable_browser_cdms==1', {
+        'defines': ['ENABLE_BROWSER_CDMS'],
+      }],
+      ['configuration_policy==1', {
+        'defines': ['ENABLE_CONFIGURATION_POLICY'],
+      }],
+      ['notifications==1', {
+        'defines': ['ENABLE_NOTIFICATIONS'],
+      }],
+      ['enable_hidpi==1', {
+        'defines': ['ENABLE_HIDPI=1'],
+      }],
+      ['enable_topchrome_md==1', {
+        'defines': ['ENABLE_TOPCHROME_MD=1'],
+      }],
+      ['native_memory_pressure_signals==1', {
+        'defines': ['SYSTEM_NATIVELY_SIGNALS_MEMORY_PRESSURE'],
+      }],
+      ['use_udev==1', {
+        'defines': ['USE_UDEV'],
+      }],
+      ['fastbuild!=0', {
+        'xcode_settings': {
+          'GCC_GENERATE_DEBUGGING_SYMBOLS': 'NO',
+        },
+        'conditions': [
+          ['OS=="win" and fastbuild==2', {
+            # Completely disable debug information.
+            'msvs_settings': {
+              'VCLinkerTool': {
+                'GenerateDebugInformation': 'false',
+              },
+              'VCCLCompilerTool': {
+                'DebugInformationFormat': '0',
+              },
+            },
+          }],
+          ['OS=="win" and fastbuild==1', {
+            'msvs_settings': {
+              'VCLinkerTool': {
+                # This tells the linker to generate .pdbs, so that
+                # we can get meaningful stack traces.
+                'GenerateDebugInformation': 'true',
+              },
+              'VCCLCompilerTool': {
+                # No debug info to be generated by compiler.
+                'DebugInformationFormat': '0',
+              },
+            },
+          }],
+          ['(OS=="android" or OS=="linux") and fastbuild==2', {
+            'variables': { 'debug_extra_cflags': '-g0', },
+          }],
+          ['(OS=="android" or OS=="linux") and fastbuild==1', {
+            # TODO(thakis): Change this to -g1 once http://crbug.com/456947 is
+            # fixed.
+            'variables': { 'debug_extra_cflags': '-g0', },
+          }],
+          # Android builds symbols on release by default, disable them.
+          ['OS=="android" and fastbuild==2', {
+            'variables': { 'release_extra_cflags': '-g0', },
+          }],
+          ['OS=="android" and fastbuild==1', {
+            # TODO(thakis): Change this to -g1 once http://crbug.com/456947 is
+            # fixed.
+            'variables': { 'release_extra_cflags': '-g0', },
+          }],
+        ],
+      }],  # fastbuild!=0
+      ['dont_embed_build_metadata==1', {
+        'defines': [
+          'DONT_EMBED_BUILD_METADATA',
+        ],
+      }],  # dont_embed_build_metadata==1
+      ['dcheck_always_on!=0', {
+        'defines': ['DCHECK_ALWAYS_ON=1'],
+      }],  # dcheck_always_on!=0
+      ['tracing_like_official_build!=0', {
+        'defines': ['TRACING_IS_OFFICIAL_BUILD=1'],
+      }],  # tracing_like_official_build!=0
+      ['OS=="win"', {
+        'defines': ['NO_TCMALLOC'],
+        'conditions': [
+          ['win_use_allocator_shim==1', {
+            'defines': ['ALLOCATOR_SHIM'],
+          }],
+        ],
+      }],
+      ['asan==1', {
+        'defines': [
+          'ADDRESS_SANITIZER',
+          'MEMORY_TOOL_REPLACES_ALLOCATOR',
+          'MEMORY_SANITIZER_INITIAL_SIZE',
+        ],
+      }],
+      ['syzyasan==1', {
+        # SyzyAsan needs /PROFILE turned on to produce appropriate pdbs.
+        'msvs_settings': {
+          'VCLinkerTool': {
+            'Profile': 'true',
+          },
+        },
+        'defines': [
+            'SYZYASAN',
+            'MEMORY_TOOL_REPLACES_ALLOCATOR',
+            'MEMORY_SANITIZER_INITIAL_SIZE',
+        ],
+      }],
+      ['kasko==1', {
+        'defines': [
+            'KASKO',
+        ],
+        'include_dirs': [
+          '<(DEPTH)/third_party/kasko/include',
+        ],
+      }],
+      ['OS=="win"', {
+        'defines': [
+          '__STD_C',
+          '_CRT_SECURE_NO_DEPRECATE',
+          '_SCL_SECURE_NO_DEPRECATE',
+          # This define is required to pull in the new Win8 interfaces from
+          # system headers like ShObjIdl.h.
+          'NTDDI_VERSION=0x06030000',
+          # This is required for ATL to use XP-safe versions of its functions.
+          '_USING_V110_SDK71_',
+        ],
+        'include_dirs': [
+          '<(DEPTH)/third_party/wtl/include',
+        ],
+        'conditions': [
+          ['win_z7!=0', {
+            'msvs_settings': {
+              # Generates debug info when win_z7=1
+              # even if fastbuild=1 (that makes GenerateDebugInformation false).
+              'VCLinkerTool': {
+                'GenerateDebugInformation': 'true',
+              },
+              'VCCLCompilerTool': {
+                'DebugInformationFormat': '1',
+              }
+            }
+          }],  # win_z7!=0
+          ['win_analyze', {
+            'defines!': [
+              # This is prohibited when running /analyze.
+              '_USING_V110_SDK71_',
+            ],
+            'msvs_settings': {
+              'VCCLCompilerTool': {
+                # Set WarnAsError to false to disable this setting for most
+                # projects so that compilation continues.
+                'WarnAsError': 'false',
+                # When win_analyze is specified add the /analyze switch.
+                # Also add /WX- to force-disable WarnAsError for projects that
+                # override WarnAsError.
+                # Also, disable various noisy warnings that have low value.
+                'AdditionalOptions': [
+                  '/analyze:WX-',
+                  '/wd6011',  # Dereferencing NULL pointer
+                  '/wd6312',  # Possible infinite loop: use of the constant
+                    # EXCEPTION_CONTINUE_EXECUTION in the exception-filter
+                  '/wd6326',  # Potential comparison of constant with constant
+                  '/wd28159', # Consider using 'GetTickCount64'
+                  '/wd28204', # Inconsistent SAL annotations
+                  '/wd28251', # Inconsistent SAL annotations
+                  '/wd28252', # Inconsistent SAL annotations
+                  '/wd28253', # Inconsistent SAL annotations
+                  '/wd28196', # The precondition is not satisfied
+                  '/wd28301', # Inconsistent SAL annotations
+                  '/wd6340',  # Sign mismatch in function parameter
+                  '/wd28182', # Dereferencing NULL pointer
+                  # C6285 is ~16% of raw warnings and has low value
+                  '/wd6285',  # non-zero constant || non-zero constant
+                  # C6334 is ~80% of raw warnings and has low value
+                  '/wd6334',  # sizeof applied to an expression with an operator
+                ],
+              },
+            },
+          }],  # win_analyze
+        ],
+      }],  # OS==win
+      ['chromecast==1', {
+        'defines': [
+          'LOG_DISABLED=0',
+        ],
+        'conditions': [
+          ['use_playready==1', {
+            'defines': [
+              'PLAYREADY_CDM_AVAILABLE',
+            ],
+          }],
+        ],
+      }],
+      ['enable_task_manager==1', {
+        'defines': [
+          'ENABLE_TASK_MANAGER=1',
+        ],
+      }],
+      ['enable_extensions==1', {
+        'defines': [
+          'ENABLE_EXTENSIONS=1',
+        ],
+      }],
+      ['OS=="win" and branding=="Chrome"', {
+        'defines': ['ENABLE_SWIFTSHADER'],
+      }],
+      ['enable_dart==1', {
+        'defines': ['WEBKIT_USING_DART=1'],
+      }],
+      ['enable_plugin_installation==1', {
+        'defines': ['ENABLE_PLUGIN_INSTALLATION=1'],
+      }],
+      ['enable_plugins==1', {
+        'defines': ['ENABLE_PLUGINS=1'],
+      }],
+      ['enable_session_service==1', {
+        'defines': ['ENABLE_SESSION_SERVICE=1'],
+      }],
+      ['enable_themes==1', {
+        'defines': ['ENABLE_THEMES=1'],
+      }],
+      ['enable_autofill_dialog==1', {
+        'defines': ['ENABLE_AUTOFILL_DIALOG=1'],
+      }],
+      ['enable_prod_wallet_service==1', {
+        # In GN, this is set on the autofill tagets only. See
+        # //components/autofill/core/browser:wallet_service
+        'defines': ['ENABLE_PROD_WALLET_SERVICE=1'],
+      }],
+      ['enable_background==1', {
+        'defines': ['ENABLE_BACKGROUND=1'],
+      }],
+      ['enable_google_now==1', {
+        'defines': ['ENABLE_GOOGLE_NOW=1'],
+      }],
+      ['cld_version!=0', {
+        'defines': ['CLD_VERSION=<(cld_version)'],
+      }],
+      ['enable_basic_printing==1 or enable_print_preview==1', {
+        # Convenience define for ENABLE_BASIC_PRINTING || ENABLE_PRINT_PREVIEW.
+        'defines': ['ENABLE_PRINTING=1'],
+      }],
+      ['enable_basic_printing==1', {
+        # Enable basic printing support and UI.
+        'defines': ['ENABLE_BASIC_PRINTING=1'],
+      }],
+      ['enable_print_preview==1', {
+        # Enable printing with print preview.
+        # Can be defined without ENABLE_BASIC_PRINTING.
+        'defines': ['ENABLE_PRINT_PREVIEW=1'],
+      }],
+      ['enable_spellcheck==1', {
+        'defines': ['ENABLE_SPELLCHECK=1'],
+      }],
+      ['use_platform_spellchecker', {
+        'defines': ['USE_PLATFORM_SPELLCHECKER=1'],
+      }],
+      ['enable_captive_portal_detection==1', {
+        'defines': ['ENABLE_CAPTIVE_PORTAL_DETECTION=1'],
+      }],
+      ['enable_app_list==1', {
+        'defines': ['ENABLE_APP_LIST=1'],
+      }],
+      ['enable_settings_app==1', {
+        'defines': ['ENABLE_SETTINGS_APP=1'],
+      }],
+      ['disable_file_support==1', {
+        'defines': ['DISABLE_FILE_SUPPORT=1'],
+      }],
+      ['disable_ftp_support==1', {
+        'defines': ['DISABLE_FTP_SUPPORT=1'],
+      }],
+      ['enable_supervised_users==1', {
+        'defines': ['ENABLE_SUPERVISED_USERS=1'],
+      }],
+      ['enable_mdns==1', {
+        'defines': ['ENABLE_MDNS=1'],
+      }],
+      ['enable_service_discovery==1', {
+        'defines' : [ 'ENABLE_SERVICE_DISCOVERY=1' ],
+      }],
+      ['enable_wifi_bootstrapping==1', {
+        'defines' : [ 'ENABLE_WIFI_BOOTSTRAPPING=1' ],
+      }],
+      ['enable_hangout_services_extension==1', {
+        'defines': ['ENABLE_HANGOUT_SERVICES_EXTENSION=1'],
+      }],
+      ['enable_ipc_fuzzer==1', {
+        'defines': ['ENABLE_IPC_FUZZER=1'],
+      }],
+      ['video_hole==1', {
+        'defines': ['VIDEO_HOLE=1'],
+      }],
+      ['v8_use_external_startup_data==1', {
+       'defines': ['V8_USE_EXTERNAL_STARTUP_DATA'],
+      }],
+      ['enable_webvr==1', {
+        'defines': ['ENABLE_WEBVR'],
+      }],
+
+      # SAFE_BROWSING_SERVICE - browser manages a safe-browsing service.
+      # SAFE_BROWSING_DB_LOCAL - service manages a local database.
+      # SAFE_BROWSING_DB_REMOTE - service talks via API to a database
+      # SAFE_BROWSING_CSD - enable client-side phishing detection.
+      ['safe_browsing==1', {
+        'defines': [
+          # TODO(nparker): Remove existing uses of FULL_SAFE_BROWSING
+          'FULL_SAFE_BROWSING',
+          'SAFE_BROWSING_CSD',
+          'SAFE_BROWSING_DB_LOCAL',
+          'SAFE_BROWSING_SERVICE',
+        ],
+      }],
+      ['safe_browsing==2', {
+        'defines': [
+          # TODO(nparker): Remove existing uses of MOBILE_SAFE_BROWSING
+          'MOBILE_SAFE_BROWSING',
+          'SAFE_BROWSING_SERVICE',
+        ],
+      }],
+      ['safe_browsing==3', {
+        'defines': [
+          # TODO(nparker): Remove existing uses of MOBILE_SAFE_BROWSING
+          'MOBILE_SAFE_BROWSING',
+          'SAFE_BROWSING_DB_REMOTE',
+          'SAFE_BROWSING_SERVICE',
+        ],
+      }],
+    ],  # conditions for 'target_defaults'
+    'target_conditions': [
+      ['<(use_libpci)==1', {
+        'defines': ['USE_LIBPCI=1'],
+      }],
+      ['<(use_openssl)==1', {
+        'defines': ['USE_OPENSSL=1'],
+      }],
+      ['<(use_openssl_certs)==1', {
+        'defines': ['USE_OPENSSL_CERTS=1'],
+      }],
+      ['>(nacl_untrusted_build)==1', {
+        'defines': [
+          'USE_OPENSSL=1',
+          'USE_OPENSSL_CERTS=1',
+        ],
+      }],
+      ['<(use_glib)==1 and >(nacl_untrusted_build)==0', {
+        'defines': ['USE_GLIB=1'],
+      }],
+      ['<(use_nss_certs)==1 and >(nacl_untrusted_build)==0', {
+        'defines': ['USE_NSS_CERTS=1'],
+      }],
+      ['<(chromeos)==1 and >(nacl_untrusted_build)==0', {
+        'defines': ['OS_CHROMEOS=1'],
+      }],
+      ['enable_wexit_time_destructors==1 and OS!="win"', {
+        # TODO: Enable on Windows too, http://crbug.com/404525
+        'variables': { 'clang_warning_flags': ['-Wexit-time-destructors']},
+      }],
+      ['chromium_code==0', {
+        'variables': {
+          'clang_warning_flags': [
+            # TODO(mgiuca): Move this suppression into individual third-party
+            # libraries as required. http://crbug.com/505301.
+            '-Wno-overloaded-virtual',
+            # Lots of third-party libraries have unused variables. Instead of
+            # suppressing them individually, we just blanket suppress them here.
+            '-Wno-unused-variable',
+          ],
+        },
+        'conditions': [
+          [ 'os_posix==1 and OS!="mac" and OS!="ios"', {
+            # We don't want to get warnings from third-party code,
+            # so remove any existing warning-enabling flags like -Wall.
+            'cflags!': [
+              '-Wall',
+              '-Wextra',
+            ],
+            'cflags_cc': [
+              # Don't warn about hash_map in third-party code.
+              '-Wno-deprecated',
+            ],
+            'cflags': [
+              # Don't warn about printf format problems.
+              # This is off by default in gcc but on in Ubuntu's gcc(!).
+              '-Wno-format',
+            ],
+            'cflags_cc!': [
+              # Necessary because llvm.org/PR10448 is WONTFIX (crbug.com/90453).
+              '-Wsign-compare',
+            ]
+          }],
+          # TODO: Fix all warnings on chromeos too.
+          [ 'os_posix==1 and OS!="mac" and OS!="ios" and (clang!=1 or chromeos==1)', {
+            'cflags!': [
+              '-Werror',
+            ],
+          }],
+          [ 'os_posix==1 and os_bsd!=1 and OS!="mac" and OS!="android"', {
+            'cflags': [
+              # Don't warn about ignoring the return value from e.g. close().
+              # This is off by default in some gccs but on by default in others.
+              # BSD systems do not support this option, since they are usually
+              # using gcc 4.2.1, which does not have this flag yet.
+              '-Wno-unused-result',
+            ],
+          }],
+          [ 'OS=="win"', {
+            'defines': [
+              '_CRT_SECURE_NO_DEPRECATE',
+              '_CRT_NONSTDC_NO_WARNINGS',
+              '_CRT_NONSTDC_NO_DEPRECATE',
+              '_SCL_SECURE_NO_DEPRECATE',
+            ],
+            'msvs_disabled_warnings': [
+              4800,
+            ],
+            'msvs_settings': {
+              'VCCLCompilerTool': {
+                'WarningLevel': '3',
+                'WarnAsError': 'true',
+                'Detect64BitPortabilityProblems': 'false',
+              },
+            },
+            'conditions': [
+              ['buildtype=="Official"', {
+                'msvs_settings': {
+                  'VCCLCompilerTool': { 'WarnAsError': 'false' },
+                }
+              }],
+              [ 'component=="shared_library"', {
+              # TODO(darin): Unfortunately, some third_party code depends on base.
+                'msvs_disabled_warnings': [
+                  4251,  # class 'std::xx' needs to have dll-interface.
+                 ],
+              }],
+            ],
+          }],
+
+          [ 'OS=="mac" or OS=="ios"', {
+            'xcode_settings': {
+              'WARNING_CFLAGS!': ['-Wall', '-Wextra'],
+            },
+            'conditions': [
+              ['buildtype=="Official"', {
+                'xcode_settings': {
+                  'GCC_TREAT_WARNINGS_AS_ERRORS': 'NO',    # -Werror
+                },
+              }],
+            ],
+          }],
+          [ 'OS=="ios"', {
+            'xcode_settings': {
+              # TODO(ios): Fix remaining warnings in third-party code, then
+              # remove this; the Mac cleanup didn't get everything that's
+              # flagged in an iOS build.
+              'GCC_TREAT_WARNINGS_AS_ERRORS': 'NO',
+              'RUN_CLANG_STATIC_ANALYZER': 'NO',
+              # Several internal ios directories generate numerous warnings for
+              # -Wobjc-missing-property-synthesis.
+              'CLANG_WARN_OBJC_MISSING_PROPERTY_SYNTHESIS': 'NO',
+            },
+          }],
+        ],
+      }, {
+        'includes': [
+           # Rules for excluding e.g. foo_win.cc from the build on non-Windows.
+          'filename_rules.gypi',
+        ],
+        # In Chromium code, we define __STDC_foo_MACROS in order to get the
+        # C99 macros on Mac and Linux.
+        'defines': [
+          '__STDC_CONSTANT_MACROS',
+          '__STDC_FORMAT_MACROS',
+        ],
+        'conditions': [
+          ['OS=="win"', {
+            # turn on warnings for signed/unsigned mismatch on chromium code.
+            'msvs_settings': {
+              'VCCLCompilerTool': {
+                'AdditionalOptions': ['/we4389'],
+              },
+            },
+          }],
+          ['OS=="win" and component=="shared_library"', {
+            'msvs_disabled_warnings': [
+              4251,  # class 'std::xx' needs to have dll-interface.
+            ],
+          }],
+        ],
+      }],
+    ],  # target_conditions for 'target_defaults'
+    'default_configuration': 'Debug',
+    'configurations': {
+      # VCLinkerTool LinkIncremental values below:
+      #   0 == default
+      #   1 == /INCREMENTAL:NO
+      #   2 == /INCREMENTAL
+      # Debug links incremental, Release does not.
+      #
+      # Abstract base configurations to cover common attributes.
+      #
+      'Common_Base': {
+        'abstract': 1,
+        'msvs_configuration_attributes': {
+          'OutputDirectory': '<(DEPTH)\\build\\<(build_dir_prefix)$(ConfigurationName)',
+          'IntermediateDirectory': '$(OutDir)\\obj\\$(ProjectName)',
+          'CharacterSet': '1',
+        },
+        'msvs_settings':{
+          'VCCLCompilerTool': {
+            'AdditionalOptions': [
+              '/bigobj',
+            ],
+          },
+          'VCLinkerTool': {
+            # Add the default import libs.
+            'AdditionalDependencies': [
+              'kernel32.lib',
+              'gdi32.lib',
+              'winspool.lib',
+              'comdlg32.lib',
+              'advapi32.lib',
+              'shell32.lib',
+              'ole32.lib',
+              'oleaut32.lib',
+              'user32.lib',
+              'uuid.lib',
+              'odbc32.lib',
+              'odbccp32.lib',
+              'delayimp.lib',
+              'credui.lib',
+              'netapi32.lib',
+            ],
+            'AdditionalOptions': [
+              # Suggested by Microsoft Devrel to avoid
+              #   LINK : fatal error LNK1248: image size (80000000) exceeds maximum allowable size (80000000)
+              # which started happening more regularly after VS2013 Update 4.
+              # Needs to be a bit lower for VS2015, or else errors out.
+              '/maxilksize:0x7ff00000',
+            ],
+          },
+        },
+      },
+      'x86_Base': {
+        'abstract': 1,
+        'msvs_settings': {
+          'VCLinkerTool': {
+            'MinimumRequiredVersion': '5.01',  # XP.
+            'TargetMachine': '1',
+          },
+          'VCLibrarianTool': {
+            'TargetMachine': '1',
+          },
+        },
+        'msvs_configuration_platform': 'Win32',
+      },
+      'x64_Base': {
+        'abstract': 1,
+        'msvs_configuration_platform': 'x64',
+        'msvs_settings': {
+          'VCLinkerTool': {
+            # Make sure to understand http://crbug.com/361720 if you want to
+            # increase this.
+            'MinimumRequiredVersion': '5.02',  # Server 2003.
+            'TargetMachine': '17', # x86 - 64
+            'AdditionalLibraryDirectories!':
+              ['<(windows_sdk_path)/Lib/win8/um/x86'],
+            'AdditionalLibraryDirectories':
+              ['<(windows_sdk_path)/Lib/win8/um/x64'],
+            # Doesn't exist x64 SDK. Should use oleaut32 in any case.
+            'IgnoreDefaultLibraryNames': [ 'olepro32.lib' ],
+          },
+          'VCLibrarianTool': {
+            'AdditionalLibraryDirectories!':
+              ['<(windows_sdk_path)/Lib/win8/um/x86'],
+            'AdditionalLibraryDirectories':
+              ['<(windows_sdk_path)/Lib/win8/um/x64'],
+            'TargetMachine': '17', # x64
+          },
+        },
+      },
+      'Debug_Base': {
+        'abstract': 1,
+        'defines': [
+          'DYNAMIC_ANNOTATIONS_ENABLED=1',
+          'WTF_USE_DYNAMIC_ANNOTATIONS=1',
+        ],
+        'xcode_settings': {
+          'GCC_OPTIMIZATION_LEVEL': '<(mac_debug_optimization)',
+          'OTHER_CFLAGS': [
+            '<@(debug_extra_cflags)',
+          ],
+        },
+        'msvs_settings': {
+          'VCCLCompilerTool': {
+            'Optimization': '<(win_debug_Optimization)',
+            'PreprocessorDefinitions': ['_DEBUG'],
+            'BasicRuntimeChecks': '<(win_debug_RuntimeChecks)',
+            'RuntimeLibrary': '<(win_debug_RuntimeLibrary)',
+            'conditions': [
+              # According to MSVS, InlineFunctionExpansion=0 means
+              # "default inlining", not "/Ob0".
+              # Thus, we have to handle InlineFunctionExpansion==0 separately.
+              ['win_debug_InlineFunctionExpansion==0', {
+                'AdditionalOptions': ['/Ob0'],
+              }],
+              ['win_debug_InlineFunctionExpansion!=""', {
+                'InlineFunctionExpansion':
+                  '<(win_debug_InlineFunctionExpansion)',
+              }],
+              ['win_debug_disable_iterator_debugging==1', {
+                'PreprocessorDefinitions': ['_HAS_ITERATOR_DEBUGGING=0'],
+              }],
+
+              # if win_debug_OmitFramePointers is blank, leave as default
+              ['win_debug_OmitFramePointers==1', {
+                'OmitFramePointers': 'true',
+              }],
+              ['win_debug_OmitFramePointers==0', {
+                'OmitFramePointers': 'false',
+                # The above is not sufficient (http://crbug.com/106711): it
+                # simply eliminates an explicit "/Oy", but both /O2 and /Ox
+                # perform FPO regardless, so we must explicitly disable.
+                # We still want the false setting above to avoid having
+                # "/Oy /Oy-" and warnings about overriding.
+                'AdditionalOptions': ['/Oy-'],
+              }],
+            ],
+            'AdditionalOptions': [ '<@(win_debug_extra_cflags)', ],
+          },
+          'VCLinkerTool': {
+            'LinkIncremental': '<(msvs_debug_link_incremental)',
+            # ASLR makes debugging with windbg difficult because Chrome.exe and
+            # Chrome.dll share the same base name. As result, windbg will
+            # name the Chrome.dll module like chrome_<base address>, where
+            # <base address> typically changes with each launch. This in turn
+            # means that breakpoints in Chrome.dll don't stick from one launch
+            # to the next. For this reason, we turn ASLR off in debug builds.
+            # Note that this is a three-way bool, where 0 means to pick up
+            # the default setting, 1 is off and 2 is on.
+            'RandomizedBaseAddress': 1,
+          },
+          'VCResourceCompilerTool': {
+            'PreprocessorDefinitions': ['_DEBUG'],
+          },
+        },
+        'conditions': [
+          ['OS=="linux" or OS=="android"', {
+            'target_conditions': [
+              ['_toolset=="target"', {
+                'cflags': [
+                  '<@(debug_extra_cflags)',
+                ],
+              }],
+            ],
+          }],
+          ['OS=="linux" and target_arch!="ia32" and disable_glibcxx_debug==0', {
+            # Enable libstdc++ debugging facilities to help catch problems
+            # early, see http://crbug.com/65151 .
+            # TODO(phajdan.jr): Should we enable this for all of POSIX?
+            'defines': ['_GLIBCXX_DEBUG=1',],
+          }],
+          ['release_valgrind_build==0', {
+            'xcode_settings': {
+              'OTHER_CFLAGS': [
+                '-fstack-protector-all',  # Implies -fstack-protector
+              ],
+            },
+          }],
+          ['clang==1', {
+            'cflags': [
+              # Allow comparing the address of references and 'this' against 0
+              # in debug builds. Technically, these can never be null in
+              # well-defined C/C++ and Clang can optimize such checks away in
+              # release builds, but they may be used in asserts in debug builds.
+              '-Wno-undefined-bool-conversion',
+              '-Wno-tautological-undefined-compare',
+            ],
+            'xcode_settings': {
+              'OTHER_CFLAGS': [
+                '-Wno-undefined-bool-conversion',
+                '-Wno-tautological-undefined-compare',
+              ],
+            },
+            'msvs_settings': {
+              'VCCLCompilerTool': {
+                'AdditionalOptions': [
+                  '-Wno-undefined-bool-conversion',
+                  '-Wno-tautological-undefined-compare',
+                ],
+              },
+            },
+          }],
+        ],
+      },
+      'Release_Base': {
+        'abstract': 1,
+        'defines': [
+          'NDEBUG',
+        ],
+        'xcode_settings': {
+          'DEAD_CODE_STRIPPING': 'YES',  # -Wl,-dead_strip
+          'GCC_OPTIMIZATION_LEVEL': '<(mac_release_optimization)',
+          'OTHER_CFLAGS': [ '<@(release_extra_cflags)', ],
+        },
+        'msvs_settings': {
+          'VCCLCompilerTool': {
+            'RuntimeLibrary': '<(win_release_RuntimeLibrary)',
+            'conditions': [
+              # In official builds, each target will self-select
+              # an optimization level.
+              ['buildtype!="Official"', {
+                  'Optimization': '<(win_release_Optimization)',
+                },
+              ],
+              # According to MSVS, InlineFunctionExpansion=0 means
+              # "default inlining", not "/Ob0".
+              # Thus, we have to handle InlineFunctionExpansion==0 separately.
+              ['win_release_InlineFunctionExpansion==0', {
+                'AdditionalOptions': ['/Ob0'],
+              }],
+              ['win_release_InlineFunctionExpansion!=""', {
+                'InlineFunctionExpansion':
+                  '<(win_release_InlineFunctionExpansion)',
+              }],
+
+              # if win_release_OmitFramePointers is blank, leave as default
+              ['win_release_OmitFramePointers==1', {
+                'OmitFramePointers': 'true',
+              }],
+              ['win_release_OmitFramePointers==0', {
+                'OmitFramePointers': 'false',
+                # The above is not sufficient (http://crbug.com/106711): it
+                # simply eliminates an explicit "/Oy", but both /O2 and /Ox
+                # perform FPO regardless, so we must explicitly disable.
+                # We still want the false setting above to avoid having
+                # "/Oy /Oy-" and warnings about overriding.
+                'AdditionalOptions': ['/Oy-'],
+              }],
+              ['asan==0', {
+                # Put data in separate COMDATs. This allows the linker
+                # to put bit-identical constants at the same address even if
+                # they're unrelated constants, which saves binary size.
+                # This optimization can't be used when ASan is enabled because
+                # it is not compatible with the ASan ODR checker.
+                'AdditionalOptions': ['/Gw'],
+              }],
+            ],
+            'AdditionalOptions': [
+                '/d2Zi+',  # Improve debugging of Release builds.
+                '/Zc:inline',  # Remove unreferenced COMDAT (faster links).
+                '<@(win_release_extra_cflags)',
+            ],
+          },
+          'VCLinkerTool': {
+            # LinkIncremental is a tri-state boolean, where 0 means default
+            # (i.e., inherit from parent solution), 1 means false, and
+            # 2 means true.
+            'LinkIncremental': '1',
+            # This corresponds to the /PROFILE flag which ensures the PDB
+            # file contains FIXUP information (growing the PDB file by about
+            # 5%) but does not otherwise alter the output binary. This
+            # information is used by the Syzygy optimization tool when
+            # decomposing the release image.
+            'Profile': 'true',
+          },
+        },
+        'conditions': [
+          ['msvs_use_common_release', {
+            'includes': ['release.gypi'],
+          }],
+          ['release_valgrind_build==0 and tsan==0', {
+            'defines': [
+              'NVALGRIND',
+              'DYNAMIC_ANNOTATIONS_ENABLED=0',
+            ],
+          }, {
+            'defines': [
+              'MEMORY_TOOL_REPLACES_ALLOCATOR',
+              'MEMORY_SANITIZER_INITIAL_SIZE',
+              'DYNAMIC_ANNOTATIONS_ENABLED=1',
+              'WTF_USE_DYNAMIC_ANNOTATIONS=1',
+            ],
+          }],
+          ['OS=="win"', {
+            'defines': ['NO_TCMALLOC'],
+          }],
+          # _FORTIFY_SOURCE isn't really supported by Clang now, see
+          # http://llvm.org/bugs/show_bug.cgi?id=16821.
+          # It seems to work fine with Ubuntu 12 headers though, so use it
+          # in official builds.
+          ['os_posix==1 and (asan!=1 and msan!=1 and tsan!=1 and lsan!=1 and ubsan!=1) and (OS!="linux" or clang!=1 or buildtype=="Official")', {
+            'target_conditions': [
+              ['chromium_code==1', {
+                # Non-chromium code is not guaranteed to compile cleanly
+                # with _FORTIFY_SOURCE. Also, fortified build may fail
+                # when optimizations are disabled, so only do that for Release
+                # build.
+                'defines': [
+                  '_FORTIFY_SOURCE=2',
+                ],
+              }],
+            ],
+          }],
+          ['OS=="linux" or OS=="android"', {
+            'target_conditions': [
+              ['_toolset=="target"', {
+                'cflags': [
+                  '<@(release_extra_cflags)',
+                ],
+                'conditions': [
+                  ['enable_resource_whitelist_generation==1', {
+                    'cflags': [
+                      '-Wunknown-pragmas -Wno-error=unknown-pragmas',
+                    ],
+                  }],
+                ],
+              }],
+            ],
+          }],
+          ['OS=="ios"', {
+            'defines': [
+              'NS_BLOCK_ASSERTIONS=1',
+            ],
+          }],
+        ],
+      },
+      #
+      # Concrete configurations
+      #
+      'Debug': {
+        'inherit_from': ['Common_Base', 'x86_Base', 'Debug_Base'],
+      },
+      'Release': {
+        'inherit_from': ['Common_Base', 'x86_Base', 'Release_Base'],
+      },
+      'conditions': [
+        [ 'OS=="ios"', {
+          'Profile': {
+            'inherit_from': ['Common_Base', 'x86_Base', 'Release_Base'],
+            'target_conditions': [
+              [ '_type=="executable"', {
+                # To get a real .dSYM bundle produced by dsymutil, set the
+                # debug information format to dwarf-with-dsym.  Since
+                # strip_from_xcode will not be used, set Xcode to do the
+                # stripping as well.
+                'xcode_settings': {
+                  'DEBUG_INFORMATION_FORMAT': 'dwarf-with-dsym',
+                  'DEPLOYMENT_POSTPROCESSING': 'YES',
+                  'STRIP_INSTALLED_PRODUCT': 'YES',
+                },
+              }],
+            ],
+          },
+        }],
+        [ 'OS=="win"', {
+          # TODO(bradnelson): add a gyp mechanism to make this more graceful.
+          'Debug_x64': {
+            'inherit_from': ['Common_Base', 'x64_Base', 'Debug_Base'],
+          },
+          'Release_x64': {
+            'inherit_from': ['Common_Base', 'x64_Base', 'Release_Base'],
+          },
+        }],
+      ],
+    },
+  },
+  'conditions': [
+    ['os_posix==1', {
+      'target_defaults': {
+        'ldflags': [
+          '-Wl,-z,now',
+          '-Wl,-z,relro',
+        ],
+        # TODO(glider): enable the default options on other systems.
+        'conditions': [
+          ['use_sanitizer_options==1 and ((OS=="linux" and (chromeos==0 or target_arch!="ia32")) or OS=="mac")', {
+            'dependencies': [
+              '<(DEPTH)/build/sanitizers/sanitizers.gyp:sanitizer_options',
+            ],
+          }],
+        ],
+      },
+    }],
+    # TODO(jochen): Enable this on chromeos on arm. http://crbug.com/356580
+    ['os_posix==1 and disable_fatal_linker_warnings==0 and use_evdev_gestures==0 and (chromeos==0 or target_arch!="arm")', {
+      'target_defaults': {
+        'ldflags': [
+          '-Wl,--fatal-warnings',
+        ],
+      },
+    }],
+    # -Wl,-z,-defs doesn't work with the sanitiziers, http://crbug.com/452065
+    ['(OS=="linux" or OS=="android") and asan==0 and msan==0 and tsan==0 and ubsan==0 and ubsan_vptr==0', {
+      'target_defaults': {
+        'ldflags': [
+          '-Wl,-z,defs',
+        ],
+      },
+    }],
+    ['os_posix==1 and chromeos==0', {
+      # Chrome OS enables -fstack-protector-strong via its build wrapper,
+      # and we want to avoid overriding this, so stack-protector is only
+      # enabled when not building on Chrome OS.
+      # TODO(phajdan.jr): Use -fstack-protector-strong when our gcc
+      # supports it.
+      'target_defaults': {
+        'cflags': [
+          '-fstack-protector',
+          '--param=ssp-buffer-size=4',
+        ],
+      },
+    }],
+    ['os_posix==1 and OS=="linux"', {
+      'defines': [
+        '_LARGEFILE_SOURCE',
+        '_LARGEFILE64_SOURCE',
+        '_FILE_OFFSET_BITS=64',
+      ],
+    }],
+    ['os_posix==1 and OS!="mac" and OS!="ios"', {
+      'target_defaults': {
+        # Enable -Werror by default, but put it in a variable so it can
+        # be disabled in ~/.gyp/include.gypi on the valgrind builders.
+        'variables': {
+          'werror%': '-Werror',
+          'libraries_for_target%': '',
+        },
+        'defines': [
+          '_FILE_OFFSET_BITS=64',
+        ],
+        'cflags': [
+          '<(werror)',  # See note above about the werror variable.
+          '-pthread',
+          '-fno-strict-aliasing',  # See http://crbug.com/32204
+          '-Wall',
+          # Don't warn about unused function params.  We use those everywhere.
+          '-Wno-unused-parameter',
+          # Don't warn about the "struct foo f = {0};" initialization pattern.
+          '-Wno-missing-field-initializers',
+          # Don't export any symbols (for example, to plugins we dlopen()).
+          # Note: this is *required* to make some plugins work.
+          '-fvisibility=hidden',
+          '-pipe',
+        ],
+        'cflags_cc': [
+          '-fno-exceptions',
+          '-fno-rtti',
+          '-fno-threadsafe-statics',
+          # Make inline functions have hidden visiblity by default.
+          # Surprisingly, not covered by -fvisibility=hidden.
+          '-fvisibility-inlines-hidden',
+          # GCC turns on -Wsign-compare for C++ under -Wall, but clang doesn't,
+          # so we specify it explicitly.  (llvm.org/PR10448, crbug.com/90453)
+          '-Wsign-compare',
+        ],
+        'ldflags': [
+          '-pthread', '-Wl,-z,noexecstack',
+        ],
+        'libraries' : [
+          '<(libraries_for_target)',
+        ],
+        'configurations': {
+          'Debug_Base': {
+            'variables': {
+              'debug_optimize%': '0',
+            },
+            'defines': [
+              '_DEBUG',
+            ],
+            'cflags': [
+              '-O>(debug_optimize)',
+              '-g',
+            ],
+            'conditions' : [
+              ['OS=="android" and target_arch!="mipsel" and target_arch!="mips64el"', {
+                # TODO(jdduke) Re-enable on mips after resolving linking
+                # issues with libc++ (crbug.com/456380).
+                'ldflags': [
+                  # Warn in case of text relocations.
+                  '-Wl,--warn-shared-textrel',
+                ],
+              }],
+              ['OS=="android" and android_full_debug==0', {
+                # Some configurations are copied from Release_Base to reduce
+                # the binary size.
+                'variables': {
+                  'debug_optimize%': 's',
+                },
+                'cflags': [
+                  '-fdata-sections',
+                  '-ffunction-sections',
+                ],
+                'ldflags': [
+                  '-Wl,-O1',
+                  '-Wl,--as-needed',
+                ],
+              }],
+              ['OS=="android" and android_full_debug==0 and target_arch!="arm64"', {
+                # We don't omit frame pointers on arm64 since they are required
+                # to correctly unwind stackframes which contain system library
+                # function frames (crbug.com/391706).
+                'cflags': [
+                  '-fomit-frame-pointer',
+                ],
+              }],
+              ['OS=="linux" and target_arch=="ia32"', {
+                'ldflags': [
+                  '-Wl,--no-as-needed',
+                ],
+              }],
+              ['debug_unwind_tables==1', {
+                'cflags': ['-funwind-tables'],
+              }, {
+                'cflags': ['-fno-unwind-tables', '-fno-asynchronous-unwind-tables'],
+                'defines': ['NO_UNWIND_TABLES'],
+              }],
+              # TODO(mostynb): shuffle clang/gcc_version/binutils_version
+              # definitions in to the right scope to use them when setting
+              # linux_use_debug_fission, so it can be used here alone.
+              ['linux_use_debug_fission==1 and linux_use_gold_flags==1 and (clang==1 or gcc_version>=48) and binutils_version>=223', {
+                'cflags': ['-gsplit-dwarf'],
+              }],
+            ],
+          },
+          'Release_Base': {
+            'variables': {
+              'release_optimize%': '2',
+              # Binaries become big and gold is unable to perform GC
+              # and remove unused sections for some of test targets
+              # on 32 bit platform.
+              # (This is currently observed only in chromeos valgrind bots)
+              # The following flag is to disable --gc-sections linker
+              # option for these bots.
+              'no_gc_sections%': 0,
+
+              # TODO(bradnelson): reexamine how this is done if we change the
+              # expansion of configurations
+              'release_valgrind_build%': 0,
+            },
+            'cflags': [
+              '-O<(release_optimize)',
+              # Don't emit the GCC version ident directives, they just end up
+              # in the .comment section taking up binary size.
+              '-fno-ident',
+              # Put data and code in their own sections, so that unused symbols
+              # can be removed at link time with --gc-sections.
+              '-fdata-sections',
+              '-ffunction-sections',
+            ],
+            'ldflags': [
+              # Specifically tell the linker to perform optimizations.
+              # See http://lwn.net/Articles/192624/ .
+              '-Wl,-O1',
+              '-Wl,--as-needed',
+            ],
+            'conditions' : [
+              ['no_gc_sections==0', {
+                'ldflags': [
+                  '-Wl,--gc-sections',
+                ],
+              }],
+              ['OS=="android" and target_arch!="arm64"', {
+                # We don't omit frame pointers on arm64 since they are required
+                # to correctly unwind stackframes which contain system library
+                # function frames (crbug.com/391706).
+                'cflags': [
+                  '-fomit-frame-pointer',
+                ]
+              }],
+              ['OS=="android" and target_arch!="mipsel" and target_arch!="mips64el"', {
+                # TODO(jdduke) Re-enable on mips after resolving linking
+                # issues with libc++ (crbug.com/456380).
+                'ldflags': [
+                  # Warn in case of text relocations.
+                  '-Wl,--warn-shared-textrel',
+                ],
+              }],
+              ['OS=="android"', {
+                'variables': {
+                  'release_optimize%': 's',
+                },
+              }],
+              ['profiling==1', {
+                'cflags': [
+                  '-fno-omit-frame-pointer',
+                  '-g',
+                ],
+                'conditions' : [
+                  ['profiling_full_stack_frames==1', {
+                    'cflags': [
+                      '-fno-inline',
+                      '-fno-optimize-sibling-calls',
+                    ],
+                  }],
+                ],
+              }],
+              ['release_unwind_tables==1', {
+                'cflags': ['-funwind-tables'],
+              }, {
+                'cflags': ['-fno-unwind-tables', '-fno-asynchronous-unwind-tables'],
+                'defines': ['NO_UNWIND_TABLES'],
+              }],
+            ],
+          },
+        },
+        'conditions': [
+          ['target_arch=="ia32"', {
+            'target_conditions': [
+              ['_toolset=="target"', {
+                'asflags': [
+                  # Needed so that libs with .s files (e.g. libicudata.a)
+                  # are compatible with the general 32-bit-ness.
+                  '-32',
+                ],
+                # All floating-point computations on x87 happens in 80-bit
+                # precision.  Because the C and C++ language standards allow
+                # the compiler to keep the floating-point values in higher
+                # precision than what's specified in the source and doing so
+                # is more efficient than constantly rounding up to 64-bit or
+                # 32-bit precision as specified in the source, the compiler,
+                # especially in the optimized mode, tries very hard to keep
+                # values in x87 floating-point stack (in 80-bit precision)
+                # as long as possible. This has important side effects, that
+                # the real value used in computation may change depending on
+                # how the compiler did the optimization - that is, the value
+                # kept in 80-bit is different than the value rounded down to
+                # 64-bit or 32-bit. There are possible compiler options to
+                # make this behavior consistent (e.g. -ffloat-store would keep
+                # all floating-values in the memory, thus force them to be
+                # rounded to its original precision) but they have significant
+                # runtime performance penalty.
+                #
+                # -mfpmath=sse -msse2 makes the compiler use SSE instructions
+                # which keep floating-point values in SSE registers in its
+                # native precision (32-bit for single precision, and 64-bit
+                # for double precision values). This means the floating-point
+                # value used during computation does not change depending on
+                # how the compiler optimized the code, since the value is
+                # always kept in its specified precision.
+                #
+                # Refer to http://crbug.com/348761 for rationale behind SSE2
+                # being a minimum requirement for 32-bit Linux builds and
+                # http://crbug.com/313032 for an example where this has "bit"
+                # us in the past.
+                'cflags': [
+                  '-msse2',
+                  '-mfpmath=sse',
+                  '-mmmx',  # Allows mmintrin.h for MMX intrinsics.
+                  '-m32',
+                ],
+                'ldflags': [
+                  '-m32',
+                ],
+                'conditions': [
+                  # Use gold linker for Android ia32 target.
+                  ['OS=="android"', {
+                    'ldflags': [
+                      '-fuse-ld=gold',
+                    ],
+                  }],
+                ],
+              }],
+            ],
+          }],
+          ['target_arch=="x64"', {
+            'target_conditions': [
+              ['_toolset=="target"', {
+                'conditions': [
+                  # Use gold linker for Android x64 target.
+                  ['OS=="android"', {
+                    'ldflags': [
+                      '-fuse-ld=gold',
+                    ],
+                  }],
+                ],
+                'cflags': [
+                  '-m64',
+                  '-march=x86-64',
+                ],
+                'ldflags': [
+                  '-m64',
+                ],
+              }],
+            ],
+          }],
+          ['target_arch=="arm"', {
+            'target_conditions': [
+              ['_toolset=="target"', {
+                'conditions': [
+                  ['clang==0', {
+                    'cflags_cc': [
+                      # The codesourcery arm-2009q3 toolchain warns at that the ABI
+                      # has changed whenever it encounters a varargs function. This
+                      # silences those warnings, as they are not helpful and
+                      # clutter legitimate warnings.
+                      '-Wno-abi',
+                    ],
+                  }],
+                  ['clang==1 and arm_arch!="" and OS!="android"', {
+                    'cflags': [
+                      '-target arm-linux-gnueabihf',
+                    ],
+                    'ldflags': [
+                      '-target arm-linux-gnueabihf',
+                    ],
+                  }],
+                  ['arm_arch!=""', {
+                    'cflags': [
+                      '-march=<(arm_arch)',
+                    ],
+                    'conditions': [
+                      ['use_lto==1 or use_lto_o2==1', {
+                        'ldflags': [
+                          '-march=<(arm_arch)',
+                        ],
+                      }],
+                    ],
+                  }],
+                  ['clang==1 and OS!="android"', {
+                    'cflags': [
+                      # We need to disable clang's builtin assembler as it can't
+                      # handle several asm files, crbug.com/124610
+                      '-no-integrated-as',
+                    ],
+                  }],
+                  ['arm_tune!=""', {
+                    'cflags': [
+                      '-mtune=<(arm_tune)',
+                    ],
+                    'conditions': [
+                      ['use_lto==1 or use_lto_o2==1', {
+                        'ldflags': [
+                          '-mtune=<(arm_tune)',
+                        ],
+                      }],
+                    ],
+                  }],
+                  ['arm_fpu!=""', {
+                    'cflags': [
+                      '-mfpu=<(arm_fpu)',
+                    ],
+                    'conditions': [
+                      ['use_lto==1 or use_lto_o2==1', {
+                        'ldflags': [
+                          '-mfpu=<(arm_fpu)',
+                        ],
+                      }],
+                    ],
+                  }],
+                  ['arm_float_abi!=""', {
+                    'cflags': [
+                      '-mfloat-abi=<(arm_float_abi)',
+                    ],
+                    'conditions': [
+                      ['use_lto==1 or use_lto_o2==1', {
+                        'ldflags': [
+                          '-mfloat-abi=<(arm_float_abi)',
+                        ],
+                      }],
+                    ],
+                  }],
+                  ['arm_thumb==1', {
+                    'cflags': [
+                      '-mthumb',
+                    ],
+                    'conditions': [
+                      ['use_lto==1 or use_lto_o2==1', {
+                        'ldflags': [
+                          '-mthumb',
+                        ],
+                      }],
+                    ],
+                  }],
+                  ['OS=="android"', {
+                    # Most of the following flags are derived from what Android
+                    # uses by default when building for arm, reference for which
+                    # can be found in the following file in the Android NDK:
+                    # toolchains/arm-linux-androideabi-4.9/setup.mk
+                    'cflags': [
+                      # The tree-sra optimization (scalar replacement for
+                      # aggregates enabling subsequent optimizations) leads to
+                      # invalid code generation when using the Android NDK's
+                      # compiler (r5-r7). This can be verified using
+                      # webkit_unit_tests' WTF.Checked_int8_t test.
+                      '-fno-tree-sra',
+                      # The following option is disabled to improve binary
+                      # size and performance in gcc 4.9.
+                      '-fno-caller-saves',
+                      '-Wno-psabi',
+                    ],
+                    # Android now supports .relro sections properly.
+                    # NOTE: While these flags enable the generation of .relro
+                    # sections, the generated libraries can still be loaded on
+                    # older Android platform versions.
+                    'ldflags': [
+                        '-Wl,-z,relro',
+                        '-Wl,-z,now',
+                        '-fuse-ld=gold',
+                    ],
+                    'conditions': [
+                      ['gcc_version==48 and clang==0', {
+                        'cflags': [
+                          # The following 5 options are disabled to save on
+                          # binary size in GCC 4.8.
+                          '-fno-partial-inlining',
+                          '-fno-early-inlining',
+                          '-fno-tree-copy-prop',
+                          '-fno-tree-loop-optimize',
+                          '-fno-move-loop-invariants',
+                        ],
+                      }],
+                      ['arm_thumb==1', {
+                        'cflags': [ '-mthumb-interwork' ],
+                      }],
+                      ['profiling==1', {
+                        'cflags': [
+                          # Thumb code with frame pointer makes chrome crash
+                          # early.
+                          '-marm',
+                          '-mapcs-frame', # Required by -fno-omit-frame-pointer.
+                          # The perf report sometimes incorrectly attributes
+                          # code from tail calls.
+                          '-fno-optimize-sibling-calls',
+                        ],
+                        'cflags!': [
+                          '-fomit-frame-pointer',
+                        ],
+                      }],
+                      ['clang==1', {
+                        'cflags!': [
+                          # Clang does not support the following options.
+                          '-mapcs-frame',
+                          '-mthumb-interwork',
+                          '-finline-limit=64',
+                          '-fno-tree-sra',
+                          '-fno-caller-saves',
+                          '-Wno-psabi',
+                        ],
+                        'cflags': [
+                          # TODO(hans) Enable integrated-as (crbug.com/124610).
+                          '-no-integrated-as',
+                          '-B<(android_toolchain)',  # Else /usr/bin/as gets picked up.
+                        ],
+                      }],
+                      ['clang==1 and linux_use_bundled_gold==0', {
+                        'ldflags': [
+                          # Let clang find the ld.gold in the NDK.
+                          '--gcc-toolchain=<(android_toolchain)/..',
+                        ],
+                      }],
+                      ['asan==1', {
+                        'cflags': [
+                          '-marm', # Required for frame pointer based stack traces.
+                        ],
+                      }],
+                    ],
+                  }],
+                  ['chromecast==1', {
+                    'cflags': [
+                      # We set arm_arch to "" so that -march compiler option
+                      # is not set.  Otherwise a gcc bug that would complain
+                      # about it conflicting with '-mcpu=cortex-a9'. The flag
+                      # '-march=armv7-a' is actually redundant anyway because
+                      # it is enabled by default when we built the toolchain.
+                      # And using '-mcpu=cortex-a9' should be sufficient.
+                      '-mcpu=cortex-a9',
+                      '-funwind-tables',
+                      # Breakpad requires symbols with debugging information
+                      '-g',
+                    ],
+                    'ldflags': [
+                      # We want to statically link libstdc++/libgcc_s.
+                      '-static-libstdc++',
+                      '-static-libgcc',
+                    ],
+                    'cflags!': [
+                      # Some components in Chromium (e.g. v8, skia, ffmpeg)
+                      # define their own cflags for arm builds that could
+                      # conflict with the flags we set here (e.g.
+                      # '-mcpu=cortex-a9'). Remove these flags explicitly.
+                      '-march=armv7-a',
+                      '-mtune=cortex-a8',
+                    ],
+                  }],
+                ],
+              }],
+            ],
+          }],
+          ['target_arch=="arm64"', {
+            'target_conditions': [
+              ['_toolset=="target"', {
+                'conditions': [
+                  ['OS=="android"', {
+                    'cflags!': [
+                       '-fstack-protector',  # stack protector is always enabled on arm64.
+                    ],
+                  }],
+                ],
+              }],
+            ],
+          }],
+          ['target_arch=="mipsel"', {
+            'target_conditions': [
+              ['_toolset=="target"', {
+                'conditions': [
+                  ['mips_arch_variant=="r6"', {
+                    'conditions': [
+                      ['clang==1', {
+                        'cflags': [ '-target mipsel-linux-gnu', '-march=mips32r6', ],
+                        'ldflags': [ '-target mipsel-linux-gnu', ],
+                      }, { # clang==0
+                        'cflags': ['-mips32r6', '-Wa,-mips32r6', ],
+                      }],
+                      ['clang==0 and OS=="android"', {
+                        'ldflags': ['-mips32r6', '-Wl,-melf32ltsmip',],
+                      }],
+                    ],
+                  }],
+                  ['mips_arch_variant=="r2"', {
+                    'conditions': [
+                      ['mips_float_abi=="hard" and mips_fpu_mode!=""', {
+                        'cflags': ['-m<(mips_fpu_mode)'],
+                      }],
+                      ['clang==1', {
+                         'conditions': [
+                          ['OS=="android"', {
+                            'cflags': [ '-target mipsel-linux-android', '-march=mipsel', '-mcpu=mips32r2'],
+                            'ldflags': [ '-target mipsel-linux-android', ],
+                          }, {
+                            'cflags': [ '-target mipsel-linux-gnu', '-march=mipsel', '-mcpu=mips32r2'],
+                            'ldflags': [ '-target mipsel-linux-gnu', ],
+                          }],
+                         ],
+                      }, { # clang==0
+                        'cflags': ['-mips32r2', '-Wa,-mips32r2', ],
+                      }],
+                    ],
+                  }],
+                  ['mips_arch_variant=="r1"', {
+                    'conditions': [
+                      ['clang==1', {
+                        'conditions': [
+                          ['OS=="android"', {
+                            'cflags': [ '-target mipsel-linux-android', '-march=mipsel', '-mcpu=mips32'],
+                            'ldflags': [ '-target mipsel-linux-android', ],
+                          }, {
+                            'cflags': [ '-target mipsel-linux-gnu', '-march=mipsel', '-mcpu=mips32'],
+                            'ldflags': [ '-target mipsel-linux-gnu', ],
+                          }],
+                        ],
+                      }, { # clang==0
+                        'cflags': ['-mips32', '-Wa,-mips32', ],
+                      }],
+                    ],
+                  }],
+                  ['clang==1', {
+                    'cflags!': [
+                      # Clang does not support the following options.
+                      '-finline-limit=64',
+                    ],
+                    'cflags': [
+                      # TODO(gordanac) Enable integrated-as.
+                      '-no-integrated-as',
+                    ],
+                  }],
+                  ['clang==1 and OS=="android"', {
+                    'cflags': [
+                      '-B<(android_toolchain)',  # Else /usr/bin/as gets picked up.
+                    ],
+                    'ldflags': [
+                      # Let clang find the ld in the NDK.
+                      '--gcc-toolchain=<(android_toolchain)/..',
+                    ],
+                  }],
+                  ['mips_dsp_rev==1', {
+                    'cflags': ['-mdsp'],
+                  }],
+                  ['mips_dsp_rev==2', {
+                    'cflags': ['-mdspr2'],
+                  }],
+                ],
+                'cflags': [
+                  '-m<(mips_float_abi)-float'
+                ],
+                'ldflags': [
+                  '-Wl,--no-keep-memory'
+                ],
+                'cflags_cc': [
+                  '-Wno-uninitialized',
+                ],
+              }],
+            ],
+          }],
+          ['target_arch=="mips64el"', {
+            'target_conditions': [
+              ['_toolset=="target"', {
+                'conditions': [
+                  ['mips_arch_variant=="r6"', {
+                    'cflags': ['-mips64r6', '-Wa,-mips64r6'],
+                    'ldflags': ['-mips64r6'],
+                  }],
+                  ['mips_arch_variant=="r2"', {
+                    'cflags': ['-mips64r2', '-Wa,-mips64r2'],
+                    'ldflags': ['-mips64r2'],
+                  }],
+                ],
+                'cflags_cc': [
+                  '-Wno-uninitialized',
+                ],
+              }],
+            ],
+          }],
+          ['linux_fpic==1', {
+            'cflags': [
+              '-fPIC',
+            ],
+            'ldflags': [
+              '-fPIC',
+            ],
+          }],
+          ['sysroot!=""', {
+            'target_conditions': [
+              ['_toolset=="target"', {
+                'cflags': [
+                  '--sysroot=<(sysroot)',
+                ],
+                'ldflags': [
+                  '--sysroot=<(sysroot)',
+                  '<!(<(DEPTH)/build/linux/sysroot_ld_path.sh <(sysroot))',
+                ],
+              }]]
+          }],
+          ['clang==1', {
+            'cflags': [
+              # TODO(thakis): Remove, http://crbug.com/263960
+              '-Wno-reserved-user-defined-literal',
+            ],
+            'cflags_cc': [
+              # gnu++11 instead of c++11 is needed because some code uses
+              # typeof() (a GNU extension).
+              # TODO(thakis): Eventually switch this to c++11 instead,
+              # http://crbug.com/427584
+              '-std=gnu++11',
+            ],
+          }],
+          ['clang==0 and host_clang==1', {
+            'target_conditions': [
+              ['_toolset=="host"', {
+                'cflags_cc': [ '-std=gnu++11', ],
+              }],
+            ],
+          }],
+          ['clang==1 and clang_use_chrome_plugins==1', {
+            'cflags': [
+              '<@(clang_chrome_plugins_flags)',
+            ],
+          }],
+          ['clang==1 and clang_load!=""', {
+            'cflags': [
+              '-Xclang', '-load', '-Xclang', '<(clang_load)',
+            ],
+          }],
+          ['clang==1 and clang_add_plugin!=""', {
+            'cflags': [
+              '-Xclang', '-add-plugin', '-Xclang', '<(clang_add_plugin)',
+            ],
+          }],
+          ['clang==1 and target_arch=="ia32"', {
+            'cflags': [
+              # Else building libyuv gives clang's register allocator issues,
+              # see llvm.org/PR15798 / crbug.com/233709
+              '-momit-leaf-frame-pointer',
+              # Align the stack on 16-byte boundaries, http://crbug.com/418554.
+              '-mstack-alignment=16',
+              '-mstackrealign',
+            ],
+          }],
+          ['clang==1 and "<(GENERATOR)"=="ninja"', {
+            'cflags': [
+              # See http://crbug.com/110262
+              '-fcolor-diagnostics',
+            ],
+          }],
+          # Common options for AddressSanitizer, LeakSanitizer,
+          # ThreadSanitizer and MemorySanitizer.
+          ['asan==1 or lsan==1 or tsan==1 or msan==1 or ubsan==1 or ubsan_vptr==1', {
+            'target_conditions': [
+              ['_toolset=="target"', {
+                'cflags': [
+                  '-fno-omit-frame-pointer',
+                  '-gline-tables-only',
+                ],
+                'cflags!': [
+                  '-fomit-frame-pointer',
+                ],
+              }],
+            ],
+          }],
+          ['asan==1 or lsan==1 or tsan==1 or msan==1', {
+            'target_conditions': [
+              ['_toolset=="target"', {
+                'ldflags!': [
+                  # Functions interposed by the sanitizers can make ld think
+                  # that some libraries aren't needed when they actually are,
+                  # http://crbug.com/234010. As workaround, disable --as-needed.
+                  '-Wl,--as-needed',
+                ],
+                'defines': [
+                  'MEMORY_TOOL_REPLACES_ALLOCATOR',
+                  'MEMORY_SANITIZER_INITIAL_SIZE',
+                ],
+              }],
+            ],
+          }],
+          ['asan==1', {
+            'target_conditions': [
+              ['_toolset=="target"', {
+                'cflags': [
+                  '-fsanitize=address',
+                  # TODO(earthdok): Re-enable. http://crbug.com/427202
+                  #'-fsanitize-blacklist=<(asan_blacklist)',
+                ],
+                'ldflags': [
+                  '-fsanitize=address',
+                ],
+              }],
+            ],
+            'conditions': [
+              ['OS=="mac"', {
+                'cflags': [
+                  '-mllvm -asan-globals=0',  # http://crbug.com/352073
+                ],
+              }],
+            ],
+          }],
+          ['ubsan==1', {
+            'target_conditions': [
+              ['_toolset=="target"', {
+                'cflags': [
+                  # FIXME: work on enabling more flags and getting rid of false
+                  # positives. http://crbug.com/174801.
+                  '-fsanitize=bounds',
+                  '-fsanitize=float-divide-by-zero',
+                  '-fsanitize=integer-divide-by-zero',
+                  '-fsanitize=null',
+                  '-fsanitize=object-size',
+                  '-fsanitize=return',
+                  '-fsanitize=returns-nonnull-attribute',
+                  '-fsanitize=shift-exponent',
+                  '-fsanitize=signed-integer-overflow',
+                  '-fsanitize=unreachable',
+                  '-fsanitize=vla-bound',
+                  '-fsanitize-blacklist=<(ubsan_blacklist)',
+                  # Employ the experimental PBQP register allocator to avoid
+                  # slow compilation on files with too many basic blocks.
+                  # See http://crbug.com/426271.
+                  '-mllvm -regalloc=pbqp',
+                  # Speculatively use coalescing to slightly improve the code
+                  # generated by PBQP regallocator. May increase compile time.
+                  '-mllvm -pbqp-coalescing',
+                ],
+                'cflags_cc!': [
+                  '-fno-rtti',
+                ],
+                'cflags!': [
+                  '-fno-rtti',
+                ],
+                'ldflags': [
+                  '-fsanitize=undefined',
+                ],
+                'defines': [
+                  'UNDEFINED_SANITIZER',
+                ],
+              }],
+            ],
+          }],
+          ['ubsan_vptr==1', {
+            'target_conditions': [
+              ['_toolset=="target"', {
+                'cflags': [
+                  '-fsanitize=vptr',
+                  '-fsanitize-blacklist=<(ubsan_vptr_blacklist)',
+                ],
+                'cflags_cc!': [
+                  '-fno-rtti',
+                ],
+                'cflags!': [
+                  '-fno-rtti',
+                ],
+                'ldflags': [
+                  '-fsanitize=vptr',
+                ],
+                'defines': [
+                  'UNDEFINED_SANITIZER',
+                ],
+              }],
+            ],
+          }],
+          ['asan_coverage!=0 and sanitizer_coverage==0', {
+            'target_conditions': [
+              ['_toolset=="target"', {
+                'cflags': [
+                  '-fsanitize-coverage=<(asan_coverage)',
+                ],
+                'defines': [
+                  'SANITIZER_COVERAGE',
+                ],
+              }],
+            ],
+          }],
+          ['sanitizer_coverage!=0', {
+            'target_conditions': [
+              ['_toolset=="target"', {
+                'cflags': [
+                  '-fsanitize-coverage=<(sanitizer_coverage)',
+                ],
+                'defines': [
+                  'SANITIZER_COVERAGE',
+                ],
+              }],
+            ],
+          }],
+          ['asan_field_padding!=0', {
+            'target_conditions': [
+              ['_toolset=="target"', {
+                'cflags': [
+                  '-fsanitize-address-field-padding=<(asan_field_padding)',
+                ],
+              }],
+            ],
+          }],
+          ['lsan==1', {
+            'target_conditions': [
+              ['_toolset=="target"', {
+                'cflags': [
+                  '-fsanitize=leak',
+                ],
+                'ldflags': [
+                  '-fsanitize=leak',
+                ],
+                'defines': [
+                  'LEAK_SANITIZER',
+                  'WTF_USE_LEAK_SANITIZER=1',
+                ],
+              }],
+            ],
+          }],
+          ['tsan==1', {
+            'target_conditions': [
+              ['_toolset=="target"', {
+                'cflags': [
+                  '-fsanitize=thread',
+                  '-fsanitize-blacklist=<(tsan_blacklist)',
+                ],
+                'ldflags': [
+                  '-fsanitize=thread',
+                ],
+                'defines': [
+                  'THREAD_SANITIZER',
+                  'DYNAMIC_ANNOTATIONS_EXTERNAL_IMPL=1',
+                  'WTF_USE_DYNAMIC_ANNOTATIONS_NOIMPL=1',
+                ],
+              }],
+            ],
+          }],
+          ['msan==1', {
+            'target_conditions': [
+              ['_toolset=="target"', {
+                'cflags': [
+                  '-fsanitize=memory',
+                  '-fsanitize-memory-track-origins=<(msan_track_origins)',
+                  '-fsanitize-blacklist=<(msan_blacklist)',
+                ],
+                'ldflags': [
+                  '-fsanitize=memory',
+                ],
+                'defines': [
+                  'MEMORY_SANITIZER',
+                ],
+              }],
+            ],
+          }],
+          ['use_instrumented_libraries==1', {
+            'dependencies': [
+              '<(DEPTH)/third_party/instrumented_libraries/instrumented_libraries.gyp:instrumented_libraries',
+            ],
+          }],
+          ['use_prebuilt_instrumented_libraries==1', {
+            'dependencies': [
+              '<(DEPTH)/third_party/instrumented_libraries/instrumented_libraries.gyp:prebuilt_instrumented_libraries',
+            ],
+          }],
+          ['use_custom_libcxx==1', {
+            'dependencies': [
+              '<(DEPTH)/buildtools/third_party/libc++/libc++.gyp:libcxx_proxy',
+            ],
+          }],
+          ['order_profiling!=0 and (chromeos==1 or OS=="linux" or OS=="android")', {
+            'target_conditions' : [
+              # crazy_linker has an upstream gyp file we can't edit, and we
+              # don't want to instrument it.
+              ['_toolset=="target" and _target_name!="crazy_linker"', {
+                'cflags': [
+                  '-finstrument-functions',
+                  # Allow mmx intrinsics to inline, so that the
+                  #0 compiler can expand the intrinsics.
+                  '-finstrument-functions-exclude-file-list=mmintrin.h',
+                ],
+              }],
+              ['_toolset=="target" and OS=="android"', {
+                'cflags': [
+                  # Avoids errors with current NDK:
+                  # "third_party/android_tools/ndk/toolchains/arm-linux-androideabi-4.6/prebuilt/linux-x86_64/bin/../lib/gcc/arm-linux-androideabi/4.6/include/arm_neon.h:3426:3: error: argument must be a constant"
+                  '-finstrument-functions-exclude-file-list=arm_neon.h,SaturatedArithmeticARM.h',
+                ],
+              }],
+            ],
+          }],
+          ['linux_dump_symbols==1', {
+            'cflags': [ '-g' ],
+            'conditions': [
+              ['OS=="linux" and host_arch=="ia32" and linux_use_bundled_gold==0', {
+                'target_conditions': [
+                  ['_toolset=="target"', {
+                    'ldflags': [
+                      # Attempt to use less memory to prevent the linker from
+                      # running out of address space. Considering installing a
+                      # 64-bit kernel and switching to a 64-bit linker.
+                      '-Wl,--no-keep-memory',
+                    ],
+                  }],
+                ],
+              }],
+            ],
+          }],
+          ['use_allocator!="tcmalloc"', {
+            'defines': ['NO_TCMALLOC'],
+          }],
+          ['linux_use_gold_flags==1', {
+            # Newer gccs and clangs support -fuse-ld, use the flag to force gold
+            # selection.
+            # gcc -- http://gcc.gnu.org/onlinedocs/gcc-4.8.0/gcc/Optimize-Options.html
+            'ldflags': [ '-fuse-ld=gold', ],
+
+            'target_conditions': [
+              ['_toolset=="target"', {
+                'ldflags': [
+                  # Experimentation found that using four linking threads
+                  # saved ~20% of link time.
+                  # https://groups.google.com/a/chromium.org/group/chromium-dev/browse_thread/thread/281527606915bb36
+                  # Only apply this to the target linker, since the host
+                  # linker might not be gold, but isn't used much anyway.
+                  # TODO(raymes): Disable threading because gold is frequently
+                  # crashing on the bots: crbug.com/161942.
+                  # '-Wl,--threads',
+                  # '-Wl,--thread-count=4',
+                ],
+                'conditions': [
+                  # TODO(thestig): Enable this for disabled cases.
+                  [ 'buildtype!="Official" and chromeos==0 and release_valgrind_build==0 and asan==0 and lsan==0 and tsan==0 and msan==0 and ubsan==0 and ubsan_vptr==0', {
+                    'ldflags': [
+                      '-Wl,--detect-odr-violations',
+                    ],
+                  }],
+                ],
+              }],
+            ],
+            'conditions': [
+              ['release_valgrind_build==0 and order_profiling==0 and asan==0 and msan==0 and lsan==0 and tsan==0', {
+                'target_conditions': [
+                  ['_toolset=="target"', {
+                    'ldflags': [
+                      '-Wl,--icf=<(gold_icf_level)',
+                    ],
+                  }],
+                ],
+              }],
+            ],
+          }],
+          ['linux_use_bundled_binutils==1', {
+            'cflags': [
+              '-B<!(cd <(DEPTH) && pwd -P)/<(binutils_dir)',
+            ],
+          }],
+          ['linux_use_bundled_gold==1 and '
+           'not (clang==0 and (use_lto==1 or use_lto_o2==1))', {
+            # Put our binutils, which contains gold in the search path. We pass
+            # the path to gold to the compiler. gyp leaves unspecified what the
+            # cwd is when running the compiler, so the normal gyp path-munging
+            # fails us. This hack gets the right path.
+            #
+            # Disabled when using GCC LTO because GCC also uses the -B search
+            # path at link time to find "as", and our bundled "as" can only
+            # target x86.
+            'ldflags': [
+              '-B<!(cd <(DEPTH) && pwd -P)/<(binutils_dir)',
+            ],
+          }],
+          # Some binutils 2.23 releases may or may not have new dtags enabled,
+          # but they are all compatible with --disable-new-dtags,
+          # because the new dynamic tags are not created by default.
+          ['binutils_version>=223', {
+            # Newer binutils don't set DT_RPATH unless you disable "new" dtags
+            # and the new DT_RUNPATH doesn't work without --no-as-needed flag.
+            # FIXME(mithro): Figure out the --as-needed/--no-as-needed flags
+            # inside this file to allow usage of --no-as-needed and removal of
+            # this flag.
+            'ldflags': [
+              '-Wl,--disable-new-dtags',
+            ],
+          }],
+          ['gcc_version>=47 and clang==0', {
+            'target_conditions': [
+              ['_toolset=="target"', {
+                'cflags_cc': [
+                  '-std=gnu++11',
+                  # See comment for -Wno-c++11-narrowing.
+                  '-Wno-narrowing',
+                  # TODO(thakis): Remove, http://crbug.com/263960
+                  '-Wno-literal-suffix',
+                ],
+              }],
+            ],
+          }],
+          ['host_gcc_version>=47 and clang==0 and host_clang==0', {
+            'target_conditions': [
+              ['_toolset=="host"', {
+                'cflags_cc': [
+                  '-std=gnu++11',
+                  # See comment for -Wno-c++11-narrowing.
+                  '-Wno-narrowing',
+                  # TODO(thakis): Remove, http://crbug.com/263960
+                  '-Wno-literal-suffix',
+                ],
+              }],
+            ],
+          }],
+        ],
+      },
+    }],
+    # FreeBSD-specific options; note that most FreeBSD options are set above,
+    # with Linux.
+    ['OS=="freebsd"', {
+      'target_defaults': {
+        'ldflags': [
+          '-Wl,--no-keep-memory',
+        ],
+      },
+    }],
+    # Android-specific options; note that most are set above with Linux.
+    ['OS=="android"', {
+      'variables': {
+        # This is a unique identifier for a given build. It's used for
+        # identifying various build artifacts corresponding to a particular
+        # build of chrome (e.g. where to find archived symbols).
+        'chrome_build_id%': '',
+        'conditions': [
+          # Figure this out early since it needs symbols from libgcc.a, so it
+          # has to be before that in the set of libraries.
+          ['component=="shared_library"', {
+              'android_libcpp_library': 'c++_shared',
+          }, {
+              'android_libcpp_library': 'c++_static',
+          }],
+        ],
+
+        # Placing this variable here prevents from forking libvpx, used
+        # by remoting.  Remoting is off, so it needn't built,
+        # so forking it's deps seems like overkill.
+        # But this variable need defined to properly run gyp.
+        # A proper solution is to have an OS==android conditional
+        # in third_party/libvpx/libvpx.gyp to define it.
+        'libvpx_path': 'lib/linux/arm',
+      },
+      'target_defaults': {
+        'variables': {
+          'release_extra_cflags%': '',
+          'conditions': [
+            # If we're using the components build, append "cr" to all shared
+            # libraries to avoid naming collisions with android system library
+            # versions with the same name (e.g. skia, icu).
+            ['component=="shared_library"', {
+              'android_product_extension': 'cr.so',
+            }, {
+              'android_product_extension': 'so',
+            } ],
+          ],
+        },
+        'target_conditions': [
+          ['_type=="shared_library"', {
+            'product_extension': '<(android_product_extension)',
+          }],
+
+          # Settings for building device targets using Android's toolchain.
+          # These are based on the setup.mk file from the Android NDK.
+          #
+          # The NDK Android executable link step looks as follows:
+          #  $LDFLAGS
+          #  $(TARGET_CRTBEGIN_DYNAMIC_O)  <-- crtbegin.o
+          #  $(PRIVATE_OBJECTS)            <-- The .o that we built
+          #  $(PRIVATE_STATIC_LIBRARIES)   <-- The .a that we built
+          #  $(TARGET_LIBGCC)              <-- libgcc.a
+          #  $(PRIVATE_SHARED_LIBRARIES)   <-- The .so that we built
+          #  $(PRIVATE_LDLIBS)             <-- System .so
+          #  $(TARGET_CRTEND_O)            <-- crtend.o
+          #
+          # For now the above are approximated for executables by adding
+          # crtbegin.o to the end of the ldflags and 'crtend.o' to the end
+          # of 'libraries'.
+          #
+          # The NDK Android shared library link step looks as follows:
+          #  $LDFLAGS
+          #  $(PRIVATE_OBJECTS)            <-- The .o that we built
+          #  -l,--whole-archive
+          #  $(PRIVATE_WHOLE_STATIC_LIBRARIES)
+          #  -l,--no-whole-archive
+          #  $(PRIVATE_STATIC_LIBRARIES)   <-- The .a that we built
+          #  $(TARGET_LIBGCC)              <-- libgcc.a
+          #  $(PRIVATE_SHARED_LIBRARIES)   <-- The .so that we built
+          #  $(PRIVATE_LDLIBS)             <-- System .so
+          #
+          # For now, assume that whole static libraries are not needed.
+          #
+          # For both executables and shared libraries, add the proper
+          # libgcc.a to the start of libraries which puts it in the
+          # proper spot after .o and .a files get linked in.
+          #
+          # TODO: The proper thing to do longer-tem would be proper gyp
+          # support for a custom link command line.
+          ['_toolset=="target"', {
+            'cflags!': [
+              '-pthread',  # Not supported by Android toolchain.
+            ],
+            'cflags': [
+              '-ffunction-sections',
+              '-funwind-tables',
+              '-g',
+              '-fstack-protector',
+              '-fno-short-enums',
+              '-finline-limit=64',
+              '<@(release_extra_cflags)',
+              '--sysroot=<(android_ndk_sysroot)',
+              # NOTE: The libc++ header include paths below are specified in
+              # cflags rather than include_dirs because they need to come
+              # after include_dirs.
+              # The include ordering here is important; change with caution.
+              '-isystem<(android_libcpp_include)',
+              '-isystem<(android_ndk_root)/sources/cxx-stl/llvm-libc++abi/libcxxabi/include',
+              '-isystem<(android_ndk_root)/sources/android/support/include',
+            ],
+            'defines': [
+              'ANDROID',
+              '__GNU_SOURCE=1',  # Necessary for clone()
+              'CHROME_BUILD_ID="<(chrome_build_id)"',
+              # The NDK has these things, but doesn't define the constants
+              # to say that it does. Define them here instead.
+              'HAVE_SYS_UIO_H',
+            ],
+            'ldflags!': [
+              '-pthread',  # Not supported by Android toolchain.
+            ],
+            'ldflags': [
+              '-Wl,--build-id=sha1',
+              '-Wl,--no-undefined',
+              '--sysroot=<(android_ndk_sysroot)',
+              '-nostdlib',
+              '-L<(android_libcpp_libs_dir)',
+              # Don't allow visible symbols from libgcc or libc++ to be
+              # re-exported.
+              '-Wl,--exclude-libs=libgcc.a',
+              '-Wl,--exclude-libs=libc++_static.a',
+              # Don't allow visible symbols from libraries that contain
+              # assembly code with symbols that aren't hidden properly.
+              # http://crbug.com/448386
+              '-Wl,--exclude-libs=libcommon_audio.a',
+              '-Wl,--exclude-libs=libcommon_audio_neon.a',
+              '-Wl,--exclude-libs=libcommon_audio_sse2.a',
+              '-Wl,--exclude-libs=libiSACFix.a',
+              '-Wl,--exclude-libs=libisac_neon.a',
+              '-Wl,--exclude-libs=libopus.a',
+              '-Wl,--exclude-libs=libvpx.a',
+            ],
+            'libraries': [
+              '-l<(android_libcpp_library)',
+              '-latomic',
+              # Manually link the libgcc.a that the cross compiler uses.
+              '<!(<(android_toolchain)/*-gcc -print-libgcc-file-name)',
+              '-lc',
+              '-ldl',
+              '-lm',
+            ],
+            'conditions': [
+              ['component=="static_library"', {
+                'target_conditions': [
+                  ['use_native_jni_exports==0', {
+                    # Use a linker version script to strip JNI exports from
+                    # binaries which have not specifically asked to use them.
+                    'ldflags': [
+                      '-Wl,--version-script=<!(cd <(DEPTH) && pwd -P)/build/android/android_no_jni_exports.lst',
+                    ],
+                  }],
+                ],
+              }],
+              ['clang==1', {
+                'libraries!': [
+                  # Clang with libc++ does not require an explicit atomic
+                  # library reference.
+                  '-latomic',
+                ],
+                'cflags': [
+                  # Work around incompatibilities between bionic and clang
+                  # headers.
+                  '-D__compiler_offsetof=__builtin_offsetof',
+                  '-Dnan=__builtin_nan',
+                ],
+                'conditions': [
+                  ['target_arch=="arm"', {
+                    'cflags': [
+                      '-target arm-linux-androideabi',
+                    ],
+                    'ldflags': [
+                      '-target arm-linux-androideabi',
+                    ],
+                  }],
+                  ['target_arch=="ia32"', {
+                    'cflags': [
+                      '-target x86-linux-androideabi',
+                    ],
+                    'ldflags': [
+                      '-target x86-linux-androideabi',
+                    ],
+                  }],
+                  # Place holder for x64 support, not tested.
+                  # TODO: Enable clang support for Android x64. http://crbug.com/346626
+                  ['target_arch=="x64"', {
+                    'cflags': [
+                      '-target x86_64-linux-androideabi',
+                    ],
+                    'ldflags': [
+                      '-target x86_64-linux-androideabi',
+                    ],
+                  }],
+                ],
+              }],
+              ['asan==1', {
+                'cflags': [
+                  # Android build relies on -Wl,--gc-sections removing
+                  # unreachable code. ASan instrumentation for globals inhibits
+                  # this and results in a library with unresolvable relocations.
+                  # TODO(eugenis): find a way to reenable this.
+                  '-mllvm -asan-globals=0',
+                ],
+              }],
+              ['target_arch == "arm" and order_profiling==0', {
+                'ldflags': [
+                  # Enable identical code folding to reduce size.
+                  '-Wl,--icf=<(gold_icf_level)',
+                ],
+              }],
+              ['target_arch=="ia32"', {
+                # The x86 toolchain currently has problems with stack-protector.
+                'cflags!': [
+                  '-fstack-protector',
+                ],
+                'cflags': [
+                  '-fno-stack-protector',
+                ],
+              }],
+            ],
+            'target_conditions': [
+              ['_type=="executable"', {
+                # Force android tools to export the "main" symbol so they can be
+                # loaded on ICS using the run_pie wrapper. See crbug.com/373219.
+                # TODO(primiano): remove -fvisibility and -rdynamic flags below
+                # when ICS support will be dropped.
+                'cflags': [
+                  '-fPIE',
+                  '-fvisibility=default',
+                ],
+                'ldflags': [
+                  '-Bdynamic',
+                  '-Wl,--gc-sections',
+                  '-Wl,-z,nocopyreloc',
+                  '-pie',
+                  '-rdynamic',
+                  # crtbegin_dynamic.o should be the last item in ldflags.
+                  '<(android_ndk_lib)/crtbegin_dynamic.o',
+                ],
+                'libraries': [
+                  # crtend_android.o needs to be the last item in libraries.
+                  # Do not add any libraries after this!
+                  '<(android_ndk_lib)/crtend_android.o',
+                ],
+              }],
+              ['_type=="shared_library" or _type=="loadable_module"', {
+                'ldflags': [
+                  '-Wl,-shared,-Bsymbolic',
+                  # crtbegin_so.o should be the last item in ldflags.
+                  '<(android_ndk_lib)/crtbegin_so.o',
+                ],
+                'libraries': [
+                  # crtend_so.o needs to be the last item in libraries.
+                  # Do not add any libraries after this!
+                  '<(android_ndk_lib)/crtend_so.o',
+                ],
+              }],
+            ],
+          }],
+          # Settings for building host targets using the system toolchain.
+          ['_toolset=="host"', {
+            'cflags!': [
+              # Due to issues in Clang build system, using ASan on 32-bit
+              # binaries on x86_64 host is problematic.
+              # TODO(eugenis): re-enable.
+              '-fsanitize=address',
+            ],
+            'ldflags!': [
+              '-fsanitize=address',
+              '-Wl,-z,noexecstack',
+              '-Wl,--gc-sections',
+              '-Wl,-O1',
+              '-Wl,--as-needed',
+              '-Wl,--warn-shared-textrel',
+              '-Wl,--fatal-warnings',
+            ],
+          }],
+          # Settings for building host targets on mac.
+          ['_toolset=="host" and host_os=="mac"', {
+            'ldflags!': [
+              '-Wl,-z,now',
+              '-Wl,-z,relro',
+            ],
+          }],
+        ],
+      },
+    }],
+    ['OS=="solaris"', {
+      'cflags!': ['-fvisibility=hidden'],
+      'cflags_cc!': ['-fvisibility-inlines-hidden'],
+    }],
+    ['OS=="mac" or OS=="ios"', {
+      'target_defaults': {
+        'mac_bundle': 0,
+        'xcode_settings': {
+          'ALWAYS_SEARCH_USER_PATHS': 'NO',
+          # Don't link in libarclite_macosx.a, see http://crbug.com/156530.
+          'CLANG_LINK_OBJC_RUNTIME': 'NO',          # -fno-objc-link-runtime
+          'COPY_PHASE_STRIP': 'NO',
+          'GCC_C_LANGUAGE_STANDARD': 'c99',         # -std=c99
+          'GCC_CW_ASM_SYNTAX': 'NO',                # No -fasm-blocks
+          'GCC_ENABLE_CPP_EXCEPTIONS': 'NO',        # -fno-exceptions
+          'GCC_ENABLE_CPP_RTTI': 'NO',              # -fno-rtti
+          'GCC_ENABLE_PASCAL_STRINGS': 'NO',        # No -mpascal-strings
+          # GCC_INLINES_ARE_PRIVATE_EXTERN maps to -fvisibility-inlines-hidden
+          'GCC_INLINES_ARE_PRIVATE_EXTERN': 'YES',
+          'GCC_OBJC_CALL_CXX_CDTORS': 'YES',        # -fobjc-call-cxx-cdtors
+          'GCC_SYMBOLS_PRIVATE_EXTERN': 'YES',      # -fvisibility=hidden
+          'GCC_THREADSAFE_STATICS': 'NO',           # -fno-threadsafe-statics
+          'GCC_TREAT_WARNINGS_AS_ERRORS': 'YES',    # -Werror
+          'GCC_VERSION': '4.2',
+          'GCC_WARN_ABOUT_MISSING_NEWLINE': 'YES',  # -Wnewline-eof
+          'USE_HEADERMAP': 'NO',
+          'WARNING_CFLAGS': [
+            '-Wall',
+            '-Wendif-labels',
+            '-Wextra',
+            # Don't warn about unused function parameters.
+            '-Wno-unused-parameter',
+            # Don't warn about the "struct foo f = {0};" initialization
+            # pattern.
+            '-Wno-missing-field-initializers',
+          ],
+          'conditions': [
+            ['chromium_mac_pch', {'GCC_PRECOMPILE_PREFIX_HEADER': 'YES'},
+                                 {'GCC_PRECOMPILE_PREFIX_HEADER': 'NO'}
+            ],
+            # Note that the prebuilt Clang binaries should not be used for iOS
+            # development except for ASan builds.
+            ['clang==1', {
+              'CLANG_CXX_LANGUAGE_STANDARD': 'c++11',  # -std=c++11
+              # Warn if automatic synthesis is triggered with
+              # the -Wobjc-missing-property-synthesis flag.
+              'CLANG_WARN_OBJC_MISSING_PROPERTY_SYNTHESIS': 'YES',
+              'GCC_VERSION': 'com.apple.compilers.llvm.clang.1_0',
+              'WARNING_CFLAGS': [
+                # This warns on selectors from Cocoa headers (-length, -set).
+                # cfe-dev is currently discussing the merits of this warning.
+                # TODO(thakis): Reevaluate what to do with this, based one
+                # cfe-dev discussion.
+                '-Wno-selector-type-mismatch',
+              ],
+              'conditions': [
+                ['clang_xcode==0', {
+                  'CC': '$(SOURCE_ROOT)/<(clang_dir)/clang',
+                  'LDPLUSPLUS': '$(SOURCE_ROOT)/<(clang_dir)/clang++',
+                }],
+              ],
+            }],
+            ['clang==1 and clang_xcode==0 and clang_use_chrome_plugins==1', {
+              'OTHER_CFLAGS': [
+                '<@(clang_chrome_plugins_flags)',
+              ],
+            }],
+            ['clang==1 and clang_xcode==0 and clang_load!=""', {
+              'OTHER_CFLAGS': [
+                '-Xclang', '-load', '-Xclang', '<(clang_load)',
+              ],
+            }],
+            ['clang==1 and clang_xcode==0 and clang_add_plugin!=""', {
+              'OTHER_CFLAGS': [
+                '-Xclang', '-add-plugin', '-Xclang', '<(clang_add_plugin)',
+              ],
+            }],
+            ['clang==1 and "<(GENERATOR)"=="ninja"', {
+              'OTHER_CFLAGS': [
+                # See http://crbug.com/110262
+                '-fcolor-diagnostics',
+              ],
+            }],
+            ['OS=="ios" and target_subarch!="arm32" and \
+              "<(GENERATOR)"=="xcode"', {
+              'OTHER_CFLAGS': [
+                # TODO(ios): when building Chrome for iOS on 64-bit platform
+                # with Xcode, the -Wshorted-64-to-32 warning is automatically
+                # enabled. This cause failures when compiling protobuf code,
+                # so disable the warning. http://crbug.com/359107
+                '-Wno-shorten-64-to-32',
+              ],
+            }],
+          ],
+        },
+        'conditions': [
+          ['clang==1', {
+            'variables': {
+              'clang_dir': '../third_party/llvm-build/Release+Asserts/bin',
+            },
+          }],
+          ['asan==1', {
+            'xcode_settings': {
+              'OTHER_CFLAGS': [
+                '-fsanitize=address',
+                '-mllvm -asan-globals=0',  # http://crbug.com/352073
+                '-gline-tables-only',
+              ],
+            },
+          }],
+          ['asan_coverage!=0 and sanitizer_coverage==0', {
+            'target_conditions': [
+              ['_toolset=="target"', {
+                'cflags': [
+                  '-fsanitize-coverage=<(asan_coverage)',
+                ],
+                'defines': [
+                  'SANITIZER_COVERAGE',
+                ],
+              }],
+            ],
+          }],
+          ['sanitizer_coverage!=0', {
+            'target_conditions': [
+              ['_toolset=="target"', {
+                'cflags': [
+                  '-fsanitize-coverage=<(sanitizer_coverage)',
+                ],
+                'defines': [
+                  'SANITIZER_COVERAGE',
+                ],
+              }],
+            ],
+          }],
+        ],
+        'target_conditions': [
+          ['_type!="static_library"', {
+            'xcode_settings': {'OTHER_LDFLAGS': ['-Wl,-search_paths_first']},
+            'conditions': [
+              ['asan==1', {
+                'xcode_settings': {
+                  'OTHER_LDFLAGS': [
+                    '-fsanitize=address',
+                  ],
+                },
+              }],
+              ['mac_write_linker_maps==1', {
+                'xcode_settings': {
+                  'OTHER_LDFLAGS': [
+                    '-Wl,-map,>(_target_name).map',
+                  ],
+                },
+              }],
+            ],
+          }],
+          ['_mac_bundle', {
+            'xcode_settings': {'OTHER_LDFLAGS': ['-Wl,-ObjC']},
+            'target_conditions': [
+              ['_type=="executable"', {
+                'conditions': [
+                  ['asan==1', {
+                    'postbuilds': [
+                      {
+                        'variables': {
+                          # Define copy_asan_dylib_path in a variable ending in
+                          # _path so that gyp understands it's a path and
+                          # performs proper relativization during dict merging.
+                          'copy_asan_dylib_path':
+                            'mac/copy_asan_runtime_dylib.sh',
+                        },
+                        'postbuild_name': 'Copy ASan runtime dylib',
+                        'action': [
+                          '<(copy_asan_dylib_path)',
+                        ],
+                      },
+                    ],
+                  }],
+                ],
+              }],
+            ],
+          }],
+        ],  # target_conditions
+      },  # target_defaults
+    }],  # OS=="mac" or OS=="ios"
+    ['OS=="mac"', {
+      'target_defaults': {
+        'defines': [
+          # Prevent Mac OS X AssertMacros.h from defining macros that collide
+          # with common names, like 'check', 'require', and 'verify'.
+          # (Included by system header. Also exists on iOS but not included.)
+          # http://opensource.apple.com/source/CarbonHeaders/CarbonHeaders-18.1/AssertMacros.h
+          '__ASSERT_MACROS_DEFINE_VERSIONS_WITHOUT_UNDERSCORE=0',
+        ],
+        'variables': {
+          # These should end with %, but there seems to be a bug with % in
+          # variables that are intended to be set to different values in
+          # different targets, like these.
+          'mac_pie': 1,        # Most executables can be position-independent.
+          # Strip debugging symbols from the target.
+          'mac_strip': '<(mac_strip_release)',
+          'conditions': [
+            ['asan==1', {
+              'conditions': [
+                ['mac_want_real_dsym=="default"', {
+                  'mac_real_dsym': 1,
+                }, {
+                  'mac_real_dsym': '<(mac_want_real_dsym)'
+                }],
+              ],
+            }, {
+              'conditions': [
+                ['mac_want_real_dsym=="default"', {
+                  'mac_real_dsym': 0, # Fake .dSYMs are fine in most cases.
+                }, {
+                  'mac_real_dsym': '<(mac_want_real_dsym)'
+                }],
+              ],
+            }],
+          ],
+        },
+        'configurations': {
+          'Release_Base': {
+            'conditions': [
+              ['branding=="Chrome" and buildtype=="Official"', {
+                'xcode_settings': {
+                  'OTHER_CFLAGS': [
+                    # The Google Chrome Framework dSYM generated by dsymutil has
+                    # grown larger than 4GB, which dsymutil can't handle. Reduce
+                    # the amount of debug symbols.
+                    '-fno-standalone-debug',  # See http://crbug.com/479841
+                  ]
+                },
+              }],
+            ],
+          },  # configuration "Release"
+        },  # configurations
+        'xcode_settings': {
+          'GCC_DYNAMIC_NO_PIC': 'NO',               # No -mdynamic-no-pic
+                                                    # (Equivalent to -fPIC)
+          # MACOSX_DEPLOYMENT_TARGET maps to -mmacosx-version-min
+          'MACOSX_DEPLOYMENT_TARGET': '<(mac_deployment_target)',
+          # Keep pch files below xcodebuild/.
+          'SHARED_PRECOMPS_DIR': '$(CONFIGURATION_BUILD_DIR)/SharedPrecompiledHeaders',
+          'OTHER_CFLAGS': [
+            # Someday this can be replaced by an 'GCC_STRICT_ALIASING': 'NO'
+            # xcode_setting, but not until all downstream projects' mac bots are
+            # using xcode >= 4.6, because that's when the default value of the
+            # flag in the compiler switched.  Pre-4.6, the value 'NO' for that
+            # setting is a no-op as far as xcode is concerned, but the compiler
+            # behaves differently based on whether -fno-strict-aliasing is
+            # specified or not.
+            '-fno-strict-aliasing',  # See http://crbug.com/32204.
+          ],
+        },
+        'target_conditions': [
+          ['_type=="executable"', {
+            'postbuilds': [
+              {
+                # Arranges for data (heap) pages to be protected against
+                # code execution when running on Mac OS X 10.7 ("Lion"), and
+                # ensures that the position-independent executable (PIE) bit
+                # is set for ASLR when running on Mac OS X 10.5 ("Leopard").
+                'variables': {
+                  # Define change_mach_o_flags in a variable ending in _path
+                  # so that GYP understands it's a path and performs proper
+                  # relativization during dict merging.
+                  'change_mach_o_flags_path':
+                      'mac/change_mach_o_flags_from_xcode.sh',
+                  'change_mach_o_flags_options%': [
+                  ],
+                  'target_conditions': [
+                    ['mac_pie==0 or release_valgrind_build==1', {
+                      # Don't enable PIE if it's unwanted. It's unwanted if
+                      # the target specifies mac_pie=0 or if building for
+                      # Valgrind, because Valgrind doesn't understand slide.
+                      # See the similar mac_pie/release_valgrind_build check
+                      # below.
+                      'change_mach_o_flags_options': [
+                        '--no-pie',
+                      ],
+                    }],
+                  ],
+                },
+                'postbuild_name': 'Change Mach-O Flags',
+                'action': [
+                  '<(change_mach_o_flags_path)',
+                  '>@(change_mach_o_flags_options)',
+                ],
+              },
+            ],
+            'target_conditions': [
+              ['mac_pie==1 and release_valgrind_build==0', {
+                # Turn on position-independence (ASLR) for executables. When
+                # PIE is on for the Chrome executables, the framework will
+                # also be subject to ASLR.
+                # Don't do this when building for Valgrind, because Valgrind
+                # doesn't understand slide. TODO: Make Valgrind on Mac OS X
+                # understand slide, and get rid of the Valgrind check.
+                'xcode_settings': {
+                  'OTHER_LDFLAGS': [
+                    '-Wl,-pie',  # Position-independent executable (MH_PIE)
+                  ],
+                },
+              }],
+            ],
+          }],
+          ['(_type=="executable" or _type=="shared_library" or \
+             _type=="loadable_module") and mac_strip!=0', {
+            'target_conditions': [
+              ['mac_real_dsym == 1', {
+                # To get a real .dSYM bundle produced by dsymutil, set the
+                # debug information format to dwarf-with-dsym.  Since
+                # strip_from_xcode will not be used, set Xcode to do the
+                # stripping as well.
+                'configurations': {
+                  'Release_Base': {
+                    'xcode_settings': {
+                      'DEBUG_INFORMATION_FORMAT': 'dwarf-with-dsym',
+                      'DEPLOYMENT_POSTPROCESSING': 'YES',
+                      'STRIP_INSTALLED_PRODUCT': 'YES',
+                      'conditions': [
+                        # Only strip non-ASan builds.
+                        ['asan==0', {
+                          'target_conditions': [
+                            ['_type=="shared_library" or _type=="loadable_module"', {
+                              # The Xcode default is to strip debugging symbols
+                              # only (-S).  Local symbols should be stripped as
+                              # well, which will be handled by -x.  Xcode will
+                              # continue to insert -S when stripping even when
+                              # additional flags are added with STRIPFLAGS.
+                              'STRIPFLAGS': '-x',
+                            }],  # _type=="shared_library" or _type=="loadable_module"
+                          ],  # target_conditions
+                        }, {  # asan != 0
+                          'STRIPFLAGS': '-S',
+                        }],
+                      ],
+                    },  # xcode_settings
+                  },  # configuration "Release"
+                },  # configurations
+              }, {  # mac_real_dsym != 1
+                # To get a fast fake .dSYM bundle, use a post-build step to
+                # produce the .dSYM and strip the executable.  strip_from_xcode
+                # only operates in the Release configuration.
+                'postbuilds': [
+                  {
+                    'variables': {
+                      # Define strip_from_xcode in a variable ending in _path
+                      # so that gyp understands it's a path and performs proper
+                      # relativization during dict merging.
+                      'strip_from_xcode_path': 'mac/strip_from_xcode',
+                    },
+                    'postbuild_name': 'Strip If Needed',
+                    'action': ['<(strip_from_xcode_path)'],
+                  },
+                ],  # postbuilds
+              }],  # mac_real_dsym
+            ],  # target_conditions
+          }],  # (_type=="executable" or _type=="shared_library" or
+               #  _type=="loadable_module") and mac_strip!=0
+        ],  # target_conditions
+      },  # target_defaults
+    }],  # OS=="mac"
+    ['OS=="ios"', {
+      'includes': [
+        'ios/coverage.gypi',
+      ],
+      'target_defaults': {
+        'xcode_settings' : {
+          'CLANG_CXX_LANGUAGE_STANDARD': 'c++11',
+
+          'conditions': [
+            # Older Xcodes do not support -Wno-deprecated-register, so pass an
+            # additional flag to suppress the "unknown compiler option" error.
+            # Restrict this flag to builds that are either compiling with Xcode
+            # or compiling with Xcode's Clang.  This will allow Ninja builds to
+            # continue failing on unknown compiler options.
+            # TODO(rohitrao): This flag is temporary and should be removed as
+            # soon as the iOS bots are updated to use Xcode 5.1.
+            ['clang_xcode==1', {
+              'WARNING_CFLAGS': [
+                '-Wno-unknown-warning-option',
+                # It's not possible to achieve nullability completeness before
+                # all builders are running Xcode 7. crbug.com/499809
+                '-Wno-nullability-completeness',
+              ],
+            }],
+
+            # Limit the valid architectures depending on "target_subarch".
+            # This need to include the "arm" architectures but also the "x86"
+            # ones (they are used when building for the simulator).
+            ['target_subarch=="arm32"', {
+              'VALID_ARCHS': ['armv7', 'i386'],
+            }],
+            ['target_subarch=="arm64"', {
+              'VALID_ARCHS': ['arm64', 'x86_64'],
+            }],
+            ['target_subarch=="both"', {
+              'VALID_ARCHS': ['arm64', 'armv7', 'x86_64', 'i386'],
+            }],
+            ['use_system_libcxx==1', {
+              'target_conditions': [
+                # Only use libc++ when building target for iOS not when building
+                # tools for the host (OS X) as Mac targets OS X SDK 10.6 which
+                # does not support libc++.
+                ['_toolset=="target"', {
+                  'CLANG_CXX_LIBRARY': 'libc++',  # -stdlib=libc++
+                }]
+              ],
+            }, {
+              # The default for deployment target of 7.0+ is libc++, so force
+              # the old behavior unless libc++ is enabled.
+              'CLANG_CXX_LIBRARY': 'libstdc++',  # -stdlib=libstdc++
+            }],
+          ],
+        },
+        'target_conditions': [
+          ['_toolset=="host"', {
+            'xcode_settings': {
+              'SDKROOT': 'macosx<(mac_sdk)',  # -isysroot
+              'MACOSX_DEPLOYMENT_TARGET': '<(mac_deployment_target)',
+              'VALID_ARCHS': [
+                'x86_64',
+              ],
+              'ARCHS': [
+                'x86_64',
+              ],
+            },
+          }],
+          ['_toolset=="target"', {
+            'xcode_settings': {
+              # This section should be for overriding host settings. But,
+              # since we can't negate the iphone deployment target above, we
+              # instead set it here for target only.
+              'IPHONEOS_DEPLOYMENT_TARGET': '<(ios_deployment_target)',
+              'ARCHS': ['$(ARCHS_STANDARD_INCLUDING_64_BIT)'],
+            },
+          }],
+          ['_type=="executable"', {
+            'configurations': {
+              'Release_Base': {
+                'xcode_settings': {
+                  'DEPLOYMENT_POSTPROCESSING': 'YES',
+                  'STRIP_INSTALLED_PRODUCT': 'YES',
+                },
+              },
+              'Debug_Base': {
+                'xcode_settings': {
+                  # Remove dSYM to reduce build time.
+                  'DEBUG_INFORMATION_FORMAT': 'dwarf',
+                },
+              },
+            },
+            'xcode_settings': {
+              'conditions': [
+                ['chromium_ios_signing', {
+                  # iOS SDK wants everything for device signed.
+                  'CODE_SIGN_IDENTITY[sdk=iphoneos*]': 'iPhone Developer',
+                }, {
+                  'CODE_SIGNING_REQUIRED': 'NO',
+                  'CODE_SIGN_IDENTITY[sdk=iphoneos*]': '',
+                }],
+              ],
+            },
+          }],
+        ],  # target_conditions
+      },  # target_defaults
+    }],  # OS=="ios"
+    ['OS=="win"', {
+      'target_defaults': {
+        'defines': [
+          '_WIN32_WINNT=0x0603',
+          'WINVER=0x0603',
+          'WIN32',
+          '_WINDOWS',
+          'NOMINMAX',
+          'PSAPI_VERSION=1',
+          '_CRT_RAND_S',
+          'CERT_CHAIN_PARA_HAS_EXTRA_FIELDS',
+          'WIN32_LEAN_AND_MEAN',
+          '_ATL_NO_OPENGL',
+          '_SECURE_ATL',
+          # _HAS_EXCEPTIONS must match ExceptionHandling in msvs_settings.
+          '_HAS_EXCEPTIONS=0',
+          # Silence some warnings; we can't switch the the 'recommended'
+          # versions as they're not available on old OSs.
+          '_WINSOCK_DEPRECATED_NO_WARNINGS',
+        ],
+        'conditions': [
+          ['buildtype=="Official"', {
+              # In official builds, targets can self-select an optimization
+              # level by defining a variable named 'optimize', and setting it
+              # to one of
+              # - "size", optimizes for minimal code size - the default.
+              # - "speed", optimizes for speed over code size.
+              # - "max", whole program optimization and link-time code
+              #   generation. This is very expensive and should be used
+              #   sparingly.
+              'variables': {
+                'optimize%': 'size',
+              },
+              'msvs_settings': {
+                'VCLinkerTool': {
+                  # Set /LTCG for the official builds.
+                  'LinkTimeCodeGeneration': '1',
+                  'AdditionalOptions': [
+                    # Set the number of LTCG code-gen threads to eight.
+                    # The default is four. This gives a 5-10% link speedup.
+                    '/cgthreads:8',
+                  ],
+                },
+              },
+              'target_conditions': [
+                ['optimize=="size"', {
+                    'msvs_settings': {
+                      'VCCLCompilerTool': {
+                        # 1, optimizeMinSpace, Minimize Size (/O1)
+                        'Optimization': '1',
+                        # 2, favorSize - Favor small code (/Os)
+                        'FavorSizeOrSpeed': '2',
+                      },
+                    },
+                  },
+                ],
+                # This config is used to avoid a problem in ffmpeg, see
+                # http://crbug.com/264459.
+                ['optimize=="size_no_ltcg"', {
+                    'msvs_settings': {
+                      'VCCLCompilerTool': {
+                        # 1, optimizeMinSpace, Minimize Size (/O1)
+                        'Optimization': '1',
+                        # 2, favorSize - Favor small code (/Os)
+                        'FavorSizeOrSpeed': '2',
+                      },
+                    },
+                  },
+                ],
+                ['optimize=="speed"', {
+                    'msvs_settings': {
+                      'VCCLCompilerTool': {
+                        # 2, optimizeMaxSpeed, Maximize Speed (/O2)
+                        'Optimization': '2',
+                        # 1, favorSpeed - Favor fast code (/Ot)
+                        'FavorSizeOrSpeed': '1',
+                      },
+                    },
+                  },
+                ],
+                ['optimize=="max"', {
+                    # Disable Warning 4702 ("Unreachable code") for the WPO/PGO
+                    # builds. Probably anything that this would catch that
+                    # wouldn't be caught in a normal build isn't going to
+                    # actually be a bug, so the incremental value of C4702 for
+                    # PGO builds is likely very small.
+                    'msvs_disabled_warnings': [
+                      4702
+                    ],
+                    'msvs_settings': {
+                      'VCCLCompilerTool': {
+                        # 2, optimizeMaxSpeed, Maximize Speed (/O2)
+                        'Optimization': '2',
+                        # 1, favorSpeed - Favor fast code (/Ot)
+                        'FavorSizeOrSpeed': '1',
+                        # This implies link time code generation.
+                        'WholeProgramOptimization': 'true',
+                      },
+                    },
+                  },
+                ],
+              ],
+            },
+          ],
+          ['msvs_xtree_patched!=1', {
+            # If xtree hasn't been patched, then we disable C4702. Otherwise,
+            # it's enabled. This will generally only be true for system-level
+            # installed Express users.
+            'msvs_disabled_warnings': [
+              4702,
+            ],
+          }],
+        ],
+        'msvs_system_include_dirs': [
+          '<(windows_sdk_path)/Include/shared',
+          '<(windows_sdk_path)/Include/um',
+          '<(windows_sdk_path)/Include/winrt',
+          '$(VSInstallDir)/VC/atlmfc/include',
+        ],
+        'msvs_cygwin_shell': 0,
+        'msvs_disabled_warnings': [
+          # C4091: 'typedef ': ignored on left of 'X' when no variable is
+          #                    declared.
+          # This happens in a number of Windows headers. Dumb.
+          4091,
+
+          # C4127: conditional expression is constant
+          # This warning can in theory catch dead code and other problems, but
+          # triggers in far too many desirable cases where the conditional
+          # expression is either set by macros or corresponds some legitimate
+          # compile-time constant expression (due to constant template args,
+          # conditionals comparing the sizes of different types, etc.).  Some of
+          # these can be worked around, but it's not worth it.
+          4127,
+
+          # C4351: new behavior: elements of array 'array' will be default
+          #        initialized
+          # This is a silly "warning" that basically just alerts you that the
+          # compiler is going to actually follow the language spec like it's
+          # supposed to, instead of not following it like old buggy versions
+          # did.  There's absolutely no reason to turn this on.
+          4351,
+
+          # C4355: 'this': used in base member initializer list
+          # It's commonly useful to pass |this| to objects in a class'
+          # initializer list.  While this warning can catch real bugs, most of
+          # the time the constructors in question don't attempt to call methods
+          # on the passed-in pointer (until later), and annotating every legit
+          # usage of this is simply more hassle than the warning is worth.
+          4355,
+
+          # C4503: 'identifier': decorated name length exceeded, name was
+          #        truncated
+          # This only means that some long error messages might have truncated
+          # identifiers in the presence of lots of templates.  It has no effect
+          # on program correctness and there's no real reason to waste time
+          # trying to prevent it.
+          4503,
+
+          # Warning C4589 says: "Constructor of abstract class ignores
+          # initializer for virtual base class." Disable this warning because it
+          # is flaky in VS 2015 RTM. It triggers on compiler generated
+          # copy-constructors in some cases.
+          4589,
+
+          # C4611: interaction between 'function' and C++ object destruction is
+          #        non-portable
+          # This warning is unavoidable when using e.g. setjmp/longjmp.  MSDN
+          # suggests using exceptions instead of setjmp/longjmp for C++, but
+          # Chromium code compiles without exception support.  We therefore have
+          # to use setjmp/longjmp for e.g. JPEG decode error handling, which
+          # means we have to turn off this warning (and be careful about how
+          # object destruction happens in such cases).
+          4611,
+
+          # TODO(maruel): These warnings are level 4. They will be slowly
+          # removed as code is fixed.
+          4100, # Unreferenced formal parameter
+          4121, # Alignment of a member was sensitive to packing
+          4244, # Conversion from 'type1' to 'type2', possible loss of data
+          4481, # Nonstandard extension used: override specifier 'keyword'
+          4505, # Unreferenced local function has been removed
+          4510, # Default constructor could not be generated
+          4512, # Assignment operator could not be generated
+          4610, # Object can never be instantiated
+          4838, # Narrowing conversion. Doesn't seem to be very useful.
+          4996, # 'X': was declared deprecated (for GetVersionEx).
+
+          # These are variable shadowing warnings that are new in VS2015. We
+          # should work through these at some point -- they may be removed from
+          # the RTM release in the /W4 set.
+          4456, 4457, 4458, 4459,
+        ],
+        'msvs_settings': {
+          'VCCLCompilerTool': {
+            'AdditionalOptions': ['/MP'],
+            'MinimalRebuild': 'false',
+            'BufferSecurityCheck': 'true',
+            'EnableFunctionLevelLinking': 'true',
+            'RuntimeTypeInfo': 'false',
+            'WarningLevel': '4',
+            'WarnAsError': 'true',
+            'DebugInformationFormat': '3',
+            # ExceptionHandling must match _HAS_EXCEPTIONS above.
+            'ExceptionHandling': '0',
+          },
+          'VCLibrarianTool': {
+            'AdditionalOptions': ['/ignore:4221'],
+            'AdditionalLibraryDirectories': [
+              '<(windows_sdk_path)/Lib/win8/um/x86',
+            ],
+          },
+          'VCLinkerTool': {
+            'AdditionalDependencies': [
+              'wininet.lib',
+              'dnsapi.lib',
+              'version.lib',
+              'msimg32.lib',
+              'ws2_32.lib',
+              'usp10.lib',
+              'psapi.lib',
+              'dbghelp.lib',
+              'winmm.lib',
+              'shlwapi.lib',
+            ],
+            'AdditionalLibraryDirectories': [
+              '<(windows_sdk_path)/Lib/win8/um/x86',
+            ],
+            'GenerateDebugInformation': 'true',
+            'MapFileName': '$(OutDir)\\$(TargetName).map',
+            'ImportLibrary': '$(OutDir)\\lib\\$(TargetName).lib',
+            'FixedBaseAddress': '1',
+            # SubSystem values:
+            #   0 == not set
+            #   1 == /SUBSYSTEM:CONSOLE
+            #   2 == /SUBSYSTEM:WINDOWS
+            # Most of the executables we'll ever create are tests
+            # and utilities with console output.
+            'SubSystem': '1',
+          },
+          'VCMIDLTool': {
+            'GenerateStublessProxies': 'true',
+            'TypeLibraryName': '$(InputName).tlb',
+            'OutputDirectory': '$(IntDir)',
+            'HeaderFileName': '$(InputName).h',
+            'DLLDataFileName': '$(InputName).dlldata.c',
+            'InterfaceIdentifierFileName': '$(InputName)_i.c',
+            'ProxyFileName': '$(InputName)_p.c',
+          },
+          'VCResourceCompilerTool': {
+            'Culture' : '1033',
+            'AdditionalIncludeDirectories': [
+              '<(DEPTH)',
+              '<(SHARED_INTERMEDIATE_DIR)',
+            ],
+          },
+          'target_conditions': [
+            ['_type=="executable"', {
+              'VCManifestTool': {
+                'EmbedManifest': 'true',
+              },
+            }],
+            ['_type=="executable" and ">(win_exe_compatibility_manifest)"!=""', {
+              'VCManifestTool': {
+                'AdditionalManifestFiles': [
+                  '>(win_exe_compatibility_manifest)',
+                ],
+              },
+            }],
+          ],
+          'conditions': [
+            # Building with Clang on Windows is a work in progress and very
+            # experimental. See crbug.com/82385.
+            # Keep this in sync with the similar blocks in build/config/compiler/BUILD.gn
+            ['clang==1', {
+              'VCCLCompilerTool': {
+                'AdditionalOptions': [
+                  # Many files use intrinsics without including this header.
+                  # TODO(hans): Fix those files, or move this to sub-GYPs.
+                  '/FIIntrin.h',
+
+                  # TODO(hans): Make this list shorter eventually, http://crbug.com/504657
+                  '-Qunused-arguments',  # http://crbug.com/504658
+                  '-Wno-microsoft',  # http://crbug.com/505296
+                  '-Wno-switch',  # http://crbug.com/505308
+                  '-Wno-unknown-pragmas',  # http://crbug.com/505314
+                  '-Wno-unused-function',  # http://crbug.com/505316
+                  '-Wno-unused-value',  # http://crbug.com/505318
+                  '-Wno-unused-local-typedef',  # http://crbug.com/411648
+                ],
+              },
+            }],
+            ['clang==1 and target_arch=="ia32"', {
+              'VCCLCompilerTool': {
+                'WarnAsError': 'false',
+                'AdditionalOptions': [
+                  '/fallback',
+                ],
+              },
+            }],
+            ['clang==1 and clang_use_chrome_plugins==1', {
+              'VCCLCompilerTool': {
+                'AdditionalOptions': [
+                  '<@(clang_chrome_plugins_flags)',
+                ],
+              },
+            }],
+            ['clang==1 and MSVS_VERSION == "2013"', {
+              'VCCLCompilerTool': {
+                'AdditionalOptions': [
+                  '-fmsc-version=1800',
+                ],
+              },
+            }],
+            ['clang==1 and MSVS_VERSION == "2015"', {
+              'VCCLCompilerTool': {
+                'AdditionalOptions': [
+                  '-fmsc-version=1900',
+                ],
+              },
+            }],
+            ['clang==1 and "<!(python <(DEPTH)/build/win/use_ansi_codes.py)"=="True"', {
+              'VCCLCompilerTool': {
+                'AdditionalOptions': [
+                  # cmd.exe doesn't understand ANSI escape codes by default,
+                  # so only enable them if something emulating them is around.
+                  '-fansi-escape-codes',
+                  # Also see http://crbug.com/110262
+                  '-fcolor-diagnostics',
+                ],
+              },
+            }],
+          ],
+        },
+      },
+    }],
+    ['disable_nacl==1', {
+      'target_defaults': {
+        'defines': [
+          'DISABLE_NACL',
+        ],
+      },
+    }],
+    ['OS=="win" and msvs_use_common_linker_extras', {
+      'target_defaults': {
+        'msvs_settings': {
+          'VCLinkerTool': {
+            'DelayLoadDLLs': [
+              'dbghelp.dll',
+              'dwmapi.dll',
+              'shell32.dll',
+              'uxtheme.dll',
+            ],
+          },
+        },
+        'configurations': {
+          'x86_Base': {
+            'msvs_settings': {
+              'VCLinkerTool': {
+                'AdditionalOptions': [
+                  '/safeseh',
+                  '/dynamicbase',
+                  '/ignore:4199',
+                  '/ignore:4221',
+                  '/nxcompat',
+                ],
+              },
+              'conditions': [
+                ['syzyasan==0', {
+                  'VCLinkerTool': {
+                    'AdditionalOptions': ['/largeaddressaware'],
+                  },
+                }],
+                ['asan==1', {
+                  # TODO(asan/win): Move this down into the general
+                  # win-target_defaults section once the 64-bit asan runtime
+                  # exists.  See crbug.com/345874.
+                  'VCCLCompilerTool': {
+                    'AdditionalOptions': [
+                      '-fsanitize=address',
+                      '-fsanitize-blacklist=<(PRODUCT_DIR)/../../tools/memory/asan/blacklist_win.txt',
+                    ],
+                    'AdditionalIncludeDirectories': [
+                      # MSVC needs to be able to find the sanitizer headers when
+                      # invoked via /fallback. This is critical for using macros
+                      # like ASAN_UNPOISON_MEMORY_REGION in files where we fall
+                      # back.
+                      '<(DEPTH)/<(make_clang_dir)/lib/clang/<!(python <(DEPTH)/tools/clang/scripts/update.py --print-clang-version)/include_sanitizer',
+                    ],
+                  },
+                  'VCLinkerTool': {
+                    'AdditionalLibraryDirectories': [
+                      # TODO(hans): If make_clang_dir is absolute, this breaks.
+                      '<(DEPTH)/<(make_clang_dir)/lib/clang/<!(python <(DEPTH)/tools/clang/scripts/update.py --print-clang-version)/lib/windows',
+                    ],
+                  },
+                  'target_conditions': [
+                    ['component=="shared_library"', {
+                      'VCLinkerTool': {
+                        'AdditionalDependencies': [
+                           'clang_rt.asan_dynamic-i386.lib',
+                           'clang_rt.asan_dynamic_runtime_thunk-i386.lib',
+                        ],
+                      },
+                    }],
+                    ['_type=="executable" and component=="static_library"', {
+                      'VCLinkerTool': {
+                        'AdditionalDependencies': [
+                           'clang_rt.asan-i386.lib',
+                        ],
+                      },
+                    }],
+                    ['(_type=="shared_library" or _type=="loadable_module") and component=="static_library"', {
+                      'VCLinkerTool': {
+                        'AdditionalDependencies': [
+                           'clang_rt.asan_dll_thunk-i386.lib',
+                        ],
+                      },
+                    }],
+                  ],
+                }],
+                ['sanitizer_coverage!=0', {
+                  # TODO(asan/win): Move this down into the general
+                  # win-target_defaults section once the 64-bit asan runtime
+                  # exists.  See crbug.com/345874.
+                  'VCCLCompilerTool': {
+                    'AdditionalOptions': [
+                      '-fsanitize-coverage=<(sanitizer_coverage)',
+                    ],
+                  },
+                }],
+              ],
+            },
+            'conditions': [
+              ['sanitizer_coverage!=0', {
+                # TODO(asan/win): Move this down into the general
+                # win-target_defaults section once the 64-bit asan runtime
+                # exists.  See crbug.com/345874.
+                'defines': [
+                  'SANITIZER_COVERAGE',
+                ],
+              }],
+            ],
+          },
+          'x64_Base': {
+            'msvs_settings': {
+              'VCLinkerTool': {
+                'AdditionalOptions': [
+                  # safeseh is not compatible with x64
+                  '/dynamicbase',
+                  '/ignore:4199',
+                  '/ignore:4221',
+                  '/nxcompat',
+                ],
+              },
+            },
+          },
+        },
+      },
+    }],
+    ['enable_new_npdevice_api==1', {
+      'target_defaults': {
+        'defines': [
+          'ENABLE_NEW_NPDEVICE_API',
+        ],
+      },
+    }],
+    # Don't warn about the "typedef 'foo' locally defined but not used"
+    # for gcc 4.8 and higher.
+    # TODO: remove this flag once all builds work. See crbug.com/227506
+    ['gcc_version>=48 and clang==0', {
+      'target_defaults': {
+        'cflags': [
+          '-Wno-unused-local-typedefs',
+        ],
+      },
+    }],
+    ['gcc_version>=48 and clang==0 and host_clang==1', {
+      'target_defaults': {
+        'target_conditions': [
+          ['_toolset=="host"', { 'cflags!': [ '-Wno-unused-local-typedefs' ]}],
+        ],
+      },
+    }],
+    ['clang==1 and ((OS!="mac" and OS!="ios") or clang_xcode==0) '
+        'and OS!="win"', {
+      'make_global_settings': [
+        ['CC', '<(make_clang_dir)/bin/clang'],
+        ['CXX', '<(make_clang_dir)/bin/clang++'],
+        ['CC.host', '$(CC)'],
+        ['CXX.host', '$(CXX)'],
+      ],
+    }],
+    ['clang==1 and OS=="win"', {
+      'make_global_settings': [
+        # On Windows, gyp's ninja generator only looks at CC.
+        ['CC', '<(make_clang_dir)/bin/clang-cl'],
+      ],
+    }],
+    ['use_lld==1 and OS=="win"', {
+      'make_global_settings': [
+        # Limited to Windows because -flavor link2 is the driver that is
+        # compatible with link.exe.
+        ['LD', '<(make_clang_dir)/bin/lld -flavor link2'],
+      ],
+    }],
+    ['OS=="android" and clang==0', {
+      # Hardcode the compiler names in the Makefile so that
+      # it won't depend on the environment at make time.
+      'make_global_settings': [
+        ['CC', '<!(/bin/echo -n <(android_toolchain)/*-gcc)'],
+        ['CXX', '<!(/bin/echo -n <(android_toolchain)/*-g++)'],
+        ['CC.host', '<(host_cc)'],
+        ['CXX.host', '<(host_cxx)'],
+      ],
+    }],
+    ['OS=="linux" and target_arch=="mipsel" and clang==0', {
+      'make_global_settings': [
+        ['CC', '<(sysroot)/../bin/mipsel-linux-gnu-gcc'],
+        ['CXX', '<(sysroot)/../bin/mipsel-linux-gnu-g++'],
+        ['CC.host', '<(host_cc)'],
+        ['CXX.host', '<(host_cxx)'],
+      ],
+    }],
+    ['OS=="linux" and target_arch=="arm" and host_arch!="arm" and chromeos==0 and clang==0', {
+      # Set default ARM cross compiling on linux.  These can be overridden
+      # using CC/CXX/etc environment variables.
+      'make_global_settings': [
+        ['CC', '<!(which arm-linux-gnueabihf-gcc)'],
+        ['CXX', '<!(which arm-linux-gnueabihf-g++)'],
+        ['CC.host', '<(host_cc)'],
+        ['CXX.host', '<(host_cxx)'],
+      ],
+    }],
+
+    # TODO(yyanagisawa): supports GENERATOR==make
+    #  make generator doesn't support CC_wrapper without CC
+    #  in make_global_settings yet.
+    ['use_goma==1 and ("<(GENERATOR)"=="ninja" or clang==1)', {
+      'make_global_settings': [
+       ['CC_wrapper', '<(gomadir)/gomacc'],
+       ['CXX_wrapper', '<(gomadir)/gomacc'],
+       ['CC.host_wrapper', '<(gomadir)/gomacc'],
+       ['CXX.host_wrapper', '<(gomadir)/gomacc'],
+      ],
+    }],
+    ['use_lto==1', {
+      'target_defaults': {
+        'target_conditions': [
+          ['_toolset=="target"', {
+            'cflags': [
+              '-flto',
+            ],
+            'xcode_settings': {
+              'LLVM_LTO': 'YES',
+            },
+          }],
+          # Work-around for http://openradar.appspot.com/20356002
+          ['_toolset=="target" and _type!="static_library"', {
+            'xcode_settings': {
+              'OTHER_LDFLAGS': [
+                '-Wl,-all_load',
+              ],
+            },
+          }],
+        ],
+      },
+    }],
+    ['use_lto==1 and clang==0', {
+      'target_defaults': {
+        'target_conditions': [
+          ['_toolset=="target"', {
+            'cflags': [
+              '-ffat-lto-objects',
+            ],
+          }],
+        ],
+      },
+    }],
+    ['use_lto==1 and clang==1', {
+      'target_defaults': {
+        'target_conditions': [
+          ['_toolset=="target"', {
+            'arflags': [
+              '--plugin', '../../<(make_clang_dir)/lib/LLVMgold.so',
+            ],
+          }],
+        ],
+      },
+    }],
+    # Apply a lower LTO optimization level in non-official builds.
+    ['use_lto==1 and clang==1 and buildtype!="Official"', {
+      'target_defaults': {
+        'target_conditions': [
+          ['_toolset=="target"', {
+            'ldflags': [
+              '-Wl,--plugin-opt,O1',
+            ],
+          }],
+          ['_toolset=="target" and _type!="static_library"', {
+            'xcode_settings':  {
+              'OTHER_LDFLAGS': [
+                '-Wl,-mllvm,-O1',
+              ],
+            },
+          }],
+        ],
+      },
+    }],
+    ['use_lto==1 and clang==1 and target_arch=="arm"', {
+      'target_defaults': {
+        'target_conditions': [
+          ['_toolset=="target"', {
+            # Without this flag, LTO produces a .text section that is larger
+            # than the maximum call displacement, preventing the linker from
+            # relocating calls (http://llvm.org/PR22999).
+            'ldflags': [
+              '-Wl,-plugin-opt,-function-sections',
+            ],
+          }],
+        ],
+      },
+    }],
+    ['(use_lto==1 or use_lto_o2==1) and clang==0', {
+      'target_defaults': {
+        'target_conditions': [
+          ['_toolset=="target"', {
+            'ldflags': [
+              '-flto=32',
+            ],
+          }],
+        ],
+      },
+    }],
+    ['(use_lto==1 or use_lto_o2==1) and clang==1', {
+      'target_defaults': {
+        'target_conditions': [
+          ['_toolset=="target"', {
+            'ldflags': [
+              '-flto',
+            ],
+          }],
+        ],
+      },
+    }],
+    ['cfi_vptr==1', {
+      'target_defaults': {
+        'target_conditions': [
+          ['_toolset=="target"', {
+            'cflags': [
+              '-fsanitize=cfi-vcall',
+              '-fsanitize=cfi-derived-cast',
+              '-fsanitize=cfi-unrelated-cast',
+              '-fsanitize-blacklist=<(cfi_blacklist)',
+            ],
+            'ldflags': [
+              '-fsanitize=cfi-vcall',
+              '-fsanitize=cfi-derived-cast',
+              '-fsanitize=cfi-unrelated-cast',
+            ],
+            'xcode_settings': {
+              'OTHER_CFLAGS': [
+                '-fsanitize=cfi-vcall',
+                '-fsanitize=cfi-derived-cast',
+                '-fsanitize=cfi-unrelated-cast',
+                '-fsanitize-blacklist=<(cfi_blacklist)',
+              ],
+            },
+            'msvs_settings': {
+              'VCCLCompilerTool': {
+                'AdditionalOptions': [
+                  '-fsanitize=cfi-vcall',
+                  '-fsanitize=cfi-derived-cast',
+                  '-fsanitize=cfi-unrelated-cast',
+                  '-fsanitize-blacklist=<(cfi_blacklist)',
+                ],
+              },
+            },
+          }],
+          ['_toolset=="target" and _type!="static_library"', {
+            'xcode_settings':  {
+              'OTHER_LDFLAGS': [
+                '-fsanitize=cfi-vcall',
+                '-fsanitize=cfi-derived-cast',
+                '-fsanitize=cfi-unrelated-cast',
+              ],
+            },
+          }],
+        ],
+      },
+    }],
+  ],
+  'xcode_settings': {
+    # DON'T ADD ANYTHING NEW TO THIS BLOCK UNLESS YOU REALLY REALLY NEED IT!
+    # This block adds *project-wide* configuration settings to each project
+    # file.  It's almost always wrong to put things here.  Specify your
+    # custom xcode_settings in target_defaults to add them to targets instead.
+
+    'conditions': [
+      # In an Xcode Project Info window, the "Base SDK for All Configurations"
+      # setting sets the SDK on a project-wide basis. In order to get the
+      # configured SDK to show properly in the Xcode UI, SDKROOT must be set
+      # here at the project level.
+      ['OS=="mac"', {
+        'conditions': [
+          ['mac_sdk_path==""', {
+            'SDKROOT': 'macosx<(mac_sdk)',  # -isysroot
+          }, {
+            'SDKROOT': '<(mac_sdk_path)',  # -isysroot
+          }],
+        ],
+      }],
+      ['OS=="ios"', {
+        'conditions': [
+          ['ios_sdk_path==""', {
+            'conditions': [
+              # TODO(justincohen): Ninja only supports simulator for now.
+              ['"<(GENERATOR)"=="xcode"', {
+                'SDKROOT': 'iphoneos<(ios_sdk)',  # -isysroot
+              }, {
+                'SDKROOT': 'iphonesimulator<(ios_sdk)',  # -isysroot
+              }],
+            ],
+          }, {
+            'SDKROOT': '<(ios_sdk_path)',  # -isysroot
+          }],
+        ],
+      }],
+      ['OS=="ios"', {
+        # Target both iPhone and iPad.
+        'TARGETED_DEVICE_FAMILY': '1,2',
+      }, {  # OS!="ios"
+        'conditions': [
+          ['target_arch=="x64"', {
+            'ARCHS': [
+              'x86_64'
+            ],
+          }],
+          ['target_arch=="ia32"', {
+            'ARCHS': [
+              'i386'
+            ],
+          }],
+        ],
+      }],
+    ],
+
+    # The Xcode generator will look for an xcode_settings section at the root
+    # of each dict and use it to apply settings on a file-wide basis.  Most
+    # settings should not be here, they should be in target-specific
+    # xcode_settings sections, or better yet, should use non-Xcode-specific
+    # settings in target dicts.  SYMROOT is a special case, because many other
+    # Xcode variables depend on it, including variables such as
+    # PROJECT_DERIVED_FILE_DIR.  When a source group corresponding to something
+    # like PROJECT_DERIVED_FILE_DIR is added to a project, in order for the
+    # files to appear (when present) in the UI as actual files and not red
+    # red "missing file" proxies, the correct path to PROJECT_DERIVED_FILE_DIR,
+    # and therefore SYMROOT, needs to be set at the project level.
+    'SYMROOT': '<(DEPTH)/xcodebuild',
+  },
+}
diff --git a/build/common_untrusted.gypi b/build/common_untrusted.gypi
new file mode 100644
index 0000000..bcc3686
--- /dev/null
+++ b/build/common_untrusted.gypi
@@ -0,0 +1,40 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This GYP file should be included for every target in Chromium that is built
+# using the NaCl toolchain.
+{
+  'includes': [
+    '../native_client/build/untrusted.gypi',
+  ],
+  'target_defaults': {
+    'conditions': [
+      # TODO(bradnelson): Drop this once the nacl side does the same.
+      ['target_arch=="x64"', {
+        'variables': {
+          'enable_x86_32': 0,
+        },
+      }],
+      ['target_arch=="ia32" and OS!="win"', {
+        'variables': {
+          'enable_x86_64': 0,
+        },
+      }],
+      ['target_arch=="arm"', {
+        'variables': {
+          'clang': 1,
+        },
+        'defines': [
+          # Needed by build/build_config.h processor architecture detection.
+          '__ARMEL__',
+          # Needed by base/third_party/nspr/prtime.cc.
+          '__arm__',
+          # Disable ValGrind. The assembly code it generates causes the build
+          # to fail.
+          'NVALGRIND',
+        ],
+      }],
+    ],
+  },
+}
diff --git a/build/compiled_action.gni b/build/compiled_action.gni
new file mode 100644
index 0000000..b6d0c4d
--- /dev/null
+++ b/build/compiled_action.gni
@@ -0,0 +1,173 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file introduces two related templates that act like action and
+# action_foreach but instead of running a Python script, it will compile a
+# given tool in the host toolchain and run that (either once or over the list
+# of inputs, depending on the variant).
+#
+# Parameters
+#
+#   tool (required)
+#       [label] Label of the tool to run. This should be an executable, and
+#       this label should not include a toolchain (anything in parens). The
+#       host compile of this tool will be used.
+#
+#   outputs (required)
+#       [list of files] Like the outputs of action (if using "compiled_action",
+#       this would be just the list of outputs), or action_foreach (if using
+#       "compiled_action_foreach", this would contain source expansions mapping
+#       input to output files).
+#
+#   args (required)
+#       [list of strings] Same meaning as action/action_foreach.
+#
+#   inputs (optional)
+#       Files the binary takes as input. The step will be re-run whenever any
+#       of these change. If inputs is empty, the step will run only when the
+#       binary itself changes.
+#
+#   visibility
+#   deps
+#   args   (all optional)
+#       Same meaning as action/action_foreach.
+#
+#
+# Example of usage:
+#
+#   compiled_action("run_my_tool") {
+#     tool = "//tools/something:mytool"
+#     outputs = [
+#       "$target_gen_dir/mysource.cc",
+#       "$target_gen_dir/mysource.h",
+#     ]
+#
+#     # The tool takes this input.
+#     inputs = [ "my_input_file.idl" ]
+#
+#     # In this case, the tool takes as arguments the input file and the output
+#     # build dir (both relative to the "cd" that the script will be run in)
+#     # and will produce the output files listed above.
+#     args = [
+#       rebase_path("my_input_file.idl", root_build_dir),
+#       "--output-dir", rebase_path(target_gen_dir, root_build_dir),
+#     ]
+#   }
+#
+# You would typically declare your tool like this:
+#   if (host_toolchain == current_toolchain) {
+#     executable("mytool") {
+#       ...
+#     }
+#   }
+# The if statement around the executable is optional. That says "I only care
+# about this target in the host toolchain". Usually this is what you want, and
+# saves unnecessarily compiling your tool for the target platform. But if you
+# need a target build of your tool as well, just leave off the if statement.
+
+if (host_os == "win") {
+  _host_executable_suffix = ".exe"
+} else {
+  _host_executable_suffix = ""
+}
+
+template("compiled_action") {
+  assert(defined(invoker.tool), "tool must be defined for $target_name")
+  assert(defined(invoker.outputs), "outputs must be defined for $target_name")
+  assert(defined(invoker.args), "args must be defined for $target_name")
+
+  assert(!defined(invoker.sources),
+         "compiled_action doesn't take a sources arg. Use inputs instead.")
+
+  action(target_name) {
+    if (defined(invoker.visibility)) {
+      visibility = invoker.visibility
+    }
+
+    script = "//build/gn_run_binary.py"
+
+    if (defined(invoker.inputs)) {
+      inputs = invoker.inputs
+    } else {
+      inputs = []
+    }
+    outputs = invoker.outputs
+
+    # Constuct the host toolchain version of the tool.
+    host_tool = invoker.tool + "($host_toolchain)"
+
+    # Get the path to the executable. Currently, this assumes that the tool
+    # does not specify output_name so that the target name is the name to use.
+    # If that's not the case, we'll need another argument to the script to
+    # specify this, since we can't know what the output name is (it might be in
+    # another file not processed yet).
+    host_executable =
+        get_label_info(host_tool, "root_out_dir") + "/" +
+        get_label_info(host_tool, "name") + _host_executable_suffix
+
+    # Add the executable itself as an input.
+    inputs += [ host_executable ]
+
+    deps = [
+      host_tool,
+    ]
+    if (defined(invoker.deps)) {
+      deps += invoker.deps
+    }
+
+    # The script takes as arguments the binary to run, and then the arguments
+    # to pass it.
+    args = [ rebase_path(host_executable, root_build_dir) ] + invoker.args
+  }
+}
+
+template("compiled_action_foreach") {
+  assert(defined(invoker.sources), "sources must be defined for $target_name")
+  assert(defined(invoker.tool), "tool must be defined for $target_name")
+  assert(defined(invoker.outputs), "outputs must be defined for $target_name")
+  assert(defined(invoker.args), "args must be defined for $target_name")
+
+  action_foreach(target_name) {
+    # Otherwise this is a standalone action, define visibility if requested.
+    if (defined(invoker.visibility)) {
+      visibility = invoker.visibility
+    }
+
+    script = "//build/gn_run_binary.py"
+    sources = invoker.sources
+
+    if (defined(invoker.inputs)) {
+      inputs = invoker.inputs
+    } else {
+      inputs = []
+    }
+    outputs = invoker.outputs
+
+    # Constuct the host toolchain version of the tool.
+    host_tool = invoker.tool + "($host_toolchain)"
+
+    # Get the path to the executable. Currently, this assumes that the tool
+    # does not specify output_name so that the target name is the name to use.
+    # If that's not the case, we'll need another argument to the script to
+    # specify this, since we can't know what the output name is (it might be in
+    # another file not processed yet).
+    host_executable =
+        get_label_info(host_tool, "root_out_dir") + "/" +
+        get_label_info(host_tool, "name") + _host_executable_suffix
+
+    # Add the executable itself as an input.
+    inputs += [ host_executable ]
+
+    deps = [
+      host_tool,
+    ]
+    if (defined(invoker.deps)) {
+      deps += invoker.deps
+    }
+
+    # The script takes as arguments the binary to run, and then the arguments
+    # to pass it.
+    args = [ rebase_path(host_executable, root_build_dir) ] + invoker.args
+  }
+}
diff --git a/build/compiler_version.py b/build/compiler_version.py
new file mode 100755
index 0000000..05faf54
--- /dev/null
+++ b/build/compiler_version.py
@@ -0,0 +1,143 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Compiler version checking tool for gcc
+
+Print gcc version as XY if you are running gcc X.Y.*.
+This is used to tweak build flags for gcc 4.4.
+"""
+
+import os
+import re
+import subprocess
+import sys
+
+
+compiler_version_cache = {}  # Map from (compiler, tool) -> version.
+
+
+def Usage(program_name):
+  print '%s MODE TOOL' % os.path.basename(program_name)
+  print 'MODE: host or target.'
+  print 'TOOL: assembler or compiler or linker.'
+  return 1
+
+
+def ParseArgs(args):
+  if len(args) != 2:
+    raise Exception('Invalid number of arguments')
+  mode = args[0]
+  tool = args[1]
+  if mode not in ('host', 'target'):
+    raise Exception('Invalid mode: %s' % mode)
+  if tool not in ('assembler', 'compiler', 'linker'):
+    raise Exception('Invalid tool: %s' % tool)
+  return mode, tool
+
+
+def GetEnvironFallback(var_list, default):
+  """Look up an environment variable from a possible list of variable names."""
+  for var in var_list:
+    if var in os.environ:
+      return os.environ[var]
+  return default
+
+
+def GetVersion(compiler, tool):
+  tool_output = tool_error = None
+  cache_key = (compiler, tool)
+  cached_version = compiler_version_cache.get(cache_key)
+  if cached_version:
+    return cached_version
+  try:
+    # Note that compiler could be something tricky like "distcc g++".
+    if tool == "compiler":
+      compiler = compiler + " -dumpversion"
+      # 4.6
+      version_re = re.compile(r"(\d+)\.(\d+)")
+    elif tool == "assembler":
+      compiler = compiler + " -Xassembler --version -x assembler -c /dev/null"
+      # Unmodified: GNU assembler (GNU Binutils) 2.24
+      # Ubuntu: GNU assembler (GNU Binutils for Ubuntu) 2.22
+      # Fedora: GNU assembler version 2.23.2
+      version_re = re.compile(r"^GNU [^ ]+ .* (\d+).(\d+).*?$", re.M)
+    elif tool == "linker":
+      compiler = compiler + " -Xlinker --version"
+      # Using BFD linker
+      # Unmodified: GNU ld (GNU Binutils) 2.24
+      # Ubuntu: GNU ld (GNU Binutils for Ubuntu) 2.22
+      # Fedora: GNU ld version 2.23.2
+      # Using Gold linker
+      # Unmodified: GNU gold (GNU Binutils 2.24) 1.11
+      # Ubuntu: GNU gold (GNU Binutils for Ubuntu 2.22) 1.11
+      # Fedora: GNU gold (version 2.23.2) 1.11
+      version_re = re.compile(r"^GNU [^ ]+ .* (\d+).(\d+).*?$", re.M)
+    else:
+      raise Exception("Unknown tool %s" % tool)
+
+    # Force the locale to C otherwise the version string could be localized
+    # making regex matching fail.
+    env = os.environ.copy()
+    env["LC_ALL"] = "C"
+    pipe = subprocess.Popen(compiler, shell=True, env=env,
+                            stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+    tool_output, tool_error = pipe.communicate()
+    if pipe.returncode:
+      raise subprocess.CalledProcessError(pipe.returncode, compiler)
+
+    parsed_output = version_re.match(tool_output)
+    result = parsed_output.group(1) + parsed_output.group(2)
+    compiler_version_cache[cache_key] = result
+    return result
+  except Exception, e:
+    if tool_error:
+      sys.stderr.write(tool_error)
+    print >> sys.stderr, "compiler_version.py failed to execute:", compiler
+    print >> sys.stderr, e
+    return ""
+
+
+def main(args):
+  try:
+    (mode, tool) = ParseArgs(args[1:])
+  except Exception, e:
+    sys.stderr.write(e.message + '\n\n')
+    return Usage(args[0])
+
+  ret_code, result = ExtractVersion(mode, tool)
+  if ret_code == 0:
+    print result
+  return ret_code
+
+
+def DoMain(args):
+  """Hook to be called from gyp without starting a separate python
+  interpreter."""
+  (mode, tool) = ParseArgs(args)
+  ret_code, result = ExtractVersion(mode, tool)
+  if ret_code == 0:
+    return result
+  raise Exception("Failed to extract compiler version for args: %s" % args)
+
+
+def ExtractVersion(mode, tool):
+  # Check if various CXX environment variables exist and use them if they
+  # exist. The preferences and fallback order is a close approximation of
+  # GenerateOutputForConfig() in GYP's ninja generator.
+  # The main difference being not supporting GYP's make_global_settings.
+  environments = ['CXX_target', 'CXX']
+  if mode == 'host':
+    environments = ['CXX_host'] + environments;
+  compiler = GetEnvironFallback(environments, 'c++')
+
+  if compiler:
+    compiler_version = GetVersion(compiler, tool)
+    if compiler_version != "":
+      return (0, compiler_version)
+  return (1, None)
+
+
+if __name__ == "__main__":
+  sys.exit(main(sys.argv))
diff --git a/build/config/BUILD.gn b/build/config/BUILD.gn
new file mode 100644
index 0000000..0af90cd
--- /dev/null
+++ b/build/config/BUILD.gn
@@ -0,0 +1,434 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/allocator.gni")
+import("//build/config/chrome_build.gni")
+import("//build/config/crypto.gni")
+import("//build/config/features.gni")
+import("//build/config/ui.gni")
+import("//build/module_args/v8.gni")
+
+declare_args() {
+  # When set, turns off the (normally-on) iterator debugging and related stuff
+  # that is normally turned on for Debug builds. These are generally useful for
+  # catching bugs but in some cases may cause conflicts or excessive slowness.
+  disable_iterator_debugging = false
+
+  # Set to true to not store any build metadata, e.g. ifdef out all __DATE__
+  # and __TIME__. Set to 0 to reenable the use of these macros in the code
+  # base. See http://crbug.com/314403.
+  #
+  # Continue to embed build meta data in Official builds, basically the
+  # time it was built.
+  # TODO(maruel): This decision should be revisited because having an
+  # official deterministic build has high value too but MSVC toolset can't
+  # generate anything deterministic with WPO enabled AFAIK.
+  dont_embed_build_metadata = !is_official_build
+
+  # Set to true to enable dcheck in Release builds.
+  dcheck_always_on = false
+
+  # Set to true to compile with the OpenGL ES 2.0 conformance tests.
+  internal_gles2_conform_tests = false
+}
+
+# TODO(brettw) Most of these should be removed. Instead of global feature
+# flags, we should have more modular flags that apply only to a target and its
+# dependents. For example, depending on the "x11" meta-target should define
+# USE_X11 for all dependents so that everything that could use X11 gets the
+# define, but anything that doesn't depend on X11 doesn't see it.
+#
+# For now we define these globally to match the current GYP build.
+config("feature_flags") {
+  # TODO(brettw) this probably needs to be parameterized.
+  defines = [ "V8_DEPRECATION_WARNINGS" ]  # Don't use deprecated V8 APIs anywhere.
+
+  if (cld_version > 0) {
+    defines += [ "CLD_VERSION=$cld_version" ]
+  }
+  if (enable_mdns) {
+    defines += [ "ENABLE_MDNS=1" ]
+  }
+  if (enable_notifications) {
+    defines += [ "ENABLE_NOTIFICATIONS" ]
+  }
+  if (enable_pepper_cdms) {
+    # TODO(brettw) should probably be "=1"
+    defines += [ "ENABLE_PEPPER_CDMS" ]
+  }
+  if (enable_browser_cdms) {
+    # TODO(brettw) should probably be "=1"
+    defines += [ "ENABLE_BROWSER_CDMS" ]
+  }
+  if (enable_plugins) {
+    defines += [ "ENABLE_PLUGINS=1" ]
+  }
+  if (enable_basic_printing || enable_print_preview) {
+    # Convenience define for ENABLE_BASIC_PRINTING || ENABLE_PRINT_PREVIEW.
+    defines += [ "ENABLE_PRINTING=1" ]
+    if (enable_basic_printing) {
+      # Enable basic printing support and UI.
+      defines += [ "ENABLE_BASIC_PRINTING=1" ]
+    }
+    if (enable_print_preview) {
+      # Enable printing with print preview.
+      # Can be defined without ENABLE_BASIC_PRINTING.
+      defines += [ "ENABLE_PRINT_PREVIEW=1" ]
+    }
+  }
+  if (enable_spellcheck) {
+    defines += [ "ENABLE_SPELLCHECK=1" ]
+  }
+  if (use_platform_spellchecker) {
+    defines += [ "USE_PLATFORM_SPELLCHECKER=1" ]
+  }
+  if (dont_embed_build_metadata) {
+    defines += [ "DONT_EMBED_BUILD_METADATA" ]
+  }
+  if (dcheck_always_on) {
+    defines += [ "DCHECK_ALWAYS_ON=1" ]
+  }
+  if (use_udev) {
+    # TODO(brettw) should probably be "=1".
+    defines += [ "USE_UDEV" ]
+  }
+  if (ui_compositor_image_transport) {
+    # TODO(brettw) should probably be "=1".
+    defines += [ "UI_COMPOSITOR_IMAGE_TRANSPORT" ]
+  }
+  if (use_ash) {
+    defines += [ "USE_ASH=1" ]
+  }
+  if (use_aura) {
+    defines += [ "USE_AURA=1" ]
+  }
+  if (use_glfw) {
+    defines += [ "USE_GLFW=1" ]
+  }
+  if (use_pango) {
+    defines += [ "USE_PANGO=1" ]
+  }
+  if (use_cairo) {
+    defines += [ "USE_CAIRO=1" ]
+  }
+  if (use_clipboard_aurax11) {
+    defines += [ "USE_CLIPBOARD_AURAX11=1" ]
+  }
+  if (use_default_render_theme) {
+    defines += [ "USE_DEFAULT_RENDER_THEME=1" ]
+  }
+  if (use_openssl) {
+    defines += [ "USE_OPENSSL=1" ]
+  }
+  if (use_openssl_certs) {
+    defines += [ "USE_OPENSSL_CERTS=1" ]
+  }
+  if (use_nss_certs) {
+    defines += [ "USE_NSS_CERTS=1" ]
+  }
+  if (use_ozone) {
+    defines += [ "USE_OZONE=1" ]
+  }
+  if (use_x11) {
+    defines += [ "USE_X11=1" ]
+  }
+  if (use_allocator != "tcmalloc") {
+    defines += [ "NO_TCMALLOC" ]
+  }
+  if (is_asan || is_lsan || is_tsan || is_msan || is_ios) {
+    defines += [
+      "MEMORY_TOOL_REPLACES_ALLOCATOR",
+      "MEMORY_SANITIZER_INITIAL_SIZE",
+    ]
+  }
+  if (is_asan) {
+    defines += [ "ADDRESS_SANITIZER" ]
+  }
+  if (is_lsan) {
+    defines += [
+      "LEAK_SANITIZER",
+      "WTF_USE_LEAK_SANITIZER=1",
+    ]
+  }
+  if (is_tsan) {
+    defines += [
+      "THREAD_SANITIZER",
+      "DYNAMIC_ANNOTATIONS_EXTERNAL_IMPL=1",
+      "WTF_USE_DYNAMIC_ANNOTATIONS_NOIMPL=1",
+    ]
+  }
+  if (is_msan) {
+    defines += [ "MEMORY_SANITIZER" ]
+  }
+  if (enable_webrtc) {
+    defines += [ "ENABLE_WEBRTC=1" ]
+  }
+  if (disable_ftp_support) {
+    defines += [ "DISABLE_FTP_SUPPORT=1" ]
+  }
+  if (!enable_nacl) {
+    defines += [ "DISABLE_NACL" ]
+  }
+  if (enable_extensions) {
+    defines += [ "ENABLE_EXTENSIONS=1" ]
+  }
+  if (enable_configuration_policy) {
+    defines += [ "ENABLE_CONFIGURATION_POLICY" ]
+  }
+  if (enable_task_manager) {
+    defines += [ "ENABLE_TASK_MANAGER=1" ]
+  }
+  if (enable_themes) {
+    defines += [ "ENABLE_THEMES=1" ]
+  }
+  if (enable_captive_portal_detection) {
+    defines += [ "ENABLE_CAPTIVE_PORTAL_DETECTION=1" ]
+  }
+  if (enable_session_service) {
+    defines += [ "ENABLE_SESSION_SERVICE=1" ]
+  }
+  if (enable_rlz) {
+    defines += [ "ENABLE_RLZ" ]
+  }
+  if (enable_plugin_installation) {
+    defines += [ "ENABLE_PLUGIN_INSTALLATION=1" ]
+  }
+  if (enable_app_list) {
+    defines += [ "ENABLE_APP_LIST=1" ]
+  }
+  if (enable_settings_app) {
+    defines += [ "ENABLE_SETTINGS_APP=1" ]
+  }
+  if (enable_supervised_users) {
+    defines += [ "ENABLE_SUPERVISED_USERS=1" ]
+  }
+  if (enable_service_discovery) {
+    defines += [ "ENABLE_SERVICE_DISCOVERY=1" ]
+  }
+  if (enable_autofill_dialog) {
+    defines += [ "ENABLE_AUTOFILL_DIALOG=1" ]
+  }
+  if (enable_wifi_bootstrapping) {
+    defines += [ "ENABLE_WIFI_BOOTSTRAPPING=1" ]
+  }
+  if (enable_image_loader_extension) {
+    defines += [ "IMAGE_LOADER_EXTENSION=1" ]
+  }
+  if (enable_remoting) {
+    defines += [ "ENABLE_REMOTING=1" ]
+  }
+  if (enable_google_now) {
+    defines += [ "ENABLE_GOOGLE_NOW=1" ]
+  }
+  if (enable_one_click_signin) {
+    defines += [ "ENABLE_ONE_CLICK_SIGNIN" ]
+  }
+  if (enable_hidpi) {
+    defines += [ "ENABLE_HIDPI=1" ]
+  }
+  if (enable_topchrome_md) {
+    defines += [ "ENABLE_TOPCHROME_MD=1" ]
+  }
+  if (proprietary_codecs) {
+    defines += [ "USE_PROPRIETARY_CODECS" ]
+  }
+  if (enable_hangout_services_extension) {
+    defines += [ "ENABLE_HANGOUT_SERVICES_EXTENSION=1" ]
+  }
+  if (v8_use_external_startup_data) {
+    defines += [ "V8_USE_EXTERNAL_STARTUP_DATA" ]
+  }
+  if (enable_background) {
+    defines += [ "ENABLE_BACKGROUND=1" ]
+  }
+  if (enable_pre_sync_backup) {
+    defines += [ "ENABLE_PRE_SYNC_BACKUP" ]
+  }
+  if (enable_video_hole) {
+    defines += [ "VIDEO_HOLE=1" ]
+  }
+  if (safe_browsing_mode == 1) {
+    defines += [ "FULL_SAFE_BROWSING" ]
+    defines += [ "SAFE_BROWSING_CSD" ]
+    defines += [ "SAFE_BROWSING_DB_LOCAL" ]
+    defines += [ "SAFE_BROWSING_SERVICE" ]
+  } else if (safe_browsing_mode == 2) {
+    defines += [ "MOBILE_SAFE_BROWSING" ]
+    defines += [ "SAFE_BROWSING_SERVICE" ]
+  } else if (safe_browsing_mode == 3) {
+    defines += [ "MOBILE_SAFE_BROWSING" ]
+    defines += [ "SAFE_BROWSING_DB_REMOTE" ]
+    defines += [ "SAFE_BROWSING_SERVICE" ]
+  }
+  if (is_official_build) {
+    defines += [ "OFFICIAL_BUILD" ]
+  }
+  if (is_chrome_branded) {
+    defines += [ "GOOGLE_CHROME_BUILD" ]
+  } else {
+    defines += [ "CHROMIUM_BUILD" ]
+  }
+  if (enable_media_router) {
+    defines += [ "ENABLE_MEDIA_ROUTER=1" ]
+  }
+  if (enable_webvr) {
+    defines += [ "ENABLE_WEBVR" ]
+  }
+  if (is_fnl) {
+    defines += [ "HAVE_SYS_QUEUE_H_=0" ]
+  }
+}
+
+# Debug/release ----------------------------------------------------------------
+
+config("debug") {
+  defines = [
+    "_DEBUG",
+    "DYNAMIC_ANNOTATIONS_ENABLED=1",
+    "WTF_USE_DYNAMIC_ANNOTATIONS=1",
+  ]
+
+  if (is_nacl) {
+    defines += [ "DYNAMIC_ANNOTATIONS_PREFIX=NACL_" ]
+  }
+
+  if (is_win) {
+    if (disable_iterator_debugging) {
+      # Iterator debugging is enabled by the compiler on debug builds, and we
+      # have to tell it to turn it off.
+      defines += [ "_HAS_ITERATOR_DEBUGGING=0" ]
+    }
+  } else if (is_linux && !is_android && current_cpu == "x64" &&
+             !disable_iterator_debugging) {
+    # Enable libstdc++ debugging facilities to help catch problems early, see
+    # http://crbug.com/65151 .
+    # TODO(phajdan.jr): Should we enable this for all of POSIX?
+    defines += [ "_GLIBCXX_DEBUG=1" ]
+  }
+}
+
+config("release") {
+  defines = [ "NDEBUG" ]
+
+  # Sanitizers.
+  # TODO(GYP) The GYP build has "release_valgrind_build == 0" for this
+  # condition. When Valgrind is set up, we need to do the same here.
+  if (is_tsan) {
+    defines += [
+      "DYNAMIC_ANNOTATIONS_ENABLED=1",
+      "WTF_USE_DYNAMIC_ANNOTATIONS=1",
+    ]
+  } else {
+    defines += [ "NVALGRIND" ]
+    if (!is_nacl) {
+      # NaCl always enables dynamic annotations. Currently this value is set to
+      # 1 for all .nexes.
+      defines += [ "DYNAMIC_ANNOTATIONS_ENABLED=0" ]
+    }
+  }
+}
+
+# Default libraries ------------------------------------------------------------
+
+# This config defines the default libraries applied to all targets.
+config("default_libs") {
+  if (is_win) {
+    # TODO(brettw) this list of defaults should probably be smaller, and
+    # instead the targets that use the less common ones (e.g. wininet or
+    # winspool) should include those explicitly.
+    libs = [
+      "advapi32.lib",
+      "comdlg32.lib",
+      "dbghelp.lib",
+      "delayimp.lib",
+      "dnsapi.lib",
+      "gdi32.lib",
+      "kernel32.lib",
+      "msimg32.lib",
+      "odbc32.lib",
+      "odbccp32.lib",
+      "ole32.lib",
+      "oleaut32.lib",
+      "psapi.lib",
+      "shell32.lib",
+      "shlwapi.lib",
+      "user32.lib",
+      "usp10.lib",
+      "uuid.lib",
+      "version.lib",
+      "wininet.lib",
+      "winmm.lib",
+      "winspool.lib",
+      "ws2_32.lib",
+
+      # Please don't add more stuff here. We should actually be making this
+      # list smaller, since all common things should be covered. If you need
+      # some extra libraries, please just add a libs = [ "foo.lib" ] to your
+      # target that needs it.
+    ]
+  } else if (is_android) {
+    # Android uses -nostdlib so we need to add even libc here.
+    libs = [
+      # TODO(brettw) write a version of this, hopefully we can express this
+      # without forking out to GCC just to get the library name. The android
+      # toolchain directory should probably be extracted into a .gni file that
+      # this file and the android toolchain .gn file can share.
+      #   # Manually link the libgcc.a that the cross compiler uses.
+      #   '<!(<(android_toolchain)/*-gcc -print-libgcc-file-name)',
+      "c",
+      "dl",
+      "m",
+    ]
+  } else if (is_mac) {
+    libs = [
+      "AppKit.framework",
+      "ApplicationServices.framework",
+      "Carbon.framework",
+      "CoreFoundation.framework",
+      "Foundation.framework",
+      "IOKit.framework",
+      "Security.framework",
+      "OpenGL.framework",
+    ]
+  } else if (is_ios) {
+    # The libraries listed here will be specified for both the target and the
+    # host. Only the common ones should be listed here.
+    libs = [
+      "CoreFoundation.framework",
+      "CoreGraphics.framework",
+      "CoreText.framework",
+      "Foundation.framework",
+    ]
+  } else if (is_linux) {
+    libs = [ "dl" ]
+  }
+}
+
+# Add this config to your target to enable precompiled headers.
+#
+# On Windows, precompiled headers are done on a per-target basis. If you have
+# just a couple of files, the time it takes to precompile (~2 seconds) can
+# actually be longer than the time saved. On a Z620, a 100 file target compiles
+# about 2 seconds faster with precompiled headers, with greater savings for
+# larger targets.
+#
+# Recommend precompiled headers for targets with more than 50 .cc files.
+config("precompiled_headers") {
+  # TODO(brettw) enable this when GN support in the binary has been rolled.
+  #if (is_win) {
+  if (false) {
+    # This is a string rather than a file GN knows about. It has to match
+    # exactly what's in the /FI flag below, and what might appear in the source
+    # code in quotes for an #include directive.
+    precompiled_header = "build/precompile.h"
+
+    # This is a file that GN will compile with the above header. It will be
+    # implicitly added to the sources (potentially multiple times, with one
+    # variant for each language used in the target).
+    precompiled_source = "//build/precompile.cc"
+
+    # Force include the header.
+    cflags = [ "/FI$precompiled_header" ]
+  }
+}
diff --git a/build/config/BUILDCONFIG.gn b/build/config/BUILDCONFIG.gn
new file mode 100644
index 0000000..e22b29a
--- /dev/null
+++ b/build/config/BUILDCONFIG.gn
@@ -0,0 +1,827 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# =============================================================================
+# PLATFORM SELECTION
+# =============================================================================
+#
+# There are two main things to set: "os" and "cpu". The "toolchain" is the name
+# of the GN thing that encodes combinations of these things.
+#
+# Users typically only set the variables "target_os" and "target_cpu" in "gn
+# args", the rest are set up by our build and internal to GN.
+#
+# There are three different types of each of these things: The "host"
+# represents the computer doing the compile and never changes. The "target"
+# represents the main thing we're trying to build. The "current" represents
+# which configuration is currently being defined, which can be either the
+# host, the target, or something completely different (like nacl). GN will
+# run the same build file multiple times for the different required
+# configuration in the same build.
+#
+# This gives the following variables:
+#  - host_os, host_cpu, host_toolchain
+#  - target_os, target_cpu, default_toolchain
+#  - current_os, current_cpu, current_toolchain.
+#
+# Note the default_toolchain isn't symmetrical (you would expect
+# target_toolchain). This is because the "default" toolchain is a GN built-in
+# concept, and "target" is something our build sets up that's symmetrical with
+# its GYP counterpart. Potentially the built-in default_toolchain variable
+# could be renamed in the future.
+#
+# When writing build files, to do something only for the host:
+#   if (current_toolchain == host_toolchain) { ...
+
+if (target_os == "") {
+  target_os = host_os
+}
+
+if (target_cpu == "") {
+  if (target_os == "android") {
+    # If we're building for Android, we should assume that we want to
+    # build for ARM by default, not the host_cpu (which is likely x64).
+    # This allows us to not have to specify both target_os and target_cpu
+    # on the command line.
+    target_cpu = "arm"
+  } else {
+    target_cpu = host_cpu
+  }
+}
+
+if (current_cpu == "") {
+  current_cpu = target_cpu
+}
+if (current_os == "") {
+  current_os = target_os
+}
+
+# =============================================================================
+# BUILD FLAGS
+# =============================================================================
+#
+# This block lists input arguments to the build, along with their default
+# values.
+#
+# If a value is specified on the command line, it will overwrite the defaults
+# given in a declare_args block, otherwise the default will be used.
+#
+# YOU SHOULD ALMOST NEVER NEED TO ADD FLAGS TO THIS FILE. GN allows any file in
+# the build to declare build flags. If you need a flag for a single component,
+# you can just declare it in the corresponding BUILD.gn file. If you need a
+# flag in multiple components, there are a few options:
+#
+# - If your feature is a single target, say //components/foo, and the targets
+#   depending on foo need to have some define set if foo is enabled: (1) Write
+#   a declare_args block in foo's BUILD.gn file listing your enable_foo build
+#   flag. (2) Write a config in that file listing the define, and list that
+#   config in foo's public_configs. This will propagate that define to all the
+#   targets depending on foo. (3) When foo is not enabled, just make it expand
+#   to an empty group (or whatever's appropriate for the "off" state of your
+#   feature.
+#
+# - If a semi-random set of targets need to know about a define: (1) In the
+#   lowest level of the build that knows about this feature, add a declare_args
+#   block in the build file for your enable flag. (2) Write a config that adds
+#   a define conditionally based on that build flags. (3) Manually add that
+#   config to the "configs" applying to the targets that need the define.
+#
+# - If a semi-random set of targets need to know about the build flag (to do
+#   file inclusion or exclusion, more than just defines): (1) Write a .gni file
+#   in the lowest-level directory that knows about the feature. (2) Put the
+#   declare_args block with your build flag in that .gni file. (3) Import that
+#   .gni file from the BUILD.gn files that need the flag.
+#
+# Other advice:
+#
+# - Use boolean values when possible. If you need a default value that expands
+#   to some complex thing in the default case (like the location of the
+#   compiler which would be computed by a script), use a default value of -1 or
+#   the empty string. Outside of the declare_args block, conditionally expand
+#   the default value as necessary.
+#
+# - Use a name like "use_foo" or "is_foo" (whatever is more appropriate for
+#   your feature) rather than just "foo".
+#
+# - Write good comments directly above the declaration with no blank line.
+#   These comments will appear as documentation in "gn args --list".
+#
+# - Don't call exec_script inside declare_args. This will execute the script
+#   even if the value is overridden, which is wasteful. See first bullet.
+
+declare_args() {
+  # How many symbols to include in the build. This affects the performance of
+  # the build since the symbols are large and dealing with them is slow.
+  #   2 means regular build with symbols.
+  #   1 means minimal symbols, usually enough for backtraces only.
+  #   0 means no symbols.
+  #   -1 means auto-set (off in release, regular in debug).
+  symbol_level = -1
+
+  # Component build.
+  is_component_build = false
+
+  # Debug build.
+  is_debug = true
+
+  # Whether we're a traditional desktop unix.
+  is_desktop_linux = current_os == "linux" && current_os != "chromeos"
+
+  # Set to true when compiling with the Clang compiler. Typically this is used
+  # to configure warnings.
+  is_clang = current_os == "mac" || current_os == "ios" ||
+             current_os == "linux" || current_os == "chromeos"
+
+  # Compile for Address Sanitizer to find memory bugs.
+  is_asan = false
+
+  # Compile for Leak Sanitizer to find leaks.
+  is_lsan = false
+
+  # Compile for Memory Sanitizer to find uninitialized reads.
+  is_msan = false
+
+  # Compile for Thread Sanitizer to find threading bugs.
+  is_tsan = false
+
+  if (current_os == "chromeos") {
+    # Allows the target toolchain to be injected as arguments. This is needed
+    # to support the CrOS build system which supports per-build-configuration
+    # toolchains.
+    cros_use_custom_toolchain = false
+  }
+
+  # DON'T ADD MORE FLAGS HERE. Read the comment above.
+}
+
+# =============================================================================
+# OS DEFINITIONS
+# =============================================================================
+#
+# We set these various is_FOO booleans for convenience in writing OS-based
+# conditions.
+#
+# - is_android, is_chromeos, is_ios, and is_win should be obvious.
+# - is_mac is set only for desktop Mac. It is not set on iOS.
+# - is_posix is true for mac and any Unix-like system (basically everything
+#   except Windows).
+# - is_linux is true for desktop Linux and ChromeOS, but not Android (which is
+#   generally too different despite being based on the Linux kernel).
+#
+# Do not add more is_* variants here for random lesser-used Unix systems like
+# aix or one of the BSDs. If you need to check these, just check the
+# current_os value directly.
+
+if (current_os == "win") {
+  is_android = false
+  is_chromeos = false
+  is_fnl = false
+  is_ios = false
+  is_linux = false
+  is_mac = false
+  is_nacl = false
+  is_posix = false
+  is_win = true
+} else if (current_os == "mac") {
+  is_android = false
+  is_chromeos = false
+  is_fnl = false
+  is_ios = false
+  is_linux = false
+  is_mac = true
+  is_nacl = false
+  is_posix = true
+  is_win = false
+} else if (current_os == "android") {
+  is_android = true
+  is_chromeos = false
+  is_fnl = false
+  is_ios = false
+  is_linux = false
+  is_mac = false
+  is_nacl = false
+  is_posix = true
+  is_win = false
+} else if (current_os == "chromeos") {
+  is_android = false
+  is_chromeos = true
+  is_fnl = false
+  is_ios = false
+  is_linux = true
+  is_mac = false
+  is_nacl = false
+  is_posix = true
+  is_win = false
+} else if (current_os == "nacl") {
+  # current_os == "nacl" will be passed by the nacl toolchain definition.
+  # It is not set by default or on the command line. We treat is as a
+  # Posix variant.
+  is_android = false
+  is_chromeos = false
+  is_fnl = false
+  is_ios = false
+  is_linux = false
+  is_mac = false
+  is_nacl = true
+  is_posix = true
+  is_win = false
+} else if (current_os == "ios") {
+  is_android = false
+  is_chromeos = false
+  is_fnl = false
+  is_ios = true
+  is_linux = false
+  is_mac = false
+  is_nacl = false
+  is_posix = true
+  is_win = false
+} else if (current_os == "linux") {
+  is_android = false
+  is_chromeos = false
+  is_fnl = false
+  is_ios = false
+  is_linux = true
+  is_mac = false
+  is_nacl = false
+  is_posix = true
+  is_win = false
+} else if (current_os == "fnl") {
+  is_android = false
+  is_chromeos = false
+  is_fnl = true
+  is_ios = false
+  is_linux = true
+  is_mac = false
+  is_nacl = false
+  is_posix = true
+  is_win = false
+}
+
+# =============================================================================
+# SOURCES FILTERS
+# =============================================================================
+#
+# These patterns filter out platform-specific files when assigning to the
+# sources variable. The magic variable |sources_assignment_filter| is applied
+# to each assignment or appending to the sources variable and matches are
+# automatcally removed.
+#
+# Note that the patterns are NOT regular expressions. Only "*" and "\b" (path
+# boundary = end of string or slash) are supported, and the entire string
+# muct match the pattern (so you need "*.cc" to match all .cc files, for
+# example).
+
+# DO NOT ADD MORE PATTERNS TO THIS LIST, see set_sources_assignment_filter call
+# below.
+sources_assignment_filter = []
+if (!is_posix) {
+  sources_assignment_filter += [
+    "*_posix.h",
+    "*_posix.cc",
+    "*_posix_unittest.h",
+    "*_posix_unittest.cc",
+    "*\bposix/*",
+  ]
+}
+if (!is_win) {
+  sources_assignment_filter += [
+    "*_win.cc",
+    "*_win.h",
+    "*_win_unittest.cc",
+    "*\bwin/*",
+    "*.def",
+    "*.rc",
+  ]
+}
+if (!is_mac) {
+  sources_assignment_filter += [
+    "*_mac.h",
+    "*_mac.cc",
+    "*_mac.mm",
+    "*_mac_unittest.h",
+    "*_mac_unittest.cc",
+    "*_mac_unittest.mm",
+    "*\bmac/*",
+    "*_cocoa.h",
+    "*_cocoa.cc",
+    "*_cocoa.mm",
+    "*_cocoa_unittest.h",
+    "*_cocoa_unittest.cc",
+    "*_cocoa_unittest.mm",
+    "*\bcocoa/*",
+  ]
+}
+if (!is_ios) {
+  sources_assignment_filter += [
+    "*_ios.h",
+    "*_ios.cc",
+    "*_ios.mm",
+    "*_ios_unittest.h",
+    "*_ios_unittest.cc",
+    "*_ios_unittest.mm",
+    "*\bios/*",
+  ]
+}
+if (!is_mac && !is_ios) {
+  sources_assignment_filter += [ "*.mm" ]
+}
+if (!is_linux) {
+  sources_assignment_filter += [
+    "*_linux.h",
+    "*_linux.cc",
+    "*_linux_unittest.h",
+    "*_linux_unittest.cc",
+    "*\blinux/*",
+  ]
+}
+if (!is_android) {
+  sources_assignment_filter += [
+    "*_android.h",
+    "*_android.cc",
+    "*_android_unittest.h",
+    "*_android_unittest.cc",
+    "*\bandroid/*",
+  ]
+}
+if (!is_chromeos) {
+  sources_assignment_filter += [
+    "*_chromeos.h",
+    "*_chromeos.cc",
+    "*_chromeos_unittest.h",
+    "*_chromeos_unittest.cc",
+    "*\bchromeos/*",
+  ]
+}
+
+# DO NOT ADD MORE PATTERNS TO THIS LIST, see set_sources_assignment_filter call
+# below.
+
+# Actually save this list.
+#
+# These patterns are executed for every file in the source tree of every run.
+# Therefore, adding more patterns slows down the build for everybody. We should
+# only add automatic patterns for configurations affecting hundreds of files
+# across many projects in the tree.
+#
+# Therefore, we only add rules to this list corresponding to platforms on the
+# Chromium waterfall.  This is not for non-officially-supported platforms
+# (FreeBSD, etc.) toolkits, (X11, GTK, etc.), or features. For these cases,
+# write a conditional in the target to remove the file(s) from the list when
+# your platform/toolkit/feature doesn't apply.
+set_sources_assignment_filter(sources_assignment_filter)
+
+# =============================================================================
+# BUILD OPTIONS
+# =============================================================================
+
+# These Sanitizers all imply using the Clang compiler. On Windows they either
+# don't work or work differently.
+if (!is_clang && (is_asan || is_lsan || is_tsan || is_msan)) {
+  is_clang = true
+}
+
+# =============================================================================
+# TARGET DEFAULTS
+# =============================================================================
+#
+# Set up the default configuration for every build target of the given type.
+# The values configured here will be automatically set on the scope of the
+# corresponding target. Target definitions can add or remove to the settings
+# here as needed.
+
+# Holds all configs used for making native executables and libraries, to avoid
+# duplication in each target below.
+_native_compiler_configs = [
+  "//build/config:feature_flags",
+  "//build/config/compiler:compiler",
+  "//build/config/compiler:compiler_arm_fpu",
+  "//build/config/compiler:chromium_code",
+  "//build/config/compiler:default_include_dirs",
+  "//build/config/compiler:no_rtti",
+  "//build/config/compiler:runtime_library",
+]
+if (is_win) {
+  _native_compiler_configs += [
+    "//build/config/win:lean_and_mean",
+    "//build/config/win:nominmax",
+    "//build/config/win:sdk",
+    "//build/config/win:unicode",
+    "//build/config/win:winver",
+  ]
+}
+if (is_posix) {
+  _native_compiler_configs += [
+    "//build/config/gcc:no_exceptions",
+    "//build/config/gcc:symbol_visibility_hidden",
+  ]
+}
+
+if (is_fnl) {
+  _native_compiler_configs += [ "//build/config/fnl:sdk" ]
+} else if (is_linux) {
+  _native_compiler_configs += [ "//build/config/linux:sdk" ]
+} else if (is_mac) {
+  _native_compiler_configs += [ "//build/config/mac:sdk" ]
+} else if (is_ios) {
+  _native_compiler_configs += [ "//build/config/ios:sdk" ]
+} else if (is_android) {
+  _native_compiler_configs += [ "//build/config/android:sdk" ]
+}
+
+if (is_clang) {
+  _native_compiler_configs += [
+    "//build/config/clang:find_bad_constructs",
+    "//build/config/clang:extra_warnings",
+  ]
+}
+
+# Optimizations and debug checking.
+if (is_debug) {
+  _native_compiler_configs += [ "//build/config:debug" ]
+  _default_optimization_config = "//build/config/compiler:no_optimize"
+} else {
+  _native_compiler_configs += [ "//build/config:release" ]
+  _default_optimization_config = "//build/config/compiler:optimize"
+}
+_native_compiler_configs += [ _default_optimization_config ]
+
+# If it wasn't manually set, set to an appropriate default.
+if (symbol_level == -1) {
+  # Linux is slowed by having symbols as part of the target binary, whereas
+  # Mac and Windows have them separate, so in Release Linux, default them off.
+  if (is_debug || !is_linux) {
+    symbol_level = 2
+  } else if (is_asan || is_lsan || is_tsan || is_msan) {
+    # Sanitizers require symbols for filename suppressions to work.
+    symbol_level = 1
+  } else {
+    symbol_level = 0
+  }
+}
+
+# Symbol setup.
+if (symbol_level == 2) {
+  _default_symbols_config = "//build/config/compiler:symbols"
+} else if (symbol_level == 1) {
+  _default_symbols_config = "//build/config/compiler:minimal_symbols"
+} else if (symbol_level == 0) {
+  _default_symbols_config = "//build/config/compiler:no_symbols"
+} else {
+  assert(false, "Bad value for symbol_level.")
+}
+_native_compiler_configs += [ _default_symbols_config ]
+
+# Windows linker setup for EXEs and DLLs.
+if (is_win) {
+  _windows_linker_configs = [
+    "//build/config/win:default_incremental_linking",
+    "//build/config/win:sdk_link",
+    "//build/config/win:common_linker_setup",
+
+    # Default to console-mode apps. Most of our targets are tests and such
+    # that shouldn't use the windows subsystem.
+    "//build/config/win:console",
+  ]
+}
+
+# Executable defaults.
+_executable_configs =
+    _native_compiler_configs + [ "//build/config:default_libs" ]
+if (is_win) {
+  _executable_configs += _windows_linker_configs
+} else if (is_mac) {
+  _executable_configs += [
+    "//build/config/mac:mac_dynamic_flags",
+    "//build/config/mac:mac_executable_flags",
+  ]
+} else if (is_linux || is_android) {
+  _executable_configs += [ "//build/config/gcc:executable_ldconfig" ]
+  if (is_android) {
+    _executable_configs += [ "//build/config/android:executable_config" ]
+  }
+}
+set_defaults("executable") {
+  configs = _executable_configs
+}
+
+# Static library defaults.
+set_defaults("static_library") {
+  configs = _native_compiler_configs
+}
+
+# Shared library defaults (also for components in component mode).
+_shared_library_configs =
+    _native_compiler_configs + [ "//build/config:default_libs" ]
+if (is_win) {
+  _shared_library_configs += _windows_linker_configs
+} else if (is_mac) {
+  _shared_library_configs += [ "//build/config/mac:mac_dynamic_flags" ]
+} else if (is_android) {
+  # Strip native JNI exports from shared libraries by default. Binaries that
+  # want this can remove this config.
+  _shared_library_configs +=
+      [ "//build/config/android:hide_native_jni_exports" ]
+}
+set_defaults("shared_library") {
+  configs = _shared_library_configs
+}
+if (is_component_build) {
+  set_defaults("component") {
+    configs = _shared_library_configs
+  }
+}
+
+# Source set defaults (also for components in non-component mode).
+set_defaults("source_set") {
+  configs = _native_compiler_configs
+}
+if (!is_component_build) {
+  set_defaults("component") {
+    configs = _native_compiler_configs
+  }
+}
+
+# Test defaults.
+set_defaults("test") {
+  if (is_android) {
+    configs = _shared_library_configs
+  } else {
+    configs = _executable_configs
+  }
+}
+
+# ==============================================================================
+# TOOLCHAIN SETUP
+# ==============================================================================
+#
+# Here we set the default toolchain, as well as the variable host_toolchain
+# which will identify the toolchain corresponding to the local system when
+# doing cross-compiles. When not cross-compiling, this will be the same as the
+# default toolchain.
+
+if (is_win) {
+  # On windows we use the same toolchain for host and target by default.
+  if (is_clang) {
+    host_toolchain = "//build/toolchain/win:clang_$current_cpu"
+  } else {
+    host_toolchain = "//build/toolchain/win:$current_cpu"
+  }
+  set_default_toolchain("$host_toolchain")
+} else if (is_android) {
+  if (host_os == "linux") {
+    # Use clang for the x86/64 Linux host builds.
+    if (host_cpu == "x86" || host_cpu == "x64") {
+      host_toolchain = "//build/toolchain/linux:clang_$host_cpu"
+    } else {
+      host_toolchain = "//build/toolchain/linux:$host_cpu"
+    }
+  } else if (host_os == "mac") {
+    host_toolchain = "//build/toolchain/mac:clang_$host_cpu"
+  } else {
+    assert(false, "Unknown host for android cross compile")
+  }
+  if (is_clang) {
+    set_default_toolchain("//build/toolchain/android:clang_$current_cpu")
+  } else {
+    set_default_toolchain("//build/toolchain/android:$current_cpu")
+  }
+} else if (is_linux) {
+  if (is_clang) {
+    host_toolchain = "//build/toolchain/linux:clang_$host_cpu"
+    set_default_toolchain("//build/toolchain/linux:clang_$current_cpu")
+  } else {
+    host_toolchain = "//build/toolchain/linux:$host_cpu"
+    set_default_toolchain("//build/toolchain/linux:$current_cpu")
+  }
+  if (is_chromeos && cros_use_custom_toolchain) {
+    set_default_toolchain("//build/toolchain/cros:target")
+  }
+  if (is_fnl) {
+    set_default_toolchain("//build/toolchain/fnl:target")
+  }
+} else if (is_mac) {
+  host_toolchain = "//build/toolchain/mac:clang_x64"
+  set_default_toolchain(host_toolchain)
+} else if (is_ios) {
+  host_toolchain = "//build/toolchain/mac:clang_x64"
+  if (use_ios_simulator) {
+    set_default_toolchain("//build/toolchain/mac:ios_clang_x64")
+  } else {
+    set_default_toolchain("//build/toolchain/mac:ios_clang_arm")
+  }
+} else if (is_nacl) {
+  # TODO(GYP): This will need to change when we get NaCl working
+  # on multiple platforms, but this whole block of code (how we define
+  # host_toolchain) needs to be reworked regardless to key off of host_os
+  # and host_cpu rather than the is_* variables.
+  host_toolchain = "//build/toolchain/linux:clang_x64"
+}
+
+# ==============================================================================
+# COMPONENT SETUP
+# ==============================================================================
+
+# TODO(brettw) erase this once the built-in "component" function is removed.
+if (is_component_build) {
+  component_mode = "shared_library"
+} else {
+  component_mode = "source_set"
+}
+
+template("component") {
+  if (is_component_build) {
+    shared_library(target_name) {
+      # Configs will always be defined since we set_defaults for a component
+      # above. We want to use those rather than whatever came with the nested
+      # shared/static library inside the component.
+      configs = []  # Prevent list overwriting warning.
+      configs = invoker.configs
+
+      # The sources assignment filter will have already been applied when the
+      # code was originally executed. We don't want to apply it again, since
+      # the original target may have override it for some assignments.
+      set_sources_assignment_filter([])
+
+      if (defined(invoker.all_dependent_configs)) {
+        all_dependent_configs = invoker.all_dependent_configs
+      }
+      if (defined(invoker.allow_circular_includes_from)) {
+        allow_circular_includes_from = invoker.allow_circular_includes_from
+      }
+      if (defined(invoker.cflags)) {
+        cflags = invoker.cflags
+      }
+      if (defined(invoker.cflags_c)) {
+        cflags_c = invoker.cflags_c
+      }
+      if (defined(invoker.cflags_cc)) {
+        cflags_cc = invoker.cflags_cc
+      }
+      if (defined(invoker.cflags_objc)) {
+        cflags_objc = invoker.cflags_objc
+      }
+      if (defined(invoker.cflags_objcc)) {
+        cflags_objcc = invoker.cflags_objcc
+      }
+      if (defined(invoker.check_includes)) {
+        check_includes = invoker.check_includes
+      }
+      if (defined(invoker.data)) {
+        data = invoker.data
+      }
+      if (defined(invoker.data_deps)) {
+        data_deps = invoker.data_deps
+      }
+      if (defined(invoker.datadeps)) {
+        datadeps = invoker.datadeps
+      }
+      if (defined(invoker.defines)) {
+        defines = invoker.defines
+      }
+
+      # All shared libraries must have the sanitizer deps to properly link in
+      # asan mode (this target will be empty in other cases).
+      if (defined(invoker.deps)) {
+        deps = invoker.deps + [ "//build/config/sanitizers:deps" ]
+      } else {
+        deps = [
+          "//build/config/sanitizers:deps",
+        ]
+      }
+      if (defined(invoker.direct_dependent_configs)) {
+        direct_dependent_configs = invoker.direct_dependent_configs
+      }
+      if (defined(invoker.forward_dependent_configs_from)) {
+        forward_dependent_configs_from = invoker.forward_dependent_configs_from
+      }
+      if (defined(invoker.include_dirs)) {
+        include_dirs = invoker.include_dirs
+      }
+      if (defined(invoker.ldflags)) {
+        ldflags = invoker.ldflags
+      }
+      if (defined(invoker.lib_dirs)) {
+        lib_dirs = invoker.lib_dirs
+      }
+      if (defined(invoker.libs)) {
+        libs = invoker.libs
+      }
+      if (defined(invoker.output_extension)) {
+        output_extension = invoker.output_extension
+      }
+      if (defined(invoker.output_name)) {
+        output_name = invoker.output_name
+      }
+      if (defined(invoker.public)) {
+        public = invoker.public
+      }
+      if (defined(invoker.public_configs)) {
+        public_configs = invoker.public_configs
+      }
+      if (defined(invoker.public_deps)) {
+        public_deps = invoker.public_deps
+      }
+      if (defined(invoker.sources)) {
+        sources = invoker.sources
+      }
+      if (defined(invoker.testonly)) {
+        testonly = invoker.testonly
+      }
+      if (defined(invoker.visibility)) {
+        visibility = invoker.visibility
+      }
+    }
+  } else {
+    source_set(target_name) {
+      # See above.
+      configs = []  # Prevent list overwriting warning.
+      configs = invoker.configs
+
+      # See above call.
+      set_sources_assignment_filter([])
+
+      if (defined(invoker.all_dependent_configs)) {
+        all_dependent_configs = invoker.all_dependent_configs
+      }
+      if (defined(invoker.allow_circular_includes_from)) {
+        allow_circular_includes_from = invoker.allow_circular_includes_from
+      }
+      if (defined(invoker.cflags)) {
+        cflags = invoker.cflags
+      }
+      if (defined(invoker.cflags_c)) {
+        cflags_c = invoker.cflags_c
+      }
+      if (defined(invoker.cflags_cc)) {
+        cflags_cc = invoker.cflags_cc
+      }
+      if (defined(invoker.cflags_objc)) {
+        cflags_objc = invoker.cflags_objc
+      }
+      if (defined(invoker.cflags_objcc)) {
+        cflags_objcc = invoker.cflags_objcc
+      }
+      if (defined(invoker.check_includes)) {
+        check_includes = invoker.check_includes
+      }
+      if (defined(invoker.data)) {
+        data = invoker.data
+      }
+      if (defined(invoker.data_deps)) {
+        data_deps = invoker.data_deps
+      }
+      if (defined(invoker.datadeps)) {
+        datadeps = invoker.datadeps
+      }
+      if (defined(invoker.defines)) {
+        defines = invoker.defines
+      }
+      if (defined(invoker.deps)) {
+        deps = invoker.deps
+      }
+      if (defined(invoker.direct_dependent_configs)) {
+        direct_dependent_configs = invoker.direct_dependent_configs
+      }
+      if (defined(invoker.forward_dependent_configs_from)) {
+        forward_dependent_configs_from = invoker.forward_dependent_configs_from
+      }
+      if (defined(invoker.include_dirs)) {
+        include_dirs = invoker.include_dirs
+      }
+      if (defined(invoker.ldflags)) {
+        ldflags = invoker.ldflags
+      }
+      if (defined(invoker.lib_dirs)) {
+        lib_dirs = invoker.lib_dirs
+      }
+      if (defined(invoker.libs)) {
+        libs = invoker.libs
+      }
+      if (defined(invoker.output_extension)) {
+        output_extension = invoker.output_extension
+      }
+      if (defined(invoker.output_name)) {
+        output_name = invoker.output_name
+      }
+      if (defined(invoker.public)) {
+        public = invoker.public
+      }
+      if (defined(invoker.public_configs)) {
+        public_configs = invoker.public_configs
+      }
+      if (defined(invoker.public_deps)) {
+        public_deps = invoker.public_deps
+      }
+      if (defined(invoker.sources)) {
+        sources = invoker.sources
+      }
+      if (defined(invoker.testonly)) {
+        testonly = invoker.testonly
+      }
+      if (defined(invoker.visibility)) {
+        visibility = invoker.visibility
+      }
+    }
+  }
+}
diff --git a/build/config/OWNERS b/build/config/OWNERS
new file mode 100644
index 0000000..bd53091
--- /dev/null
+++ b/build/config/OWNERS
@@ -0,0 +1,6 @@
+brettw@chromium.org
+dpranke@chromium.org
+scottmg@chromium.org
+
+per-file BUILDCONFIG.gn=brettw@chromium.org
+per-file BUILDCONFIG.gn=set noparent
diff --git a/build/config/allocator.gni b/build/config/allocator.gni
new file mode 100644
index 0000000..71418a8
--- /dev/null
+++ b/build/config/allocator.gni
@@ -0,0 +1,16 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# TODO(GYP): Make tcmalloc work on win.
+if (is_android || current_cpu == "mipsel" || is_mac || is_ios || is_asan ||
+    is_lsan || is_tsan || is_msan || is_win) {
+  _default_allocator = "none"
+} else {
+  _default_allocator = "tcmalloc"
+}
+
+declare_args() {
+  # Memory allocator to use. Set to "none" to use default allocator.
+  use_allocator = _default_allocator
+}
diff --git a/build/config/android/BUILD.gn b/build/config/android/BUILD.gn
new file mode 100644
index 0000000..5492693
--- /dev/null
+++ b/build/config/android/BUILD.gn
@@ -0,0 +1,32 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/android/config.gni")
+import("//build/config/sysroot.gni")
+
+config("sdk") {
+  if (sysroot != "") {
+    cflags = [ "--sysroot=" + sysroot ]
+    ldflags = [ "--sysroot=" + sysroot ]
+
+    # Need to get some linker flags out of the sysroot.
+    sysroot_ld_path = rebase_path("//build/config/linux/sysroot_ld_path.py")
+    ldflags += [ exec_script(sysroot_ld_path,
+                             [
+                               rebase_path("//build/linux/sysroot_ld_path.sh"),
+                               sysroot,
+                             ],
+                             "value") ]
+  }
+}
+
+config("executable_config") {
+  cflags = [ "-fPIE" ]
+  ldflags = [ "-pie" ]
+}
+
+config("hide_native_jni_exports") {
+  ldflags = [ "-Wl,--version-script=" +
+              rebase_path("//build/android/android_no_jni_exports.lst") ]
+}
diff --git a/build/config/android/OWNERS b/build/config/android/OWNERS
new file mode 100644
index 0000000..3759e93
--- /dev/null
+++ b/build/config/android/OWNERS
@@ -0,0 +1 @@
+cjhopman@chromium.org
diff --git a/build/config/android/config.gni b/build/config/android/config.gni
new file mode 100644
index 0000000..5457b88
--- /dev/null
+++ b/build/config/android/config.gni
@@ -0,0 +1,206 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file contains common system config stuff for the Android build.
+
+if (is_android) {
+  has_chrome_android_internal =
+      exec_script("//build/dir_exists.py",
+                  [ rebase_path("//clank", root_build_dir) ],
+                  "string") == "True"
+
+  if (has_chrome_android_internal) {
+    import("//clank/config.gni")
+  }
+
+  if (!defined(default_android_sdk_root)) {
+    default_android_sdk_root = "//third_party/android_tools/sdk"
+    default_android_sdk_version = "22"
+    default_android_sdk_build_tools_version = "22.0.1"
+  }
+
+  if (!defined(google_play_services_library)) {
+    google_play_services_library =
+        "//third_party/android_tools:google_play_services_default_java"
+  }
+
+  declare_args() {
+    android_sdk_root = default_android_sdk_root
+    android_sdk_version = default_android_sdk_version
+    android_sdk_build_tools_version = default_android_sdk_build_tools_version
+
+    android_default_keystore_path =
+        "//build/android/ant/chromium-debug.keystore"
+    android_default_keystore_name = "chromiumdebugkey"
+    android_default_keystore_password = "chromium"
+
+    # This is a unique identifier for a given build. It's used for
+    # identifying various build artifacts corresponding to a particular build of
+    # chrome (e.g. where to find archived symbols).
+    android_chrome_build_id = "\"\""
+
+    # Set to true to run findbugs on JAR targets.
+    run_findbugs = false
+
+    # Set to true to enable the Errorprone compiler
+    use_errorprone_java_compiler = false
+  }
+
+  # Host stuff -----------------------------------------------------------------
+
+  # Defines the name the Android build gives to the current host CPU
+  # architecture, which is different than the names GN uses.
+  if (host_cpu == "x64") {
+    android_host_arch = "x86_64"
+  } else if (host_cpu == "x86") {
+    android_host_arch = "x86"
+  } else {
+    assert(false, "Need Android toolchain support for your build CPU arch.")
+  }
+
+  # Defines the name the Android build gives to the current host CPU
+  # architecture, which is different than the names GN uses.
+  if (host_os == "linux") {
+    android_host_os = "linux"
+  } else if (host_os == "mac") {
+    android_host_os = "darwin"
+  } else {
+    assert(false, "Need Android toolchain support for your build OS.")
+  }
+
+  # Directories and files ------------------------------------------------------
+  #
+  # We define may of the dirs strings here for each output architecture (rather
+  # than just the current one) since these are needed by the Android toolchain
+  # file to define toolchains for all possible targets in one pass.
+
+  android_sdk = "${android_sdk_root}/platforms/android-${android_sdk_version}"
+
+  # Path to the Android NDK and SDK.
+  android_ndk_root = "//third_party/android_tools/ndk"
+  android_ndk_include_dir = "$android_ndk_root/usr/include"
+
+  android_sdk = "${android_sdk_root}/platforms/android-${android_sdk_version}"
+
+  android_sdk_tools = "${android_sdk_root}/tools"
+  android_sdk_build_tools =
+      "${android_sdk_root}/build-tools/$android_sdk_build_tools_version"
+
+  # Path to the SDK's android.jar
+  android_sdk_jar = "$android_sdk/android.jar"
+
+  zipalign_path = "$android_sdk_build_tools/zipalign"
+
+  # Subdirectories inside android_ndk_root that contain the sysroot for the
+  # associated platform.
+  _android_api_level = 16
+  x86_android_sysroot_subdir =
+      "platforms/android-${_android_api_level}/arch-x86"
+  arm_android_sysroot_subdir =
+      "platforms/android-${_android_api_level}/arch-arm"
+  mips_android_sysroot_subdir =
+      "platforms/android-${_android_api_level}/arch-mips"
+  _android64_api_level = 21
+  x86_64_android_sysroot_subdir =
+      "platforms/android-${_android64_api_level}/arch-x86_64"
+  arm64_android_sysroot_subdir =
+      "platforms/android-${_android64_api_level}/arch-arm64"
+  mips64_android_sysroot_subdir =
+      "platforms/android-${_android64_api_level}/arch-mips64"
+
+  # Toolchain root directory for each build. The actual binaries are inside
+  # a "bin" directory inside of these.
+  _android_toolchain_version = "4.9"
+  x86_android_toolchain_root = "$android_ndk_root/toolchains/x86-${_android_toolchain_version}/prebuilt/${android_host_os}-${android_host_arch}"
+  arm_android_toolchain_root = "$android_ndk_root/toolchains/arm-linux-androideabi-${_android_toolchain_version}/prebuilt/${android_host_os}-${android_host_arch}"
+  mips_android_toolchain_root = "$android_ndk_root/toolchains/mipsel-linux-android-${_android_toolchain_version}/prebuilt/${android_host_os}-${android_host_arch}"
+  x86_64_android_toolchain_root = "$android_ndk_root/toolchains/x86_64-${_android_toolchain_version}/prebuilt/${android_host_os}-${android_host_arch}"
+  arm64_android_toolchain_root = "$android_ndk_root/toolchains/aarch64-${_android_toolchain_version}/prebuilt/${android_host_os}-${android_host_arch}"
+  mips64_android_toolchain_root = "$android_ndk_root/toolchains/mips64el-${_android_toolchain_version}/prebuilt/${android_host_os}-${android_host_arch}"
+
+  # Location of libgcc. This is only needed for the current GN toolchain, so we
+  # only need to define the current one, rather than one for every platform
+  # like the toolchain roots.
+  if (current_cpu == "x86") {
+    android_prebuilt_arch = "android-x86"
+    _binary_prefix = "i686-linux-android"
+    android_toolchain_root = "$x86_android_toolchain_root"
+    android_libgcc_file = "$android_toolchain_root/lib/gcc/i686-linux-android/${_android_toolchain_version}/libgcc.a"
+  } else if (current_cpu == "arm") {
+    android_prebuilt_arch = "android-arm"
+    _binary_prefix = "arm-linux-androideabi"
+    android_toolchain_root = "$arm_android_toolchain_root"
+    android_libgcc_file = "$android_toolchain_root/lib/gcc/arm-linux-androideabi/${_android_toolchain_version}/libgcc.a"
+  } else if (current_cpu == "mipsel") {
+    android_prebuilt_arch = "android-mips"
+    _binary_prefix = "mipsel-linux-android"
+    android_toolchain_root = "$mips_android_toolchain_root"
+    android_libgcc_file = "$android_toolchain_root/lib/gcc/mipsel-linux-android/${_android_toolchain_version}/libgcc.a"
+  } else if (current_cpu == "x64") {
+    android_prebuilt_arch = "android-x86_64"
+    _binary_prefix = "x86_64-linux-android"
+    android_toolchain_root = "$x86_64_android_toolchain_root"
+    android_libgcc_file = "$android_toolchain_root/lib/gcc/x86_64-linux-android/${_android_toolchain_version}/libgcc.a"
+  } else if (current_cpu == "arm64") {
+    android_prebuilt_arch = "android-arm64"
+    _binary_prefix = "aarch64-linux-android"
+    android_toolchain_root = "$arm64_android_toolchain_root"
+    android_libgcc_file = "$android_toolchain_root/lib/gcc/aarch64-linux-android/${_android_toolchain_version}/libgcc.a"
+  } else if (current_cpu == "mips64el") {
+    android_prebuilt_arch = "android-mips64"
+    _binary_prefix = "mips64el-linux-android"
+    android_toolchain_root = "$mips64_android_toolchain_root"
+    android_libgcc_file = "$android_toolchain_root/lib/gcc/mips64el-linux-android/${_android_toolchain_version}/libgcc.a"
+  } else {
+    assert(false, "Need android libgcc support for your target arch.")
+  }
+
+  android_tool_prefix = "$android_toolchain_root/bin/$_binary_prefix-"
+  android_readelf = "${android_tool_prefix}readelf"
+  android_objcopy = "${android_tool_prefix}objcopy"
+  android_gdbserver =
+      "$android_ndk_root/prebuilt/$android_prebuilt_arch/gdbserver/gdbserver"
+
+  # Toolchain stuff ------------------------------------------------------------
+
+  android_libcpp_root = "$android_ndk_root/sources/cxx-stl/llvm-libc++"
+  if (component_mode == "shared_library") {
+    android_libcpp_library = "c++_shared"
+  } else {
+    android_libcpp_library = "c++_static"
+  }
+
+  if (component_mode == "shared_library") {
+    # By appending .cr, we prevent name collisions with libraries already
+    # loaded by the Android zygote.
+    android_product_extension = ".cr.so"
+  } else {
+    android_product_extension = ".so"
+  }
+
+  # ABI ------------------------------------------------------------------------
+
+  if (current_cpu == "x86") {
+    android_app_abi = "x86"
+  } else if (current_cpu == "arm") {
+    import("//build/config/arm.gni")
+    if (arm_version < 7) {
+      android_app_abi = "armeabi"
+    } else {
+      android_app_abi = "armeabi-v7a"
+    }
+  } else if (current_cpu == "mipsel") {
+    android_app_abi = "mips"
+  } else if (current_cpu == "x64") {
+    android_app_abi = "x86_64"
+  } else if (current_cpu == "arm64") {
+    android_app_abi = "arm64-v8a"
+  } else if (current_cpu == "mips64el") {
+    android_app_abi = "mips64"
+  } else {
+    assert(false, "Unknown Android ABI: " + current_cpu)
+  }
+
+  android_log_tag = "\"flutter\""
+}
diff --git a/build/config/android/internal_rules.gni b/build/config/android/internal_rules.gni
new file mode 100644
index 0000000..ab154b4
--- /dev/null
+++ b/build/config/android/internal_rules.gni
@@ -0,0 +1,1596 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/android/config.gni")
+
+assert(is_android)
+
+rebased_android_sdk = rebase_path(android_sdk, root_build_dir)
+rebased_android_sdk_root = rebase_path(android_sdk_root, root_build_dir)
+rebased_android_sdk_build_tools =
+    rebase_path(android_sdk_build_tools, root_build_dir)
+
+android_sdk_jar = "$android_sdk/android.jar"
+rebased_android_sdk_jar = rebase_path(android_sdk_jar, root_build_dir)
+android_aapt_path = "$rebased_android_sdk_build_tools/aapt"
+
+template("android_lint") {
+  set_sources_assignment_filter([])
+  if (defined(invoker.testonly)) {
+    testonly = invoker.testonly
+  }
+
+  jar_path = invoker.jar_path
+  android_manifest = invoker.android_manifest
+  java_files = invoker.java_files
+  base_path = "$target_gen_dir/$target_name"
+
+  action(target_name) {
+    script = "//build/android/gyp/lint.py"
+    result_path = base_path + "/result.xml"
+    config_path = base_path + "/config.xml"
+    suppressions_file = "//build/android/lint/suppressions.xml"
+    inputs = [
+               suppressions_file,
+               android_manifest,
+               jar_path,
+             ] + java_files
+
+    outputs = [
+      config_path,
+      result_path,
+    ]
+
+    rebased_java_files = rebase_path(java_files, root_build_dir)
+
+    args = [
+      "--lint-path=$rebased_android_sdk_root/tools/lint",
+      "--config-path",
+      rebase_path(suppressions_file, root_build_dir),
+      "--manifest-path",
+      rebase_path(android_manifest, root_build_dir),
+      "--product-dir=.",
+      "--jar-path",
+      rebase_path(jar_path, root_build_dir),
+      "--processed-config-path",
+      rebase_path(config_path, root_build_dir),
+      "--result-path",
+      rebase_path(result_path, root_build_dir),
+      "--java-files=$rebased_java_files",
+      "--enable",
+    ]
+
+    if (defined(invoker.deps)) {
+      deps = invoker.deps
+    }
+    if (defined(invoker.public_deps)) {
+      public_deps = invoker.public_deps
+    }
+    if (defined(invoker.data_deps)) {
+      data_deps = invoker.data_deps
+    }
+  }
+}
+
+template("findbugs") {
+  jar_path = invoker.jar_path
+
+  build_config = invoker.build_config
+
+  action(target_name) {
+    script = "//build/android/findbugs_diff.py"
+    depfile = "$target_gen_dir/$target_name.d"
+    result_path = "$target_gen_dir/$target_name/result.xml"
+    exclusions_file = "//build/android/findbugs_filter/findbugs_exclude.xml"
+
+    rebased_build_config = rebase_path(build_config, root_build_dir)
+
+    if (defined(invoker.deps)) {
+      deps = invoker.deps
+    }
+
+    if (defined(invoker.testonly)) {
+      testonly = invoker.testonly
+    }
+
+    inputs = [
+      "//build/android/pylib/utils/findbugs.py",
+      exclusions_file,
+      jar_path,
+    ]
+
+    outputs = [
+      depfile,
+      result_path,
+    ]
+
+    args = [
+      "--depfile",
+      rebase_path(depfile, root_build_dir),
+      "--exclude",
+      rebase_path(exclusions_file, root_build_dir),
+      "--auxclasspath-gyp",
+      "@FileArg($rebased_build_config:javac:classpath)",
+      "--output-file",
+      rebase_path(result_path, root_build_dir),
+      rebase_path(jar_path, root_build_dir),
+    ]
+  }
+}
+
+template("dex") {
+  set_sources_assignment_filter([])
+  if (defined(invoker.testonly)) {
+    testonly = invoker.testonly
+  }
+
+  assert(defined(invoker.output))
+  action(target_name) {
+    script = "//build/android/gyp/dex.py"
+    depfile = "$target_gen_dir/$target_name.d"
+    if (defined(invoker.sources)) {
+      sources = invoker.sources
+    }
+    outputs = [
+      depfile,
+      invoker.output,
+    ]
+    if (defined(invoker.inputs)) {
+      inputs = invoker.inputs
+    }
+
+    if (defined(invoker.deps)) {
+      deps = invoker.deps
+    }
+
+    rebased_output = rebase_path(invoker.output, root_build_dir)
+
+    args = [
+      "--depfile",
+      rebase_path(depfile, root_build_dir),
+      "--android-sdk-tools",
+      rebased_android_sdk_build_tools,
+      "--dex-path",
+      rebased_output,
+    ]
+
+    if (defined(invoker.no_locals) && invoker.no_locals) {
+      args += [ "--no-locals=1" ]
+    }
+
+    if (defined(invoker.args)) {
+      args += invoker.args
+    }
+
+    if (defined(invoker.sources)) {
+      args += rebase_path(invoker.sources, root_build_dir)
+    }
+  }
+}
+
+# Creates a zip archive of the inputs.
+# If base_dir is provided, the archive paths will be relative to it.
+template("zip") {
+  set_sources_assignment_filter([])
+  if (defined(invoker.testonly)) {
+    testonly = invoker.testonly
+  }
+
+  assert(defined(invoker.inputs))
+  assert(defined(invoker.output))
+
+  rebase_inputs = rebase_path(invoker.inputs, root_build_dir)
+  rebase_output = rebase_path(invoker.output, root_build_dir)
+  action(target_name) {
+    script = "//build/android/gn/zip.py"
+    depfile = "$target_gen_dir/$target_name.d"
+    inputs = invoker.inputs
+    outputs = [
+      depfile,
+      invoker.output,
+    ]
+    args = [
+      "--depfile",
+      rebase_path(depfile, root_build_dir),
+      "--inputs=$rebase_inputs",
+      "--output=$rebase_output",
+    ]
+    if (defined(invoker.base_dir)) {
+      args += [
+        "--base-dir",
+        rebase_path(invoker.base_dir, root_build_dir),
+      ]
+    }
+
+    if (defined(invoker.deps)) {
+      deps = invoker.deps
+    }
+    if (defined(invoker.public_deps)) {
+      public_deps = invoker.public_deps
+    }
+    if (defined(invoker.data_deps)) {
+      data_deps = invoker.data_deps
+    }
+
+    if (defined(invoker.visibility)) {
+      visibility = invoker.visibility
+    }
+  }
+}
+
+# Write the target's .build_config file. This is a json file that contains a
+# dictionary of information about how to build this target (things that
+# require knowledge about this target's dependencies and cannot be calculated
+# at gn-time). There is a special syntax to add a value in that dictionary to
+# an action/action_foreachs args:
+#   --python-arg=@FileArg($rebased_build_config_path:key0:key1)
+# At runtime, such an arg will be replaced by the value in the build_config.
+# See build/android/gyp/write_build_config.py and
+# build/android/gyp/util/build_utils.py:ExpandFileArgs
+template("write_build_config") {
+  set_sources_assignment_filter([])
+  if (defined(invoker.testonly)) {
+    testonly = invoker.testonly
+  }
+
+  assert(defined(invoker.type))
+  assert(defined(invoker.build_config))
+
+  type = invoker.type
+  build_config = invoker.build_config
+
+  assert(type == "android_apk" || type == "java_library" ||
+         type == "android_resources" || type == "deps_dex")
+
+  action(target_name) {
+    if (defined(invoker.visibility)) {
+      visibility = invoker.visibility
+    }
+
+    script = "//build/android/gyp/write_build_config.py"
+    depfile = "$target_gen_dir/$target_name.d"
+    inputs = []
+
+    deps = []
+    if (defined(invoker.deps)) {
+      deps += invoker.deps
+    }
+
+    possible_deps_configs = []
+    foreach(d, deps) {
+      dep_gen_dir = get_label_info(d, "target_gen_dir")
+      dep_name = get_label_info(d, "name")
+      possible_deps_configs += [ "$dep_gen_dir/$dep_name.build_config" ]
+    }
+    rebase_possible_deps_configs =
+        rebase_path(possible_deps_configs, root_build_dir)
+
+    outputs = [
+      depfile,
+      build_config,
+    ]
+
+    args = [
+      "--type",
+      type,
+      "--depfile",
+      rebase_path(depfile, root_build_dir),
+      "--possible-deps-configs=$rebase_possible_deps_configs",
+      "--build-config",
+      rebase_path(build_config, root_build_dir),
+    ]
+
+    is_java_library = type == "java_library"
+    is_apk = type == "android_apk"
+    is_android_resources = type == "android_resources"
+    is_deps_dex = type == "deps_dex"
+
+    supports_android = is_apk || is_android_resources || is_deps_dex ||
+                       (is_java_library && defined(invoker.supports_android) &&
+                        invoker.supports_android)
+    requires_android = is_apk || is_android_resources || is_deps_dex ||
+                       (is_java_library && defined(invoker.requires_android) &&
+                        invoker.requires_android)
+
+    assert(!requires_android || supports_android,
+           "requires_android requires" + " supports_android")
+
+    # Mark these variables as used.
+    assert(is_java_library || true)
+    assert(is_apk || true)
+    assert(is_android_resources || true)
+    assert(is_deps_dex || true)
+
+    if (is_java_library || is_apk) {
+      args += [
+        "--jar-path",
+        rebase_path(invoker.jar_path, root_build_dir),
+      ]
+    }
+
+    if (is_apk || is_deps_dex || (is_java_library && supports_android)) {
+      args += [
+        "--dex-path",
+        rebase_path(invoker.dex_path, root_build_dir),
+      ]
+    }
+    if (supports_android) {
+      args += [ "--supports-android" ]
+    }
+    if (requires_android) {
+      args += [ "--requires-android" ]
+    }
+    if (defined(invoker.bypass_platform_checks) &&
+        invoker.bypass_platform_checks) {
+      args += [ "--bypass-platform-checks" ]
+    }
+
+    if (defined(invoker.apk_under_test)) {
+      deps += [ invoker.apk_under_test ]
+      apk_under_test_gen_dir =
+          get_label_info(invoker.apk_under_test, "target_gen_dir")
+      apk_under_test_name = get_label_info(invoker.apk_under_test, "name")
+      apk_under_test_config =
+          "$apk_under_test_gen_dir/$apk_under_test_name.build_config"
+      args += [
+        "--tested-apk-config",
+        rebase_path(apk_under_test_config, root_build_dir),
+      ]
+    }
+
+    if (is_android_resources || is_apk) {
+      assert(defined(invoker.resources_zip))
+      args += [
+        "--resources-zip",
+        rebase_path(invoker.resources_zip, root_build_dir),
+      ]
+      if (defined(invoker.android_manifest)) {
+        inputs += [ invoker.android_manifest ]
+        args += [
+          "--android-manifest",
+          rebase_path(invoker.android_manifest, root_build_dir),
+        ]
+      } else {
+        assert(!is_apk, "apk build configs require an android_manifest")
+      }
+      if (defined(invoker.custom_package)) {
+        args += [
+          "--package-name",
+          invoker.custom_package,
+        ]
+      }
+      if (defined(invoker.r_text)) {
+        args += [
+          "--r-text",
+          rebase_path(invoker.r_text, root_build_dir),
+        ]
+      }
+    }
+
+    if (is_apk) {
+      if (defined(invoker.native_libs)) {
+        inputs += invoker.native_libs
+        rebased_native_libs = rebase_path(invoker.native_libs, root_build_dir)
+        rebased_android_readelf = rebase_path(android_readelf, root_build_dir)
+        args += [
+          "--native-libs=$rebased_native_libs",
+          "--readelf-path=$rebased_android_readelf",
+        ]
+      }
+    }
+
+    if (defined(invoker.srcjar)) {
+      args += [
+        "--srcjar",
+        rebase_path(invoker.srcjar, root_build_dir),
+      ]
+    }
+  }
+}
+
+template("process_java_prebuilt") {
+  set_sources_assignment_filter([])
+  if (defined(invoker.testonly)) {
+    testonly = invoker.testonly
+  }
+
+  _input_jar_path = invoker.input_jar_path
+  _output_jar_path = invoker.output_jar_path
+  _jar_toc_path = _output_jar_path + ".TOC"
+
+  assert(invoker.build_config != "")
+
+  if (defined(invoker.proguard_preprocess) && invoker.proguard_preprocess) {
+    _proguard_jar_path = "$android_sdk_root/tools/proguard/lib/proguard.jar"
+    _proguard_config_path = invoker.proguard_config
+    _build_config = invoker.build_config
+    _rebased_build_config = rebase_path(_build_config, root_build_dir)
+    _output_jar_target = "${target_name}__proguard_process"
+    action(_output_jar_target) {
+      script = "//build/android/gyp/proguard.py"
+      inputs = [
+        android_sdk_jar,
+        _proguard_jar_path,
+        _build_config,
+        _input_jar_path,
+        _proguard_config_path,
+      ]
+      depfile = "${target_gen_dir}/${target_name}.d"
+      outputs = [
+        depfile,
+        _output_jar_path,
+      ]
+      args = [
+        "--depfile",
+        rebase_path(depfile, root_build_dir),
+        "--proguard-path",
+        rebase_path(_proguard_jar_path, root_build_dir),
+        "--input-path",
+        rebase_path(_input_jar_path, root_build_dir),
+        "--output-path",
+        rebase_path(_output_jar_path, root_build_dir),
+        "--proguard-config",
+        rebase_path(_proguard_config_path, root_build_dir),
+        "--classpath",
+        rebased_android_sdk_jar,
+        "--classpath=@FileArg($_rebased_build_config:javac:classpath)",
+      ]
+
+      if (defined(invoker.deps)) {
+        deps = invoker.deps
+      }
+      if (defined(invoker.public_deps)) {
+        public_deps = invoker.public_deps
+      }
+      if (defined(invoker.data_deps)) {
+        data_deps = invoker.data_deps
+      }
+    }
+  } else {
+    _output_jar_target = "${target_name}__copy_jar"
+    copy(_output_jar_target) {
+      sources = [
+        _input_jar_path,
+      ]
+      outputs = [
+        _output_jar_path,
+      ]
+
+      if (defined(invoker.deps)) {
+        deps = invoker.deps
+      }
+      if (defined(invoker.public_deps)) {
+        public_deps = invoker.public_deps
+      }
+      if (defined(invoker.data_deps)) {
+        data_deps = invoker.data_deps
+      }
+    }
+  }
+
+  action("${target_name}__jar_toc") {
+    script = "//build/android/gyp/jar_toc.py"
+    depfile = "$target_gen_dir/$target_name.d"
+    outputs = [
+      depfile,
+      _jar_toc_path,
+      _jar_toc_path + ".md5.stamp",
+    ]
+    inputs = [
+      _output_jar_path,
+    ]
+    args = [
+      "--depfile",
+      rebase_path(depfile, root_build_dir),
+      "--jar-path",
+      rebase_path(_output_jar_path, root_build_dir),
+      "--toc-path",
+      rebase_path(_jar_toc_path, root_build_dir),
+    ]
+    public_deps = [
+      ":$_output_jar_target",
+    ]
+  }
+
+  group(target_name) {
+    if (defined(invoker.visibility)) {
+      visibility = invoker.visibility
+    }
+    public_deps = [
+      ":${target_name}__jar_toc",
+      ":$_output_jar_target",
+    ]
+  }
+}
+
+template("finalize_apk") {
+  action(target_name) {
+    script = "//build/android/gyp/finalize_apk.py"
+    depfile = "$target_gen_dir/$target_name.d"
+
+    if (defined(invoker.testonly)) {
+      testonly = invoker.testonly
+    }
+
+    sources = [
+      invoker.input_apk_path,
+    ]
+    inputs = [
+      invoker.keystore_path,
+    ]
+    outputs = [
+      depfile,
+      invoker.output_apk_path,
+    ]
+
+    args = [
+      "--depfile",
+      rebase_path(depfile, root_build_dir),
+      "--zipalign-path",
+      rebase_path(zipalign_path, root_build_dir),
+      "--unsigned-apk-path",
+      rebase_path(invoker.input_apk_path, root_build_dir),
+      "--final-apk-path",
+      rebase_path(invoker.output_apk_path, root_build_dir),
+      "--key-path",
+      rebase_path(invoker.keystore_path, root_build_dir),
+      "--key-name",
+      invoker.keystore_name,
+      "--key-passwd",
+      invoker.keystore_password,
+    ]
+    if (defined(invoker.rezip_apk) && invoker.rezip_apk) {
+      _rezip_jar_path = "$root_build_dir/lib.java/rezip_apk.jar"
+      inputs += [ _rezip_jar_path ]
+      args += [
+        "--load-library-from-zip=1",
+        "--rezip-apk-jar-path",
+        rebase_path(_rezip_jar_path, root_build_dir),
+      ]
+    }
+
+    if (defined(invoker.deps)) {
+      deps = invoker.deps
+    }
+    if (defined(invoker.public_deps)) {
+      public_deps = invoker.public_deps
+    }
+    if (defined(invoker.data_deps)) {
+      data_deps = invoker.data_deps
+    }
+  }
+}
+
+# Packages resources, assets, dex, and native libraries into an apk. Signs and
+# zipaligns the apk.
+template("create_apk") {
+  set_sources_assignment_filter([])
+  if (defined(invoker.testonly)) {
+    testonly = invoker.testonly
+  }
+
+  _android_manifest = invoker.android_manifest
+  _base_path = invoker.base_path
+  _final_apk_path = invoker.apk_path
+
+  if (defined(invoker.resources_zip)) {
+    _resources_zip = invoker.resources_zip
+  }
+  if (defined(invoker.dex_path)) {
+    _dex_path = invoker.dex_path
+  }
+  _load_library_from_apk = invoker.load_library_from_apk
+
+  _package_deps = []
+  if (defined(invoker.deps)) {
+    _package_deps = invoker.deps
+  }
+
+  _native_libs_dir = "//build/android/empty/res"
+  if (defined(invoker.native_libs_dir)) {
+    _native_libs_dir = invoker.native_libs_dir
+  }
+
+  if (defined(invoker.asset_location)) {
+    _asset_location = invoker.asset_location
+  }
+
+  _version_code = invoker.version_code
+  _version_name = invoker.version_name
+
+  _base_apk_path = _base_path + ".apk_intermediates"
+
+  _resource_packaged_apk_path = _base_apk_path + ".ap_"
+  _packaged_apk_path = _base_apk_path + ".unfinished.apk"
+  _shared_resources =
+      defined(invoker.shared_resources) && invoker.shared_resources
+
+  _configuration_name = "Release"
+  if (is_debug) {
+    _configuration_name = "Debug"
+  }
+
+  _keystore_path = invoker.keystore_path
+  _keystore_name = invoker.keystore_name
+  _keystore_password = invoker.keystore_password
+
+  _split_densities = []
+  if (defined(invoker.create_density_splits) && invoker.create_density_splits) {
+    _split_densities = [
+      "hdpi",
+      "xhdpi",
+      "xxhdpi",
+      "xxxhdpi",
+      "tvdpi",
+    ]
+  }
+
+  _split_languages = []
+  if (defined(invoker.language_splits)) {
+    _split_languages = invoker.language_splits
+  }
+
+  _package_resources_target_name = "${target_name}__package_resources"
+  action(_package_resources_target_name) {
+    deps = _package_deps
+
+    script = "//build/android/gyp/package_resources.py"
+    depfile = "${target_gen_dir}/${target_name}.d"
+    inputs = [
+      _android_manifest,
+    ]
+    if (defined(_resources_zip)) {
+      inputs += [ _resources_zip ]
+    }
+    outputs = [
+      depfile,
+      _resource_packaged_apk_path,
+    ]
+
+    args = [
+      "--depfile",
+      rebase_path(depfile, root_build_dir),
+      "--android-sdk",
+      rebased_android_sdk,
+      "--aapt-path",
+      android_aapt_path,
+      "--configuration-name=$_configuration_name",
+      "--android-manifest",
+      rebase_path(_android_manifest, root_build_dir),
+      "--version-code",
+      _version_code,
+      "--version-name",
+      _version_name,
+      "--apk-path",
+      rebase_path(_resource_packaged_apk_path, root_build_dir),
+    ]
+
+    if (defined(_asset_location)) {
+      args += [
+        "--asset-dir",
+        rebase_path(_asset_location, root_build_dir),
+      ]
+    }
+    if (defined(_resources_zip)) {
+      args += [
+        "--resource-zips",
+        rebase_path(_resources_zip, root_build_dir),
+      ]
+    }
+    if (_shared_resources) {
+      args += [ "--shared-resources" ]
+    }
+    if (_split_densities != []) {
+      args += [ "--create-density-splits" ]
+      foreach(_density, _split_densities) {
+        outputs += [ "${_resource_packaged_apk_path}_${_density}" ]
+      }
+    }
+    if (_split_languages != []) {
+      args += [ "--language-splits=$_split_languages" ]
+      foreach(_language, _split_languages) {
+        outputs += [ "${_resource_packaged_apk_path}_${_language}" ]
+      }
+    }
+    if (defined(invoker.extensions_to_not_compress)) {
+      args += [
+        "--no-compress",
+        invoker.extensions_to_not_compress,
+      ]
+    }
+  }
+
+  package_target = "${target_name}__package"
+  action(package_target) {
+    script = "//build/android/gyp/ant.py"
+    _ant_script = "//build/android/ant/apk-package.xml"
+
+    deps = [
+      ":${_package_resources_target_name}",
+    ]
+    if (defined(invoker.deps)) {
+      deps += invoker.deps
+    }
+    depfile = "$target_gen_dir/$target_name.d"
+
+    inputs = [
+      _resource_packaged_apk_path,
+      _ant_script,
+    ]
+    if (defined(_dex_path)) {
+      inputs += [ _dex_path ]
+    }
+
+    outputs = [
+      depfile,
+      _packaged_apk_path,
+    ]
+
+    _rebased_emma_jar = ""
+    _rebased_resource_packaged_apk_path =
+        rebase_path(_resource_packaged_apk_path, root_build_dir)
+    _rebased_packaged_apk_path = rebase_path(_packaged_apk_path, root_build_dir)
+    _rebased_native_libs_dir = rebase_path(_native_libs_dir, root_build_dir)
+    args = [
+      "--depfile",
+      rebase_path(depfile, root_build_dir),
+      "--",
+      "-quiet",
+      "-DANDROID_SDK_ROOT=$rebased_android_sdk_root",
+      "-DANDROID_SDK_TOOLS=$rebased_android_sdk_build_tools",
+      "-DRESOURCE_PACKAGED_APK_NAME=$_rebased_resource_packaged_apk_path",
+      "-DCONFIGURATION_NAME=$_configuration_name",
+      "-DNATIVE_LIBS_DIR=$_rebased_native_libs_dir",
+      "-DOUT_DIR=",
+      "-DUNSIGNED_APK_PATH=$_rebased_packaged_apk_path",
+      "-DEMMA_INSTRUMENT=0",
+      "-DEMMA_DEVICE_JAR=$_rebased_emma_jar",
+      "-Dbasedir=.",
+      "-buildfile",
+      rebase_path(_ant_script, root_build_dir),
+    ]
+    if (defined(_dex_path)) {
+      _rebased_dex_path = rebase_path(_dex_path, root_build_dir)
+      args += [
+        "-DDEX_FILE_PATH=$_rebased_dex_path",
+        "-DHAS_CODE=true",
+      ]
+    } else {
+      args += [ "-DHAS_CODE=false" ]
+    }
+  }
+
+  _finalize_apk_rule_name = "${target_name}__finalize"
+  finalize_apk(_finalize_apk_rule_name) {
+    input_apk_path = _packaged_apk_path
+    output_apk_path = _final_apk_path
+    keystore_path = _keystore_path
+    keystore_name = _keystore_name
+    keystore_password = _keystore_password
+    rezip_apk = _load_library_from_apk
+
+    public_deps = [
+      # Generator of the _packaged_apk_path this target takes as input.
+      ":$package_target",
+    ]
+  }
+
+  _final_deps = [ ":${_finalize_apk_rule_name}" ]
+
+  template("finalize_split") {
+    finalize_apk(target_name) {
+      _config = invoker.split_config
+      _type = invoker.split_type
+      input_apk_path = "${_resource_packaged_apk_path}_${_config}"
+      _output_paths = process_file_template(
+              [ _final_apk_path ],
+              "{{source_dir}}/{{source_name_part}}-${_type}-${_config}.apk")
+      output_apk_path = _output_paths[0]
+      keystore_path = _keystore_path
+      keystore_name = _keystore_name
+      keystore_password = _keystore_password
+      deps = [
+        ":${_package_resources_target_name}",
+      ]
+    }
+  }
+
+  foreach(_split, _split_densities) {
+    _split_rule = "${target_name}__finalize_${_split}_split"
+    finalize_split(_split_rule) {
+      split_type = "density"
+      split_config = _split
+    }
+    _final_deps += [ ":$_split_rule" ]
+  }
+  foreach(_split, _split_languages) {
+    _split_rule = "${target_name}__finalize_${_split}_split"
+    finalize_split(_split_rule) {
+      split_type = "lang"
+      split_config = _split
+    }
+    _final_deps += [ ":$_split_rule" ]
+  }
+
+  group(target_name) {
+    public_deps = _final_deps
+  }
+}
+
+template("java_prebuilt_impl") {
+  set_sources_assignment_filter([])
+  if (defined(invoker.testonly)) {
+    testonly = invoker.testonly
+  }
+  _supports_android =
+      defined(invoker.supports_android) && invoker.supports_android
+
+  assert(defined(invoker.jar_path))
+  _base_path = "${target_gen_dir}/$target_name"
+  _jar_path = _base_path + ".jar"
+  _build_config = _base_path + ".build_config"
+
+  if (_supports_android) {
+    _dex_path = _base_path + ".dex.jar"
+  }
+  _deps = []
+  if (defined(invoker.deps)) {
+    _deps = invoker.deps
+  }
+  _jar_deps = []
+  if (defined(invoker.jar_dep)) {
+    _jar_deps = [ invoker.jar_dep ]
+  }
+
+  _template_name = target_name
+
+  build_config_target_name = "${_template_name}__build_config"
+  process_jar_target_name = "${_template_name}__process_jar"
+  if (_supports_android) {
+    dex_target_name = "${_template_name}__dex"
+  }
+
+  write_build_config(build_config_target_name) {
+    type = "java_library"
+    supports_android = _supports_android
+    requires_android =
+        defined(invoker.requires_android) && invoker.requires_android
+
+    deps = _deps
+    build_config = _build_config
+    jar_path = _jar_path
+    if (_supports_android) {
+      dex_path = _dex_path
+    }
+  }
+
+  process_java_prebuilt(process_jar_target_name) {
+    visibility = [ ":$_template_name" ]
+    if (_supports_android) {
+      visibility += [ ":$dex_target_name" ]
+    }
+
+    if (defined(invoker.proguard_preprocess) && invoker.proguard_preprocess) {
+      proguard_preprocess = true
+      proguard_config = invoker.proguard_config
+    }
+
+    build_config = _build_config
+    input_jar_path = invoker.jar_path
+    output_jar_path = _jar_path
+
+    deps = [ ":$build_config_target_name" ] + _deps + _jar_deps
+  }
+
+  if (_supports_android) {
+    dex(dex_target_name) {
+      sources = [
+        _jar_path,
+      ]
+      output = _dex_path
+      deps = [ ":$process_jar_target_name" ] + _deps + _jar_deps
+    }
+  }
+
+  group(target_name) {
+    deps = [
+      ":$process_jar_target_name",
+    ]
+    if (defined(invoker.data_deps)) {
+      data_deps = invoker.data_deps
+    }
+    if (_supports_android) {
+      deps += [ ":$dex_target_name" ]
+    }
+  }
+}
+
+# Compiles and jars a set of java files.
+#
+# Outputs:
+#  $jar_path.jar
+#  $jar_path.jar.TOC
+#
+# Variables
+#   java_files: List of .java files to compile.
+#   java_deps: List of java dependencies. These should all have a .jar output
+#     at "${target_gen_dir}/${target_name}.jar.
+#   chromium_code: If true, enable extra warnings.
+#   srcjar_deps: List of srcjar dependencies. The .java files contained in the
+#     dependencies srcjar outputs will be compiled and added to the output jar.
+#   jar_path: Use this to explicitly set the output jar path. Defaults to
+#     "${target_gen_dir}/${target_name}.jar.
+template("compile_java") {
+  set_sources_assignment_filter([])
+  if (defined(invoker.testonly)) {
+    testonly = invoker.testonly
+  }
+
+  assert(defined(invoker.java_files))
+  assert(defined(invoker.build_config))
+  assert(defined(invoker.jar_path))
+
+  _java_files = invoker.java_files
+  _final_jar_path = invoker.jar_path
+  _intermediate_jar_path = "$target_gen_dir/$target_name.initial.jar"
+
+  _build_config = invoker.build_config
+
+  _jar_excluded_patterns = []
+  if (defined(invoker.jar_excluded_patterns)) {
+    _jar_excluded_patterns += invoker.jar_excluded_patterns
+  }
+
+  _chromium_code = false
+  if (defined(invoker.chromium_code)) {
+    _chromium_code = invoker.chromium_code
+  }
+
+  _supports_android = true
+  if (defined(invoker.supports_android)) {
+    _supports_android = invoker.supports_android
+  }
+
+  _enable_errorprone = use_errorprone_java_compiler
+  if (defined(invoker.enable_errorprone)) {
+    _enable_errorprone = invoker.enable_errorprone
+  }
+
+  _manifest_entries = []
+  if (defined(invoker.manifest_entries)) {
+    _manifest_entries = invoker.manifest_entries
+  }
+
+  _srcjar_deps = []
+  if (defined(invoker.srcjar_deps)) {
+    _srcjar_deps += invoker.srcjar_deps
+  }
+
+  _java_srcjars = []
+  if (defined(invoker.srcjars)) {
+    _java_srcjars = invoker.srcjars
+  }
+  foreach(dep, _srcjar_deps) {
+    _dep_gen_dir = get_label_info(dep, "target_gen_dir")
+    _dep_name = get_label_info(dep, "name")
+    _java_srcjars += [ "$_dep_gen_dir/$_dep_name.srcjar" ]
+  }
+
+  # Mark srcjar_deps as used.
+  assert(_srcjar_deps == [] || true)
+
+  _system_jars = []
+  if (defined(invoker.android) && invoker.android) {
+    _system_jars += [ android_sdk_jar ]
+  }
+
+  _rebased_build_config = rebase_path(_build_config, root_build_dir)
+  _rebased_jar_path = rebase_path(_intermediate_jar_path, root_build_dir)
+
+  javac_target_name = "${target_name}__javac"
+  finish_target_name = "${target_name}__finish"
+  final_target_name = target_name
+
+  action(javac_target_name) {
+    script = "//build/android/gyp/javac.py"
+    depfile = "$target_gen_dir/$target_name.d"
+    deps = _srcjar_deps
+    if (defined(invoker.deps)) {
+      deps += invoker.deps
+    }
+
+    outputs = [
+      depfile,
+      _intermediate_jar_path,
+      _intermediate_jar_path + ".md5.stamp",
+    ]
+    sources = _java_files + _java_srcjars
+    inputs = _system_jars + [ _build_config ]
+
+    _rebased_system_jars = rebase_path(_system_jars, root_build_dir)
+    _rebased_java_srcjars = rebase_path(_java_srcjars, root_build_dir)
+    _rebased_depfile = rebase_path(depfile, root_build_dir)
+    args = [
+      "--depfile=$_rebased_depfile",
+      "--classpath=$_rebased_system_jars",
+      "--classpath=@FileArg($_rebased_build_config:javac:classpath)",
+      "--jar-path=$_rebased_jar_path",
+      "--java-srcjars=$_rebased_java_srcjars",
+      "--java-srcjars=@FileArg($_rebased_build_config:javac:srcjars)",
+      "--jar-excluded-classes=$_jar_excluded_patterns",
+    ]
+    if (_supports_android) {
+      _rebased_android_sdk_jar = rebase_path(android_sdk_jar, root_build_dir)
+      args += [ "--bootclasspath=$_rebased_android_sdk_jar" ]
+    }
+    foreach(e, _manifest_entries) {
+      args += [ "--manifest-entry=" + e ]
+    }
+    if (_chromium_code) {
+      args += [ "--chromium-code=1" ]
+    }
+    if (_enable_errorprone) {
+      deps += [ "//third_party/errorprone:chromium_errorprone" ]
+      args += [
+        "--use-errorprone-path",
+        "bin/chromium_errorprone",
+      ]
+    }
+    args += rebase_path(_java_files, root_build_dir)
+  }
+
+  process_java_prebuilt(finish_target_name) {
+    visibility = [ ":$final_target_name" ]
+
+    build_config = _build_config
+    input_jar_path = _intermediate_jar_path
+    output_jar_path = _final_jar_path
+    if (defined(invoker.proguard_preprocess) && invoker.proguard_preprocess) {
+      proguard_preprocess = invoker.proguard_preprocess
+      proguard_config = invoker.proguard_config
+    }
+    deps = [
+      ":$javac_target_name",
+    ]
+  }
+
+  group(final_target_name) {
+    if (defined(invoker.visibility)) {
+      visibility = invoker.visibility
+    }
+    public_deps = [
+      ":$finish_target_name",
+    ]
+  }
+}
+
+template("java_library_impl") {
+  set_sources_assignment_filter([])
+  if (defined(invoker.testonly)) {
+    testonly = invoker.testonly
+  }
+
+  assert(
+      defined(invoker.java_files) || defined(invoker.DEPRECATED_java_in_dir) ||
+      defined(invoker.srcjars) || defined(invoker.srcjar_deps))
+  _base_path = "$target_gen_dir/$target_name"
+  _jar_path = _base_path + ".jar"
+  if (defined(invoker.jar_path)) {
+    _jar_path = invoker.jar_path
+  }
+  _template_name = target_name
+
+  _final_deps = []
+  _final_datadeps = []
+  if (defined(invoker.datadeps)) {
+    _final_datadeps = invoker.datadeps
+  }
+
+  _supports_android =
+      defined(invoker.supports_android) && invoker.supports_android
+  _requires_android =
+      defined(invoker.requires_android) && invoker.requires_android
+
+  if (_supports_android) {
+    _dex_path = _base_path + ".dex.jar"
+    if (defined(invoker.dex_path)) {
+      _dex_path = invoker.dex_path
+    }
+  }
+
+  # Define build_config_deps which will be a list of targets required to
+  # build the _build_config.
+  if (defined(invoker.override_build_config)) {
+    _build_config = invoker.override_build_config
+
+    # When a custom build config file is specified, we need to use the deps
+    # supplied by the invoker any time we reference the build config file.
+    assert(defined(invoker.deps),
+           "If you specify a build config file for " +
+               "java_library_impl($target_name), you should " +
+               "also specify the target that made it in the deps")
+    build_config_deps = invoker.deps
+  } else {
+    _build_config = _base_path + ".build_config"
+    build_config_target_name = "${_template_name}__build_config"
+    build_config_deps = [ ":$build_config_target_name" ]
+
+    write_build_config(build_config_target_name) {
+      type = "java_library"
+      supports_android = _supports_android
+      requires_android = _requires_android
+      bypass_platform_checks = defined(invoker.bypass_platform_checks) &&
+                               invoker.bypass_platform_checks
+
+      deps = []
+      if (defined(invoker.deps)) {
+        deps += invoker.deps
+      }
+
+      build_config = _build_config
+      jar_path = _jar_path
+      if (_supports_android) {
+        dex_path = _dex_path
+      }
+    }
+  }
+
+  _chromium_code = true
+  if (defined(invoker.chromium_code)) {
+    _chromium_code = invoker.chromium_code
+  }
+
+  _srcjar_deps = []
+  if (defined(invoker.srcjar_deps)) {
+    _srcjar_deps = invoker.srcjar_deps
+  }
+
+  _srcjars = []
+  if (defined(invoker.srcjars)) {
+    _srcjars = invoker.srcjars
+  }
+
+  _java_files = []
+  if (defined(invoker.java_files)) {
+    _java_files = invoker.java_files
+  } else if (defined(invoker.DEPRECATED_java_in_dir)) {
+    _src_dir = invoker.DEPRECATED_java_in_dir + "/src"
+    _src_dir_exists = exec_script("//build/dir_exists.py",
+                                  [ rebase_path(_src_dir, root_build_dir) ],
+                                  "string")
+    assert(_src_dir_exists == "False",
+           "In GN, java_in_dir should be the fully specified java directory " +
+               "(i.e. including the trailing \"/src\")")
+
+    _java_files_build_rel = exec_script(
+            "//build/android/gyp/find.py",
+            [
+              "--pattern",
+              "*.java",
+              rebase_path(invoker.DEPRECATED_java_in_dir, root_build_dir),
+            ],
+            "list lines")
+    _java_files = rebase_path(_java_files_build_rel, ".", root_build_dir)
+  }
+  assert(_java_files != [] || _srcjar_deps != [] || _srcjars != [])
+
+  _compile_java_target = "${_template_name}__compile_java"
+  _final_deps += [ ":$_compile_java_target" ]
+  compile_java(_compile_java_target) {
+    jar_path = _jar_path
+    build_config = _build_config
+    java_files = _java_files
+    srcjar_deps = _srcjar_deps
+    srcjars = _srcjars
+    chromium_code = _chromium_code
+    android = _requires_android
+
+    if (defined(invoker.enable_errorprone)) {
+      enable_errorprone = invoker.enable_errorprone
+    }
+    if (defined(invoker.jar_excluded_patterns)) {
+      jar_excluded_patterns = invoker.jar_excluded_patterns
+    }
+    if (defined(invoker.proguard_preprocess)) {
+      proguard_preprocess = invoker.proguard_preprocess
+    }
+    if (defined(invoker.proguard_config)) {
+      proguard_config = invoker.proguard_config
+    }
+    if (defined(invoker.dist_jar_path)) {
+      dist_jar_path = invoker.dist_jar_path
+    }
+    if (defined(invoker.manifest_entries)) {
+      manifest_entries = invoker.manifest_entries
+    }
+
+    supports_android = _supports_android
+    deps = build_config_deps
+  }
+
+  if (defined(invoker.main_class)) {
+    _final_deps += [ ":${_template_name}__binary_script" ]
+    action("${_template_name}__binary_script") {
+      script = "//build/android/gyp/create_java_binary_script.py"
+      depfile = "$target_gen_dir/$target_name.d"
+      java_script = "$root_build_dir/bin/$_template_name"
+      inputs = [
+        _build_config,
+      ]
+      outputs = [
+        depfile,
+        java_script,
+      ]
+      _rebased_build_config = rebase_path(_build_config, root_build_dir)
+      args = [
+        "--depfile",
+        rebase_path(depfile, root_build_dir),
+        "--output",
+        rebase_path(java_script, root_build_dir),
+        "--classpath=@FileArg($_rebased_build_config:java:full_classpath)",
+        "--jar-path",
+        rebase_path(_jar_path, root_build_dir),
+        "--main-class",
+        invoker.main_class,
+      ]
+
+      deps = build_config_deps
+    }
+  }
+
+  if (_supports_android) {
+    if (defined(invoker.chromium_code) && invoker.chromium_code) {
+      _android_manifest = "//build/android/AndroidManifest.xml"
+      if (defined(invoker.android_manifest)) {
+        _android_manifest = invoker.android_manifest
+      }
+
+      _final_datadeps += [ ":${_template_name}__lint" ]
+      android_lint("${_template_name}__lint") {
+        android_manifest = _android_manifest
+        jar_path = _jar_path
+        java_files = _java_files
+        deps = [
+          ":$_compile_java_target",
+        ]
+        if (defined(invoker.deps)) {
+          deps += invoker.deps
+        }
+      }
+
+      if (run_findbugs) {
+        _final_datadeps += [ ":${_template_name}__findbugs" ]
+        findbugs("${_template_name}__findbugs") {
+          build_config = _build_config
+          jar_path = _jar_path
+          deps = build_config_deps
+        }
+      }
+    }
+
+    _final_deps += [ ":${_template_name}__dex" ]
+    dex("${_template_name}__dex") {
+      sources = [
+        _jar_path,
+      ]
+      output = _dex_path
+      deps = [
+        ":$_compile_java_target",
+      ]
+    }
+  }
+
+  group(target_name) {
+    if (defined(invoker.visibility)) {
+      visibility = invoker.visibility
+    }
+    public_deps = _final_deps
+    data_deps = _final_datadeps
+  }
+}
+
+# Runs process_resources.py
+template("process_resources") {
+  set_sources_assignment_filter([])
+  if (defined(invoker.testonly)) {
+    testonly = invoker.testonly
+  }
+
+  zip_path = invoker.zip_path
+  srcjar_path = invoker.srcjar_path
+  r_text_path = invoker.r_text_path
+  build_config = invoker.build_config
+  resource_dirs = invoker.resource_dirs
+  android_manifest = invoker.android_manifest
+
+  non_constant_id = true
+  if (defined(invoker.generate_constant_ids) && invoker.generate_constant_ids) {
+    non_constant_id = false
+  }
+
+  action(target_name) {
+    if (defined(invoker.visibility)) {
+      visibility = invoker.visibility
+    }
+
+    script = "//build/android/gyp/process_resources.py"
+
+    depfile = "$target_gen_dir/$target_name.d"
+    outputs = [
+      depfile,
+      zip_path,
+      srcjar_path,
+      r_text_path,
+    ]
+
+    sources_build_rel = exec_script("//build/android/gyp/find.py",
+                                    rebase_path(resource_dirs, root_build_dir),
+                                    "list lines")
+    sources = rebase_path(sources_build_rel, ".", root_build_dir)
+
+    inputs = [
+      build_config,
+      android_manifest,
+    ]
+
+    rebase_resource_dirs = rebase_path(resource_dirs, root_build_dir)
+    rebase_build_config = rebase_path(build_config, root_build_dir)
+    args = [
+      "--depfile",
+      rebase_path(depfile, root_build_dir),
+      "--android-sdk",
+      rebase_path(android_sdk, root_build_dir),
+      "--aapt-path",
+      android_aapt_path,
+      "--android-manifest",
+      rebase_path(android_manifest, root_build_dir),
+      "--resource-dirs=$rebase_resource_dirs",
+      "--srcjar-out",
+      rebase_path(srcjar_path, root_build_dir),
+      "--resource-zip-out",
+      rebase_path(zip_path, root_build_dir),
+      "--r-text-out",
+      rebase_path(r_text_path, root_build_dir),
+      "--dependencies-res-zips=@FileArg($rebase_build_config:resources:dependency_zips)",
+      "--extra-res-packages=@FileArg($rebase_build_config:resources:extra_package_names)",
+      "--extra-r-text-files=@FileArg($rebase_build_config:resources:extra_r_text_files)",
+    ]
+
+    if (non_constant_id) {
+      args += [ "--non-constant-id" ]
+    }
+
+    if (defined(invoker.custom_package)) {
+      args += [
+        "--custom-package",
+        invoker.custom_package,
+      ]
+    }
+
+    if (defined(invoker.v14_skip) && invoker.v14_skip) {
+      args += [ "--v14-skip" ]
+    }
+
+    if (defined(invoker.shared_resources) && invoker.shared_resources) {
+      args += [ "--shared-resources" ]
+    }
+
+    if (defined(invoker.include_all_resources) &&
+        invoker.include_all_resources) {
+      args += [ "--include-all-resources" ]
+    }
+
+    if (defined(invoker.all_resources_zip_path)) {
+      all_resources_zip = invoker.all_resources_zip_path
+      outputs += [ all_resources_zip ]
+      args += [
+        "--all-resources-zip-out",
+        rebase_path(all_resources_zip, root_build_dir),
+      ]
+    }
+
+    if (defined(invoker.args)) {
+      args += invoker.args
+    }
+    if (defined(invoker.deps)) {
+      deps = invoker.deps
+    }
+  }
+}
+
+template("copy_ex") {
+  set_sources_assignment_filter([])
+  if (defined(invoker.testonly)) {
+    testonly = invoker.testonly
+  }
+
+  action(target_name) {
+    if (defined(invoker.visibility)) {
+      visibility = invoker.visibility
+    }
+
+    script = "//build/android/gyp/copy_ex.py"
+
+    if (defined(invoker.deps)) {
+      deps = invoker.deps
+    }
+
+    sources = []
+    if (defined(invoker.sources)) {
+      sources += invoker.sources
+    }
+
+    inputs = []
+    if (defined(invoker.inputs)) {
+      inputs += invoker.inputs
+    }
+
+    depfile = "$target_gen_dir/$target_name.d"
+    outputs = [
+      depfile,
+    ]
+
+    args = [
+      "--depfile",
+      rebase_path(depfile, root_build_dir),
+      "--dest",
+      rebase_path(invoker.dest, root_build_dir),
+    ]
+    rebased_sources = rebase_path(sources, root_build_dir)
+    args += [ "--files=$rebased_sources" ]
+
+    if (defined(invoker.clear_dir) && invoker.clear_dir) {
+      args += [ "--clear" ]
+    }
+
+    if (defined(invoker.args)) {
+      args += invoker.args
+    }
+  }
+}
+
+# Produces a single .dex.jar out of a set of Java dependencies.
+template("deps_dex") {
+  set_sources_assignment_filter([])
+  build_config = "$target_gen_dir/${target_name}.build_config"
+  build_config_target_name = "${target_name}__build_config"
+
+  write_build_config(build_config_target_name) {
+    type = "deps_dex"
+    deps = invoker.deps
+
+    build_config = build_config
+    dex_path = invoker.dex_path
+  }
+
+  rebased_build_config = rebase_path(build_config, root_build_dir)
+  dex(target_name) {
+    inputs = [
+      build_config,
+    ]
+    output = invoker.dex_path
+    dex_arg_key = "${rebased_build_config}:final_dex:dependency_dex_files"
+    args = [ "--inputs=@FileArg($dex_arg_key)" ]
+    if (defined(invoker.excluded_jars)) {
+      excluded_jars = rebase_path(invoker.excluded_jars, root_build_dir)
+      args += [ "--excluded-paths=${excluded_jars}" ]
+    }
+    deps = [
+      ":$build_config_target_name",
+    ]
+  }
+}
+
+# Creates an AndroidManifest.xml for an APK split.
+template("generate_split_manifest") {
+  assert(defined(invoker.main_manifest))
+  assert(defined(invoker.out_manifest))
+  assert(defined(invoker.split_name))
+
+  action(target_name) {
+    depfile = "$target_gen_dir/$target_name.d"
+    if (defined(invoker.deps)) {
+      deps = invoker.deps
+    }
+    args = [
+      "--main-manifest",
+      rebase_path(invoker.main_manifest, root_build_dir),
+      "--out-manifest",
+      rebase_path(invoker.out_manifest, root_build_dir),
+      "--split",
+      invoker.split_name,
+    ]
+    if (defined(invoker.version_code)) {
+      args += [
+        "--version-code",
+        invoker.version_code,
+      ]
+    }
+    if (defined(invoker.version_name)) {
+      args += [
+        "--version-name",
+        invoker.version_name,
+      ]
+    }
+    if (defined(invoker.has_code)) {
+      args += [
+        "--has-code",
+        invoker.has_code,
+      ]
+    }
+    args += [
+      "--depfile",
+      rebase_path(depfile, root_build_dir),
+    ]
+
+    script = "//build/android/gyp/generate_split_manifest.py"
+    outputs = [
+      depfile,
+      invoker.out_manifest,
+    ]
+    inputs = [
+      invoker.main_manifest,
+    ]
+  }
+}
+
+# Generates a script in the output bin directory which runs the test
+# target using the test runner script in build/android/test_runner.py.
+template("test_runner_script") {
+  testonly = true
+  _test_name = invoker.test_name
+  _test_type = invoker.test_type
+
+  action(target_name) {
+    script = "//build/android/gyp/create_test_runner_script.py"
+    depfile = "$target_gen_dir/$target_name.d"
+
+    test_runner_args = [
+      _test_type,
+      "--output-directory",
+      rebase_path(root_build_dir, root_build_dir),
+    ]
+    if (_test_type == "gtest") {
+      assert(defined(invoker.test_suite))
+      test_runner_args += [
+        "--suite",
+        invoker.test_suite,
+      ]
+    } else if (_test_type == "instrumentation") {
+      assert(defined(invoker.test_apk))
+      test_runner_args += [
+        "--test-apk",
+        invoker.test_apk,
+      ]
+      if (defined(invoker.support_apk_path)) {
+        test_runner_args += [
+          "--support-apk",
+          rebase_path(invoker.support_apk_path, root_build_dir),
+        ]
+      }
+    } else {
+      assert(false, "Invalid test type: $_test_type.")
+    }
+
+    if (defined(invoker.isolate_file)) {
+      test_runner_args += [
+        "--isolate-file-path",
+        rebase_path(invoker.isolate_file, root_build_dir),
+      ]
+    }
+
+    generated_script = "$root_build_dir/bin/run_${_test_name}"
+    outputs = [
+      depfile,
+      generated_script,
+    ]
+    args = [
+      "--depfile",
+      rebase_path(depfile, root_build_dir),
+      "--script-output-path",
+      rebase_path(generated_script, root_build_dir),
+    ]
+    args += test_runner_args
+  }
+}
diff --git a/build/config/android/rules.gni b/build/config/android/rules.gni
new file mode 100644
index 0000000..3c8640b
--- /dev/null
+++ b/build/config/android/rules.gni
@@ -0,0 +1,2238 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//base/android/linker/config.gni")
+import("//build/config/android/config.gni")
+import("//build/config/android/internal_rules.gni")
+import("//third_party/android_platform/config.gni")
+import("//tools/grit/grit_rule.gni")
+
+assert(is_android)
+
+# Declare a jni target
+#
+# This target generates the native jni bindings for a set of .java files.
+#
+# See base/android/jni_generator/jni_generator.py for more info about the
+# format of generating JNI bindings.
+#
+# Variables
+#   sources: list of .java files to generate jni for
+#   jni_package: subdirectory path for generated bindings
+#
+# Example
+#   generate_jni("foo_jni") {
+#     sources = [
+#       "android/java/src/org/chromium/foo/Foo.java",
+#       "android/java/src/org/chromium/foo/FooUtil.java",
+#     ]
+#     jni_package = "foo"
+#   }
+template("generate_jni") {
+  set_sources_assignment_filter([])
+  if (defined(invoker.testonly)) {
+    testonly = invoker.testonly
+  }
+
+  assert(defined(invoker.sources))
+  assert(defined(invoker.jni_package))
+  jni_package = invoker.jni_package
+  base_output_dir = "${target_gen_dir}/${target_name}"
+  package_output_dir = "${base_output_dir}/${jni_package}"
+  jni_output_dir = "${package_output_dir}/jni"
+
+  jni_generator_include = "//base/android/jni_generator/jni_generator_helper.h"
+
+  foreach_target_name = "${target_name}__jni_gen"
+  action_foreach(foreach_target_name) {
+    script = "//base/android/jni_generator/jni_generator.py"
+    depfile = "$target_gen_dir/$target_name.{{source_name_part}}.d"
+    sources = invoker.sources
+    outputs = [
+      depfile,
+      "${jni_output_dir}/{{source_name_part}}_jni.h",
+    ]
+
+    args = [
+      "--depfile",
+      rebase_path(depfile, root_build_dir),
+      "--input_file={{source}}",
+      "--optimize_generation=1",
+      "--ptr_type=long",
+      "--output_dir",
+      rebase_path(jni_output_dir, root_build_dir),
+      "--includes",
+      rebase_path(jni_generator_include, jni_output_dir),
+      "--native_exports_optional",
+    ]
+    if (defined(invoker.jni_generator_jarjar_file)) {
+      args += [
+        "--jarjar",
+        rebase_path(jni_generator_jarjar_file, root_build_dir),
+      ]
+    }
+  }
+
+  config("jni_includes_${target_name}") {
+    # TODO(cjhopman): #includes should probably all be relative to
+    # base_output_dir. Remove that from this config once the includes are
+    # updated.
+    include_dirs = [
+      base_output_dir,
+      package_output_dir,
+    ]
+  }
+
+  group(target_name) {
+    deps = [
+      ":$foreach_target_name",
+    ]
+    public_configs = [ ":jni_includes_${target_name}" ]
+
+    if (defined(invoker.deps)) {
+      deps += invoker.deps
+    }
+    if (defined(invoker.public_deps)) {
+      public_deps = invoker.public_deps
+    }
+
+    if (defined(invoker.visibility)) {
+      visibility = invoker.visibility
+    }
+  }
+}
+
+# Declare a jni target for a prebuilt jar
+#
+# This target generates the native jni bindings for a set of classes in a .jar.
+#
+# See base/android/jni_generator/jni_generator.py for more info about the
+# format of generating JNI bindings.
+#
+# Variables
+#   classes: list of .class files in the jar to generate jni for. These should
+#     include the full path to the .class file.
+#   jni_package: subdirectory path for generated bindings
+#   jar_file: the path to the .jar. If not provided, will default to the sdk's
+#     android.jar
+#
+#   deps, public_deps: As normal
+#
+# Example
+#   generate_jar_jni("foo_jni") {
+#     classes = [
+#       "android/view/Foo.class",
+#     ]
+#     jni_package = "foo"
+#   }
+template("generate_jar_jni") {
+  set_sources_assignment_filter([])
+  if (defined(invoker.testonly)) {
+    testonly = invoker.testonly
+  }
+
+  assert(defined(invoker.classes))
+  assert(defined(invoker.jni_package))
+
+  if (defined(invoker.jar_file)) {
+    jar_file = invoker.jar_file
+  } else {
+    jar_file = android_sdk_jar
+  }
+
+  jni_package = invoker.jni_package
+  base_output_dir = "${root_gen_dir}/${target_name}/${jni_package}"
+  jni_output_dir = "${base_output_dir}/jni"
+
+  jni_generator_include = "//base/android/jni_generator/jni_generator_helper.h"
+
+  # TODO(cjhopman): make jni_generator.py support generating jni for multiple
+  # .class files from a .jar.
+  jni_actions = []
+  foreach(class, invoker.classes) {
+    _classname_list = []
+    _classname_list = process_file_template([ class ], "{{source_name_part}}")
+    classname = _classname_list[0]
+    jni_target_name = "${target_name}__jni_${classname}"
+    jni_actions += [ ":$jni_target_name" ]
+    action(jni_target_name) {
+      # The sources aren't compiled so don't check their dependencies.
+      check_includes = false
+      depfile = "$target_gen_dir/$target_name.d"
+      script = "//base/android/jni_generator/jni_generator.py"
+      sources = [
+        jar_file,
+      ]
+      outputs = [
+        depfile,
+        "${jni_output_dir}/${classname}_jni.h",
+      ]
+
+      args = [
+        "--depfile",
+        rebase_path(depfile, root_build_dir),
+        "--jar_file",
+        rebase_path(jar_file, root_build_dir),
+        "--input_file",
+        class,
+        "--optimize_generation=1",
+        "--ptr_type=long",
+        "--output_dir",
+        rebase_path(jni_output_dir, root_build_dir),
+        "--includes",
+        rebase_path(jni_generator_include, jni_output_dir),
+        "--native_exports_optional",
+      ]
+    }
+  }
+
+  config("jni_includes_${target_name}") {
+    include_dirs = [ base_output_dir ]
+  }
+
+  group(target_name) {
+    deps = jni_actions
+    if (defined(invoker.deps)) {
+      deps += invoker.deps
+    }
+    if (defined(invoker.public_deps)) {
+      public_deps = invoker.public_deps
+    }
+    public_configs = [ ":jni_includes_${target_name}" ]
+  }
+}
+
+# Declare a target for c-preprocessor-generated java files
+#
+# NOTE: For generating Java conterparts to enums prefer using the java_cpp_enum
+#       rule instead.
+#
+# This target generates java files using the host C pre-processor. Each file in
+# sources will be compiled using the C pre-processor. If include_path is
+# specified, it will be passed (with --I) to the pre-processor.
+#
+# This target will create a single .srcjar. Adding this target to an
+# android_library target's srcjar_deps will make the generated java files be
+# included in that library's final outputs.
+#
+# Variables
+#   sources: list of files to be processed by the C pre-processor. For each
+#     file in sources, there will be one .java file in the final .srcjar. For a
+#     file named FooBar.template, a java file will be created with name
+#     FooBar.java.
+#   inputs: additional compile-time dependencies. Any files
+#     `#include`-ed in the templates should be listed here.
+#   package_name: this will be the subdirectory for each .java file in the
+#     .srcjar.
+#
+# Example
+#   java_cpp_template("foo_generated_enum") {
+#     sources = [
+#       "android/java/templates/Foo.template",
+#     ]
+#     inputs = [
+#       "android/java/templates/native_foo_header.h",
+#     ]
+#
+#     package_name = "org/chromium/base/library_loader"
+#     include_path = "android/java/templates"
+#   }
+template("java_cpp_template") {
+  set_sources_assignment_filter([])
+  if (defined(invoker.testonly)) {
+    testonly = invoker.testonly
+  }
+
+  assert(defined(invoker.sources))
+  package_name = invoker.package_name + ""
+
+  if (defined(invoker.include_path)) {
+    include_path = invoker.include_path + ""
+  } else {
+    include_path = "//"
+  }
+
+  apply_gcc_target_name = "${target_name}__apply_gcc"
+  zip_srcjar_target_name = "${target_name}__zip_srcjar"
+  final_target_name = target_name
+
+  action_foreach(apply_gcc_target_name) {
+    visibility = [ ":$zip_srcjar_target_name" ]
+    script = "//build/android/gyp/gcc_preprocess.py"
+    if (defined(invoker.inputs)) {
+      inputs = invoker.inputs + []
+    }
+    depfile = "${target_gen_dir}/${target_name}_{{source_name_part}}.d"
+
+    sources = invoker.sources
+
+    if (defined(invoker.deps)) {
+      deps = invoker.deps
+    }
+    if (defined(invoker.public_deps)) {
+      public_deps = invoker.public_deps
+    }
+    if (defined(invoker.data_deps)) {
+      data_deps = invoker.data_deps
+    }
+
+    gen_dir =
+        "${target_gen_dir}/${target_name}/java_cpp_template/${package_name}"
+    gcc_template_output_pattern = "${gen_dir}/{{source_name_part}}.java"
+
+    outputs = [
+      depfile,
+      gcc_template_output_pattern,
+    ]
+
+    args = [
+      "--depfile",
+      rebase_path(depfile, root_build_dir),
+      "--include-path",
+      rebase_path(include_path, root_build_dir),
+      "--output",
+      rebase_path(gen_dir, root_build_dir) + "/{{source_name_part}}.java",
+      "--template={{source}}",
+    ]
+
+    if (defined(invoker.defines)) {
+      foreach(def, invoker.defines) {
+        args += [
+          "--defines",
+          def,
+        ]
+      }
+    }
+  }
+
+  apply_gcc_outputs = get_target_outputs(":$apply_gcc_target_name")
+  base_gen_dir = get_label_info(":$apply_gcc_target_name", "target_gen_dir")
+
+  srcjar_path = "${target_gen_dir}/${target_name}.srcjar"
+  zip(zip_srcjar_target_name) {
+    visibility = [ ":$final_target_name" ]
+    inputs = apply_gcc_outputs
+    output = srcjar_path
+    base_dir = base_gen_dir
+    deps = [
+      ":$apply_gcc_target_name",
+    ]
+  }
+
+  group(final_target_name) {
+    if (defined(invoker.visibility)) {
+      visibility = invoker.visibility
+    }
+    deps = [
+      ":$zip_srcjar_target_name",
+    ]
+  }
+}
+
+# Declare a target for generating Java classes from C++ enums.
+#
+# This target generates Java files from C++ enums using a script.
+#
+# This target will create a single .srcjar. Adding this target to an
+# android_library target's srcjar_deps will make the generated java files be
+# included in that library's final outputs.
+#
+# Variables
+#   sources: list of files to be processed by the script. For each annotated
+#     enum contained in the sources files the script will generate a .java
+#     file with the same name as the name of the enum.
+#
+#   outputs: list of outputs, relative to the output_dir. These paths are
+#     verified at build time by the script. To get the list programatically run:
+#       python build/android/gyp/java_cpp_enum.py \
+#         --print_output_only . path/to/header/file.h
+#
+# Example
+#   java_cpp_enum("foo_generated_enum") {
+#     sources = [
+#       "src/native_foo_header.h",
+#     ]
+#     outputs = [
+#       "org/chromium/FooEnum.java",
+#     ]
+#   }
+template("java_cpp_enum") {
+  set_sources_assignment_filter([])
+  if (defined(invoker.testonly)) {
+    testonly = invoker.testonly
+  }
+
+  assert(defined(invoker.sources))
+  assert(defined(invoker.outputs))
+
+  generate_enum_target_name = "${target_name}__generate_enum"
+  zip_srcjar_target_name = "${target_name}__zip_srcjar"
+  final_target_name = target_name
+
+  action(generate_enum_target_name) {
+    visibility = [ ":$zip_srcjar_target_name" ]
+
+    # The sources aren't compiled so don't check their dependencies.
+    check_includes = false
+
+    sources = invoker.sources
+    script = "//build/android/gyp/java_cpp_enum.py"
+    gen_dir = "${target_gen_dir}/${target_name}/enums"
+    outputs =
+        get_path_info(rebase_path(invoker.outputs, ".", gen_dir), "abspath")
+
+    args = []
+    foreach(output, rebase_path(outputs, root_build_dir)) {
+      args += [
+        "--assert_file",
+        output,
+      ]
+    }
+    args += [ rebase_path(gen_dir, root_build_dir) ]
+    args += rebase_path(invoker.sources, root_build_dir)
+  }
+
+  generate_enum_outputs = get_target_outputs(":$generate_enum_target_name")
+  base_gen_dir = get_label_info(":$generate_enum_target_name", "target_gen_dir")
+
+  srcjar_path = "${target_gen_dir}/${target_name}.srcjar"
+  zip(zip_srcjar_target_name) {
+    visibility = [ ":$final_target_name" ]
+    inputs = generate_enum_outputs
+    output = srcjar_path
+    base_dir = base_gen_dir
+    deps = [
+      ":$generate_enum_target_name",
+    ]
+  }
+
+  group(final_target_name) {
+    if (defined(invoker.visibility)) {
+      visibility = invoker.visibility
+    }
+    deps = [
+      ":$zip_srcjar_target_name",
+    ]
+  }
+}
+
+# Declare a target for processing Android resources as Jinja templates.
+#
+# This takes an Android resource directory where each resource is a Jinja
+# template, processes each template, then packages the results in a zip file
+# which can be consumed by an android resources, library, or apk target.
+#
+# If this target is included in the deps of an android resources/library/apk,
+# the resources will be included with that target.
+#
+# Variables
+#   resources: The list of resources files to process.
+#   res_dir: The resource directory containing the resources.
+#   variables: (Optional) A list of variables to make available to the template
+#     processing environment, e.g. ["name=foo", "color=red"].
+#
+# Example
+#   jinja_template_resources("chrome_shell_template_resources") {
+#     res_dir = "shell/res_template"
+#     resources = ["shell/res_template/xml/syncable.xml"]
+#     variables = ["color=red"]
+#   }
+template("jinja_template_resources") {
+  set_sources_assignment_filter([])
+  if (defined(invoker.testonly)) {
+    testonly = invoker.testonly
+  }
+
+  assert(defined(invoker.resources))
+  assert(defined(invoker.res_dir))
+
+  _base_path = "$target_gen_dir/$target_name"
+  _resources_zip = _base_path + ".resources.zip"
+  _build_config = _base_path + ".build_config"
+
+  write_build_config("${target_name}__build_config") {
+    build_config = _build_config
+    resources_zip = _resources_zip
+    type = "android_resources"
+  }
+
+  action("${target_name}__template") {
+    sources = invoker.resources
+    script = "//build/android/gyp/jinja_template.py"
+    depfile = "$target_gen_dir/$target_name.d"
+
+    outputs = [
+      depfile,
+      _resources_zip,
+    ]
+
+    rebased_resources = rebase_path(invoker.resources, root_build_dir)
+    args = [
+      "--inputs=${rebased_resources}",
+      "--inputs-base-dir",
+      rebase_path(invoker.res_dir, root_build_dir),
+      "--outputs-zip",
+      rebase_path(_resources_zip, root_build_dir),
+      "--depfile",
+      rebase_path(depfile, root_build_dir),
+    ]
+    if (defined(invoker.variables)) {
+      variables = invoker.variables
+      args += [ "--variables=${variables}" ]
+    }
+  }
+
+  group(target_name) {
+    deps = [
+      ":${target_name}__build_config",
+      ":${target_name}__template",
+    ]
+  }
+}
+
+# Creates a resources.zip with locale.pak files placed into appropriate
+# resource configs (e.g. en-GB.pak -> res/raw-en/en_gb.pak). Also generates
+# a locale_paks TypedArray so that resource files can be enumerated at runtime.
+#
+# If this target is included in the deps of an android resources/library/apk,
+# the resources will be included with that target.
+#
+# Variables:
+#   sources: List of .pak files. Names must be of the form "en.pak" or
+#       "en-US.pak".
+#   deps: (optional) List of dependencies that might be needed to generate
+#       the .pak files.
+#
+# Example
+#   locale_pak_resources("locale_paks") {
+#     sources = [ "path/en-US.pak", "path/fr.pak", ... ]
+#   }
+template("locale_pak_resources") {
+  set_sources_assignment_filter([])
+  assert(defined(invoker.sources))
+
+  _base_path = "$target_gen_dir/$target_name"
+  _resources_zip = _base_path + ".resources.zip"
+  _build_config = _base_path + ".build_config"
+
+  write_build_config("${target_name}__build_config") {
+    build_config = _build_config
+    resources_zip = _resources_zip
+    type = "android_resources"
+  }
+
+  action("${target_name}__create_resources_zip") {
+    sources = invoker.sources
+    script = "//build/android/gyp/locale_pak_resources.py"
+    depfile = "$target_gen_dir/$target_name.d"
+
+    outputs = [
+      depfile,
+      _resources_zip,
+    ]
+
+    _rebased_sources = rebase_path(invoker.sources, root_build_dir)
+    args = [
+      "--locale-paks=${_rebased_sources}",
+      "--resources-zip",
+      rebase_path(_resources_zip, root_build_dir),
+      "--depfile",
+      rebase_path(depfile, root_build_dir),
+    ]
+
+    if (defined(invoker.deps)) {
+      deps = invoker.deps
+    }
+  }
+
+  group(target_name) {
+    deps = [
+      ":${target_name}__build_config",
+      ":${target_name}__create_resources_zip",
+    ]
+  }
+}
+
+# Declare an Android resources target
+#
+# This creates a resources zip file that will be used when building an Android
+# library or apk and included into a final apk.
+#
+# To include these resources in a library/apk, this target should be listed in
+# the library's deps. A library/apk will also include any resources used by its
+# own dependencies.
+#
+# Variables
+#   deps: Specifies the dependencies of this target. Any Android resources
+#     listed in deps will be included by libraries/apks that depend on this
+#     target.
+#   resource_dirs: List of directories containing resources for this target.
+#   android_manifest: AndroidManifest.xml for this target. Defaults to
+#     //build/android/AndroidManifest.xml.
+#   custom_package: java package for generated .java files.
+#   v14_skip: If true, don't run v14 resource generator on this. Defaults to
+#     false. (see build/android/gyp/generate_v14_compatible_resources.py)
+#
+#   shared_resources: If true make a resource package that can be loaded by a
+#     different application at runtime to access the package's resources.
+#
+
+# Example
+#   android_resources("foo_resources") {
+#     deps = [":foo_strings_grd"]
+#     resource_dirs = ["res"]
+#     custom_package = "org.chromium.foo"
+#   }
+template("android_resources") {
+  set_sources_assignment_filter([])
+  if (defined(invoker.testonly)) {
+    testonly = invoker.testonly
+  }
+
+  assert(defined(invoker.resource_dirs))
+  assert(defined(invoker.android_manifest) || defined(invoker.custom_package))
+
+  base_path = "$target_gen_dir/$target_name"
+  zip_path = base_path + ".resources.zip"
+  srcjar_path = base_path + ".srcjar"
+  r_text_path = base_path + "_R.txt"
+  build_config = base_path + ".build_config"
+
+  build_config_target_name = "${target_name}__build_config"
+  process_resources_target_name = "${target_name}__process_resources"
+  final_target_name = target_name
+
+  write_build_config(build_config_target_name) {
+    visibility = [ ":$process_resources_target_name" ]
+
+    type = "android_resources"
+    resources_zip = zip_path
+    srcjar = srcjar_path
+    r_text = r_text_path
+    if (defined(invoker.deps)) {
+      deps = invoker.deps
+    }
+    if (defined(invoker.android_manifest)) {
+      android_manifest = invoker.android_manifest
+    }
+    if (defined(invoker.custom_package)) {
+      custom_package = invoker.custom_package
+    }
+  }
+
+  android_manifest = "//build/android/AndroidManifest.xml"
+  if (defined(invoker.android_manifest)) {
+    android_manifest = invoker.android_manifest
+  }
+
+  process_resources(process_resources_target_name) {
+    visibility = [ ":$final_target_name" ]
+
+    resource_dirs = invoker.resource_dirs
+    if (defined(invoker.custom_package)) {
+      custom_package = invoker.custom_package
+    }
+
+    if (defined(invoker.v14_skip)) {
+      v14_skip = invoker.v14_skip
+    }
+
+    if (defined(invoker.shared_resources)) {
+      shared_resources = invoker.shared_resources
+    }
+
+    deps = [
+      ":$build_config_target_name",
+    ]
+    if (defined(invoker.deps)) {
+      # Invoker may have added deps that generate the input resources.
+      deps += invoker.deps
+    }
+  }
+
+  group(final_target_name) {
+    if (defined(invoker.visibility)) {
+      visibility = invoker.visibility
+    }
+    deps = [
+      ":${target_name}__process_resources",
+    ]
+  }
+}
+
+# Declare a target that generates localized strings.xml from a .grd file.
+#
+# If this target is included in the deps of an android resources/library/apk,
+# the strings.xml will be included with that target.
+#
+# Variables
+#   deps: Specifies the dependencies of this target.
+#   grd_file: Path to the .grd file to generate strings.xml from.
+#   outputs: Expected grit outputs (see grit rule).
+#
+# Example
+#  java_strings_grd("foo_strings_grd") {
+#    grd_file = "foo_strings.grd"
+#  }
+template("java_strings_grd") {
+  set_sources_assignment_filter([])
+  if (defined(invoker.testonly)) {
+    testonly = invoker.testonly
+  }
+
+  base_path = "$target_gen_dir/$target_name"
+  resources_zip = base_path + ".resources.zip"
+  build_config = base_path + ".build_config"
+
+  write_build_config("${target_name}__build_config") {
+    type = "android_resources"
+    if (defined(invoker.deps)) {
+      deps = invoker.deps
+    }
+  }
+
+  # Put grit files into this subdirectory of target_gen_dir.
+  extra_output_path = target_name + "_grit_output"
+
+  grit_target_name = "${target_name}__grit"
+  grit_output_dir = "$target_gen_dir/$extra_output_path"
+  grit(grit_target_name) {
+    grit_flags = [
+      "-E",
+      "ANDROID_JAVA_TAGGED_ONLY=false",
+    ]
+    output_dir = grit_output_dir
+    resource_ids = ""
+    source = invoker.grd_file
+    outputs = invoker.outputs
+  }
+
+  # This needs to get outputs from grit's internal target, not the final
+  # source_set.
+  generate_strings_outputs = get_target_outputs(":${grit_target_name}_grit")
+
+  zip("${target_name}__zip") {
+    base_dir = grit_output_dir
+    inputs = generate_strings_outputs
+    output = resources_zip
+    deps = [
+      ":$grit_target_name",
+    ]
+  }
+
+  group(target_name) {
+    deps = [
+      ":${target_name}__build_config",
+      ":${target_name}__zip",
+    ]
+  }
+}
+
+# Declare a target that packages strings.xml generated from a grd file.
+#
+# If this target is included in the deps of an android resources/library/apk,
+# the strings.xml will be included with that target.
+#
+# Variables
+#  grit_output_dir: directory containing grit-generated files.
+#  generated_files: list of android resource files to package.
+#
+# Example
+#  java_strings_grd_prebuilt("foo_strings_grd") {
+#    grit_output_dir = "$root_gen_dir/foo/grit"
+#    generated_files = [
+#      "values/strings.xml"
+#    ]
+#  }
+template("java_strings_grd_prebuilt") {
+  set_sources_assignment_filter([])
+  if (defined(invoker.testonly)) {
+    testonly = invoker.testonly
+  }
+
+  base_path = "$target_gen_dir/$target_name"
+  resources_zip = base_path + ".resources.zip"
+  build_config = base_path + ".build_config"
+
+  build_config_target_name = "${target_name}__build_config"
+  zip_target_name = "${target_name}__zip"
+  final_target_name = target_name
+
+  write_build_config(build_config_target_name) {
+    visibility = [ ":$zip_target_name" ]
+    type = "android_resources"
+  }
+
+  zip(zip_target_name) {
+    visibility = [ ":$final_target_name" ]
+
+    base_dir = invoker.grit_output_dir
+    inputs = rebase_path(invoker.generated_files, ".", base_dir)
+    output = resources_zip
+    deps = [
+      ":$build_config_target_name",
+    ]
+    if (defined(invoker.deps)) {
+      deps += invoker.deps
+    }
+  }
+
+  group(final_target_name) {
+    if (defined(invoker.visibility)) {
+      visibility = invoker.visibility
+    }
+    deps = [
+      ":$zip_target_name",
+    ]
+  }
+}
+
+# Declare a Java executable target
+#
+# This target creates an executable from java code and libraries. The executable
+# will be in the output folder's /bin/ directory.
+#
+# Variables
+#   deps: Specifies the dependencies of this target. Java targets in this list
+#     will be included in the executable (and the javac classpath).
+#
+#   java_files: List of .java files included in this library.
+#   srcjar_deps: List of srcjar dependencies. The .java files in the srcjars
+#     will be added to java_files and be included in this library.
+#   srcjars: List of srcjars to be included in this library, together with the
+#     ones obtained from srcjar_deps.
+#
+#   bypass_platform_checks: Disables checks about cross-platform (Java/Android)
+#     dependencies for this target. This will allow depending on an
+#     android_library target, for example.
+#
+#   chromium_code: If true, extra analysis warning/errors will be enabled.
+#   enable_errorprone: If true, enables the errorprone compiler.
+#
+#   data_deps, testonly
+#
+# Example
+#   java_binary("foo") {
+#     java_files = [ "org/chromium/foo/FooMain.java" ]
+#     deps = [ ":bar_java" ]
+#     main_class = "org.chromium.foo.FooMain"
+#   }
+template("java_binary") {
+  set_sources_assignment_filter([])
+
+  # TODO(cjhopman): This should not act like a java_library for dependents (i.e.
+  # dependents shouldn't get the jar in their classpath, etc.).
+  java_library_impl(target_name) {
+    if (defined(invoker.DEPRECATED_java_in_dir)) {
+      DEPRECATED_java_in_dir = invoker.DEPRECATED_java_in_dir
+    }
+    if (defined(invoker.chromium_code)) {
+      chromium_code = invoker.chromium_code
+    }
+    if (defined(invoker.data_deps)) {
+      deps = invoker.data_deps
+    }
+    if (defined(invoker.deps)) {
+      deps = invoker.deps
+    }
+    if (defined(invoker.enable_errorprone)) {
+      enable_errorprone = invoker.enable_errorprone
+    }
+    if (defined(invoker.java_files)) {
+      java_files = invoker.java_files
+    }
+    if (defined(invoker.srcjar_deps)) {
+      srcjar_deps = invoker.srcjar_deps
+    }
+    if (defined(invoker.srcjars)) {
+      srcjars = invoker.srcjars
+    }
+    if (defined(invoker.bypass_platform_checks)) {
+      bypass_platform_checks = invoker.bypass_platform_checks
+    }
+    if (defined(invoker.testonly)) {
+      testonly = invoker.testonly
+    }
+
+    supports_android = false
+    main_class = invoker.main_class
+  }
+}
+
+# Declare a Junit executable target
+#
+# This target creates an executable from java code for running as a junit test
+# suite. The executable will be in the output folder's /bin/ directory.
+#
+# Variables
+#   deps: Specifies the dependencies of this target. Java targets in this list
+#     will be included in the executable (and the javac classpath).
+#
+#   java_files: List of .java files included in this library.
+#   srcjar_deps: List of srcjar dependencies. The .java files in the srcjars
+#     will be added to java_files and be included in this library.
+#   srcjars: List of srcjars to be included in this library, together with the
+#     ones obtained from srcjar_deps.
+#
+#   chromium_code: If true, extra analysis warning/errors will be enabled.
+#
+# Example
+#   junit_binary("foo") {
+#     java_files = [ "org/chromium/foo/FooTest.java" ]
+#     deps = [ ":bar_java" ]
+#   }
+template("junit_binary") {
+  set_sources_assignment_filter([])
+
+  java_binary(target_name) {
+    bypass_platform_checks = true
+    main_class = "org.chromium.testing.local.JunitTestMain"
+    testonly = true
+
+    if (defined(invoker.DEPRECATED_java_in_dir)) {
+      DEPRECATED_java_in_dir = invoker.DEPRECATED_java_in_dir
+    }
+    if (defined(invoker.chromium_code)) {
+      chromium_code = invoker.chromium_code
+    }
+    deps = [
+      "//testing/android/junit:junit_test_support",
+      "//third_party/junit",
+      "//third_party/mockito:mockito_java",
+      "//third_party/robolectric:robolectric_java",
+      "//third_party/robolectric:android-all-4.3_r2-robolectric-0",
+    ]
+    if (defined(invoker.deps)) {
+      deps += invoker.deps
+    }
+    if (defined(invoker.java_files)) {
+      java_files = invoker.java_files
+    }
+    if (defined(invoker.srcjar_deps)) {
+      srcjar_deps = invoker.srcjar_deps
+    }
+    if (defined(invoker.srcjars)) {
+      srcjars = invoker.srcjars
+    }
+  }
+}
+
+# Declare a java library target
+#
+# Variables
+#   deps: Specifies the dependencies of this target. Java targets in this list
+#     will be added to the javac classpath.
+#
+#   java_files: List of .java files included in this library.
+#   srcjar_deps: List of srcjar dependencies. The .java files in the srcjars
+#     will be added to java_files and be included in this library.
+#   srcjars: List of srcjars to be included in this library, together with the
+#     ones obtained from srcjar_deps.
+#   DEPRECATED_java_in_dir: Directory containing java files. All .java files in
+#     this directory will be included in the library. This is only supported to
+#     ease the gyp->gn conversion and will be removed in the future.
+#
+#   chromium_code: If true, extra analysis warning/errors will be enabled.
+#   enable_errorprone: If true, enables the errorprone compiler.
+#
+#   jar_excluded_patterns: List of patterns of .class files to exclude from the
+#     final jar.
+#
+#   proguard_preprocess: If true, proguard preprocessing will be run. This can
+#     be used to remove unwanted parts of the library.
+#   proguard_config: Path to the proguard config for preprocessing.
+#
+#   supports_android: If true, Android targets (android_library, android_apk)
+#     may depend on this target. Note: if true, this target must only use the
+#     subset of Java available on Android.
+#   bypass_platform_checks: Disables checks about cross-platform (Java/Android)
+#     dependencies for this target. This will allow depending on an
+#     android_library target, for example.
+#
+#   data_deps, testonly
+#
+# Example
+#   java_library("foo_java") {
+#     java_files = [
+#       "org/chromium/foo/Foo.java",
+#       "org/chromium/foo/FooInterface.java",
+#       "org/chromium/foo/FooService.java",
+#     ]
+#     deps = [
+#       ":bar_java"
+#     ]
+#     srcjar_deps = [
+#       ":foo_generated_enum"
+#     ]
+#     jar_excluded_patterns = [
+#       "*/FooService.class", "*/FooService##*.class"
+#     ]
+#   }
+template("java_library") {
+  set_sources_assignment_filter([])
+  java_library_impl(target_name) {
+    if (defined(invoker.DEPRECATED_java_in_dir)) {
+      DEPRECATED_java_in_dir = invoker.DEPRECATED_java_in_dir
+    }
+    if (defined(invoker.chromium_code)) {
+      chromium_code = invoker.chromium_code
+    }
+    if (defined(invoker.data_deps)) {
+      deps = invoker.data_deps
+    }
+    if (defined(invoker.deps)) {
+      deps = invoker.deps
+    }
+    if (defined(invoker.enable_errorprone)) {
+      enable_errorprone = invoker.enable_errorprone
+    }
+    if (defined(invoker.jar_excluded_patterns)) {
+      jar_excluded_patterns = invoker.jar_excluded_patterns
+    }
+    if (defined(invoker.java_files)) {
+      java_files = invoker.java_files
+    }
+    if (defined(invoker.proguard_config)) {
+      proguard_config = invoker.proguard_config
+    }
+    if (defined(invoker.proguard_preprocess)) {
+      proguard_preprocess = invoker.proguard_preprocess
+    }
+    if (defined(invoker.srcjar_deps)) {
+      srcjar_deps = invoker.srcjar_deps
+    }
+    if (defined(invoker.srcjars)) {
+      srcjars = invoker.srcjars
+    }
+    if (defined(invoker.bypass_platform_checks)) {
+      bypass_platform_checks = invoker.bypass_platform_checks
+    }
+    if (defined(invoker.testonly)) {
+      testonly = invoker.testonly
+    }
+    if (defined(invoker.jar_path)) {
+      jar_path = invoker.jar_path
+    }
+
+    if (defined(invoker.supports_android) && invoker.supports_android) {
+      supports_android = true
+    }
+  }
+}
+
+# Declare a java library target for a prebuilt jar
+#
+# Variables
+#   deps: Specifies the dependencies of this target. Java targets in this list
+#     will be added to the javac classpath.
+#   jar_path: Path to the prebuilt jar.
+#   jar_dep: Target that builds jar_path (optional).
+#   proguard_preprocess: If true, proguard preprocessing will be run. This can
+#     be used to remove unwanted parts of the library.
+#   proguard_config: Path to the proguard config for preprocessing.
+#
+# Example
+#   java_prebuilt("foo_java") {
+#     jar_path = "foo.jar"
+#     deps = [
+#       ":foo_resources",
+#       ":bar_java"
+#     ]
+#   }
+template("java_prebuilt") {
+  set_sources_assignment_filter([])
+  java_prebuilt_impl(target_name) {
+    jar_path = invoker.jar_path
+    if (defined(invoker.jar_dep)) {
+      jar_dep = invoker.jar_dep
+    }
+    if (defined(invoker.testonly)) {
+      testonly = invoker.testonly
+    }
+    if (defined(invoker.deps)) {
+      deps = invoker.deps
+    }
+    if (defined(invoker.data_deps)) {
+      data_deps = invoker.data_deps
+    }
+    if (defined(invoker.proguard_config)) {
+      proguard_config = invoker.proguard_config
+    }
+    if (defined(invoker.proguard_preprocess)) {
+      proguard_preprocess = invoker.proguard_preprocess
+    }
+  }
+}
+
+# Declare an Android library target
+#
+# This target creates an Android library containing java code and Android
+# resources.
+#
+# Variables
+#   deps: Specifies the dependencies of this target. Java targets in this list
+#     will be added to the javac classpath. Android resources in dependencies
+#     will be used when building this library.
+#
+#   java_files: List of .java files included in this library.
+#   srcjar_deps: List of srcjar dependencies. The .java files in the srcjars
+#     will be added to java_files and be included in this library.
+#   srcjars: List of srcjars to be included in this library, together with the
+#     ones obtained from srcjar_deps.
+#   DEPRECATED_java_in_dir: Directory containing java files. All .java files in
+#     this directory will be included in the library. This is only supported to
+#     ease the gyp->gn conversion and will be removed in the future.
+#
+#   chromium_code: If true, extra analysis warning/errors will be enabled.
+#   enable_errorprone: If true, enables the errorprone compiler.
+#
+#   jar_excluded_patterns: List of patterns of .class files to exclude from the
+#     final jar.
+#
+#   proguard_preprocess: If true, proguard preprocessing will be run. This can
+#     be used to remove unwanted parts of the library.
+#   proguard_config: Path to the proguard config for preprocessing.
+#
+#   dex_path: If set, the resulting .dex.jar file will be placed under this
+#     path.
+#
+#
+# Example
+#   android_library("foo_java") {
+#     java_files = [
+#       "android/org/chromium/foo/Foo.java",
+#       "android/org/chromium/foo/FooInterface.java",
+#       "android/org/chromium/foo/FooService.java",
+#     ]
+#     deps = [
+#       ":bar_java"
+#     ]
+#     srcjar_deps = [
+#       ":foo_generated_enum"
+#     ]
+#     jar_excluded_patterns = [
+#       "*/FooService.class", "*/FooService##*.class"
+#     ]
+#   }
+template("android_library") {
+  set_sources_assignment_filter([])
+  assert(!defined(invoker.jar_path),
+         "android_library does not support a custom jar path")
+  java_library_impl(target_name) {
+    if (defined(invoker.DEPRECATED_java_in_dir)) {
+      DEPRECATED_java_in_dir = invoker.DEPRECATED_java_in_dir
+    }
+    if (defined(invoker.chromium_code)) {
+      chromium_code = invoker.chromium_code
+    }
+    if (defined(invoker.data_deps)) {
+      deps = invoker.data_deps
+    }
+    if (defined(invoker.deps)) {
+      deps = invoker.deps
+    }
+    if (defined(invoker.enable_errorprone)) {
+      enable_errorprone = invoker.enable_errorprone
+    }
+    if (defined(invoker.jar_excluded_patterns)) {
+      jar_excluded_patterns = invoker.jar_excluded_patterns
+    }
+    if (defined(invoker.java_files)) {
+      java_files = invoker.java_files
+    }
+    if (defined(invoker.proguard_config)) {
+      proguard_config = invoker.proguard_config
+    }
+    if (defined(invoker.proguard_preprocess)) {
+      proguard_preprocess = invoker.proguard_preprocess
+    }
+    if (defined(invoker.srcjar_deps)) {
+      srcjar_deps = invoker.srcjar_deps
+    }
+    if (defined(invoker.srcjars)) {
+      srcjars = invoker.srcjars
+    }
+    if (defined(invoker.testonly)) {
+      testonly = invoker.testonly
+    }
+    if (defined(invoker.visibility)) {
+      visibility = invoker.visibility
+    }
+    if (defined(invoker.dex_path)) {
+      dex_path = invoker.dex_path
+    }
+    if (defined(invoker.manifest_entries)) {
+      manifest_entries = invoker.manifest_entries
+    }
+
+    supports_android = true
+    requires_android = true
+
+    if (!defined(jar_excluded_patterns)) {
+      jar_excluded_patterns = []
+    }
+    jar_excluded_patterns += [
+      "*/R.class",
+      "*/R##*.class",
+      "*/Manifest.class",
+      "*/Manifest##*.class",
+    ]
+  }
+}
+
+# Declare a target that packages a set of Java dependencies into a standalone
+# .dex.jar.
+#
+# Variables
+#   deps: specifies the dependencies of this target. Android libraries in deps
+#     will be packaged into the resulting .dex.jar file.
+#   dex_path: location at which the output file will be put
+template("android_standalone_library") {
+  set_sources_assignment_filter([])
+  deps_dex(target_name) {
+    deps = invoker.deps
+    dex_path = invoker.dex_path
+    if (defined(invoker.excluded_jars)) {
+      excluded_jars = invoker.excluded_jars
+    }
+  }
+}
+
+# Declare an Android library target for a prebuilt jar
+#
+# This target creates an Android library containing java code and Android
+# resources.
+#
+# Variables
+#   deps: Specifies the dependencies of this target. Java targets in this list
+#     will be added to the javac classpath. Android resources in dependencies
+#     will be used when building this library.
+#   jar_path: Path to the prebuilt jar.
+#   proguard_preprocess: If true, proguard preprocessing will be run. This can
+#     be used to remove unwanted parts of the library.
+#   proguard_config: Path to the proguard config for preprocessing.
+#
+# Example
+#   android_java_prebuilt("foo_java") {
+#     jar_path = "foo.jar"
+#     deps = [
+#       ":foo_resources",
+#       ":bar_java"
+#     ]
+#   }
+template("android_java_prebuilt") {
+  set_sources_assignment_filter([])
+  java_prebuilt_impl(target_name) {
+    jar_path = invoker.jar_path
+    supports_android = true
+    requires_android = true
+    if (defined(invoker.testonly)) {
+      testonly = invoker.testonly
+    }
+    if (defined(invoker.deps)) {
+      deps = invoker.deps
+    }
+    if (defined(invoker.data_deps)) {
+      data_deps = invoker.data_deps
+    }
+    if (defined(invoker.proguard_config)) {
+      proguard_config = invoker.proguard_config
+    }
+    if (defined(invoker.proguard_preprocess)) {
+      proguard_preprocess = invoker.proguard_preprocess
+    }
+  }
+}
+
+# Declare an Android apk target
+#
+# This target creates an Android APK containing java code, resources, assets,
+# and (possibly) native libraries.
+#
+# Variables
+#   android_manifest: Path to AndroidManifest.xml.
+#   android_manifest_dep: Target that generates AndroidManifest (if applicable)
+#   data_deps: List of dependencies needed at runtime. These will be built but
+#     won't change the generated .apk in any way (in fact they may be built
+#     after the .apk is).
+#   deps: List of dependencies. All Android java resources and libraries in the
+#     "transitive closure" of these dependencies will be included in the apk.
+#     Note: this "transitive closure" actually only includes such targets if
+#     they are depended on through android_library or android_resources targets
+#     (and so not through builtin targets like 'action', 'group', etc).
+#   java_files: List of .java files to include in the apk.
+#   srcjar_deps: List of srcjar dependencies. The .java files in the srcjars
+#      will be added to java_files and be included in this apk.
+#   apk_name: Name for final apk.
+#   final_apk_path: Path to final built apk. Default is
+#     $root_out_dir/apks/$apk_name.apk. Setting this will override apk_name.
+#   native_libs: List paths of native libraries to include in this apk. If these
+#     libraries depend on other shared_library targets, those dependencies will
+#     also be included in the apk.
+#   apk_under_test: For an instrumentation test apk, this is the target of the
+#     tested apk.
+#   include_all_resources - If true include all resource IDs in all generated
+#     R.java files.
+#   testonly: Marks this target as "test-only".
+#
+#   DEPRECATED_java_in_dir: Directory containing java files. All .java files in
+#     this directory will be included in the library. This is only supported to
+#     ease the gyp->gn conversion and will be removed in the future.
+#
+# Example
+#   android_apk("foo_apk") {
+#     android_manifest = "AndroidManifest.xml"
+#     java_files = [
+#       "android/org/chromium/foo/FooApplication.java",
+#       "android/org/chromium/foo/FooActivity.java",
+#     ]
+#     deps = [
+#       ":foo_support_java"
+#       ":foo_resources"
+#     ]
+#     srcjar_deps = [
+#       ":foo_generated_enum"
+#     ]
+#     native_libs = [
+#       native_lib_path
+#     ]
+#   }
+template("android_apk") {
+  set_sources_assignment_filter([])
+  if (defined(invoker.testonly)) {
+    testonly = invoker.testonly
+  }
+
+  assert(defined(invoker.final_apk_path) || defined(invoker.apk_name))
+  assert(defined(invoker.android_manifest))
+  gen_dir = "$target_gen_dir/$target_name"
+  base_path = "$gen_dir/$target_name"
+  _build_config = "$target_gen_dir/$target_name.build_config"
+  resources_zip_path = "$base_path.resources.zip"
+  _all_resources_zip_path = "$base_path.resources.all.zip"
+  jar_path = "$base_path.jar"
+  _template_name = target_name
+
+  final_dex_path = "$gen_dir/classes.dex"
+  final_dex_target_name = "${_template_name}__final_dex"
+
+  _final_apk_path = ""
+  if (defined(invoker.final_apk_path)) {
+    _final_apk_path = invoker.final_apk_path
+  } else if (defined(invoker.apk_name)) {
+    _final_apk_path = "$root_build_dir/apks/" + invoker.apk_name + ".apk"
+  }
+  _dist_jar_path_list =
+      process_file_template(
+          [ _final_apk_path ],
+          "$root_build_dir/test.lib.java/{{source_name_part}}.jar")
+  _dist_jar_path = _dist_jar_path_list[0]
+
+  _native_libs = []
+
+  _version_code = "1"
+  if (defined(invoker.version_code)) {
+    _version_code = invoker.version_code
+  }
+
+  _version_name = "Developer Build"
+  if (defined(invoker.version_name)) {
+    _version_name = invoker.version_name
+  }
+  _keystore_path = android_default_keystore_path
+  _keystore_name = android_default_keystore_name
+  _keystore_password = android_default_keystore_password
+
+  if (defined(invoker.keystore_path)) {
+    _keystore_path = invoker.keystore_path
+    _keystore_name = invoker.keystore_name
+    _keystore_password = invoker.keystore_password
+  }
+
+  _srcjar_deps = []
+  if (defined(invoker.srcjar_deps)) {
+    _srcjar_deps += invoker.srcjar_deps
+  }
+
+  _load_library_from_apk = false
+
+  # The dependency that makes the chromium linker, if any is needed.
+  _chromium_linker_dep = []
+
+  if (defined(invoker.native_libs)) {
+    _use_chromium_linker = false
+    if (defined(invoker.use_chromium_linker)) {
+      _use_chromium_linker =
+          invoker.use_chromium_linker && chromium_linker_supported
+      _chromium_linker_dep = [ "//base/android/linker:chromium_android_linker" ]
+    }
+
+    if (defined(invoker.load_library_from_apk) &&
+        invoker.load_library_from_apk) {
+      _load_library_from_apk = true
+      assert(_use_chromium_linker,
+             "Loading library from the apk requires use" +
+                 " of the Chromium linker.")
+    }
+
+    _enable_relocation_packing = false
+    if (defined(invoker.enable_relocation_packing) &&
+        invoker.enable_relocation_packing) {
+      _enable_relocation_packing = relocation_packing_supported
+      assert(_use_chromium_linker,
+             "Relocation packing requires use of the" + " Chromium linker.")
+    }
+
+    if (is_component_build) {
+      _native_libs += [ "$root_out_dir/lib.stripped/libc++_shared.so" ]
+      _chromium_linker_dep += [ "//build/android:cpplib_stripped" ]
+    }
+
+    # Allow native_libs to be in the form "foo.so" or "foo.cr.so"
+    _first_ext_removed =
+        process_file_template(invoker.native_libs, "{{source_name_part}}")
+    _native_libs += process_file_template(
+            _first_ext_removed,
+            "$root_build_dir/lib.stripped/{{source_name_part}}$android_product_extension")
+
+    _native_libs_dir = base_path + "/libs"
+
+    if (_use_chromium_linker) {
+      _native_libs += [ "$root_build_dir/lib.stripped/libchromium_android_linker$android_product_extension" ]
+    }
+
+    _enable_relocation_packing = false
+    if (_use_chromium_linker && defined(invoker.enable_relocation_packing) &&
+        invoker.enable_relocation_packing) {
+      _enable_relocation_packing = true
+    }
+
+    _native_lib_version_rule = ""
+    if (defined(invoker.native_lib_version_rule)) {
+      _native_lib_version_rule = invoker.native_lib_version_rule
+    }
+    _native_lib_version_arg = "\"\""
+    if (defined(invoker.native_lib_version_arg)) {
+      _native_lib_version_arg = invoker.native_lib_version_arg
+    }
+  }
+
+  _android_manifest_deps = []
+  if (defined(invoker.android_manifest_dep)) {
+    _android_manifest_deps = [ invoker.android_manifest_dep ]
+  }
+  _android_manifest = invoker.android_manifest
+
+  _rebased_build_config = rebase_path(_build_config, root_build_dir)
+  _create_abi_split =
+      defined(invoker.create_abi_split) && invoker.create_abi_split
+  _create_density_splits =
+      defined(invoker.create_density_splits) && invoker.create_density_splits
+
+  # Help GN understand that _create_abi_split is not unused (bug in GN).
+  assert(_create_abi_split || true)
+
+  build_config_target = "${_template_name}__build_config"
+  write_build_config(build_config_target) {
+    type = "android_apk"
+    dex_path = final_dex_path
+    resources_zip = resources_zip_path
+    build_config = _build_config
+    android_manifest = _android_manifest
+
+    deps = _chromium_linker_dep + _android_manifest_deps
+    if (defined(invoker.deps)) {
+      deps += invoker.deps
+    }
+
+    if (defined(invoker.apk_under_test)) {
+      apk_under_test = invoker.apk_under_test
+    }
+
+    native_libs = _native_libs
+  }
+
+  final_deps = []
+
+  process_resources_target = "${_template_name}__process_resources"
+  final_deps += [ ":$process_resources_target" ]
+  process_resources(process_resources_target) {
+    srcjar_path = "${target_gen_dir}/${target_name}.srcjar"
+    r_text_path = "${target_gen_dir}/${target_name}_R.txt"
+    android_manifest = _android_manifest
+    resource_dirs = [ "//build/android/ant/empty/res" ]
+    zip_path = resources_zip_path
+    all_resources_zip_path = _all_resources_zip_path
+    generate_constant_ids = true
+
+    if (defined(invoker.include_all_resources)) {
+      include_all_resources = invoker.include_all_resources
+    }
+
+    build_config = _build_config
+    deps = _android_manifest_deps + [ ":$build_config_target" ]
+    if (defined(invoker.deps)) {
+      deps += invoker.deps
+    }
+  }
+  _srcjar_deps += [ ":$process_resources_target" ]
+
+  if (_native_libs != []) {
+    _enable_chromium_linker_tests = false
+    if (defined(invoker.enable_chromium_linker_tests)) {
+      _enable_chromium_linker_tests = invoker.enable_chromium_linker_tests
+    }
+
+    java_cpp_template("${_template_name}__native_libraries_java") {
+      package_name = "org/chromium/base/library_loader"
+      sources = [
+        "//base/android/java/templates/NativeLibraries.template",
+      ]
+      inputs = [
+        _build_config,
+      ]
+      deps = [
+        ":$build_config_target",
+      ]
+      if (_native_lib_version_rule != "") {
+        deps += [ _native_lib_version_rule ]
+      }
+
+      defines = [
+        "NATIVE_LIBRARIES_LIST=" +
+            "@FileArg($_rebased_build_config:native:java_libraries_list)",
+        "NATIVE_LIBRARIES_VERSION_NUMBER=$_native_lib_version_arg",
+      ]
+      if (_use_chromium_linker) {
+        defines += [ "ENABLE_CHROMIUM_LINKER" ]
+      }
+      if (_load_library_from_apk) {
+        defines += [ "ENABLE_CHROMIUM_LINKER_LIBRARY_IN_ZIP_FILE" ]
+      }
+      if (_enable_chromium_linker_tests) {
+        defines += [ "ENABLE_CHROMIUM_LINKER_TESTS" ]
+      }
+    }
+    _srcjar_deps += [ ":${_template_name}__native_libraries_java" ]
+  }
+
+  java_target = "${_template_name}__java"
+  final_deps += [ ":$java_target" ]
+  java_library_impl(java_target) {
+    supports_android = true
+    requires_android = true
+    override_build_config = _build_config
+    deps = _android_manifest_deps + [ ":$build_config_target" ]
+
+    android_manifest = _android_manifest
+    chromium_code = true
+    if (defined(invoker.java_files)) {
+      java_files = invoker.java_files
+    } else if (defined(invoker.DEPRECATED_java_in_dir)) {
+      DEPRECATED_java_in_dir = invoker.DEPRECATED_java_in_dir
+    } else {
+      java_files = []
+    }
+    srcjar_deps = _srcjar_deps
+    dex_path = base_path + ".dex.jar"
+
+    if (defined(invoker.deps)) {
+      deps += invoker.deps
+    }
+  }
+
+  if (_dist_jar_path != "") {
+    create_dist_target = "${_template_name}__create_dist_jar"
+    final_deps += [ ":$create_dist_target" ]
+
+    # TODO(cjhopman): This is only ever needed to calculate the list of tests to
+    # run. See build/android/pylib/instrumentation/test_jar.py. We should be
+    # able to just do that calculation at build time instead.
+    action(create_dist_target) {
+      script = "//build/android/gyp/create_dist_jar.py"
+      depfile = "$target_gen_dir/$target_name.d"
+      inputs = [
+        _build_config,
+      ]
+      outputs = [
+        depfile,
+        _dist_jar_path,
+      ]
+      args = [
+        "--depfile",
+        rebase_path(depfile, root_build_dir),
+        "--output",
+        rebase_path(_dist_jar_path, root_build_dir),
+        "--inputs=@FileArg($_rebased_build_config:dist_jar:dependency_jars)",
+      ]
+      inputs += [ jar_path ]
+      _rebased_jar_path = rebase_path([ jar_path ], root_build_dir)
+      args += [ "--inputs=$_rebased_jar_path" ]
+      deps = [
+        ":$build_config_target",  # Generates the build config file.
+        ":$java_target",  # Generates the jar file.
+      ]
+    }
+  }
+
+  final_deps += [ ":$final_dex_target_name" ]
+  dex("${final_dex_target_name}_jar") {
+    deps = [
+      ":$build_config_target",
+      ":$java_target",
+    ]
+    sources = [
+      jar_path,
+    ]
+    inputs = [
+      _build_config,
+    ]
+    output = "${final_dex_path}.jar"
+    dex_arg_key = "${_rebased_build_config}:final_dex:dependency_dex_files"
+    args = [ "--inputs=@FileArg($dex_arg_key)" ]
+  }
+
+  dex("$final_dex_target_name") {
+    deps = [
+      ":${final_dex_target_name}_jar",
+    ]
+    sources = [
+      "${final_dex_path}.jar",
+    ]
+    output = final_dex_path
+  }
+
+  if (_native_libs != []) {
+    action("${_template_name}__prepare_native") {
+      script = "//build/android/gyp/pack_relocations.py"
+      packed_libraries_dir = "$_native_libs_dir/$android_app_abi"
+      depfile = "$target_gen_dir/$target_name.d"
+      outputs = [
+        depfile,
+      ]
+
+      inputs = _native_libs
+      deps = _chromium_linker_dep
+
+      inputs += [ _build_config ]
+      deps += [ ":$build_config_target" ]
+
+      skip_packing_list = [
+        "gdbserver",
+        "libchromium_android_linker$android_product_extension",
+      ]
+
+      enable_packing_arg = 0
+      if (_enable_relocation_packing) {
+        enable_packing_arg = 1
+        deps += [ relocation_packer_target ]
+      }
+
+      args = [
+        "--depfile",
+        rebase_path(depfile, root_build_dir),
+        "--enable-packing=$enable_packing_arg",
+        "--exclude-packing-list=$skip_packing_list",
+        "--android-pack-relocations",
+        rebase_path(relocation_packer_exe, root_build_dir),
+        "--stripped-libraries-dir",
+        rebase_path(root_build_dir, root_build_dir),
+        "--packed-libraries-dir",
+        rebase_path(packed_libraries_dir, root_build_dir),
+        "--libraries=@FileArg(${_rebased_build_config}:native:libraries)",
+        "--clear-dir",
+      ]
+
+      if (defined(invoker.deps)) {
+        deps += invoker.deps
+      }
+      if (defined(invoker.public_deps)) {
+        public_deps = invoker.public_deps
+      }
+      if (defined(invoker.data_deps)) {
+        data_deps = invoker.data_deps
+      }
+
+      if (is_debug) {
+        rebased_gdbserver = rebase_path([ android_gdbserver ], root_build_dir)
+        inputs += [ android_gdbserver ]
+        args += [ "--libraries=$rebased_gdbserver" ]
+      }
+    }
+  }
+
+  final_deps += [ ":${_template_name}__create" ]
+  create_apk("${_template_name}__create") {
+    apk_path = _final_apk_path
+    android_manifest = _android_manifest
+    resources_zip = _all_resources_zip_path
+    dex_path = final_dex_path
+    load_library_from_apk = _load_library_from_apk
+    create_density_splits = _create_density_splits
+    if (defined(invoker.language_splits)) {
+      language_splits = invoker.language_splits
+    }
+    if (defined(invoker.extensions_to_not_compress)) {
+      extensions_to_not_compress = invoker.extensions_to_not_compress
+    } else {
+      # Allow icu data, v8 snapshots, and pak files to be loaded directly from
+      # the .apk.
+      # Note: These are actually suffix matches, not necessarily extensions.
+      extensions_to_not_compress = ".dat,.bin,.pak"
+    }
+
+    version_code = _version_code
+    version_name = _version_name
+
+    keystore_name = _keystore_name
+    keystore_path = _keystore_path
+    keystore_password = _keystore_password
+
+    # This target generates the input file _all_resources_zip_path.
+    deps = _android_manifest_deps + [
+             ":$process_resources_target",
+             ":$final_dex_target_name",
+           ]
+    if (defined(invoker.deps)) {
+      deps += invoker.deps
+    }
+
+    if (defined(invoker.asset_location)) {
+      asset_location = invoker.asset_location
+
+      # We don't know the exact dependencies that create the assets in
+      # |asset_location|; we depend on all caller deps until a better solution
+      # is figured out (http://crbug.com/433330).
+      if (defined(invoker.deps)) {
+        deps += invoker.deps
+      }
+    }
+
+    if (_native_libs != [] && !_create_abi_split) {
+      native_libs_dir = _native_libs_dir
+      deps += [ ":${_template_name}__prepare_native" ]
+    }
+  }
+
+  if (_native_libs != [] && _create_abi_split) {
+    _manifest_rule = "${_template_name}__split_manifest_abi_${android_app_abi}"
+    generate_split_manifest(_manifest_rule) {
+      main_manifest = _android_manifest
+      out_manifest =
+          "$gen_dir/split-manifests/${android_app_abi}/AndroidManifest.xml"
+      split_name = "abi_${android_app_abi}"
+      deps = _android_manifest_deps
+    }
+
+    _apk_rule = "${_template_name}__split_apk_abi_${android_app_abi}"
+    final_deps += [ ":$_apk_rule" ]
+    create_apk(_apk_rule) {
+      _split_paths = process_file_template(
+              [ _final_apk_path ],
+              "{{source_dir}}/{{source_name_part}}-abi-${android_app_abi}.apk")
+      apk_path = _split_paths[0]
+      base_path = "$gen_dir/$_apk_rule"
+
+      manifest_outputs = get_target_outputs(":${_manifest_rule}")
+      android_manifest = manifest_outputs[1]
+      load_library_from_apk = _load_library_from_apk
+
+      version_code = _version_code
+      version_name = _version_name
+
+      keystore_name = _keystore_name
+      keystore_path = _keystore_path
+      keystore_password = _keystore_password
+
+      native_libs_dir = _native_libs_dir
+      deps = [
+        ":${_template_name}__prepare_native",
+        ":${_manifest_rule}",
+      ]
+    }
+  }
+
+  if (defined(invoker.flutter_dist_jar)) {
+    flutter_jar_target = "${_template_name}__create_flutter_jar"
+    final_deps += [ ":$flutter_jar_target" ]
+
+    action(flutter_jar_target) {
+      script = "//build/android/gyp/create_flutter_jar.py"
+      depfile = "$target_gen_dir/$target_name.d"
+      inputs = [
+        _dist_jar_path,
+      ]
+      outputs = [
+        invoker.flutter_dist_jar,
+      ]
+      args = [
+        "--depfile",
+        rebase_path(depfile, root_build_dir),
+        "--output",
+        rebase_path(invoker.flutter_dist_jar, root_build_dir),
+        "--dist_jar",
+        rebase_path(_dist_jar_path, root_build_dir),
+        "--android_abi",
+        "$android_app_abi",
+        "--asset_dir",
+        rebase_path(invoker.asset_location, root_build_dir),
+      ]
+      foreach(native_lib, rebase_path(_native_libs, root_build_dir)) {
+        args += [
+          "--native_lib",
+          native_lib,
+        ]
+      }
+      deps = [
+        ":$create_dist_target",
+        ":${_template_name}__prepare_native"
+      ]
+      if (defined(invoker.deps)) {
+        deps += invoker.deps
+      }
+    }
+  }
+
+  group(target_name) {
+    deps = final_deps
+    if (defined(invoker.data_deps)) {
+      data_deps = invoker.data_deps
+    }
+  }
+}
+
+# Declare an Android instrumentation test apk
+#
+# This target creates an Android instrumentation test apk.
+#
+# Variables
+#   android_manifest: Path to AndroidManifest.xml.
+#   data_deps: List of dependencies needed at runtime. These will be built but
+#     won't change the generated .apk in any way (in fact they may be built
+#     after the .apk is).
+#   deps: List of dependencies. All Android java resources and libraries in the
+#     "transitive closure" of these dependencies will be included in the apk.
+#     Note: this "transitive closure" actually only includes such targets if
+#     they are depended on through android_library or android_resources targets
+#     (and so not through builtin targets like 'action', 'group', etc).
+#   java_files: List of .java files to include in the apk.
+#   srcjar_deps: List of srcjar dependencies. The .java files in the srcjars
+#      will be added to java_files and be included in this apk.
+#   apk_name: Name for final apk.
+#   support_apk_path: Path to a support apk. If present, the test runner script
+#      will install it on the device before running the instrumentation tests.
+#      Should be a path relative to the src root.
+#   final_apk_path: Path to final built apk. Default is
+#     $root_out_dir/apks/$apk_name.apk. Setting this will override apk_name.
+#   native_libs: List paths of native libraries to include in this apk. If these
+#     libraries depend on other shared_library targets, those dependencies will
+#     also be included in the apk.
+#   apk_under_test: The apk being tested.
+#   isolate_file: Isolate file containing the list of test data dependencies.
+#
+#   DEPRECATED_java_in_dir: Directory containing java files. All .java files in
+#     this directory will be included in the library. This is only supported to
+#     ease the gyp->gn conversion and will be removed in the future.
+#
+# Example
+#   instrumentation_test_apk("foo_test_apk") {
+#     android_manifest = "AndroidManifest.xml"
+#     apk_name = "FooTest"
+#     apk_under_test = "Foo"
+#     java_files = [
+#       "android/org/chromium/foo/FooTestCase.java",
+#       "android/org/chromium/foo/FooExampleTest.java",
+#     ]
+#     deps = [
+#       ":foo_test_support_java"
+#     ]
+#   }
+template("instrumentation_test_apk") {
+  set_sources_assignment_filter([])
+  testonly = true
+  _template_name = target_name
+
+  if (defined(invoker.apk_name)) {
+    test_runner_data_dep = [ ":${_template_name}__test_runner_script" ]
+    test_runner_script("${_template_name}__test_runner_script") {
+      test_name = invoker.target_name
+      test_type = "instrumentation"
+      test_apk = invoker.apk_name
+      if (defined(invoker.isolate_file)) {
+        isolate_file = invoker.isolate_file
+      }
+      if (defined(invoker.support_apk_path)) {
+        support_apk_path = invoker.support_apk_path
+      }
+    }
+  }
+
+  android_apk(target_name) {
+    if (defined(invoker.android_manifest)) {
+      android_manifest = invoker.android_manifest
+    }
+    data_deps = [
+      "//testing/android/driver:driver_apk",
+      "//tools/android/forwarder2",
+      "//tools/android/md5sum",
+    ]
+    if (defined(test_runner_data_dep)) {
+      data_deps += test_runner_data_dep
+    }
+    if (defined(invoker.data_deps)) {
+      data_deps += invoker.data_deps
+    }
+    deps = [
+      "//testing/android/broker:broker_java",
+    ]
+    if (defined(invoker.deps)) {
+      deps += invoker.deps
+    }
+    if (defined(invoker.java_files)) {
+      java_files = invoker.java_files
+    }
+    if (defined(invoker.srcjar_deps)) {
+      srcjar_deps = invoker.srcjar_deps
+    }
+    if (defined(invoker.apk_name)) {
+      apk_name = invoker.apk_name
+    }
+    if (defined(invoker.final_apk_path)) {
+      final_apk_path = invoker.final_apk_path
+    }
+    if (defined(invoker.native_libs)) {
+      native_libs = invoker.native_libs
+    }
+    if (defined(invoker.apk_under_test)) {
+      apk_under_test = invoker.apk_under_test
+    }
+    if (defined(invoker.DEPRECATED_java_in_dir)) {
+      DEPRECATED_java_in_dir = invoker.DEPRECATED_java_in_dir
+    }
+  }
+}
+
+# Declare an Android gtest apk
+#
+# This target creates an Android apk for running gtest-based unittests.
+#
+# Variables
+#   deps: Specifies the dependencies of this target. These will be passed to
+#     the underlying android_apk invocation and should include the java and
+#     resource dependencies of the apk.
+#   unittests_dep: This should be the label of the gtest native target. This
+#     target must be defined previously in the same file.
+#   unittests_binary: The basename of the library produced by the unittests_dep
+#     target. If unspecified, it assumes the name of the unittests_dep target
+#     (which will be correct unless that target specifies an "output_name".
+#     TODO(brettw) make this automatic by allowing get_target_outputs to
+#     support executables.
+#   apk_name: The name of the produced apk. If unspecified, it uses the name
+#             of the unittests_dep target postfixed with "_apk"
+#
+# Example
+#   unittest_apk("foo_unittests_apk") {
+#     deps = [ ":foo_java", ":foo_resources" ]
+#     unittests_dep = ":foo_unittests"
+#   }
+template("unittest_apk") {
+  set_sources_assignment_filter([])
+  testonly = true
+
+  assert(defined(invoker.unittests_dep), "Need unittests_dep for $target_name")
+
+  test_suite_name = get_label_info(invoker.unittests_dep, "name")
+
+  # This trivial assert is needed in case both unittests_binary and apk_name
+  # are defined, as otherwise test_suite_name would not be used.
+  assert(test_suite_name != "")
+
+  if (defined(invoker.unittests_binary)) {
+    unittests_binary = invoker.unittests_binary
+  } else {
+    unittests_binary = "lib${test_suite_name}${android_product_extension}"
+  }
+
+  if (defined(invoker.apk_name)) {
+    apk_name = invoker.apk_name
+  } else {
+    apk_name = test_suite_name
+  }
+
+  android_apk(target_name) {
+    final_apk_path = "$root_build_dir/${apk_name}_apk/${apk_name}-debug.apk"
+    java_files = [
+      "//testing/android/native_test/java/src/org/chromium/native_test/NativeBrowserTestActivity.java",
+      "//testing/android/native_test/java/src/org/chromium/native_test/NativeTestActivity.java",
+      "//testing/android/native_test/java/src/org/chromium/native_test/NativeUnitTestActivity.java",
+      "//testing/android/native_test/java/src/org/chromium/native_test/NativeTestInstrumentationTestRunner.java",
+    ]
+    android_manifest = "//testing/android/native_test/java/AndroidManifest.xml"
+    native_libs = [ unittests_binary ]
+    if (defined(invoker.asset_location)) {
+      asset_location = invoker.asset_location
+    }
+    deps = [
+      "//base:base_java",
+      "//build/android/pylib/remote/device/dummy:remote_device_dummy_apk",
+      "//testing/android/appurify_support:appurify_support_java",
+      "//testing/android/reporter:reporter_java",
+    ]
+    if (defined(invoker.deps)) {
+      deps += invoker.deps
+    }
+    data_deps = [ "//tools/android/md5sum" ]
+    if (host_os == "linux") {
+      data_deps += [ "//tools/android/forwarder2" ]
+    }
+    if (defined(invoker.data_deps)) {
+      data_deps += invoker.data_deps
+    }
+  }
+}
+
+# Generate .java files from .aidl files.
+#
+# This target will store the .java files in a srcjar and should be included in
+# an android_library or android_apk's srcjar_deps.
+#
+# Variables
+#   sources: Paths to .aidl files to compile.
+#   import_include: Path to directory containing .java files imported by the
+#     .aidl files.
+#   interface_file: Preprocessed aidl file to import.
+#
+# Example
+#   android_aidl("foo_aidl") {
+#     import_include = "java/src"
+#     sources = [
+#       "java/src/com/foo/bar/FooBarService.aidl",
+#       "java/src/com/foo/bar/FooBarServiceCallback.aidl",
+#     ]
+#   }
+template("android_aidl") {
+  set_sources_assignment_filter([])
+  if (defined(invoker.testonly)) {
+    testonly = invoker.testonly
+  }
+
+  srcjar_path = "${target_gen_dir}/${target_name}.srcjar"
+  aidl_path = "${android_sdk_build_tools}/aidl"
+  framework_aidl = "$android_sdk/framework.aidl"
+
+  action(target_name) {
+    script = "//build/android/gyp/aidl.py"
+    sources = invoker.sources
+
+    imports = [ framework_aidl ]
+    if (defined(invoker.interface_file)) {
+      assert(invoker.interface_file != "")
+      imports += [ invoker.interface_file ]
+    }
+
+    inputs = [ aidl_path ] + imports
+
+    depfile = "${target_gen_dir}/${target_name}.d"
+    outputs = [
+      depfile,
+      srcjar_path,
+    ]
+    rebased_imports = rebase_path(imports, root_build_dir)
+    args = [
+      "--depfile",
+      rebase_path(depfile, root_build_dir),
+      "--aidl-path",
+      rebase_path(aidl_path, root_build_dir),
+      "--imports=$rebased_imports",
+      "--srcjar",
+      rebase_path(srcjar_path, root_build_dir),
+    ]
+    if (defined(invoker.import_include) && invoker.import_include != "") {
+      # TODO(cjhopman): aidl supports creating a depfile. We should be able to
+      # switch to constructing a depfile for the overall action from that
+      # instead of having all the .java files in the include paths as inputs.
+      rebased_import_includes =
+          rebase_path([ invoker.import_include ], root_build_dir)
+      args += [ "--includes=$rebased_import_includes" ]
+
+      _java_files_build_rel =
+          exec_script("//build/android/gyp/find.py",
+                      rebase_path([ invoker.import_include ], root_build_dir),
+                      "list lines")
+      _java_files = rebase_path(_java_files_build_rel, ".", root_build_dir)
+      inputs += _java_files
+    }
+    args += rebase_path(sources, root_build_dir)
+  }
+}
+
+# Creates a dist directory for a native executable.
+#
+# Running a native executable on a device requires all the shared library
+# dependencies of that executable. To make it easier to install and run such an
+# executable, this will create a directory containing the native exe and all
+# it's library dependencies.
+#
+# Note: It's usually better to package things as an APK than as a native
+# executable.
+#
+# Variables
+#   dist_dir: Directory for the exe and libraries. Everything in this directory
+#     will be deleted before copying in the exe and libraries.
+#   binary: Path to (stripped) executable.
+#
+# Example
+#   create_native_executable_dist("foo_dist") {
+#     dist_dir = "$root_build_dir/foo_dist"
+#     binary = "$root_build_dir/exe.stripped/foo"
+#     deps = [ ":the_thing_that_makes_foo" ]
+#   }
+template("create_native_executable_dist") {
+  set_sources_assignment_filter([])
+  if (defined(invoker.testonly)) {
+    testonly = invoker.testonly
+  }
+
+  dist_dir = invoker.dist_dir
+  binary = invoker.binary
+  template_name = target_name
+
+  libraries_list =
+      "${target_gen_dir}/${template_name}_library_dependencies.list"
+
+  find_deps_target_name = "${template_name}__find_library_dependencies"
+  copy_target_name = "${template_name}__copy_libraries_and_exe"
+
+  stripped_libraries_dir = "$root_build_dir/lib.stripped"
+  action(find_deps_target_name) {
+    visibility = [ ":$copy_target_name" ]
+
+    script = "//build/android/gyp/write_ordered_libraries.py"
+    depfile = "$target_gen_dir/$target_name.d"
+    inputs = [
+      binary,
+      android_readelf,
+    ]
+    outputs = [
+      depfile,
+      libraries_list,
+    ]
+    rebased_binaries = rebase_path([ binary ], root_build_dir)
+    args = [
+      "--depfile",
+      rebase_path(depfile, root_build_dir),
+      "--input-libraries=$rebased_binaries",
+      "--libraries-dir",
+      rebase_path(stripped_libraries_dir, root_build_dir),
+      "--output",
+      rebase_path(libraries_list, root_build_dir),
+      "--readelf",
+      rebase_path(android_readelf, root_build_dir),
+    ]
+    if (defined(invoker.deps)) {
+      deps = invoker.deps
+    }
+  }
+
+  copy_ex(copy_target_name) {
+    visibility = [ ":$template_name" ]
+
+    clear_dir = true
+    inputs = [
+      binary,
+      libraries_list,
+    ]
+    dest = dist_dir
+    rebased_binaries_list = rebase_path([ binary ], root_build_dir)
+    rebased_libraries_list = rebase_path(libraries_list, root_build_dir)
+    args = [
+      "--files=$rebased_binaries_list",
+      "--files=@FileArg($rebased_libraries_list:lib_paths)",
+    ]
+
+    deps = [
+      ":$find_deps_target_name",
+    ]
+    if (defined(invoker.deps)) {
+      deps += invoker.deps
+    }
+  }
+
+  group(template_name) {
+    if (defined(invoker.visibility)) {
+      visibility = invoker.visibility
+    }
+    deps = [
+      ":$copy_target_name",
+    ]
+  }
+}
+
+# Compile a protocol buffer to java.
+#
+# This generates java files from protocol buffers and creates an Android library
+# containing the classes.
+#
+# Variables
+#   sources: Paths to .proto files to compile.
+#   proto_path: Root directory of .proto files.
+#
+# Example:
+#  proto_java_library("foo_proto_java") {
+#    proto_path = [ "src/foo" ]
+#    sources = [ "$proto_path/foo.proto" ]
+#  }
+template("proto_java_library") {
+  set_sources_assignment_filter([])
+  _protoc_dep = "//third_party/android_protobuf:android_protoc($host_toolchain)"
+  _protoc_out_dir = get_label_info(_protoc_dep, "root_out_dir")
+  _protoc_bin = "$_protoc_out_dir/android_protoc"
+  _proto_path = invoker.proto_path
+
+  _template_name = target_name
+
+  action("${_template_name}__protoc_java") {
+    srcjar_path = "$target_gen_dir/$target_name.srcjar"
+    script = "//build/protoc_java.py"
+    deps = [
+      _protoc_dep,
+    ]
+    sources = invoker.sources
+    depfile = "$target_gen_dir/$target_name.d"
+    outputs = [
+      depfile,
+      srcjar_path,
+    ]
+    args = [
+             "--depfile",
+             rebase_path(depfile, root_build_dir),
+             "--protoc",
+             rebase_path(_protoc_bin, root_build_dir),
+             "--proto-path",
+             rebase_path(_proto_path, root_build_dir),
+             "--srcjar",
+             rebase_path(srcjar_path, root_build_dir),
+           ] + rebase_path(sources, root_build_dir)
+  }
+
+  android_library(target_name) {
+    java_files = []
+    srcjar_deps = [ ":${_template_name}__protoc_java" ]
+    deps = [
+      "//third_party/android_protobuf:protobuf_nano_javalib",
+    ]
+  }
+}
+
+# TODO(GYP): implement this.
+template("uiautomator_test") {
+  set_sources_assignment_filter([])
+  if (defined(invoker.testonly)) {
+    testonly = invoker.testonly
+  }
+  assert(target_name != "")
+  assert(invoker.deps != [] || true)
+  group(target_name) {
+  }
+}
diff --git a/build/config/arm.gni b/build/config/arm.gni
new file mode 100644
index 0000000..429a250
--- /dev/null
+++ b/build/config/arm.gni
@@ -0,0 +1,78 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+if (current_cpu == "arm" || current_cpu == "arm64") {
+  declare_args() {
+    # Version of the ARM processor when compiling on ARM. Ignored on non-ARM
+    # platforms.
+    if (current_cpu == "arm") {
+      arm_version = 7
+    } else if(current_cpu == "arm64") {
+      arm_version = 8
+    } else {
+      assert(false, "Unconfigured arm version")
+    }
+
+    # The ARM floating point mode. This is either the string "hard", "soft", or
+    # "softfp". An empty string means to use the default one for the
+    # arm_version.
+    arm_float_abi = ""
+
+    # The ARM variant-specific tuning mode. This will be a string like "armv6"
+    # or "cortex-a15". An empty string means to use the default for the
+    # arm_version.
+    arm_tune = ""
+
+    # Whether to use the neon FPU instruction set or not.
+    arm_use_neon = true
+
+    # Whether to enable optional NEON code paths.
+    arm_optionally_use_neon = false
+
+    if (is_android) {
+      arm_use_neon = false
+      arm_optionally_use_neon = true
+    }
+
+    if (is_ios) {
+      arm_use_neon = false
+      arm_optionally_use_neon = false
+    }
+  }
+
+  assert(arm_float_abi == "" || arm_float_abi == "hard" ||
+         arm_float_abi == "soft" || arm_float_abi == "softfp")
+
+  if (arm_version == 6) {
+    arm_arch = "armv6"
+    if (arm_tune != "") {
+      arm_tune = ""
+    }
+    if (arm_float_abi == "") {
+      arm_float_abi = "softfp"
+    }
+    arm_fpu = "vfp"
+
+    # Thumb is a reduced instruction set available on some ARM processors that
+    # has increased code density.
+    arm_use_thumb = false
+  } else if (arm_version == 7) {
+    arm_arch = "armv7-a"
+    if (arm_tune == "") {
+      arm_tune = "generic-armv7-a"
+    }
+
+    if (arm_float_abi == "") {
+      arm_float_abi = "softfp"
+    }
+
+    arm_use_thumb = true
+
+    if (arm_use_neon) {
+      arm_fpu = "neon"
+    } else {
+      arm_fpu = "vfpv3-d16"
+    }
+  }
+}
diff --git a/build/config/chrome_build.gni b/build/config/chrome_build.gni
new file mode 100644
index 0000000..c2132c4
--- /dev/null
+++ b/build/config/chrome_build.gni
@@ -0,0 +1,19 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+declare_args() {
+  # Selects the desired build flavor. Official builds get additional
+  # processing to prepare for release. Normally you will want to develop and
+  # test with this flag off.
+  is_official_build = false
+
+  # Select the desired branding flavor. False means normal Chromium branding,
+  # true means official Google Chrome branding (requires extra Google-internal
+  # resources).
+  is_chrome_branded = false
+
+  # Break chrome.dll into multple pieces based on process type. Only available
+  # on Windows.
+  is_multi_dll_chrome = is_win && !is_component_build
+}
diff --git a/build/config/clang/BUILD.gn b/build/config/clang/BUILD.gn
new file mode 100644
index 0000000..6dff486
--- /dev/null
+++ b/build/config/clang/BUILD.gn
@@ -0,0 +1,62 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("clang.gni")
+
+config("find_bad_constructs") {
+  if (clang_use_chrome_plugins) {
+    cflags = []
+
+    # On Windows, the plugin is built directly into clang, so there's
+    # no need to load it dynamically.
+
+    if (is_mac || is_ios) {
+      cflags += [
+        "-Xclang",
+        "-load",
+        "-Xclang",
+        rebase_path(
+            "//third_party/llvm-build/Release+Asserts/lib/libFindBadConstructs.dylib",
+            root_build_dir),
+      ]
+    } else if (is_linux) {
+      cflags += [
+        "-Xclang",
+        "-load",
+        "-Xclang",
+        rebase_path(
+            "//third_party/llvm-build/Release+Asserts/lib/libFindBadConstructs.so",
+            root_build_dir),
+      ]
+    }
+
+    # https://crbug.com/441916
+    if (is_android || is_linux || is_mac) {
+      cflags += [
+        "-Xclang",
+        "-plugin-arg-find-bad-constructs",
+        "-Xclang",
+        "check-templates",
+      ]
+    }
+
+    cflags += [
+      "-Xclang",
+      "-add-plugin",
+      "-Xclang",
+      "find-bad-constructs",
+    ]
+  }
+}
+
+# Enables some extra Clang-specific warnings. Some third-party code won't
+# compile with these so may want to remove this config.
+config("extra_warnings") {
+  cflags = [
+    "-Wheader-hygiene",
+
+    # Warns when a const char[] is converted to bool.
+    "-Wstring-conversion",
+  ]
+}
diff --git a/build/config/clang/clang.gni b/build/config/clang/clang.gni
new file mode 100644
index 0000000..cb84879
--- /dev/null
+++ b/build/config/clang/clang.gni
@@ -0,0 +1,9 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+declare_args() {
+  # Indicates if the build should use the Chrome-specific plugins for enforcing
+  # coding guidelines, etc. Only used when compiling with Clang.
+  clang_use_chrome_plugins = is_clang && !is_nacl
+}
diff --git a/build/config/compiler/BUILD.gn b/build/config/compiler/BUILD.gn
new file mode 100644
index 0000000..04d4bd9
--- /dev/null
+++ b/build/config/compiler/BUILD.gn
@@ -0,0 +1,1243 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/android/config.gni")
+import("//build/config/chrome_build.gni")
+if (current_cpu == "arm") {
+  import("//build/config/arm.gni")
+}
+if (current_cpu == "mipsel" || current_cpu == "mips64el") {
+  import("//build/config/mips.gni")
+}
+if (is_posix) {
+  import("//build/config/gcc/gcc_version.gni")
+}
+if (is_win) {
+  import("//build/config/win/visual_studio_version.gni")
+}
+
+import("//build/toolchain/ccache.gni")
+import("//build/config/sanitizers/sanitizers.gni")
+
+declare_args() {
+  # Normally, Android builds are lightly optimized, even for debug builds, to
+  # keep binary size down. Setting this flag to true disables such optimization
+  android_full_debug = false
+
+  # Whether to use the binary binutils checked into third_party/binutils.
+  # These are not multi-arch so cannot be used except on x86 and x86-64 (the
+  # only two architectures that are currently checked in). Turn this off when
+  # you are using a custom toolchain and need to control -B in cflags.
+  linux_use_bundled_binutils = is_linux && current_cpu == "x64"
+
+  # Compile in such a way as to enable profiling of the generated code. For
+  # example, don't omit the frame pointer and leave in symbols.
+  enable_profiling = false
+
+  # Compile in such a way as to make it possible for the profiler to unwind full
+  # stack frames. Setting this flag has a large effect on the performance of the
+  # generated code than just setting profiling, but gives the profiler more
+  # information to analyze.
+  # Requires profiling to be set to true.
+  enable_full_stack_frames_for_profiling = false
+
+  # Use gold for linking on 64-bit Linux only (on 32-bit it runs out of
+  # address space, and it doesn't support cross-compiling).
+  use_gold = is_linux && current_cpu == "x64"
+
+  # use_debug_fission: whether to use split DWARF debug info
+  # files. This can reduce link time significantly, but is incompatible
+  # with some utilities such as icecc and ccache. Requires gold and
+  # gcc >= 4.8 or clang.
+  # http://gcc.gnu.org/wiki/DebugFission
+  use_debug_fission = is_debug && !is_win && use_gold &&
+                      linux_use_bundled_binutils && !use_ccache
+
+  if (is_win) {
+    # Whether the VS xtree header has been patched to disable warning 4702. If
+    # it has, then we don't need to disable 4702 (unreachable code warning).
+    # The patch is preapplied to the internal toolchain and hence all bots.
+    msvs_xtree_patched = false
+  }
+}
+
+# default_include_dirs ---------------------------------------------------------
+#
+# This is a separate config so that third_party code (which would not use the
+# source root and might have conflicting versions of some headers) can remove
+# this and specify their own include paths.
+config("default_include_dirs") {
+  include_dirs = [
+    "//",
+    root_gen_dir,
+  ]
+}
+
+# TODO(GYP): is_ubsan, is_ubsan_vptr
+if (!is_win) {
+  using_sanitizer = is_asan || is_lsan || is_tsan || is_msan
+}
+
+# compiler ---------------------------------------------------------------------
+#
+# Base compiler configuration.
+#
+# See also "runtime_library" below for related stuff and a discussion about
+# where stuff should go. Put warning related stuff in the "warnings" config.
+
+config("compiler") {
+  cflags = []
+  cflags_c = []
+  cflags_cc = []
+  cflags_objcc = []
+  ldflags = []
+  defines = []
+
+  # In general, Windows is totally different, but all the other builds share
+  # some common GCC configuration. This section sets up Windows and the common
+  # GCC flags, and then we handle the other non-Windows platforms specifically
+  # below.
+  if (is_win) {
+    # Windows compiler flags setup.
+    # -----------------------------
+    cflags += [
+      "/Gy",  # Enable function-level linking.
+      "/GS",  # Enable buffer security checking.
+      "/FS",  # Preserve previous PDB behavior.
+    ]
+
+    # Building with Clang on Windows is a work in progress and very
+    # experimental. See crbug.com/82385.
+    # Keep this in sync with the similar block in build/common.gypi
+    if (is_clang) {
+      cflags += [
+        # Many files use intrinsics without including this header.
+        # TODO(hans): Fix those files, or move this to sub-GYPs.
+        "/FIIntrin.h",
+      ]
+
+      if (visual_studio_version == "2013") {
+        cflags += [ "-fmsc-version=1800" ]
+      } else if (visual_studio_version == "2015") {
+        cflags += [ "-fmsc-version=1900" ]
+      }
+
+      if (current_cpu == "x86") {
+        cflags += [
+          "/fallback",
+          "-m32",
+        ]
+      } else {
+        cflags += [ "-m64" ]
+      }
+      if (exec_script("//build/win/use_ansi_codes.py", [], "trim string") ==
+          "True") {
+        cflags += [
+          # cmd.exe doesn't understand ANSI escape codes by default,
+          # so only enable them if something emulating them is around.
+          "-fansi-escape-codes",
+        ]
+      }
+    }
+  } else {
+    # Common GCC compiler flags setup.
+    # --------------------------------
+    cflags += [ "-fno-strict-aliasing" ]  # See http://crbug.com/32204
+    common_flags = [
+      # Not exporting C++ inline functions can generally be applied anywhere
+      # so we do so here. Normal function visibility is controlled by
+      # //build/config/gcc:symbol_visibility_hidden.
+      "-fvisibility-inlines-hidden",
+    ]
+    cflags_cc += common_flags
+    cflags_objcc += common_flags
+
+    # Stack protection.
+    if (is_mac) {
+      cflags += [ "-fstack-protector-all" ]
+    } else if (is_linux) {
+      cflags += [
+        "-fstack-protector",
+        "--param=ssp-buffer-size=4",
+      ]
+    }
+
+    # Linker warnings.
+    if (!(is_chromeos && current_cpu == "arm") && !is_mac && !is_ios) {
+      # TODO(jochen): Enable this on chromeos on arm. http://crbug.com/356580
+      ldflags += [ "-Wl,--fatal-warnings" ]
+    }
+
+    # Common options for AddressSanitizer, LeakSanitizer, ThreadSanitizer and
+    # MemorySanitizer
+    if (using_sanitizer) {
+      cflags += [
+        "-fno-omit-frame-pointer",
+        "-gline-tables-only",
+      ]
+    }
+    if (is_asan) {
+      asan_blacklist_path =
+          rebase_path("//tools/memory/asan/blacklist.txt", root_build_dir)
+      cflags += [
+        "-fsanitize=address",
+        "-fsanitize-blacklist=$asan_blacklist_path",
+      ]
+      if (is_mac) {
+        cflags += [ "-mllvm -asan-globals=0" ]  # http://crbug.com/352073
+        # TODO(GYP): deal with mac_bundles.
+      }
+    }
+    if (is_lsan) {
+      cflags += [ "-fsanitize=leak" ]
+    }
+    if (is_tsan) {
+      tsan_blacklist_path =
+          rebase_path("//tools/memory/tsan_v2/ignores.txt", root_build_dir)
+      cflags += [
+        "-fsanitize=thread",
+        "-fsanitize-blacklist=$tsan_blacklist_path",
+      ]
+    }
+    if (is_msan) {
+      msan_blacklist_path =
+          rebase_path("//tools/msan/blacklist.txt", root_build_dir)
+      cflags += [
+        "-fsanitize=memory",
+        "-fsanitize-memory-track-origins=$msan_track_origins",
+        "-fsanitize-blacklist=$msan_blacklist_path",
+      ]
+    }
+
+    if (use_custom_libcxx) {
+      cflags_cc += [ "-nostdinc++" ]
+      include_dirs = [
+        "//buildtools/third_party/libc++/trunk/include",
+        "//buildtools/third_party/libc++abi/trunk/include",
+      ]
+    }
+
+    if (is_fnl) {
+      # TODO(kulakowski) remove when fnl no longer uses gcc
+      cflags += [ "-Wno-maybe-uninitialized" ]
+    }
+  }
+
+  if (is_clang && is_debug) {
+    # Allow comparing the address of references and 'this' against 0
+    # in debug builds. Technically, these can never be null in
+    # well-defined C/C++ and Clang can optimize such checks away in
+    # release builds, but they may be used in asserts in debug builds.
+    extra_flags = [
+      "-Wno-undefined-bool-conversion",
+      "-Wno-tautological-undefined-compare",
+    ]
+    cflags_cc += extra_flags
+    cflags_objcc += extra_flags
+  }
+
+  if (is_clang && !is_nacl) {
+    # This is here so that all files get recompiled after a clang roll and
+    # when turning clang on or off. (defines are passed via the command line,
+    # and build system rebuild things when their commandline changes). Nothing
+    # should ever read this define.
+    defines +=
+        [ "CR_CLANG_REVISION=" + exec_script("//tools/clang/scripts/update.py",
+                                             [ "--print-revision" ],
+                                             "trim string") ]
+  }
+
+  # Mac-specific compiler flags setup.
+  # ----------------------------------
+  if (is_mac || is_ios) {
+    # These flags are shared between the C compiler and linker.
+    common_mac_flags = []
+
+    # CPU architecture.
+    if (current_cpu == "x64") {
+      common_mac_flags += [
+        "-arch",
+        "x86_64",
+      ]
+    } else if (current_cpu == "x86") {
+      common_mac_flags += [
+        "-arch",
+        "i386",
+      ]
+    } else if (current_cpu == "arm") {
+      common_mac_flags += [
+        "-arch",
+        "armv7",
+      ]
+    } else if (current_cpu == "arm64") {
+      common_mac_flags += [
+        "-arch",
+        "arm64",
+      ]
+    }
+
+    cflags += common_mac_flags
+
+    # Without this, the constructors and destructors of a C++ object inside
+    # an Objective C struct won't be called, which is very bad.
+    cflags_objcc += [ "-fobjc-call-cxx-cdtors" ]
+
+    cflags_c += [ "-std=c99" ]
+
+    ldflags += common_mac_flags
+  } else if (is_posix) {
+    # CPU architecture. We may or may not be doing a cross compile now, so for
+    # simplicity we always explicitly set the architecture.
+    if (current_cpu == "x64") {
+      cflags += [
+        "-m64",
+        "-march=x86-64",
+      ]
+      ldflags += [ "-m64" ]
+    } else if (current_cpu == "x86") {
+      cflags += [ "-m32" ]
+      ldflags += [ "-m32" ]
+      if (is_clang) {
+        cflags += [
+          # Else building libyuv gives clang's register allocator issues,
+          # see llvm.org/PR15798 / crbug.com/233709
+          "-momit-leaf-frame-pointer",
+
+          # Align the stack on 16-byte boundaries, http://crbug.com/418554.
+          "-mstack-alignment=16",
+          "-mstackrealign",
+        ]
+      }
+    } else if (current_cpu == "arm") {
+      cflags += [
+        "-march=$arm_arch",
+        "-mfloat-abi=$arm_float_abi",
+      ]
+      if (arm_tune != "") {
+        cflags += [ "-mtune=$arm_tune" ]
+      }
+      if (arm_use_thumb) {
+        cflags += [ "-mthumb" ]
+        if (is_android && !is_clang) {  # Clang doesn't support this option.
+          cflags += [ "-mthumb-interwork" ]
+        }
+      }
+      if (!is_clang) {
+        # Clang doesn't support these flags.
+        cflags += [
+          # The tree-sra optimization (scalar replacement for
+          # aggregates enabling subsequent optimizations) leads to
+          # invalid code generation when using the Android NDK's
+          # compiler (r5-r7). This can be verified using
+          # webkit_unit_tests' WTF.Checked_int8_t test.
+          "-fno-tree-sra",
+
+          # The following option is disabled to improve binary
+          # size and performance in gcc 4.9.
+          "-fno-caller-saves",
+        ]
+      }
+    } else if (current_cpu == "mipsel") {
+      if (mips_arch_variant == "r6") {
+        cflags += [
+          "-mips32r6",
+          "-Wa,-mips32r6",
+        ]
+        if (is_android) {
+          ldflags += [
+            "-mips32r6",
+            "-Wl,-melf32ltsmip",
+          ]
+        }
+      } else if (mips_arch_variant == "r2") {
+        cflags += [
+          "-mips32r2",
+          "-Wa,-mips32r2",
+        ]
+        if (mips_float_abi == "hard" && mips_fpu_mode != "") {
+          cflags += [ "-m$mips_fpu_mode" ]
+        }
+      } else if (mips_arch_variant == "r1") {
+        cflags += [
+          "-mips32",
+          "-Wa,-mips32",
+        ]
+      }
+
+      if (mips_dsp_rev == 1) {
+        cflags += [ "-mdsp" ]
+      } else if (mips_dsp_rev == 2) {
+        cflags += [ "-mdspr2" ]
+      }
+
+      cflags += [ "-m${mips_float_abi}-float" ]
+    } else if (current_cpu == "mips64el") {
+      if (mips_arch_variant == "r6") {
+        cflags += [
+          "-mips64r6",
+          "-Wa,-mips64r6",
+        ]
+        ldflags += [ "-mips64r6" ]
+      } else if (mips_arch_variant == "r2") {
+        cflags += [
+          "-mips64r2",
+          "-Wa,-mips64r2",
+        ]
+        ldflags += [ "-mips64r2" ]
+      }
+    }
+
+    defines += [ "_FILE_OFFSET_BITS=64" ]
+
+    if (!is_android) {
+      defines += [
+        "_LARGEFILE_SOURCE",
+        "_LARGEFILE64_SOURCE",
+      ]
+    }
+
+    # Omit unwind support in official builds to save space. We can use breakpad
+    # for these builds.
+    if (is_chrome_branded && is_official_build) {
+      cflags += [
+        "-fno-unwind-tables",
+        "-fno-asynchronous-unwind-tables",
+      ]
+      defines += [ "NO_UNWIND_TABLES" ]
+    } else {
+      cflags += [ "-funwind-tables" ]
+    }
+  }
+
+  if (enable_profiling && !is_debug) {
+    # The GYP build spams this define into every compilation unit, as we do
+    # here, but it only appears to be used in base and a couple other places.
+    # TODO(abarth): Should we move this define closer to where it's used?
+    defines += [ "ENABLE_PROFILING" ]
+
+    cflags += [
+      "-fno-omit-frame-pointer",
+      "-g",
+    ]
+
+    if (enable_full_stack_frames_for_profiling) {
+      cflags += [
+        "-fno-inline",
+        "-fno-optimize-sibling-calls",
+      ]
+    }
+  }
+
+  # Linux/Android common flags setup.
+  # ---------------------------------
+  if (is_linux || is_android) {
+    cflags += [
+      "-fPIC",
+      "-pipe",  # Use pipes for communicating between sub-processes. Faster.
+    ]
+
+    ldflags += [
+      "-fPIC",
+      "-Wl,-z,noexecstack",
+      "-Wl,-z,now",
+      "-Wl,-z,relro",
+    ]
+    if (!using_sanitizer) {
+      ldflags += [ "-Wl,-z,defs" ]
+    }
+  }
+
+  # Linux-specific compiler flags setup.
+  # ------------------------------------
+  if (is_linux) {
+    cflags += [ "-pthread" ]
+    ldflags += [ "-pthread" ]
+  }
+  if (use_gold) {
+    gold_path = rebase_path("//third_party/binutils/Linux_x64/Release/bin",
+                            root_build_dir)
+    ldflags += [
+      "-B$gold_path",
+
+      # Newer gccs and clangs support -fuse-ld, use the flag to force gold
+      # selection.
+      # gcc -- http://gcc.gnu.org/onlinedocs/gcc-4.8.0/gcc/Optimize-Options.html
+      "-fuse-ld=gold",
+
+      # Experimentation found that using four linking threads
+      # saved ~20% of link time.
+      # https://groups.google.com/a/chromium.org/group/chromium-dev/browse_thread/thread/281527606915bb36
+      # Only apply this to the target linker, since the host
+      # linker might not be gold, but isn't used much anyway.
+      # TODO(raymes): Disable threading because gold is frequently
+      # crashing on the bots: crbug.com/161942.
+      #"-Wl,--threads",
+      #"-Wl,--thread-count=4",
+    ]
+
+    if (!is_asan && !is_msan && !is_lsan && !is_tsan) {
+      # TODO(brettw) common.gypi has this only for target toolset.
+      ldflags += [ "-Wl,--icf=all" ]
+    }
+
+    # TODO(thestig): Make this flag work with GN.
+    #if (!is_official_build && !is_chromeos && !(is_asan || is_lsan || is_tsan || is_msan)) {
+    #  ldflags += [
+    #    "-Wl,--detect-odr-violations",
+    #  ]
+    #}
+  }
+
+  if (linux_use_bundled_binutils) {
+    binutils_path = rebase_path("//third_party/binutils/Linux_x64/Release/bin",
+                                root_build_dir)
+    cflags += [ "-B$binutils_path" ]
+  }
+
+  # Clang-specific compiler flags setup.
+  # ------------------------------------
+  if (is_clang) {
+    cflags += [ "-fcolor-diagnostics" ]
+  }
+
+  # C++11 compiler flags setup.
+  # ---------------------------
+  if (is_linux || is_android || is_nacl) {
+    # gnu++11 instead of c++11 is needed because some code uses typeof() (a
+    # GNU extension).
+    # TODO(thakis): Eventually switch this to c++11 instead,
+    # http://crbug.com/427584
+    cflags_cc += [ "-std=gnu++11" ]
+  } else if (!is_win) {
+    cc_std = [ "-std=c++11" ]
+    cflags_cc += cc_std
+    cflags_objcc += cc_std
+  }
+
+  # Android-specific flags setup.
+  # -----------------------------
+  if (is_android) {
+    cflags += [
+      "-ffunction-sections",
+      "-funwind-tables",
+      "-fno-short-enums",
+    ]
+    if (!is_clang) {
+      # Clang doesn't support these flags.
+      cflags += [ "-finline-limit=64" ]
+    }
+    if (is_asan) {
+      # Android build relies on -Wl,--gc-sections removing unreachable code.
+      # ASan instrumentation for globals inhibits this and results in a library
+      # with unresolvable relocations.
+      # TODO(eugenis): find a way to reenable this.
+      cflags += [ "-mllvm -asan-globals=0" ]
+    }
+
+    defines += [ "ANDROID" ]
+
+    # The NDK has these things, but doesn't define the constants
+    # to say that it does. Define them here instead.
+    defines += [ "HAVE_SYS_UIO_H" ]
+
+    # Use gold for Android for most CPU architectures.
+    if (current_cpu == "x86" || current_cpu == "x64" || current_cpu == "arm") {
+      ldflags += [ "-fuse-ld=gold" ]
+      if (is_clang) {
+        # Let clang find the ld.gold in the NDK.
+        ldflags += [ "--gcc-toolchain=" +
+                     rebase_path(android_toolchain_root, root_build_dir) ]
+      }
+    }
+
+    ldflags += [
+      "-Wl,--no-undefined",
+
+      # Don't allow visible symbols from libgcc or libc++ to be
+      # re-exported.
+      "-Wl,--exclude-libs=libgcc.a",
+      "-Wl,--exclude-libs=libc++_static.a",
+
+      # Don't allow visible symbols from libraries that contain
+      # assembly code with symbols that aren't hidden properly.
+      # http://crbug.com/448386
+      "-Wl,--exclude-libs=libvpx_assembly_arm.a",
+    ]
+    if (current_cpu == "arm") {
+      ldflags += [
+        # Enable identical code folding to reduce size.
+        "-Wl,--icf=safe",
+      ]
+    }
+
+    if (is_clang) {
+      if (current_cpu == "arm") {
+        cflags += [ "--target=arm-linux-androideabi" ]
+        ldflags += [ "--target=arm-linux-androideabi" ]
+      } else if (current_cpu == "x86") {
+        cflags += [ "--target=x86-linux-androideabi" ]
+        ldflags += [ "--target=x86-linux-androideabi" ]
+      }
+    }
+  }
+}
+
+config("compiler_arm_fpu") {
+  if (current_cpu == "arm" && !is_ios) {
+    cflags = [ "-mfpu=$arm_fpu" ]
+  }
+}
+
+# runtime_library -------------------------------------------------------------
+#
+# Sets the runtime library and associated options.
+#
+# How do you determine what should go in here vs. "compiler" above? Consider if
+# a target might choose to use a different runtime library (ignore for a moment
+# if this is possible or reasonable on your system). If such a target would want
+# to change or remove your option, put it in the runtime_library config. If a
+# target wants the option regardless, put it in the compiler config.
+
+config("runtime_library") {
+  cflags = []
+  defines = []
+  ldflags = []
+  lib_dirs = []
+  libs = []
+
+  if (is_component_build) {
+    # Component mode: dynamic CRT.
+    defines += [ "COMPONENT_BUILD" ]
+    if (is_win) {
+      # Since the library is shared, it requires exceptions or will give errors
+      # about things not matching, so keep exceptions on.
+      if (is_debug) {
+        cflags += [ "/MDd" ]
+      } else {
+        cflags += [ "/MD" ]
+      }
+    }
+  } else {
+    # Static CRT.
+    if (is_win) {
+      if (is_debug) {
+        cflags += [ "/MTd" ]
+      } else {
+        cflags += [ "/MT" ]
+      }
+    }
+  }
+
+  if (is_win) {
+    defines += [
+      "__STD_C",
+      "_CRT_RAND_S",
+      "_CRT_SECURE_NO_DEPRECATE",
+      "_HAS_EXCEPTIONS=0",
+      "_SCL_SECURE_NO_DEPRECATE",
+    ]
+  }
+
+  # Android standard library setup.
+  if (is_android) {
+    if (is_clang) {
+      # Work around incompatibilities between bionic and clang headers.
+      defines += [
+        "__compiler_offsetof=__builtin_offsetof",
+        "nan=__builtin_nan",
+      ]
+    }
+
+    defines += [ "__GNU_SOURCE=1" ]  # Necessary for clone().
+
+    # TODO(jdduke) Re-enable on mips after resolving linking
+    # issues with libc++ (crbug.com/456380).
+    if (current_cpu != "mipsel" && current_cpu != "mips64el") {
+      ldflags += [ "-Wl,--warn-shared-textrel" ]
+    }
+    ldflags += [ "-nostdlib" ]
+
+    # NOTE: The libc++ header include paths below are specified in cflags
+    # rather than include_dirs because they need to come after include_dirs.
+    # Think of them like system headers, but don't use '-isystem' because the
+    # arm-linux-androideabi-4.4.3 toolchain (circa Gingerbread) will exhibit
+    # strange errors. The include ordering here is important; change with
+    # caution.
+    cflags += [
+      "-isystem" +
+          rebase_path("$android_libcpp_root/libcxx/include", root_build_dir),
+      "-isystem" + rebase_path(
+              "$android_ndk_root/sources/cxx-stl/llvm-libc++abi/libcxxabi/include",
+              root_build_dir),
+      "-isystem" +
+          rebase_path("$android_ndk_root/sources/android/support/include",
+                      root_build_dir),
+    ]
+
+    lib_dirs += [ "$android_libcpp_root/libs/$android_app_abi" ]
+    libs += [ "$android_libcpp_library" ]
+
+    if (current_cpu == "mipsel") {
+      libs += [
+        # ld linker is used for mips Android, and ld does not accept library
+        # absolute path prefixed by "-l"; Since libgcc does not exist in mips
+        # sysroot the proper library will be linked.
+        # TODO(gordanac): Remove once gold linker is used for mips Android.
+        "gcc",
+      ]
+    } else {
+      libs += [
+        # Manually link the libgcc.a that the cross compiler uses. This is
+        # absolute because the linker will look inside the sysroot if it's not.
+        rebase_path(android_libgcc_file),
+      ]
+    }
+
+    libs += [
+      "c",
+      "dl",
+      "m",
+    ]
+
+    # Clang with libc++ does not require an explicit atomic library reference.
+    if (!is_clang) {
+      libs += [ "atomic" ]
+    }
+  }
+}
+
+# default_warning_flags collects all warning flags that are used by default.
+# This is in a variable instead of a config so that it can be used in
+# both chromium_code and no_chromium_code.  This way these flags are guaranteed
+# to appear on the compile command line after -Wall.
+
+default_warning_flags = []
+default_warning_flags_cc = []
+if (is_win) {
+  if (!is_clang || current_cpu != "x86") {
+    default_warning_flags += [ "/WX" ]  # Treat warnings as errors.
+  }
+
+  default_warning_flags += [
+    # Warnings permanently disabled:
+
+    # TODO(GYP) The GYP build doesn't have this globally enabled but disabled
+    # for a bunch of individual targets. Re-enable this globally when those
+    # targets are fixed.
+    "/wd4018",  # Comparing signed and unsigned values.
+
+    # C4127: conditional expression is constant
+    # This warning can in theory catch dead code and other problems, but
+    # triggers in far too many desirable cases where the conditional
+    # expression is either set by macros or corresponds some legitimate
+    # compile-time constant expression (due to constant template args,
+    # conditionals comparing the sizes of different types, etc.).  Some of
+    # these can be worked around, but it's not worth it.
+    "/wd4127",
+
+    # C4251: 'identifier' : class 'type' needs to have dll-interface to be
+    #        used by clients of class 'type2'
+    # This is necessary for the shared library build.
+    "/wd4251",
+
+    # C4351: new behavior: elements of array 'array' will be default
+    #        initialized
+    # This is a silly "warning" that basically just alerts you that the
+    # compiler is going to actually follow the language spec like it's
+    # supposed to, instead of not following it like old buggy versions did.
+    # There's absolutely no reason to turn this on.
+    "/wd4351",
+
+    # C4355: 'this': used in base member initializer list
+    # It's commonly useful to pass |this| to objects in a class' initializer
+    # list.  While this warning can catch real bugs, most of the time the
+    # constructors in question don't attempt to call methods on the passed-in
+    # pointer (until later), and annotating every legit usage of this is
+    # simply more hassle than the warning is worth.
+    "/wd4355",
+
+    # C4503: 'identifier': decorated name length exceeded, name was
+    #        truncated
+    # This only means that some long error messages might have truncated
+    # identifiers in the presence of lots of templates.  It has no effect on
+    # program correctness and there's no real reason to waste time trying to
+    # prevent it.
+    "/wd4503",
+
+    # Warning C4589 says: "Constructor of abstract class ignores
+    # initializer for virtual base class." Disable this warning because it
+    # is flaky in VS 2015 RTM. It triggers on compiler generated
+    # copy-constructors in some cases.
+    "/wd4589",
+
+    # C4611: interaction between 'function' and C++ object destruction is
+    #        non-portable
+    # This warning is unavoidable when using e.g. setjmp/longjmp.  MSDN
+    # suggests using exceptions instead of setjmp/longjmp for C++, but
+    # Chromium code compiles without exception support.  We therefore have to
+    # use setjmp/longjmp for e.g. JPEG decode error handling, which means we
+    # have to turn off this warning (and be careful about how object
+    # destruction happens in such cases).
+    "/wd4611",
+
+    # Warnings to evaluate and possibly fix/reenable later:
+
+    "/wd4100",  # Unreferenced formal function parameter.
+    "/wd4121",  # Alignment of a member was sensitive to packing.
+    "/wd4244",  # Conversion: possible loss of data.
+    "/wd4481",  # Nonstandard extension: override specifier.
+    "/wd4505",  # Unreferenced local function has been removed.
+    "/wd4510",  # Default constructor could not be generated.
+    "/wd4512",  # Assignment operator could not be generated.
+    "/wd4610",  # Class can never be instantiated, constructor required.
+    "/wd4996",  # Deprecated function warning.
+  ]
+
+  # VS xtree header file needs to be patched or 4702 (unreachable code
+  # warning) is reported if _HAS_EXCEPTIONS=0. Disable the warning if xtree is
+  # not patched.
+  if (!msvs_xtree_patched &&
+      exec_script("../../win_is_xtree_patched.py", [], "value") == 0) {
+    default_warning_flags += [ "/wd4702" ]  # Unreachable code.
+  }
+
+  # Building with Clang on Windows is a work in progress and very
+  # experimental. See crbug.com/82385.
+  # Keep this in sync with the similar block in build/common.gypi
+  if (is_clang) {
+    default_warning_flags += [
+      # TODO(hans): Make this list shorter eventually, http://crbug.com/504657
+      "-Qunused-arguments",  # http://crbug.com/504658
+      "-Wno-microsoft",  # http://crbug.com/505296
+      "-Wno-switch",  # http://crbug.com/505308
+      "-Wno-unknown-pragmas",  # http://crbug.com/505314
+      "-Wno-unused-function",  # http://crbug.com/505316
+      "-Wno-unused-value",  # http://crbug.com/505318
+      "-Wno-unused-local-typedef",  # http://crbug.com/411648
+    ]
+  }
+} else {
+  # Common GCC warning setup.
+  default_warning_flags += [
+    # Enables.
+    "-Wendif-labels",  # Weird old-style text after an #endif.
+    "-Werror",  # Warnings as errors.
+
+    # Disables.
+    "-Wno-missing-field-initializers",  # "struct foo f = {0};"
+    "-Wno-unused-parameter",  # Unused function parameters.
+  ]
+
+  if (is_mac) {
+    # TODO(abarth): Re-enable once https://github.com/domokit/mojo/issues/728
+    #               is fixed.
+    # default_warning_flags += [ "-Wnewline-eof" ]
+    if (!is_nacl) {
+      # When compiling Objective-C, warns if a method is used whose
+      # availability is newer than the deployment target. This is not
+      # required when compiling Chrome for iOS.
+      default_warning_flags += [ "-Wpartial-availability" ]
+    }
+  }
+
+  if (gcc_version >= 48) {
+    default_warning_flags_cc += [
+      # See comment for -Wno-c++11-narrowing.
+      "-Wno-narrowing",
+    ]
+  }
+
+  # Suppress warnings about ABI changes on ARM (Clang doesn't give this
+  # warning).
+  if (current_cpu == "arm" && !is_clang) {
+    default_warning_flags += [ "-Wno-psabi" ]
+  }
+
+  if (is_android) {
+    # Disable any additional warnings enabled by the Android build system but
+    # which chromium does not build cleanly with (when treating warning as
+    # errors).
+    default_warning_flags += [
+      "-Wno-extra",
+      "-Wno-ignored-qualifiers",
+      "-Wno-type-limits",
+    ]
+    default_warning_flags_cc += [
+      # Disabling c++0x-compat should be handled in WebKit, but
+      # this currently doesn't work because gcc_version is not set
+      # correctly when building with the Android build system.
+      # TODO(torne): Fix this in WebKit.
+      "-Wno-error=c++0x-compat",
+
+      # Other things unrelated to -Wextra:
+      "-Wno-non-virtual-dtor",
+      "-Wno-sign-promo",
+    ]
+  }
+
+  if (gcc_version >= 48) {
+    # Don't warn about the "typedef 'foo' locally defined but not used"
+    # for gcc 4.8.
+    # TODO: remove this flag once all builds work. See crbug.com/227506
+    default_warning_flags += [ "-Wno-unused-local-typedefs" ]
+  }
+}
+if (is_clang) {
+  default_warning_flags += [
+    # This warns on using ints as initializers for floats in
+    # initializer lists (e.g. |int a = f(); CGSize s = { a, a };|),
+    # which happens in several places in chrome code. Not sure if
+    # this is worth fixing.
+    "-Wno-c++11-narrowing",
+
+    # Don't die on dtoa code that uses a char as an array index.
+    # This is required solely for base/third_party/dmg_fp/dtoa.cc.
+    # TODO(brettw) move this to that project then!
+    "-Wno-char-subscripts",
+
+    # Warns on switches on enums that cover all enum values but
+    # also contain a default: branch. Chrome is full of that.
+    "-Wno-covered-switch-default",
+
+    # Clang considers the `register` keyword as deprecated, but e.g.
+    # code generated by flex (used in angle) contains that keyword.
+    # http://crbug.com/255186
+    "-Wno-deprecated-register",
+  ]
+
+  # NaCl's Clang compiler and Chrome's hermetic Clang compiler will almost
+  # always have different versions. Certain flags may not be recognized by
+  # one version or the other.
+  if (!is_nacl) {
+    # Flags NaCl does not recognize.
+    default_warning_flags += [
+      # TODO(hans): Get this cleaned up, http://crbug.com/428099
+      "-Wno-inconsistent-missing-override",
+
+      # TODO(thakis): Enable this, crbug.com/507717
+      "-Wno-shift-negative-value",
+    ]
+  }
+}
+
+# chromium_code ---------------------------------------------------------------
+#
+# Toggles between higher and lower warnings for code that is (or isn't)
+# part of Chromium.
+
+config("chromium_code") {
+  if (is_win) {
+    cflags = [ "/W4" ]  # Warning level 4.
+  } else {
+    cflags = [
+      "-Wall",
+      "-Wextra",
+    ]
+
+    # In Chromium code, we define __STDC_foo_MACROS in order to get the
+    # C99 macros on Mac and Linux.
+    defines = [
+      "__STDC_CONSTANT_MACROS",
+      "__STDC_FORMAT_MACROS",
+    ]
+
+    if (!using_sanitizer && (!is_linux || !is_clang || is_official_build)) {
+      # _FORTIFY_SOURCE isn't really supported by Clang now, see
+      # http://llvm.org/bugs/show_bug.cgi?id=16821.
+      # It seems to work fine with Ubuntu 12 headers though, so use it in
+      # official builds.
+      #
+      # Non-chromium code is not guaranteed to compile cleanly with
+      # _FORTIFY_SOURCE. Also, fortified build may fail when optimizations are
+      # disabled, so only do that for Release build.
+      defines += [ "_FORTIFY_SOURCE=2" ]
+    }
+  }
+  cflags += default_warning_flags
+  cflags_cc = default_warning_flags_cc
+}
+config("no_chromium_code") {
+  cflags = []
+  cflags_cc = []
+  defines = []
+
+  if (is_win) {
+    cflags += [
+      "/W3",  # Warning level 3.
+      "/wd4800",  # Disable warning when forcing value to bool.
+      "/wd4267",  # TODO(jschuh): size_t to int.
+      "/wd4996",  # Deprecated function warning.
+    ]
+    defines += [
+      "_CRT_NONSTDC_NO_WARNINGS",
+      "_CRT_NONSTDC_NO_DEPRECATE",
+    ]
+  }
+
+  if (is_linux) {
+    # Don't warn about ignoring the return value from e.g. close(). This is
+    # off by default in some gccs but on by default in others. BSD systems do
+    # not support this option, since they are usually using gcc 4.2.1, which
+    # does not have this flag yet.
+    cflags += [ "-Wno-unused-result" ]
+  }
+
+  if (is_clang) {
+    cflags += [
+      # TODO(mgiuca): Move this suppression into individual third-party
+      # libraries as required. http://crbug.com/505301.
+      "-Wno-overloaded-virtual",
+
+      # Lots of third-party libraries have unused variables. Instead of
+      # suppressing them individually, we just blanket suppress them here.
+      "-Wno-unused-variable",
+    ]
+  }
+
+  if (is_linux || is_android) {
+    cflags += [
+      # Don't warn about printf format problems. This is off by default in gcc
+      # but on in Ubuntu's gcc(!).
+      "-Wno-format",
+    ]
+    cflags_cc += [
+      # Don't warn about hash_map in third-party code.
+      "-Wno-deprecated",
+    ]
+  }
+  cflags += default_warning_flags
+  cflags_cc += default_warning_flags_cc
+}
+
+# rtti ------------------------------------------------------------------------
+#
+# Allows turning Run-Time Type Identification on or off.
+
+config("rtti") {
+  if (is_win) {
+    cflags_cc = [ "/GR" ]
+  }
+}
+config("no_rtti") {
+  if (is_win) {
+    cflags_cc = [ "/GR-" ]
+  } else {
+    rtti_flags = [ "-fno-rtti" ]
+    cflags_cc = rtti_flags
+    cflags_objcc = rtti_flags
+  }
+}
+
+# Warnings ---------------------------------------------------------------------
+
+# This will generate warnings when using Clang if code generates exit-time
+# destructors, which will slow down closing the program.
+# TODO(thakis): Make this a blacklist instead, http://crbug.com/101600
+config("wexit_time_destructors") {
+  # TODO: Enable on Windows too, http://crbug.com/404525
+  if (is_clang && !is_win) {
+    cflags = [ "-Wexit-time-destructors" ]
+  }
+}
+
+# On Windows compiling on x64, VC will issue a warning when converting
+# size_t to int because it will truncate the value. Our code should not have
+# these warnings and one should use a static_cast or a checked_cast for the
+# conversion depending on the case. However, a lot of code still needs to be
+# fixed. Apply this config to such targets to disable the warning.
+#
+# Note that this can be applied regardless of platform and architecture to
+# clean up the call sites. This will only apply the flag when necessary.
+#
+# TODO(jschuh): crbug.com/167187 fix this and delete this config.
+config("no_size_t_to_int_warning") {
+  if (is_win && current_cpu == "x64") {
+    cflags = [ "/wd4267" ]
+  }
+}
+
+# Optimization -----------------------------------------------------------------
+#
+# Note that BUILDCONFIG.gn sets up a variable "default_optimization_config"
+# which it will assign to the config it implicitly applies to every target. If
+# you want to override the optimization level for your target, remove this
+# config (which will expand differently for debug or release builds), and then
+# add back the one you want to override it with:
+#
+#   configs -= default_optimization_config
+#   configs += [ "//build/config/compiler/optimize_max" ]
+
+# Shared settings for both "optimize" and "optimize_max" configs.
+# IMPORTANT: On Windows "/O1" and "/O2" must go before the common flags.
+if (is_win) {
+  common_optimize_on_cflags = [
+    "/Ob2",  # Both explicit and auto inlining.
+    "/Oy-",  # Disable omitting frame pointers, must be after /O2.
+  ]
+  if (!is_asan) {
+    common_optimize_on_cflags += [
+      # Put data in separate COMDATs. This allows the linker
+      # to put bit-identical constants at the same address even if
+      # they're unrelated constants, which saves binary size.
+      # This optimization can't be used when ASan is enabled because
+      # it is not compatible with the ASan ODR checker.
+      "/Gw",
+    ]
+  }
+  common_optimize_on_ldflags = [ "/OPT:REF" ]
+} else {
+  common_optimize_on_cflags = [
+    # Don't emit the GCC version ident directives, they just end up in the
+    # .comment section taking up binary size.
+    "-fno-ident",
+
+    # Put data and code in their own sections, so that unused symbols
+    # can be removed at link time with --gc-sections.
+    "-fdata-sections",
+    "-ffunction-sections",
+  ]
+  common_optimize_on_ldflags = []
+
+  if (is_android) {
+    if (!using_sanitizer) {
+      common_optimize_on_cflags += [ "-fomit-frame-pointer" ]
+    }
+
+    # TODO(jdduke) Re-enable on mips after resolving linking
+    # issues with libc++ (crbug.com/456380).
+    if (current_cpu != "mipsel" && current_cpu != "mips64el") {
+      common_optimize_on_ldflags += [
+        # Warn in case of text relocations.
+        "-Wl,--warn-shared-textrel",
+      ]
+    }
+  }
+
+  if (is_mac || is_ios) {
+    if (symbol_level == 2) {
+      # Mac dead code stripping requires symbols.
+      common_optimize_on_ldflags += [ "-Wl,-dead_strip" ]
+    }
+  } else {
+    # Non-Mac Posix linker flags.
+    common_optimize_on_ldflags += [
+      # Specifically tell the linker to perform optimizations.
+      # See http://lwn.net/Articles/192624/ .
+      "-Wl,-O1",
+      "-Wl,--gc-sections",
+    ]
+
+    if (!using_sanitizer) {
+      # Functions interposed by the sanitizers can make ld think
+      # that some libraries aren't needed when they actually are,
+      # http://crbug.com/234010. As workaround, disable --as-needed.
+      common_optimize_on_ldflags += [ "-Wl,--as-needed" ]
+    }
+  }
+}
+
+# Default "optimization on" config. On Windows, this favors size over speed.
+config("optimize") {
+  if (is_win) {
+    # Favor size over speed, /O1 must be before the common flags. The GYP
+    # build also specifies /Os and /GF but these are implied by /O1.
+    cflags = [ "/O1" ] + common_optimize_on_cflags + [ "/Oi" ]
+  } else if (is_android || is_ios) {
+    cflags = [ "-Os" ] + common_optimize_on_cflags  # Favor size over speed.
+  } else {
+    cflags = [ "-O2" ] + common_optimize_on_cflags
+  }
+  ldflags = common_optimize_on_ldflags
+}
+
+# Turn off optimizations.
+config("no_optimize") {
+  if (is_win) {
+    cflags = [
+      "/Od",  # Disable optimization.
+      "/Ob0",  # Disable all inlining (on by default).
+      "/RTC1",  # Runtime checks for stack frame and uninitialized variables.
+    ]
+  } else if (is_android && !android_full_debug) {
+    # On Android we kind of optimize some things that don't affect debugging
+    # much even when optimization is disabled to get the binary size down.
+    cflags = [
+      "-Os",
+      "-fdata-sections",
+      "-ffunction-sections",
+    ]
+    if (!using_sanitizer) {
+      cflags += [ "-fomit-frame-pointer" ]
+    }
+    ldflags = common_optimize_on_ldflags
+  } else {
+    cflags = [ "-O0" ]
+  }
+}
+
+# Turns up the optimization level. On Windows, this implies whole program
+# optimization and link-time code generation which is very expensive and should
+# be used sparingly.
+config("optimize_max") {
+  ldflags = common_optimize_on_ldflags
+  if (is_win) {
+    # Favor speed over size, /O2 must be before the common flags. The GYP
+    # build also specifies /Ot, /Oi, and /GF, but these are implied by /O2.
+    cflags = [ "/O2" ] + common_optimize_on_cflags
+    if (is_official_build) {
+      # TODO(GYP): TODO(dpranke): Should these only be on in an official
+      # build, or on all the time? For now we'll require official build so
+      # that the compile is clean.
+      cflags += [
+        "/GL",  # Whole program optimization.
+
+        # Disable Warning 4702 ("Unreachable code") for the WPO/PGO builds.
+        # Probably anything that this would catch that wouldn't be caught in a
+        # normal build isn't going to actually be a bug, so the incremental
+        # value of C4702 for PGO builds is likely very small.
+        "/wd4702",
+      ]
+      ldflags += [ "/LTCG" ]
+    }
+  } else {
+    cflags = [ "-O2" ] + common_optimize_on_cflags
+  }
+}
+
+# Symbols ----------------------------------------------------------------------
+
+config("symbols") {
+  if (is_win) {
+    import("//build/toolchain/goma.gni")
+    if (use_goma) {
+      cflags = [ "/Z7" ]  # No PDB file
+    } else {
+      cflags = [ "/Zi" ]  # Produce PDB file, no edit and continue.
+    }
+    ldflags = [ "/DEBUG" ]
+  } else {
+    cflags = [ "-g2" ]
+    if (use_debug_fission) {
+      cflags += [ "-gsplit-dwarf" ]
+    }
+  }
+}
+
+config("minimal_symbols") {
+  if (is_win) {
+    # Linker symbols for backtraces only.
+    ldflags = [ "/DEBUG" ]
+  } else {
+    cflags = [ "-g1" ]
+    if (use_debug_fission) {
+      cflags += [ "-gsplit-dwarf" ]
+    }
+  }
+}
+
+config("no_symbols") {
+  if (!is_win) {
+    cflags = [ "-g0" ]
+  }
+}
diff --git a/build/config/crypto.gni b/build/config/crypto.gni
new file mode 100644
index 0000000..2cd72d3
--- /dev/null
+++ b/build/config/crypto.gni
@@ -0,0 +1,29 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file declares build flags for the SSL library configuration.
+#
+# TODO(brettw) this should probably be moved to src/crypto or somewhere, and
+# the global build dependency on it should be removed.
+#
+# PLEASE TRY TO AVOID ADDING FLAGS TO THIS FILE in cases where grit isn't
+# required. See the declare_args block of BUILDCONFIG.gn for advice on how
+# to set up feature flags.
+
+declare_args() {
+  # Use OpenSSL instead of NSS. This is used for all platforms but iOS. (See
+  # http://crbug.com/338886).
+  use_openssl = !is_ios
+}
+
+# True when we're using OpenSSL for representing certificates. When targeting
+# Android, the platform certificate library is used for certificate
+# verification. On other targets, this flag also enables OpenSSL for certificate
+# verification, but this configuration is unsupported.
+use_openssl_certs = is_android
+
+# True if NSS is used for certificate verification. Note that this is
+# independent from use_openssl. It is possible to use OpenSSL for the crypto
+# library, but NSS for the platform certificate library.
+use_nss_certs = false
diff --git a/build/config/features.gni b/build/config/features.gni
new file mode 100644
index 0000000..93b19dd
--- /dev/null
+++ b/build/config/features.gni
@@ -0,0 +1,204 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file contains Chrome-feature-related build flags (see ui.gni for
+# UI-related ones). These should theoretically be moved to the build files of
+# the features themselves.
+#
+# However, today we have many "bad" dependencies on some of these flags from,
+# e.g. base, so they need to be global to match the GYP configuration. Also,
+# anything that needs a grit define must be in either this file or ui.gni.
+#
+# PLEASE TRY TO AVOID ADDING FLAGS TO THIS FILE in cases where grit isn't
+# required. See the declare_args block of BUILDCONFIG.gn for advice on how
+# to set up feature flags.
+
+import("//build/config/chrome_build.gni")
+if (is_android) {
+  import("//build/config/android/config.gni")
+}
+
+declare_args() {
+  # Multicast DNS.
+  enable_mdns = is_win || is_linux
+
+  enable_plugins = !is_android && !is_ios
+
+  # Enables Native Client support.
+  # TODO(GYP): Get NaCl linking on other platforms.
+  # Also, see if we can always get rid of enable_nacl_untrusted and
+  # enable_pnacl and always build them if enable_nacl is true.
+  # The "is_nacl" part of the condition is needed to ensure that
+  # the untrusted code is built properly; arguably it should be
+  # guarded by "is_nacl" directly rather than enable_nacl_untrusted, but
+  # this will go away when Mac and Win are working and we can just use
+  # the commented out logic.
+  # Eventually we want this to be:
+  #   enable_nacl = !is_ios && !is_android
+  enable_nacl = (is_linux && !is_chromeos && current_cpu == "x64") || is_nacl
+  enable_nacl_untrusted = enable_nacl
+  enable_pnacl = enable_nacl_untrusted
+
+  # If debug_devtools is set to true, JavaScript files for DevTools are stored
+  # as is and loaded from disk. Otherwise, a concatenated file is stored in
+  # resources.pak. It is still possible to load JS files from disk by passing
+  # --debug-devtools cmdline switch.
+  debug_devtools = false
+
+  # Enables WebRTC.
+  # TODO(GYP) make mac and android work.
+  enable_webrtc = !is_ios && !is_mac && !is_android
+
+  # Enables the Media Router.
+  enable_media_router = !is_ios && !is_android
+
+  # Enables proprietary codecs and demuxers; e.g. H264, MOV, AAC, and MP3.
+  # Android OS includes support for proprietary codecs regardless of building
+  # Chromium or Google Chrome. We also ship Google Chrome and Chromecast with
+  # proprietary codecs.
+  # TODO(GYP) The GYP build has || chromecast==1 for this:
+  proprietary_codecs = is_android || is_chrome_branded
+
+  enable_configuration_policy = true
+
+  # Enables support for background apps.
+  enable_background = !is_ios && !is_android
+
+  enable_captive_portal_detection = !is_android && !is_ios
+
+  # Enables use of the session service, which is enabled by default.
+  # Android stores them separately on the Java side.
+  enable_session_service = !is_android && !is_ios
+
+  enable_plugin_installation = is_win || is_mac
+
+  enable_app_list = !is_ios && !is_android
+
+  enable_supervised_users = !is_ios
+
+  enable_autofill_dialog = !is_ios
+
+  enable_google_now = !is_ios && !is_android
+
+  enable_one_click_signin = is_win || is_mac || (is_linux && !is_chromeos)
+
+  enable_remoting = !is_ios && !is_android
+
+  # Enable hole punching for the protected video.
+  enable_video_hole = is_android
+
+  # Enables browser side Content Decryption Modules. Required for embedders
+  # (e.g. Android and ChromeCast) that use a browser side CDM.
+  enable_browser_cdms = is_android
+
+  # Variable safe_browsing is used to control the build time configuration
+  # for safe browsing feature. Safe browsing can be compiled in 4 different
+  # levels: 0 disables it, 1 enables it fully, and 2 enables only UI and
+  # reporting features for use with Data Saver on Mobile, and 3 enables
+  # extended mobile protection via an external API.  When 3 is fully deployed,
+  # it will replace 2.
+  if (is_android) {
+    safe_browsing_mode = 2
+  } else if (is_ios) {
+    safe_browsing_mode = 0
+  } else {
+    safe_browsing_mode = 1
+  }
+}
+
+# Additional dependent variables -----------------------------------------------
+
+# Set the version of CLD.
+#   0: Don't specify the version. This option is for the Finch testing.
+#   1: Use only CLD1.
+#   2: Use only CLD2.
+if (is_android) {
+  cld_version = 1
+} else {
+  cld_version = 2
+}
+
+# libudev usage. This currently only affects the content layer.
+use_udev = is_linux
+
+# Enable the spell checker.
+enable_spellcheck = !is_android
+
+# Use the operating system's spellchecker rather than hunspell.
+use_platform_spellchecker = is_android || is_mac
+
+enable_pepper_cdms = enable_plugins && (is_linux || is_mac || is_win)
+
+# Enable basic printing support and UI.
+enable_basic_printing = !is_chromeos
+
+# Enable printing with print preview. It does not imply
+# enable_basic_printing. It's possible to build Chrome with preview only.
+enable_print_preview = !is_android
+
+# The seccomp-bpf sandbox is only supported on three architectures
+# currently.
+# Do not disable seccomp_bpf anywhere without talking to
+# security@chromium.org!
+use_seccomp_bpf = (is_linux || is_android) &&
+                  (current_cpu == "x86" || current_cpu == "x64" ||
+                   current_cpu == "arm" || current_cpu == "mipsel")
+
+# Enable notifications everywhere except iOS.
+enable_notifications = !is_ios
+
+# TODO(brettw) this should be moved to net and only dependents get this define.
+disable_ftp_support = is_ios
+
+enable_web_speech = !is_android && !is_ios
+
+use_dbus = is_linux
+
+enable_extensions = !is_android && !is_ios
+
+enable_task_manager = !is_ios && !is_android
+
+use_cups = is_desktop_linux || is_mac
+
+enable_themes = !is_android && !is_ios
+
+# TODO(scottmg) remove this when we've fixed printing.
+win_pdf_metafile_for_printing = true
+
+# Whether we are using the rlz library or not.  Platforms like Android send
+# rlz codes for searches but do not use the library.
+enable_rlz_support = is_win || is_mac || is_ios || is_chromeos
+enable_rlz = is_chrome_branded && enable_rlz_support
+
+enable_settings_app = enable_app_list && !is_chromeos
+
+enable_service_discovery = enable_mdns || is_mac
+
+enable_wifi_bootstrapping = is_win || is_mac
+
+# Image loader extension is enabled on ChromeOS only.
+enable_image_loader_extension = is_chromeos
+
+# Chrome OS: whether to also build the upcoming version of
+# ChromeVox, which can then be enabled via a command-line switch.
+enable_chromevox_next = false
+
+# Use brlapi from brltty for braille display support.
+use_brlapi = is_chromeos
+
+# Option controlling the use of GConf (the classic GNOME configuration
+# system).
+# TODO(GYP) also require !embedded to enable.
+use_gconf = is_linux && !is_chromeos
+
+# Hangout services is an extension that adds extra features to Hangouts.
+# For official GYP builds, this flag is set, it will likely need to be
+# parameterized in the future for a similar use.
+enable_hangout_services_extension = false
+
+# Whether to back up data before sync.
+enable_pre_sync_backup = is_win || is_mac || (is_linux && !is_chromeos)
+
+# WebVR support disabled until platform implementations have been added
+enable_webvr = false
diff --git a/build/config/fnl/BUILD.gn b/build/config/fnl/BUILD.gn
new file mode 100644
index 0000000..0eb35e3
--- /dev/null
+++ b/build/config/fnl/BUILD.gn
@@ -0,0 +1,48 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/linux/pkg_config.gni")
+import("//build/config/features.gni")
+import("//build/config/sysroot.gni")
+import("//build/config/ui.gni")
+
+config("sdk") {
+  if (sysroot != "") {
+    cflags = [ "--sysroot=" + sysroot ]
+    ldflags = [ "--sysroot=" + sysroot ]
+
+    # Need to get some linker flags out of the sysroot.
+    ldflags += [ exec_script("../linux/sysroot_ld_path.py",
+                             [
+                               rebase_path("//build/linux/sysroot_ld_path.sh",
+                                           root_build_dir),
+                               sysroot,
+                             ],
+                             "value") ]
+  }
+
+  defines = [ "FNL_MUSL" ]
+
+  if (use_ozone) {
+    defines += [ "MESA_EGL_NO_X11_HEADERS" ]
+  }
+}
+
+config("fontconfig") {
+  libs = [ "fontconfig" ]
+}
+
+pkg_config("freetype2") {
+  packages = [ "freetype2" ]
+}
+
+if (use_evdev_gestures) {
+  pkg_config("libevdev-cros") {
+    packages = [ "libevdev-cros" ]
+  }
+
+  pkg_config("libgestures") {
+    packages = [ "libgestures" ]
+  }
+}
diff --git a/build/config/gcc/BUILD.gn b/build/config/gcc/BUILD.gn
new file mode 100644
index 0000000..47bcc0b
--- /dev/null
+++ b/build/config/gcc/BUILD.gn
@@ -0,0 +1,46 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This config causes functions not to be automatically exported from shared
+# libraries. By default, all symbols are exported but this means there are
+# lots of exports that slow everything down. In general we explicitly mark
+# which functiosn we want to export from components.
+#
+# Some third_party code assumes all functions are exported so this is separated
+# into its own config so such libraries can remove this config to make symbols
+# public again.
+#
+# See http://gcc.gnu.org/wiki/Visibility
+config("symbol_visibility_hidden") {
+  # Note that -fvisibility-inlines-hidden is set globally in the compiler
+  # config since that can almost always be applied.
+  cflags = [ "-fvisibility=hidden" ]
+}
+
+# Settings for executables and shared libraries.
+config("executable_ldconfig") {
+  if (is_android) {
+    ldflags = [
+      "-Bdynamic",
+      "-Wl,-z,nocopyreloc",
+    ]
+  } else {
+    # Android doesn't support rpath.
+    ldflags = [
+      # Want to pass "\$". GN will re-escape as required for ninja.
+      "-Wl,-rpath=\$ORIGIN/",
+      "-Wl,-rpath-link=",
+
+      # Newer binutils don't set DT_RPATH unless you disable "new" dtags
+      # and the new DT_RUNPATH doesn't work without --no-as-needed flag.
+      "-Wl,--disable-new-dtags",
+    ]
+  }
+}
+
+config("no_exceptions") {
+  no_exceptions_flags = [ "-fno-exceptions" ]
+  cflags_cc = no_exceptions_flags
+  cflags_objcc = no_exceptions_flags
+}
diff --git a/build/config/gcc/gcc_version.gni b/build/config/gcc/gcc_version.gni
new file mode 100644
index 0000000..6741e45
--- /dev/null
+++ b/build/config/gcc/gcc_version.gni
@@ -0,0 +1,26 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+if (is_android) {
+  gcc_version = 49
+} else if (current_toolchain == "//build/toolchain/cros:target" ||
+           current_toolchain == "//build/toolchain/linux:mipsel") {
+  gcc_version = exec_script("../../compiler_version.py",
+                            [
+                              "target",
+                              "compiler",
+                            ],
+                            "value")
+} else if (current_toolchain == "//build/toolchain/linux:x64" ||
+           current_toolchain == "//build/toolchain/linux:x86") {
+  # These are both the same and just use the default gcc on the system.
+  gcc_version = exec_script("../../compiler_version.py",
+                            [
+                              "host",
+                              "compiler",
+                            ],
+                            "value")
+} else {
+  gcc_version = 0
+}
diff --git a/build/config/ios/BUILD.gn b/build/config/ios/BUILD.gn
new file mode 100644
index 0000000..0292315
--- /dev/null
+++ b/build/config/ios/BUILD.gn
@@ -0,0 +1,15 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/sysroot.gni")
+import("//build/config/ios/ios_sdk.gni")
+
+config("sdk") {
+  common_flags = [
+    "-stdlib=libc++",
+  ]
+
+  cflags = common_flags
+  ldflags = common_flags
+}
diff --git a/build/config/ios/XcodeHarness/.gitignore b/build/config/ios/XcodeHarness/.gitignore
new file mode 100644
index 0000000..89c499e
--- /dev/null
+++ b/build/config/ios/XcodeHarness/.gitignore
@@ -0,0 +1,18 @@
+# Xcode
+.DS_Store
+build/
+*.pbxuser
+!default.pbxuser
+*.mode1v3
+!default.mode1v3
+*.mode2v3
+!default.mode2v3
+*.perspectivev3
+!default.perspectivev3
+*.xcworkspace
+!default.xcworkspace
+xcuserdata
+profile
+*.moved-aside
+DerivedData
+.idea/
diff --git a/build/config/ios/XcodeHarness/FakeMain.m b/build/config/ios/XcodeHarness/FakeMain.m
new file mode 100644
index 0000000..5ddbd34
--- /dev/null
+++ b/build/config/ios/XcodeHarness/FakeMain.m
@@ -0,0 +1,10 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#import <Foundation/Foundation.h>
+
+int main(int argc, char* argv[]) {
+  NSCAssert(NO, @"Placeholder for Xcode. Should never be run");
+  return EXIT_FAILURE;
+}
diff --git a/build/config/ios/find_signing_identity.py b/build/config/ios/find_signing_identity.py
new file mode 100644
index 0000000..2fe67f9
--- /dev/null
+++ b/build/config/ios/find_signing_identity.py
@@ -0,0 +1,36 @@
+#!/usr/bin/python
+# Copyright (c) 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import subprocess
+import sys
+import re
+
+def ListIdentities():
+  return subprocess.check_output([
+    '/usr/bin/env',
+    'xcrun',
+    'security',
+    'find-identity',
+    '-v',
+    '-p',
+    'codesigning',
+  ]).strip()
+
+
+def FindValidIdentity():
+  lines = ListIdentities().splitlines()
+  # Look for something like "2) XYZ "iPhone Developer: Name (ABC)""
+  exp = re.compile('.*\) ([A-F|0-9]*)(.*)')
+  for line in lines:
+    res = exp.match(line)
+    if res is None:
+      continue
+    if "iPhone Developer: Google Development" in res.group(2):
+      return res.group(1)
+  return ""
+
+
+if __name__ == '__main__':
+  print FindValidIdentity()
diff --git a/build/config/ios/ios_app.py b/build/config/ios/ios_app.py
new file mode 100644
index 0000000..b8bf544
--- /dev/null
+++ b/build/config/ios/ios_app.py
@@ -0,0 +1,137 @@
+#!/usr/bin/python
+# Copyright (c) 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import os
+import errno
+import subprocess
+import sys
+
+PLUTIL = [
+  '/usr/bin/env',
+  'xcrun',
+  'plutil'
+]
+
+def MakeDirectories(path):
+  try:
+    os.makedirs(path)
+  except OSError as exc:
+    if exc.errno == errno.EEXIST and os.path.isdir(path):
+      return 0
+    else:
+      return -1
+
+  return 0
+
+
+def ProcessInfoPlist(args):
+  output_plist_file = os.path.abspath(os.path.join(args.output, 'Info.plist'))
+
+  if MakeDirectories(os.path.dirname(output_plist_file)) == -1:
+    return -1
+
+  return subprocess.check_call( PLUTIL + [
+    '-convert',
+    'binary1',
+    '-o',
+    output_plist_file,
+    '--',
+    args.input,
+  ])
+
+
+def PerformCodeSigning(args):
+  return subprocess.check_call([
+    '/usr/bin/env',
+    'xcrun',
+    'codesign',
+    '--entitlements',
+    args.entitlements_path,
+    '--sign',
+    args.identity,
+    '-f',
+    args.application_path,
+  ])
+
+def GenerateDSYM(args):
+  return subprocess.check_call([
+    '/usr/bin/env',
+    'xcrun',
+    'dsymutil',
+    '-o',
+    args.output,
+    args.executable_path
+  ])
+
+
+def GenerateProjectStructure(args):
+  application_path = os.path.join( args.dir, args.name + ".app" )
+  return MakeDirectories( application_path )
+
+
+def Main():
+  parser = argparse.ArgumentParser(description='A script that aids in '
+                                   'the creation of an iOS application')
+
+  subparsers = parser.add_subparsers()
+
+  # Plist Parser
+
+  plist_parser = subparsers.add_parser('plist',
+                                       help='Process the Info.plist')
+  plist_parser.set_defaults(func=ProcessInfoPlist)
+  
+  plist_parser.add_argument('-i', dest='input', help='The input plist path')
+  plist_parser.add_argument('-o', dest='output', help='The output plist dir')
+
+  # Directory Structure Parser
+
+  dir_struct_parser = subparsers.add_parser('structure',
+                      help='Creates the directory of an iOS application')
+
+  dir_struct_parser.set_defaults(func=GenerateProjectStructure)
+
+  dir_struct_parser.add_argument('-d', dest='dir', help='Out directory')
+  dir_struct_parser.add_argument('-n', dest='name', help='App name')
+
+  # Code Signing
+
+  code_signing_parser = subparsers.add_parser('codesign',
+                        help='Code sign the specified application')
+
+  code_signing_parser.set_defaults(func=PerformCodeSigning)
+
+  code_signing_parser.add_argument('-p', dest='application_path', required=True,
+                                   help='The application path')
+  code_signing_parser.add_argument('-i', dest='identity', required=True,
+                                   help='The code signing identity to use')
+  code_signing_parser.add_argument('-e', dest='entitlements_path',
+                                   required=True,
+                                   help='The path to the entitlements .xcent')
+
+  # dSYM Generation
+
+  dsym_generation_parser = subparsers.add_parser('dsym',
+                        help='Generate a .dSYM file for an executable')
+
+  dsym_generation_parser.set_defaults(func=GenerateDSYM)
+
+  dsym_generation_parser.add_argument('-e', dest='executable_path',
+                                      required=True,
+                                      help='The executable path')
+  dsym_generation_parser.add_argument('-o', dest='output',
+                                      required=True,
+                                      help='The output file name')
+
+  # Engage!
+
+  args = parser.parse_args()
+  
+  return args.func(args)
+
+
+if __name__ == '__main__':
+  sys.exit(Main())
diff --git a/build/config/ios/ios_sdk.gni b/build/config/ios/ios_sdk.gni
new file mode 100644
index 0000000..65654a5
--- /dev/null
+++ b/build/config/ios/ios_sdk.gni
@@ -0,0 +1,58 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+declare_args() {
+  # SDK path to use. When empty this will use the default SDK based on the
+  # value of use_ios_simulator.
+  ios_sdk_path = ""
+
+  # Set to true when targeting a simulator build on iOS. False means that the
+  # target is for running on the device. The default value is to use the
+  # Simulator except when targeting GYP's Xcode builds (for compat with the
+  # existing GYP build).
+  use_ios_simulator = true
+
+  # Version of iOS that we're targeting.
+  ios_deployment_target = "6.0"
+
+  # The iOS Code signing identity to use
+  ios_code_signing_identity = ""
+
+  # The path to the iOS device SDK.
+  ios_device_sdk_path = ""
+
+  # The path to the iOS simulator SDK.
+  ios_simulator_sdk_path = ""
+}
+
+if (ios_device_sdk_path == "") {
+  _ios_device_sdk_result =
+      exec_script("ios_sdk.py", [ "iphoneos" ], "list lines")
+  ios_device_sdk_path = _ios_device_sdk_result[0]
+}
+
+if (ios_simulator_sdk_path == "") {
+  _ios_sim_sdk_result =
+      exec_script("ios_sdk.py", [ "iphonesimulator" ], "list lines")
+  ios_simulator_sdk_path = _ios_sim_sdk_result[0]
+}
+
+# Compute default target.
+if (use_ios_simulator) {
+  ios_sdk_path = ios_simulator_sdk_path
+} else {
+  ios_sdk_path = ios_device_sdk_path
+}
+
+if (use_ios_simulator) {
+  # Always disable code signing on the simulator
+  ios_code_signing_identity = ""
+} else {
+  # If an identity is not provided, look for one on the host
+  if (ios_code_signing_identity == "") {
+    _ios_identities = exec_script("find_signing_identity.py", 
+                                  [], "list lines")
+    ios_code_signing_identity = _ios_identities[0]
+  }
+}
diff --git a/build/config/ios/ios_sdk.py b/build/config/ios/ios_sdk.py
new file mode 100644
index 0000000..dfec4db
--- /dev/null
+++ b/build/config/ios/ios_sdk.py
@@ -0,0 +1,19 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import subprocess
+import sys
+
+# This script returns the path to the SDK of the given type. Pass the type of
+# SDK you want, which is typically "iphone" or "iphonesimulator".
+#
+# In the GYP build, this is done inside GYP itself based on the SDKROOT
+# variable.
+
+if len(sys.argv) != 2:
+  print "Takes one arg (SDK to find)"
+  sys.exit(1)
+
+print subprocess.check_output(['xcodebuild', '-version', '-sdk',
+                               sys.argv[1], 'Path']).strip()
diff --git a/build/config/ios/lldb_start_commands.txt b/build/config/ios/lldb_start_commands.txt
new file mode 100644
index 0000000..42e0b14
--- /dev/null
+++ b/build/config/ios/lldb_start_commands.txt
@@ -0,0 +1,4 @@
+breakpoint set --name UIApplicationMain
+breakpoint set --name objc_exception_throw
+continue
+script print "........ Debugger break on main() ........"
diff --git a/build/config/ios/rules.gni b/build/config/ios/rules.gni
new file mode 100644
index 0000000..429c153
--- /dev/null
+++ b/build/config/ios/rules.gni
@@ -0,0 +1,234 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+ios_app_script = "//build/config/ios/ios_app.py"
+
+template("code_sign_ios") {
+  assert(defined(invoker.entitlements_path),
+         "The path to the entitlements .xcent file")
+  assert(defined(invoker.identity),
+         "The code signing identity")
+  assert(defined(invoker.application_path),
+         "The application to code sign")
+  assert(defined(invoker.deps))
+
+  action(target_name) {
+    sources = [
+      invoker.entitlements_path,
+    ]
+
+    _application_path = invoker.application_path
+
+    script = ios_app_script
+
+    outputs = [
+      "$_application_path/_CodeSignature/CodeResources"
+    ]
+
+    args = [
+      "codesign",
+      "-p",
+      rebase_path(invoker.application_path, root_build_dir),
+      "-i",
+      invoker.identity,
+      "-e",
+      rebase_path(invoker.entitlements_path, root_build_dir),
+    ]
+
+    deps = invoker.deps
+  }
+}
+
+template("xcode_harness_ios") {
+  assert(defined(invoker.deps),
+         "The dependencies must be specified")
+  assert(defined(invoker.app_bundle),
+         "The app bundle must be defined")
+  assert(defined(invoker.app_name),
+         "The application name must be defined")
+  app_name = invoker.app_name
+
+  xcode_project_gen_target_name = app_name + "_xcode"
+  copy(xcode_project_gen_target_name) {
+    sources = [
+      "//build/config/ios/XcodeHarness/FakeMain.m",
+      "//build/config/ios/XcodeHarness/Harness.xcodeproj",
+    ]
+    outputs = [
+      "$root_build_dir/$xcode_project_gen_target_name/{{source_file_part}}",
+    ]
+  }
+
+  bundle_copy_gen_target_name = app_name + "_bundle_copy"
+  copy(bundle_copy_gen_target_name) {
+    sources = [
+      invoker.app_bundle
+    ]
+    outputs = [
+      "$root_build_dir/$xcode_project_gen_target_name/Application",
+    ]
+
+    deps = invoker.deps
+  }
+
+  group(target_name) {
+    deps = [
+      ":$xcode_project_gen_target_name",
+      ":$bundle_copy_gen_target_name",
+    ]
+  }
+}
+
+template("resource_copy_ios") {
+  assert(defined(invoker.resources),
+         "The source list of resources to copy over")
+  assert(defined(invoker.bundle_directory),
+         "The directory within the bundle to place the sources in")
+  assert(defined(invoker.app_name),
+         "The name of the application")
+
+  _bundle_directory = invoker.bundle_directory
+  _app_name = invoker.app_name
+  _resources = invoker.resources
+
+  copy(target_name) {
+    set_sources_assignment_filter([])
+    sources = _resources
+    outputs = [ "$root_build_dir/$_app_name.app/$_bundle_directory/{{source_file_part}}" ]
+
+    if (defined(invoker.deps)) {
+      deps = invoker.deps
+    }
+  }
+}
+
+template("ios_app") {
+
+  assert(defined(invoker.deps),
+         "Dependencies must be specified for $target_name")
+  assert(defined(invoker.info_plist),
+         "The application plist file must be specified for $target_name")
+  assert(defined(invoker.app_name),
+         "The name of iOS application for $target_name")
+  assert(defined(invoker.entitlements_path),
+         "The entitlements path must be specified for $target_name")
+  assert(defined(invoker.code_signing_identity),
+         "The entitlements path must be specified for $target_name")
+
+  # We just create a variable so we can use the same in interpolation
+  app_name = invoker.app_name
+
+  # Generate the project structure
+
+  struct_gen_target_name = target_name + "_struct"
+
+  action(struct_gen_target_name) {
+
+    script = ios_app_script
+
+    sources = []
+    outputs = [ "$root_build_dir/$app_name.app" ]
+
+    args = [
+      "structure",
+      "-d",
+      rebase_path(root_build_dir),
+      "-n",
+      app_name
+    ]
+
+  }
+
+  # Generate the executable
+
+  bin_gen_target_name = target_name + "_bin"
+
+  executable(bin_gen_target_name) {
+    libs = [
+      "AudioToolbox.framework",
+      "AVFoundation.framework",
+      "OpenGLES.framework",
+      "QuartzCore.framework",
+      "UIKit.framework",
+    ]
+    deps = invoker.deps
+    output_name = app_name
+  }
+
+  # Process the Info.plist
+
+  plist_gen_target_name = target_name + "_plist"
+
+  action(plist_gen_target_name) {
+
+    script = ios_app_script
+
+    sources = [ invoker.info_plist ]
+    outputs = [ "$root_build_dir/plist/$app_name/Info.plist" ]
+
+    args = [
+      "plist",
+      "-i",
+      rebase_path(invoker.info_plist, root_build_dir),
+      "-o",
+      rebase_path("$root_build_dir/plist/$app_name"),
+    ]
+  }
+
+  # Copy the generated binaries and assets to their appropriate locations
+
+  copy_gen_target_name = target_name + "_copy"
+  copy(copy_gen_target_name) {
+    sources = [
+      "$root_build_dir/plist/$app_name/Info.plist",
+      "$root_build_dir/$app_name",
+    ]
+
+    outputs = [
+      "$root_build_dir/$app_name.app/{{source_file_part}}"
+    ]
+
+    deps = [
+      ":$struct_gen_target_name",
+      ":$bin_gen_target_name",
+      ":$plist_gen_target_name",
+    ]
+  }
+
+  # Generate the Xcode Harness for Profiling
+
+  xcode_harness_gen_target_name = app_name + "_harness"
+  xcode_harness_ios(xcode_harness_gen_target_name) {
+    app_bundle = "$root_build_dir/$app_name.app"
+    deps = [
+      ":$bin_gen_target_name",
+      ":$struct_gen_target_name",
+      ":$copy_gen_target_name",
+    ]
+  }
+
+  # Perform Code Signing
+
+  code_sign_gen_target_name = target_name + "_codesign"
+  code_sign_ios(code_sign_gen_target_name) {
+    entitlements_path = invoker.entitlements_path
+    identity = invoker.code_signing_identity
+    application_path = "$root_build_dir/$app_name.app"
+    deps = [ ":$copy_gen_target_name" ]
+  }
+
+  # Top level group
+
+  group(target_name) {
+    # Skip code signing if no identity is provided. This is useful for simulator
+    # builds
+    deps = [ ":$xcode_harness_gen_target_name" ]
+    if (invoker.code_signing_identity == "") {
+      deps += [ ":$copy_gen_target_name" ]
+    } else {
+      deps += [ ":$code_sign_gen_target_name" ]
+    }
+  }
+
+}
diff --git a/build/config/linux/BUILD.gn b/build/config/linux/BUILD.gn
new file mode 100644
index 0000000..71a73b2
--- /dev/null
+++ b/build/config/linux/BUILD.gn
@@ -0,0 +1,62 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/linux/pkg_config.gni")
+import("//build/config/features.gni")
+import("//build/config/sysroot.gni")
+import("//build/config/ui.gni")
+
+config("sdk") {
+  if (sysroot != "") {
+    cflags = [ "--sysroot=" + sysroot ]
+    ldflags = [ "--sysroot=" + sysroot ]
+
+    # Need to get some linker flags out of the sysroot.
+    ldflags += [ exec_script("sysroot_ld_path.py",
+                             [
+                               rebase_path("//build/linux/sysroot_ld_path.sh",
+                                           root_build_dir),
+                               sysroot,
+                             ],
+                             "value") ]
+  }
+}
+
+config("fontconfig") {
+  libs = [ "fontconfig" ]
+}
+
+pkg_config("freetype2") {
+  packages = [ "freetype2" ]
+}
+
+config("x11") {
+  libs = [
+    "X11",
+    "Xcomposite",
+    "Xcursor",
+    "Xdamage",
+    "Xext",
+    "Xfixes",
+    "Xi",
+    "Xrender",
+    "Xtst",
+  ]
+}
+
+config("xrandr") {
+  libs = [ "Xrandr" ]
+}
+
+config("xinerama") {
+  libs = [ "Xinerama" ]
+}
+
+config("xcomposite") {
+  libs = [ "Xcomposite" ]
+}
+
+config("xext") {
+  libs = [ "Xext" ]
+}
diff --git a/build/config/linux/gtk/BUILD.gn b/build/config/linux/gtk/BUILD.gn
new file mode 100644
index 0000000..9c9c696
--- /dev/null
+++ b/build/config/linux/gtk/BUILD.gn
@@ -0,0 +1,45 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/linux/pkg_config.gni")
+
+assert(is_linux, "This file should only be referenced on Linux")
+
+# Depend on //build/config/linux/gtk to use GTK.
+#
+# GN doesn't check visibility for configs so we give this an obviously internal
+# name to discourage random targets from accidentally depending on this and
+# bypassing the GTK target's visibility.
+pkg_config("gtk_internal_config") {
+  # Gtk requires gmodule, but it does not list it as a dependency in some
+  # misconfigured systems.
+  packages = [
+    "gmodule-2.0",
+    "gtk+-2.0",
+    "gthread-2.0",
+  ]
+}
+
+# Basically no parts of Chrome should depend on GTK. To prevent accidents, the
+# parts that explicitly need GTK are whitelisted on this target.
+group("gtk") {
+  visibility = [
+    "//chrome/browser/ui/libgtk2ui",
+    "//gpu/gles2_conform_support:gles2_conform_test_windowless",
+    "//remoting/host",
+    "//remoting/host/it2me:remote_assistance_host",
+    "//remoting/host:remoting_me2me_host_static",
+  ]
+  public_configs = [ ":gtk_internal_config" ]
+}
+
+# Depend on "gtkprint" to get this.
+pkg_config("gtkprint_internal_config") {
+  packages = [ "gtk+-unix-print-2.0" ]
+}
+
+group("gtkprint") {
+  visibility = [ "//chrome/browser/ui/libgtk2ui" ]
+  public_configs = [ ":gtkprint_internal_config" ]
+}
diff --git a/build/config/linux/pkg-config.py b/build/config/linux/pkg-config.py
new file mode 100644
index 0000000..fadcc0b
--- /dev/null
+++ b/build/config/linux/pkg-config.py
@@ -0,0 +1,200 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import json
+import os
+import subprocess
+import sys
+import re
+from optparse import OptionParser
+
+# This script runs pkg-config, optionally filtering out some results, and
+# returns the result.
+#
+# The result will be [ <includes>, <cflags>, <libs>, <lib_dirs>, <ldflags> ]
+# where each member is itself a list of strings.
+#
+# You can filter out matches using "-v <regexp>" where all results from
+# pkgconfig matching the given regular expression will be ignored. You can
+# specify more than one regular expression my specifying "-v" more than once.
+#
+# You can specify a sysroot using "-s <sysroot>" where sysroot is the absolute
+# system path to the sysroot used for compiling. This script will attempt to
+# generate correct paths for the sysroot.
+#
+# When using a sysroot, you must also specify the architecture via
+# "-a <arch>" where arch is either "x86" or "x64".
+#
+# Additionally, you can specify the option --atleast-version. This will skip
+# the normal outputting of a dictionary and instead print true or false,
+# depending on the return value of pkg-config for the given package.
+
+# If this is run on non-Linux platforms, just return nothing and indicate
+# success. This allows us to "kind of emulate" a Linux build from other
+# platforms.
+if sys.platform.find("linux") == -1:
+  print "[[],[],[],[],[]]"
+  sys.exit(0)
+
+
+def SetConfigPath(options):
+  """Set the PKG_CONFIG_PATH environment variable.
+  This takes into account any sysroot and architecture specification from the
+  options on the given command line."""
+
+  sysroot = options.sysroot
+  if not sysroot:
+    sysroot = ""
+
+  # Compute the library path name based on the architecture.
+  arch = options.arch
+  if sysroot and not arch:
+    print "You must specify an architecture via -a if using a sysroot."
+    sys.exit(1)
+  if arch == 'x64':
+    libpath = 'lib64'
+  else:
+    libpath = 'lib'
+
+  # Add the sysroot path to the environment's PKG_CONFIG_PATH
+  config_path = sysroot + '/usr/' + libpath + '/pkgconfig'
+  config_path += ':' + sysroot + '/usr/share/pkgconfig'
+  if 'PKG_CONFIG_PATH' in os.environ:
+    os.environ['PKG_CONFIG_PATH'] += ':' + config_path
+  else:
+    os.environ['PKG_CONFIG_PATH'] = config_path
+
+
+def GetPkgConfigPrefixToStrip(args):
+  """Returns the prefix from pkg-config where packages are installed.
+  This returned prefix is the one that should be stripped from the beginning of
+  directory names to take into account sysroots."""
+  # Some sysroots, like the Chromium OS ones, may generate paths that are not
+  # relative to the sysroot. For example,
+  # /path/to/chroot/build/x86-generic/usr/lib/pkgconfig/pkg.pc may have all
+  # paths relative to /path/to/chroot (i.e. prefix=/build/x86-generic/usr)
+  # instead of relative to /path/to/chroot/build/x86-generic (i.e prefix=/usr).
+  # To support this correctly, it's necessary to extract the prefix to strip
+  # from pkg-config's |prefix| variable.
+  prefix = subprocess.check_output(["pkg-config", "--variable=prefix"] + args,
+      env=os.environ)
+  if prefix[-4] == '/usr':
+    return prefix[4:]
+  return prefix
+
+
+def MatchesAnyRegexp(flag, list_of_regexps):
+  """Returns true if the first argument matches any regular expression in the
+  given list."""
+  for regexp in list_of_regexps:
+    if regexp.search(flag) != None:
+      return True
+  return False
+
+
+def RewritePath(path, strip_prefix, sysroot):
+  """Rewrites a path by stripping the prefix and prepending the sysroot."""
+  if os.path.isabs(path) and not path.startswith(sysroot):
+    if path.startswith(strip_prefix):
+      path = path[len(strip_prefix):]
+    path = path.lstrip('/')
+    return os.path.join(sysroot, path)
+  else:
+    return path
+
+
+parser = OptionParser()
+parser.add_option('-p', action='store', dest='pkg_config', type='string',
+                  default='pkg-config')
+parser.add_option('-v', action='append', dest='strip_out', type='string')
+parser.add_option('-s', action='store', dest='sysroot', type='string')
+parser.add_option('-a', action='store', dest='arch', type='string')
+parser.add_option('--atleast-version', action='store',
+                  dest='atleast_version', type='string')
+parser.add_option('--libdir', action='store_true', dest='libdir')
+(options, args) = parser.parse_args()
+
+# Make a list of regular expressions to strip out.
+strip_out = []
+if options.strip_out != None:
+  for regexp in options.strip_out:
+    strip_out.append(re.compile(regexp))
+
+SetConfigPath(options)
+if options.sysroot:
+  prefix = GetPkgConfigPrefixToStrip(args)
+else:
+  prefix = ''
+
+if options.atleast_version:
+  # When asking for the return value, just run pkg-config and print the return
+  # value, no need to do other work.
+  if not subprocess.call([options.pkg_config,
+                          "--atleast-version=" + options.atleast_version] +
+                          args,
+                         env=os.environ):
+    print "true"
+  else:
+    print "false"
+  sys.exit(0)
+
+if options.libdir:
+  try:
+    libdir = subprocess.check_output([options.pkg_config,
+                                      "--variable=libdir"] +
+                                     args,
+                                     env=os.environ)
+  except:
+    print "Error from pkg-config."
+    sys.exit(1)
+  sys.stdout.write(libdir.strip())
+  sys.exit(0)
+
+try:
+  flag_string = subprocess.check_output(
+      [ options.pkg_config, "--cflags", "--libs-only-l", "--libs-only-L" ] +
+      args, env=os.environ)
+  # For now just split on spaces to get the args out. This will break if
+  # pkgconfig returns quoted things with spaces in them, but that doesn't seem
+  # to happen in practice.
+  all_flags = flag_string.strip().split(' ')
+except:
+  print "Could not run pkg-config."
+  sys.exit(1)
+
+
+sysroot = options.sysroot
+if not sysroot:
+  sysroot = ''
+
+includes = []
+cflags = []
+libs = []
+lib_dirs = []
+ldflags = []
+
+for flag in all_flags[:]:
+  if len(flag) == 0 or MatchesAnyRegexp(flag, strip_out):
+    continue;
+
+  if flag[:2] == '-l':
+    libs.append(RewritePath(flag[2:], prefix, sysroot))
+  elif flag[:2] == '-L':
+    lib_dirs.append(RewritePath(flag[2:], prefix, sysroot))
+  elif flag[:2] == '-I':
+    includes.append(RewritePath(flag[2:], prefix, sysroot))
+  elif flag[:3] == '-Wl':
+    ldflags.append(flag)
+  elif flag == '-pthread':
+    # Many libs specify "-pthread" which we don't need since we always include
+    # this anyway. Removing it here prevents a bunch of duplicate inclusions on
+    # the command line.
+    pass
+  else:
+    cflags.append(flag)
+
+# Output a GN array, the first one is the cflags, the second are the libs. The
+# JSON formatter prints GN compatible lists when everything is a list of
+# strings.
+print json.dumps([includes, cflags, libs, lib_dirs, ldflags])
diff --git a/build/config/linux/pkg_config.gni b/build/config/linux/pkg_config.gni
new file mode 100644
index 0000000..34ed1af
--- /dev/null
+++ b/build/config/linux/pkg_config.gni
@@ -0,0 +1,83 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/sysroot.gni")
+
+# Defines a config specifying the result of running pkg-config for the given
+# packages. Put the package names you want to query in the "packages" variable
+# inside the template invocation.
+#
+# You can also add defines via the "defines" variable. This can be useful to
+# add this to the config to pass defines that the library expects to get by
+# users of its headers.
+#
+# Example:
+#   pkg_config("mything") {
+#     packages = [ "mything1", "mything2" ]
+#     defines = [ "ENABLE_AWESOME" ]
+#   }
+#
+# You can also use "extra args" to filter out results (see pkg-config.py):
+#   extra_args = [ "-v, "foo" ]
+# To ignore libs and ldflags (only cflags/defines will be set, which is useful
+# when doing manual dynamic linking), set:
+#   ignore_libs = true
+
+declare_args() {
+  # A pkg-config wrapper to call instead of trying to find and call the right
+  # pkg-config directly. Wrappers like this are common in cross-compilation
+  # environments.
+  # Leaving it blank defaults to searching PATH for 'pkg-config' and relying on
+  # the sysroot mechanism to find the right .pc files.
+  pkg_config = ""
+}
+
+pkg_config_script = "//build/config/linux/pkg-config.py"
+
+# Define the args we pass to the pkg-config script for other build files that
+# need to invoke it manually.
+if (sysroot != "") {
+  # Pass the sysroot if we're using one (it requires the CPU arch also).
+  pkg_config_args = [
+    "-s",
+    sysroot,
+    "-a",
+    current_cpu,
+  ]
+} else if (pkg_config != "") {
+  pkg_config_args = [
+    "-p",
+    pkg_config,
+  ]
+} else {
+  pkg_config_args = []
+}
+
+template("pkg_config") {
+  assert(defined(invoker.packages),
+         "Variable |packages| must be defined to be a list in pkg_config.")
+  config(target_name) {
+    args = pkg_config_args + invoker.packages
+    if (defined(invoker.extra_args)) {
+      args += invoker.extra_args
+    }
+
+    pkgresult = exec_script(pkg_config_script, args, "value")
+    include_dirs = pkgresult[0]
+    cflags = pkgresult[1]
+
+    if (!defined(invoker.ignore_libs) || !invoker.ignore_libs) {
+      libs = pkgresult[2]
+      lib_dirs = pkgresult[3]
+      ldflags = pkgresult[4]
+    }
+
+    if (defined(invoker.defines)) {
+      defines = invoker.defines
+    }
+    if (defined(invoker.visibility)) {
+      visibility = invoker.visibility
+    }
+  }
+}
diff --git a/build/config/linux/sysroot_ld_path.py b/build/config/linux/sysroot_ld_path.py
new file mode 100644
index 0000000..4bce7ee
--- /dev/null
+++ b/build/config/linux/sysroot_ld_path.py
@@ -0,0 +1,20 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file takes two arguments, the relative location of the shell script that
+# does the checking, and the name of the sysroot.
+
+# TODO(brettw) the build/linux/sysroot_ld_path.sh script should be rewritten in
+# Python in this file.
+
+import subprocess
+import sys
+
+if len(sys.argv) != 3:
+  print "Need two arguments"
+  sys.exit(1)
+
+result = subprocess.check_output([sys.argv[1], sys.argv[2]]).strip()
+
+print '"' + result + '"'
diff --git a/build/config/locales.gni b/build/config/locales.gni
new file mode 100644
index 0000000..a628007
--- /dev/null
+++ b/build/config/locales.gni
@@ -0,0 +1,118 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Note: keep in sync with below.
+locales = [
+  "am",
+  "ar",
+  "bg",
+  "bn",
+  "ca",
+  "cs",
+  "da",
+  "de",
+  "el",
+  "en-GB",
+  "en-US",
+  "es-419",
+  "es",
+  "et",
+  "fa",
+  "fi",
+  "fil",
+  "fr",
+  "gu",
+  "he",
+  "hi",
+  "hr",
+  "hu",
+  "id",
+  "it",
+  "ja",
+  "kn",
+  "ko",
+  "lt",
+  "lv",
+  "ml",
+  "mr",
+  "ms",
+  "nb",
+  "nl",
+  "pl",
+  "pt-BR",
+  "pt-PT",
+  "ro",
+  "ru",
+  "sk",
+  "sl",
+  "sr",
+  "sv",
+  "sw",
+  "ta",
+  "te",
+  "th",
+  "tr",
+  "uk",
+  "vi",
+  "zh-CN",
+  "zh-TW",
+]
+
+# Same as the locales list but in the format Mac expects for output files:
+# it uses underscores instead of hyphens, and "en" instead of "en-US".
+locales_as_mac_outputs = [
+  "am",
+  "ar",
+  "bg",
+  "bn",
+  "ca",
+  "cs",
+  "da",
+  "de",
+  "el",
+  "en_GB",
+  "en",
+  "es_419",
+  "es",
+  "et",
+  "fa",
+  "fi",
+  "fil",
+  "fr",
+  "gu",
+  "he",
+  "hi",
+  "hr",
+  "hu",
+  "id",
+  "it",
+  "ja",
+  "kn",
+  "ko",
+  "lt",
+  "lv",
+  "ml",
+  "mr",
+  "ms",
+  "nb",
+  "nl",
+  "pl",
+  "pt_BR",
+  "pt_PT",
+  "ro",
+  "ru",
+  "sk",
+  "sl",
+  "sr",
+  "sv",
+  "sw",
+  "ta",
+  "te",
+  "th",
+  "tr",
+  "uk",
+  "vi",
+  "zh_CN",
+  "zh_TW",
+]
diff --git a/build/config/mac/BUILD.gn b/build/config/mac/BUILD.gn
new file mode 100644
index 0000000..132e12c
--- /dev/null
+++ b/build/config/mac/BUILD.gn
@@ -0,0 +1,34 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/sysroot.gni")
+
+config("sdk") {
+  common_flags = [
+    "-stdlib=libc++",
+  ]
+
+  cflags = common_flags
+  ldflags = common_flags
+}
+
+# On Mac, this is used for everything except static libraries.
+config("mac_dynamic_flags") {
+  ldflags = [
+    "-Wl,-search_paths_first",
+    "-L.",
+
+    # Path for loading shared libraries for unbundled binaries.
+    "-Wl,-rpath,@loader_path/.",
+
+    # Path for loading shared libraries for bundled binaries. Get back from
+    # Binary.app/Contents/MacOS.
+    "-Wl,-rpath,@loader_path/../../..",
+  ]
+}
+
+# On Mac, this is used only for executables.
+config("mac_executable_flags") {
+  ldflags = [ "-Wl,-pie" ]  # Position independent.
+}
diff --git a/build/config/mac/mac_app.py b/build/config/mac/mac_app.py
new file mode 100644
index 0000000..909fa58
--- /dev/null
+++ b/build/config/mac/mac_app.py
@@ -0,0 +1,113 @@
+#!/usr/bin/python
+# Copyright (c) 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import os
+import errno
+import subprocess
+import sys
+
+PLUTIL = [
+  '/usr/bin/env',
+  'xcrun',
+  'plutil'
+]
+
+IBTOOL = [
+  '/usr/bin/env',
+  'xcrun',
+  'ibtool',
+]
+
+
+def MakeDirectories(path):
+  try:
+    os.makedirs(path)
+  except OSError as exc:
+    if exc.errno == errno.EEXIST and os.path.isdir(path):
+      return 0
+    else:
+      return -1
+
+  return 0
+
+
+def ProcessInfoPlist(args):
+  output_plist_file = os.path.abspath(os.path.join(args.output, 'Info.plist'))
+  return subprocess.check_call( PLUTIL + [
+    '-convert',
+    'binary1',
+    '-o',
+    output_plist_file,
+    '--',
+    args.input,
+  ])
+
+
+def ProcessNIB(args):
+  output_nib_file = os.path.join(os.path.abspath(args.output),
+      "%s.nib" % os.path.splitext(os.path.basename(args.input))[0])
+
+  return subprocess.check_call(IBTOOL + [
+    '--module',
+    args.module,
+    '--auto-activate-custom-fonts',
+    '--target-device',
+    'mac',
+    '--compile',
+    output_nib_file,
+    os.path.abspath(args.input),
+  ])
+
+
+def GenerateProjectStructure(args):
+  application_path = os.path.join( args.dir, args.name + ".app", "Contents" )
+  return MakeDirectories( application_path )
+
+
+def Main():
+  parser = argparse.ArgumentParser(description='A script that aids in '
+                                   'the creation of an Mac application')
+
+  subparsers = parser.add_subparsers()
+
+  # Plist Parser
+
+  plist_parser = subparsers.add_parser('plist',
+                                       help='Process the Info.plist')
+  plist_parser.set_defaults(func=ProcessInfoPlist)
+  
+  plist_parser.add_argument('-i', dest='input', help='The input plist path')
+  plist_parser.add_argument('-o', dest='output', help='The output plist dir')
+
+  # NIB Parser
+
+  plist_parser = subparsers.add_parser('nib',
+                                       help='Process a NIB file')
+  plist_parser.set_defaults(func=ProcessNIB)
+  
+  plist_parser.add_argument('-i', dest='input', help='The input nib path')
+  plist_parser.add_argument('-o', dest='output', help='The output nib dir')
+  plist_parser.add_argument('-m', dest='module', help='The module name')
+
+  # Directory Structure Parser
+
+  dir_struct_parser = subparsers.add_parser('structure',
+                      help='Creates the directory of an Mac application')
+
+  dir_struct_parser.set_defaults(func=GenerateProjectStructure)
+
+  dir_struct_parser.add_argument('-d', dest='dir', help='Out directory')
+  dir_struct_parser.add_argument('-n', dest='name', help='App name')
+
+  # Engage!
+
+  args = parser.parse_args()
+
+  return args.func(args)
+
+
+if __name__ == '__main__':
+  sys.exit(Main())
diff --git a/build/config/mac/mac_sdk.gni b/build/config/mac/mac_sdk.gni
new file mode 100644
index 0000000..54b68c4
--- /dev/null
+++ b/build/config/mac/mac_sdk.gni
@@ -0,0 +1,41 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/chrome_build.gni")
+
+declare_args() {
+  # Minimum supported version of the Mac SDK.
+  mac_sdk_min = "10.8"
+
+  # Path to a specific version of the Mac SDKJ, not including a backslash at
+  # the end. If empty, the path to the lowest version greater than or equal to
+  # mac_sdk_min is used.
+  mac_sdk_path = ""
+}
+
+find_sdk_args = [ "--print_sdk_path" ]
+if (is_chrome_branded && is_official_build) {
+  find_sdk_args += [
+    "--verify",
+    mac_sdk_min,
+    "--sdk_path=" + mac_sdk_path,
+  ]
+} else {
+  find_sdk_args += [ mac_sdk_min ]
+}
+
+# The tool will print the SDK path on the first line, and the version on the
+# second line.
+find_sdk_lines =
+    exec_script("//build/mac/find_sdk.py", find_sdk_args, "list lines")
+mac_sdk_version = find_sdk_lines[1]
+if (mac_sdk_path == "") {
+  # TODO(brettw) http://crbug.com/335325  when everybody moves to XCode 5 we
+  # can remove the --print_sdk_path argument to find_sdk and instead just use
+  # the following two lines to get the path. Although it looks longer here, it
+  # saves forking a process in find_sdk.py so will be faster.
+  #mac_sdk_root = "/Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX"
+  #mac_sdk_path = mac_sdk_root + mac_sdk_version + ".sdk"
+  mac_sdk_path = find_sdk_lines[0]
+}
diff --git a/build/config/mac/rules.gni b/build/config/mac/rules.gni
new file mode 100644
index 0000000..66f0de4
--- /dev/null
+++ b/build/config/mac/rules.gni
@@ -0,0 +1,220 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+mac_app_script = "//build/config/mac/mac_app.py"
+
+template("code_sign_mac") {
+  assert(defined(invoker.entitlements_path),
+         "The path to the entitlements .xcent file")
+  assert(defined(invoker.identity),
+         "The code signing identity")
+  assert(defined(invoker.application_path),
+         "The application to code sign")
+  assert(defined(invoker.deps))
+
+  action(target_name) {
+    sources = [
+      invoker.entitlements_path,
+    ]
+
+    _application_path = invoker.application_path
+
+    script = mac_app_script
+
+    outputs = [
+      "$_application_path/_CodeSignature/CodeResources"
+    ]
+
+    args = [
+      "codesign",
+      "-p",
+      rebase_path(invoker.application_path, root_build_dir),
+      "-i",
+      invoker.identity,
+      "-e",
+      rebase_path(invoker.entitlements_path, root_build_dir),
+    ]
+
+    deps = invoker.deps
+  }
+}
+
+template("process_nibs_mac") {
+  assert(defined(invoker.sources),
+         "The nib sources must be specified")
+  assert(defined(invoker.module),
+         "The nib module must be specified")
+  assert(defined(invoker.output_dir),
+         "The output directory must be specified")
+
+  action_foreach(target_name) {
+    sources = invoker.sources
+
+    script = mac_app_script
+
+    invoker_out_dir = invoker.output_dir
+
+    outputs = [
+      "$root_build_dir/$invoker_out_dir/{{source_name_part}}.nib"
+    ]
+
+    args = [
+      "nib",
+      "-i",
+      "{{source}}",
+      "-o",
+      invoker_out_dir,
+      "-m",
+      invoker.module,
+    ]
+  }
+}
+
+template("resource_copy_mac") {
+  assert(defined(invoker.resources),
+         "The source list of resources to copy over")
+  assert(defined(invoker.bundle_directory),
+         "The directory within the bundle to place the sources in")
+  assert(defined(invoker.app_name),
+         "The name of the application")
+
+  _bundle_directory = invoker.bundle_directory
+  _app_name = invoker.app_name
+  _resources = invoker.resources
+
+  copy(target_name) {
+    set_sources_assignment_filter([])
+    sources = _resources
+    outputs = [ "$root_build_dir/$_app_name.app/$_bundle_directory/Contents/Resources/{{source_file_part}}" ]
+
+    if (defined(invoker.deps)) {
+      deps = invoker.deps
+    }
+  }
+}
+
+template("mac_app") {
+
+  assert(defined(invoker.deps),
+         "Dependencies must be specified for $target_name")
+  assert(defined(invoker.info_plist),
+         "The application plist file must be specified for $target_name")
+  assert(defined(invoker.app_name),
+         "The name of Mac application for $target_name")
+  assert(defined(invoker.xibs),
+         "The list of XIB files must be specified for $target_name")
+  # assert(defined(invoker.entitlements_path),
+  #        "The entitlements path must be specified for $target_name")
+  # assert(defined(invoker.code_signing_identity),
+  #        "The entitlements path must be specified for $target_name")
+
+  # We just create a variable so we can use the same in interpolation
+  app_name = invoker.app_name
+
+  # Generate the project structure
+
+  struct_gen_target_name = target_name + "_struct"
+
+  action(struct_gen_target_name) {
+
+    script = mac_app_script
+
+    sources = []
+    outputs = [ "$root_build_dir/$app_name.app" ]
+
+    args = [
+      "structure",
+      "-d",
+      rebase_path(root_build_dir),
+      "-n",
+      app_name
+    ]
+
+  }
+
+  # Generate the executable
+
+  bin_gen_target_name = target_name + "_bin"
+
+  executable(bin_gen_target_name) {
+    deps = invoker.deps
+    output_name = app_name
+  }
+
+  # Process the Info.plist
+
+  plist_gen_target_name = target_name + "_plist"
+
+  action(plist_gen_target_name) {
+
+    script = mac_app_script
+
+    sources = [ invoker.info_plist ]
+    outputs = [ "$root_build_dir/plist/$app_name/Info.plist" ]
+
+    args = [
+      "plist",
+      "-i",
+      rebase_path(invoker.info_plist, root_build_dir),
+      "-o",
+      rebase_path("$root_build_dir/plist/$app_name"),
+    ]
+  }
+
+  # Copy the generated binaries and assets to their appropriate locations
+
+  copy_plist_gen_target_name = target_name + "_plist_copy"
+  copy(copy_plist_gen_target_name) {
+    sources = [
+      "$root_build_dir/plist/$app_name/Info.plist",
+    ]
+
+    outputs = [
+      "$root_build_dir/$app_name.app/Contents/{{source_file_part}}"
+    ]
+
+    deps = [
+      ":$plist_gen_target_name",
+    ]
+  }
+
+  copy_bin_target_name = target_name + "_bin_copy"
+  copy(copy_bin_target_name) {
+    sources = [
+      "$root_build_dir/$app_name",
+    ]
+
+    outputs = [
+      "$root_build_dir/$app_name.app/Contents/MacOS/{{source_file_part}}"
+    ]
+
+    deps = [
+      ":$bin_gen_target_name",
+    ]
+  }
+
+  copy_xib_target_name = target_name + "_xib_copy"
+  process_nibs_mac(copy_xib_target_name) {
+    sources = invoker.xibs
+    module = app_name
+    output_dir = "$app_name.app/Contents/Resources"
+  }
+
+  copy_all_target_name = target_name + "_all_copy"
+  group(copy_all_target_name) {
+    deps = [
+      ":$struct_gen_target_name",
+      ":$copy_plist_gen_target_name",
+      ":$copy_bin_target_name",
+      ":$copy_xib_target_name",
+    ]
+  }
+
+  # Top level group
+
+  group(target_name) {
+    deps = [ ":$copy_all_target_name" ]
+  }
+
+}
diff --git a/build/config/mips.gni b/build/config/mips.gni
new file mode 100644
index 0000000..1b40657
--- /dev/null
+++ b/build/config/mips.gni
@@ -0,0 +1,43 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+if (current_cpu == "mipsel") {
+  declare_args() {
+    # MIPS arch variant. Possible values are:
+    #   "r1"
+    #   "r2"
+    #   "r6"
+    mips_arch_variant = "r1"
+
+    # MIPS DSP ASE revision. Possible values are:
+    #   0: unavailable
+    #   1: revision 1
+    #   2: revision 2
+    mips_dsp_rev = 0
+
+    # MIPS floating-point ABI. Possible values are:
+    #   "hard": sets the GCC -mhard-float option.
+    #   "soft": sets the GCC -msoft-float option.
+    mips_float_abi = "hard"
+
+    # MIPS32 floating-point register width. Possible values are:
+    #   "fp32": sets the GCC -mfp32 option.
+    #   "fp64": sets the GCC -mfp64 option.
+    #   "fpxx": sets the GCC -mfpxx option.
+    mips_fpu_mode = "fp32"
+  }
+} else if (current_cpu == "mips64el") {
+  # MIPS arch variant. Possible values are:
+  #   "r2"
+  #   "r6"
+  if (is_android) {
+    declare_args() {
+      mips_arch_variant = "r6"
+    }
+  } else {
+    declare_args() {
+      mips_arch_variant = "r2"
+    }
+  }
+}
diff --git a/build/config/sanitizers/BUILD.gn b/build/config/sanitizers/BUILD.gn
new file mode 100644
index 0000000..8996951
--- /dev/null
+++ b/build/config/sanitizers/BUILD.gn
@@ -0,0 +1,59 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/sanitizers/sanitizers.gni")
+
+# Contains the dependencies needed for sanitizers to link into executables and
+# shared_libraries. Unconditionally depend upon this target as it is empty if
+# |is_asan|, |is_lsan|, |is_tsan|, |is_msan| and |use_custom_libcxx| are false.
+group("deps") {
+  deps = [
+    "//third_party/instrumented_libraries:deps",
+  ]
+  if (is_asan || is_lsan || is_tsan || is_msan) {
+    public_configs = [ ":sanitizer_options_link_helper" ]
+    deps += [ ":options_sources" ]
+  }
+  if (use_custom_libcxx) {
+    deps += [ "//buildtools/third_party/libc++:libcxx_proxy" ]
+  }
+}
+
+config("sanitizer_options_link_helper") {
+  ldflags = [ "-Wl,-u_sanitizer_options_link_helper" ]
+  if (is_asan) {
+    ldflags += [ "-fsanitize=address" ]
+  }
+  if (is_lsan) {
+    ldflags += [ "-fsanitize=leak" ]
+  }
+  if (is_tsan) {
+    ldflags += [ "-fsanitize=thread" ]
+  }
+  if (is_msan) {
+    ldflags += [ "-fsanitize=memory" ]
+  }
+}
+
+source_set("options_sources") {
+  visibility = [
+    ":deps",
+    "//:gn_visibility",
+  ]
+  sources = [
+    "//build/sanitizers/sanitizer_options.cc",
+  ]
+
+  if (is_asan) {
+    sources += [ "//build/sanitizers/asan_suppressions.cc" ]
+  }
+
+  if (is_lsan) {
+    sources += [ "//build/sanitizers/lsan_suppressions.cc" ]
+  }
+
+  if (is_tsan) {
+    sources += [ "//build/sanitizers/tsan_suppressions.cc" ]
+  }
+}
diff --git a/build/config/sanitizers/sanitizers.gni b/build/config/sanitizers/sanitizers.gni
new file mode 100644
index 0000000..a7b9658
--- /dev/null
+++ b/build/config/sanitizers/sanitizers.gni
@@ -0,0 +1,20 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+declare_args() {
+  # Use libc++ (buildtools/third_party/libc++ and
+  # buildtools/third_party/libc++abi) instead of stdlibc++ as standard library.
+  # This is intended to be used for instrumented builds.
+  use_custom_libcxx = (is_asan && is_linux) || is_tsan || is_msan
+
+  # Track where uninitialized memory originates from. From fastest to slowest:
+  # 0 - no tracking, 1 - track only the initial allocation site, 2 - track the
+  # chain of stores leading from allocation site to use site.
+  msan_track_origins = 2
+
+  # Use dynamic libraries instrumented by one of the sanitizers instead of the
+  # standard system libraries. Set this flag to download prebuilt binaries from
+  # GCS.
+  use_prebuilt_instrumented_libraries = false
+}
diff --git a/build/config/sysroot.gni b/build/config/sysroot.gni
new file mode 100644
index 0000000..5bce02e
--- /dev/null
+++ b/build/config/sysroot.gni
@@ -0,0 +1,60 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This header file defines the "sysroot" variable which is the absolute path
+# of the sysroot. If no sysroot applies, the variable will be an empty string.
+
+import("//build/config/chrome_build.gni")
+
+declare_args() {
+  # The absolute path of the sysroot that is applied when compiling using
+  # the target toolchain.
+  target_sysroot = ""
+}
+
+if (current_toolchain == default_toolchain && target_sysroot != "") {
+  sysroot = target_sysroot
+} else if (is_android) {
+  import("//build/config/android/config.gni")
+  if (current_cpu == "x86") {
+    sysroot = rebase_path("$android_ndk_root/$x86_android_sysroot_subdir")
+  } else if (current_cpu == "arm") {
+    sysroot = rebase_path("$android_ndk_root/$arm_android_sysroot_subdir")
+  } else if (current_cpu == "mipsel") {
+    sysroot = rebase_path("$android_ndk_root/$mips_android_sysroot_subdir")
+  } else if (current_cpu == "x64") {
+    sysroot = rebase_path("$android_ndk_root/$x86_64_android_sysroot_subdir")
+  } else if (current_cpu == "arm64") {
+    sysroot = rebase_path("$android_ndk_root/$arm64_android_sysroot_subdir")
+  } else if (current_cpu == "mips64") {
+    sysroot = rebase_path("$android_ndk_root/$mips64_android_sysroot_subdir")
+  } else {
+    sysroot = ""
+  }
+} else if (is_linux && is_chrome_branded && is_official_build && !is_chromeos) {
+  # For official builds, use the sysroot checked into the internal source repo
+  # so that the builds work on older versions of Linux.
+  if (current_cpu == "x64") {
+    sysroot = rebase_path("//build/linux/debian_wheezy_amd64-sysroot")
+  } else if (current_cpu == "x86") {
+    sysroot = rebase_path("//build/linux/debian_wheezy_i386-sysroot")
+  } else {
+    # Any other builds don't use a sysroot.
+    sysroot = ""
+  }
+} else if (is_linux && !is_chromeos) {
+  if (current_cpu == "mipsel") {
+    sysroot = rebase_path("//mipsel-sysroot/sysroot")
+  } else {
+    sysroot = ""
+  }
+} else if (is_mac) {
+  import("//build/config/mac/mac_sdk.gni")
+  sysroot = mac_sdk_path
+} else if (is_ios) {
+  import("//build/config/ios/ios_sdk.gni")
+  sysroot = ios_sdk_path
+} else {
+  sysroot = ""
+}
diff --git a/build/config/templates/templates.gni b/build/config/templates/templates.gni
new file mode 100644
index 0000000..ae00fca
--- /dev/null
+++ b/build/config/templates/templates.gni
@@ -0,0 +1,56 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Declare a target for processing a template.
+#
+# Variables
+#   input: The template file to be processed.
+#   output: Where to save the result.
+#   variables: A list of variables to make available to the template
+#     processing environment, e.g. ["name=foo", "color=red"].
+#
+# Example
+#   file_template("chrome_shell_manifest") {
+#     input = "shell/java/AndroidManifest.xml"
+#     output = "$target_gen_dir/AndroidManifest.xml"
+#     variables = "app_name=chrome_shell app_version=1"
+#   }
+template("file_template") {
+  set_sources_assignment_filter([])
+
+  if (defined(invoker.testonly)) {
+    testonly = invoker.testonly
+  }
+
+  assert(defined(invoker.input),
+      "The input file must be specified")
+  assert(defined(invoker.output),
+      "The output file must be specified")
+  assert(defined(invoker.variables),
+      "The variable used for substitution in templates must be specified")
+
+  variables = invoker.variables
+
+  action(target_name) {
+    if(defined(invoker.visibility)) {
+      visibility = invoker.visibility
+    }
+
+    script = "//build/android/gyp/jinja_template.py"
+    depfile = "$target_gen_dir/$target_name.d"
+
+    sources = [ invoker.input ]
+    outputs = [ invoker.output, depfile ]
+
+    args = [
+      "--inputs",
+      rebase_path(invoker.input, root_build_dir),
+      "--output",
+      rebase_path(invoker.output, root_build_dir),
+      "--depfile",
+      rebase_path(depfile, root_build_dir),
+      "--variables=${variables}"
+    ]
+  }
+}
diff --git a/build/config/ui.gni b/build/config/ui.gni
new file mode 100644
index 0000000..c2dff4a
--- /dev/null
+++ b/build/config/ui.gni
@@ -0,0 +1,68 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file contains UI-related build flags. It should theoretically be in the
+# src/ui directory and only things that depend on the ui module should get the
+# definitions.
+#
+# However, today we have many "bad" dependencies on some of these flags from,
+# e.g. base, so they need to be global.
+#
+# See also build/config/features.gni
+
+declare_args() {
+  # Indicates if Ash is enabled. Ash is the Aura Shell which provides a
+  # desktop-like environment for Aura. Requires use_aura = true
+  use_ash = is_win || is_linux
+
+  # Indicates if Ozone is enabled. Ozone is a low-level library layer for Linux
+  # that does not require X11.
+  use_ozone = false
+
+  # Indicates if GLFW is enabled. GLFW is an abstraction layer for the
+  # windowing system and OpenGL rendering, providing cross-platform support
+  # for creating windows and OpenGL surfaces and contexts, and handling
+  # window system events and input.
+  use_glfw = false
+
+  # Support ChromeOS touchpad gestures with ozone.
+  use_evdev_gestures = false
+
+  # Indicates if Aura is enabled. Aura is a low-level windowing library, sort
+  # of a replacement for GDI or GTK.
+  use_aura = is_win || is_linux
+
+  # True means the UI is built using the "views" framework.
+  toolkit_views = is_mac || is_win || is_chromeos || use_aura
+
+  # Whether the entire browser uses toolkit-views on Mac instead of Cocoa.
+  mac_views_browser = false
+
+  # Whether we should use glib, a low level C utility library.
+  use_glib = is_linux && !use_ozone
+}
+
+# Additional dependent variables -----------------------------------------------
+#
+# These variables depend on other variables and can't be set externally.
+
+use_cairo = false
+use_pango = false
+
+# Use GPU accelerated cross process image transport by default on linux builds
+# with the Aura window manager.
+ui_compositor_image_transport = use_aura && is_linux
+
+use_default_render_theme = use_aura || is_linux
+
+# Indicates if the UI toolkit depends on X11.
+use_x11 = is_linux && !use_ozone && !use_glfw
+
+use_ozone_evdev = use_ozone
+
+use_clipboard_aurax11 = is_linux && use_aura && use_x11
+
+enable_hidpi = is_mac || is_chromeos || is_win || is_linux
+
+enable_topchrome_md = false
diff --git a/build/config/win/BUILD.gn b/build/config/win/BUILD.gn
new file mode 100644
index 0000000..261a1ac
--- /dev/null
+++ b/build/config/win/BUILD.gn
@@ -0,0 +1,181 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/win/visual_studio_version.gni")
+
+# Compiler setup for the Windows SDK. Applied to all targets.
+config("sdk") {
+  # The include path is the stuff returned by the script.
+  #include_dirs = msvc_config[0]  TODO(brettw) make this work.
+
+  defines = [
+    "_ATL_NO_OPENGL",
+    "_WINDOWS",
+    "CERT_CHAIN_PARA_HAS_EXTRA_FIELDS",
+    "NTDDI_VERSION=0x06030000",
+    "PSAPI_VERSION=1",
+    "WIN32",
+    "_SECURE_ATL",
+
+    # This is required for ATL to use XP-safe versions of its functions.
+    "_USING_V110_SDK71_",
+  ]
+}
+
+# Sets the default Windows build version. This is separated because some
+# targets need to manually override it for their compiles.
+config("winver") {
+  defines = [
+    "_WIN32_WINNT=0x0603",
+    "WINVER=0x0603",
+  ]
+}
+
+# Linker flags for Windows SDK setup, this is applied only to EXEs and DLLs.
+config("sdk_link") {
+  if (current_cpu == "x64") {
+    ldflags = [ "/MACHINE:X64" ]
+    lib_dirs = [
+      "$windows_sdk_path\Lib\winv6.3\um\x64",
+      "$visual_studio_path\VC\lib\amd64",
+      "$visual_studio_path\VC\atlmfc\lib\amd64",
+    ]
+  } else {
+    ldflags = [
+      "/MACHINE:X86",
+      "/SAFESEH",  # Not compatible with x64 so use only for x86.
+    ]
+    lib_dirs = [
+      "$windows_sdk_path\Lib\winv6.3\um\x86",
+      "$visual_studio_path\VC\lib",
+      "$visual_studio_path\VC\atlmfc\lib",
+    ]
+    if (!is_asan) {
+      ldflags += [ "/largeaddressaware" ]
+    }
+  }
+}
+
+# This default linker setup is provided separately from the SDK setup so
+# targets who want different library configurations can remove this and specify
+# their own.
+config("common_linker_setup") {
+  ldflags = [
+    "/FIXED:NO",
+    "/ignore:4199",
+    "/ignore:4221",
+    "/NXCOMPAT",
+
+    # Suggested by Microsoft Devrel to avoid
+    #   LINK : fatal error LNK1248: image size (80000000)
+    #   exceeds maximum allowable size (80000000)
+    # which started happening more regularly after VS2013 Update 4.
+    "/maxilksize:2147483647",
+  ]
+
+  # ASLR makes debugging with windbg difficult because Chrome.exe and
+  # Chrome.dll share the same base name. As result, windbg will name the
+  # Chrome.dll module like chrome_<base address>, where <base address>
+  # typically changes with each launch. This in turn means that breakpoints in
+  # Chrome.dll don't stick from one launch to the next. For this reason, we
+  # turn ASLR off in debug builds.
+  if (is_debug) {
+    ldflags += [ "/DYNAMICBASE:NO" ]
+  } else {
+    ldflags += [ "/DYNAMICBASE" ]
+  }
+
+  # Delay loaded DLLs.
+  ldflags += [
+    "/DELAYLOAD:dbghelp.dll",
+    "/DELAYLOAD:dwmapi.dll",
+    "/DELAYLOAD:shell32.dll",
+    "/DELAYLOAD:uxtheme.dll",
+  ]
+}
+
+# Subsystem --------------------------------------------------------------------
+
+# This is appended to the subsystem to specify a minimum version.
+if (current_cpu == "x64") {
+  # The number after the comma is the minimum required OS version.
+  # 5.02 = Windows Server 2003.
+  subsystem_version_suffix = ",5.02"
+} else {
+  # 5.01 = Windows XP.
+  subsystem_version_suffix = ",5.01"
+}
+
+config("console") {
+  ldflags = [ "/SUBSYSTEM:CONSOLE$subsystem_version_suffix" ]
+}
+config("windowed") {
+  ldflags = [ "/SUBSYSTEM:WINDOWS$subsystem_version_suffix" ]
+}
+
+# Incremental linking ----------------------------------------------------------
+
+incremental_linking_on_switch = [ "/INCREMENTAL" ]
+incremental_linking_off_switch = [ "/INCREMENTAL:NO" ]
+if (is_debug) {
+  default_incremental_linking_switch = incremental_linking_on_switch
+} else {
+  default_incremental_linking_switch = incremental_linking_off_switch
+}
+
+# Applies incremental linking or not depending on the current configuration.
+config("default_incremental_linking") {
+  ldflags = default_incremental_linking_switch
+}
+
+# Explicitly on or off incremental linking
+config("incremental_linking") {
+  ldflags = incremental_linking_on_switch
+}
+config("no_incremental_linking") {
+  ldflags = incremental_linking_off_switch
+}
+
+# Some large modules can't handle incremental linking in some situations. This
+# config should be applied to large modules to turn off incremental linking
+# when it won't work.
+config("default_large_module_incremental_linking") {
+  if (symbol_level > 0 && (current_cpu == "x86" || !is_component_build)) {
+    # When symbols are on, things get so large that the tools fail due to the
+    # size of the .ilk files.
+    ldflags = incremental_linking_off_switch
+  } else {
+    # Otherwise just do the default incremental linking for this build type.
+    ldflags = default_incremental_linking_switch
+  }
+}
+
+# Character set ----------------------------------------------------------------
+
+# Not including this config means "ansi" (8-bit system codepage).
+config("unicode") {
+  defines = [
+    "_UNICODE",
+    "UNICODE",
+  ]
+}
+
+# Lean and mean ----------------------------------------------------------------
+
+# Some third party code might not compile with WIN32_LEAN_AND_MEAN so we have
+# to have a separate config for it. Remove this config from your target to
+# get the "bloaty and accomodating" version of windows.h.
+config("lean_and_mean") {
+  defines = [ "WIN32_LEAN_AND_MEAN" ]
+}
+
+# Nominmax --------------------------------------------------------------------
+
+# Some third party code defines NOMINMAX before including windows.h, which
+# then causes warnings when it's been previously defined on the command line.
+# For such targets, this config can be removed.
+
+config("nominmax") {
+  defines = [ "NOMINMAX" ]
+}
diff --git a/build/config/win/visual_studio_version.gni b/build/config/win/visual_studio_version.gni
new file mode 100644
index 0000000..6a2828c
--- /dev/null
+++ b/build/config/win/visual_studio_version.gni
@@ -0,0 +1,39 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+declare_args() {
+  # Path to Visual Studio. If empty, the default is used which is to use the
+  # automatic toolchain in depot_tools. If set, you must also set the
+  # visual_studio_version and wdk_path.
+  visual_studio_path = ""
+
+  # Version of Visual Studio pointed to by the visual_studio_path.
+  # Use "2013" for Visual Studio 2013, or "2013e" for the Express version.
+  visual_studio_version = ""
+
+  # Directory of the Windows driver kit. If visual_studio_path is empty, this
+  # will be auto-filled.
+  wdk_path = ""
+
+  # Full path to the Windows SDK, not including a backslash at the end.
+  # This value is the default location, override if you have a different
+  # installation location.
+  windows_sdk_path = "C:\Program Files (x86)\Windows Kits\8.1"
+}
+
+if (visual_studio_path == "") {
+  toolchain_data =
+      exec_script("../../vs_toolchain.py", [ "get_toolchain_dir" ], "scope")
+  visual_studio_path = toolchain_data.vs_path
+  windows_sdk_path = toolchain_data.sdk_path
+  visual_studio_version = toolchain_data.vs_version
+  wdk_path = toolchain_data.wdk_dir
+  visual_studio_runtime_dirs = toolchain_data.runtime_dirs
+} else {
+  assert(visual_studio_version != "",
+         "You must set the visual_studio_version if you set the path")
+  assert(wdk_path != "",
+         "You must set the wdk_path if you set the visual studio path")
+  visual_studio_runtime_dirs = []
+}
diff --git a/build/copy_test_data_ios.gypi b/build/copy_test_data_ios.gypi
new file mode 100644
index 0000000..576a0f2
--- /dev/null
+++ b/build/copy_test_data_ios.gypi
@@ -0,0 +1,53 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into an action to copy test data files into
+# an iOS app bundle. To use this the following variables need to be defined:
+#   test_data_files: list: paths to test data files or directories
+#   test_data_prefix: string: a directory prefix that will be prepended to each
+#                             output path.  Generally, this should be the base
+#                             directory of the gypi file containing the unittest
+#                             target (e.g. "base" or "chrome").
+#
+# To use this, create a gyp target with the following form:
+# {
+#   'target_name': 'my_unittests',
+#   'conditions': [
+#     ['OS == "ios"', {
+#       'actions': [
+#         {
+#           'action_name': 'copy_test_data',
+#           'variables': {
+#             'test_data_files': [
+#               'path/to/datafile.txt',
+#               'path/to/data/directory/',
+#             ]
+#             'test_data_prefix' : 'prefix',
+#           },
+#           'includes': ['path/to/this/gypi/file'],
+#         },
+#       ],
+#     }],
+# }
+#
+
+{
+  'inputs': [
+    # The |-o <(test_data_prefix)| is ignored; it is there to work around a
+    # caching bug in gyp (https://code.google.com/p/gyp/issues/detail?id=112).
+    # It caches command output when the string is the same, so if two copy
+    # steps have the same relative paths, there can be bogus cache hits that
+    # cause compile failures unless something varies.
+    '<!@pymod_do_main(copy_test_data_ios -o <(test_data_prefix) --inputs <(test_data_files))',
+  ],
+  'outputs': [
+    '<!@pymod_do_main(copy_test_data_ios -o <(PRODUCT_DIR)/<(_target_name).app/<(test_data_prefix) --outputs <(test_data_files))',
+  ],
+  'action': [
+    'python',
+    '<(DEPTH)/build/copy_test_data_ios.py',
+    '-o', '<(PRODUCT_DIR)/<(_target_name).app/<(test_data_prefix)',
+    '<@(_inputs)',
+  ],
+}
diff --git a/build/copy_test_data_ios.py b/build/copy_test_data_ios.py
new file mode 100755
index 0000000..6f0302f
--- /dev/null
+++ b/build/copy_test_data_ios.py
@@ -0,0 +1,105 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Copies test data files or directories into a given output directory."""
+
+import optparse
+import os
+import shutil
+import sys
+
+class WrongNumberOfArgumentsException(Exception):
+  pass
+
+def EscapePath(path):
+  """Returns a path with spaces escaped."""
+  return path.replace(" ", "\\ ")
+
+def ListFilesForPath(path):
+  """Returns a list of all the files under a given path."""
+  output = []
+  # Ignore revision control metadata directories.
+  if (os.path.basename(path).startswith('.git') or
+      os.path.basename(path).startswith('.svn')):
+    return output
+
+  # Files get returned without modification.
+  if not os.path.isdir(path):
+    output.append(path)
+    return output
+
+  # Directories get recursively expanded.
+  contents = os.listdir(path)
+  for item in contents:
+    full_path = os.path.join(path, item)
+    output.extend(ListFilesForPath(full_path))
+  return output
+
+def CalcInputs(inputs):
+  """Computes the full list of input files for a set of command-line arguments.
+  """
+  # |inputs| is a list of paths, which may be directories.
+  output = []
+  for input in inputs:
+    output.extend(ListFilesForPath(input))
+  return output
+
+def CopyFiles(relative_filenames, output_basedir):
+  """Copies files to the given output directory."""
+  for file in relative_filenames:
+    relative_dirname = os.path.dirname(file)
+    output_dir = os.path.join(output_basedir, relative_dirname)
+    output_filename = os.path.join(output_basedir, file)
+
+    # In cases where a directory has turned into a file or vice versa, delete it
+    # before copying it below.
+    if os.path.exists(output_dir) and not os.path.isdir(output_dir):
+      os.remove(output_dir)
+    if os.path.exists(output_filename) and os.path.isdir(output_filename):
+      shutil.rmtree(output_filename)
+
+    if not os.path.exists(output_dir):
+      os.makedirs(output_dir)
+    shutil.copy(file, output_filename)
+
+def DoMain(argv):
+  parser = optparse.OptionParser()
+  usage = 'Usage: %prog -o <output_dir> [--inputs] [--outputs] <input_files>'
+  parser.set_usage(usage)
+  parser.add_option('-o', dest='output_dir')
+  parser.add_option('--inputs', action='store_true', dest='list_inputs')
+  parser.add_option('--outputs', action='store_true', dest='list_outputs')
+  options, arglist = parser.parse_args(argv)
+
+  if len(arglist) == 0:
+    raise WrongNumberOfArgumentsException('<input_files> required.')
+
+  files_to_copy = CalcInputs(arglist)
+  escaped_files = [EscapePath(x) for x in CalcInputs(arglist)]
+  if options.list_inputs:
+    return '\n'.join(escaped_files)
+
+  if not options.output_dir:
+    raise WrongNumberOfArgumentsException('-o required.')
+
+  if options.list_outputs:
+    outputs = [os.path.join(options.output_dir, x) for x in escaped_files]
+    return '\n'.join(outputs)
+
+  CopyFiles(files_to_copy, options.output_dir)
+  return
+
+def main(argv):
+  try:
+    result = DoMain(argv[1:])
+  except WrongNumberOfArgumentsException, e:
+    print >>sys.stderr, e
+    return 1
+  if result:
+    print result
+  return 0
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
diff --git a/build/cp.py b/build/cp.py
new file mode 100755
index 0000000..0f32536
--- /dev/null
+++ b/build/cp.py
@@ -0,0 +1,23 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Copy a file.
+
+This module works much like the cp posix command - it takes 2 arguments:
+(src, dst) and copies the file with path |src| to |dst|.
+"""
+
+import os
+import shutil
+import sys
+
+
+def Main(src, dst):
+  # Use copy instead of copyfile to ensure the executable bit is copied.
+  return shutil.copy(src, os.path.normpath(dst))
+
+
+if __name__ == '__main__':
+  sys.exit(Main(sys.argv[1], sys.argv[2]))
diff --git a/build/detect_host_arch.py b/build/detect_host_arch.py
new file mode 100755
index 0000000..19579eb
--- /dev/null
+++ b/build/detect_host_arch.py
@@ -0,0 +1,40 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Outputs host CPU architecture in format recognized by gyp."""
+
+import platform
+import re
+import sys
+
+
+def HostArch():
+  """Returns the host architecture with a predictable string."""
+  host_arch = platform.machine()
+
+  # Convert machine type to format recognized by gyp.
+  if re.match(r'i.86', host_arch) or host_arch == 'i86pc':
+    host_arch = 'ia32'
+  elif host_arch in ['x86_64', 'amd64']:
+    host_arch = 'x64'
+  elif host_arch.startswith('arm'):
+    host_arch = 'arm'
+
+  # platform.machine is based on running kernel. It's possible to use 64-bit
+  # kernel with 32-bit userland, e.g. to give linker slightly more memory.
+  # Distinguish between different userland bitness by querying
+  # the python binary.
+  if host_arch == 'x64' and platform.architecture()[0] == '32bit':
+    host_arch = 'ia32'
+
+  return host_arch
+
+def DoMain(_):
+  """Hook to be called from gyp without starting a separate python
+  interpreter."""
+  return HostArch()
+
+if __name__ == '__main__':
+  print DoMain([])
diff --git a/build/dir_exists.py b/build/dir_exists.py
new file mode 100755
index 0000000..70d367e
--- /dev/null
+++ b/build/dir_exists.py
@@ -0,0 +1,23 @@
+#!/usr/bin/env python
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Writes True if the argument is a directory."""
+
+import os.path
+import sys
+
+def main():
+  sys.stdout.write(_is_dir(sys.argv[1]))
+  return 0
+
+def _is_dir(dir_name):
+  return str(os.path.isdir(dir_name))
+
+def DoMain(args):
+  """Hook to be called from gyp without starting a separate python
+  interpreter."""
+  return _is_dir(args[0])
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/download_gold_plugin.py b/build/download_gold_plugin.py
new file mode 100755
index 0000000..cd7ca41
--- /dev/null
+++ b/build/download_gold_plugin.py
@@ -0,0 +1,45 @@
+#!/usr/bin/env python
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Script to download LLVM gold plugin from google storage."""
+
+import json
+import os
+import shutil
+import subprocess
+import sys
+import zipfile
+
+SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
+CHROME_SRC = os.path.abspath(os.path.join(SCRIPT_DIR, os.pardir))
+sys.path.insert(0, os.path.join(CHROME_SRC, 'tools'))
+
+import find_depot_tools
+
+DEPOT_PATH = find_depot_tools.add_depot_tools_to_path()
+GSUTIL_PATH = os.path.join(DEPOT_PATH, 'gsutil.py')
+
+LLVM_BUILD_PATH = os.path.join(CHROME_SRC, 'third_party', 'llvm-build',
+                               'Release+Asserts')
+CLANG_UPDATE_PY = os.path.join(CHROME_SRC, 'tools', 'clang', 'scripts',
+                               'update.py')
+CLANG_REVISION = os.popen(CLANG_UPDATE_PY + ' --print-revision').read().rstrip()
+
+CLANG_BUCKET = 'gs://chromium-browser-clang/Linux_x64'
+
+def main():
+  targz_name = 'llvmgold-%s.tgz' % CLANG_REVISION
+  remote_path = '%s/%s' % (CLANG_BUCKET, targz_name)
+
+  os.chdir(LLVM_BUILD_PATH)
+
+  subprocess.check_call(['python', GSUTIL_PATH,
+                         'cp', remote_path, targz_name])
+  subprocess.check_call(['tar', 'xzf', targz_name])
+  os.remove(targz_name)
+  return 0
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/download_nacl_toolchains.py b/build/download_nacl_toolchains.py
new file mode 100755
index 0000000..b99b940
--- /dev/null
+++ b/build/download_nacl_toolchains.py
@@ -0,0 +1,59 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Shim to run nacl toolchain download script only if there is a nacl dir."""
+
+import os
+import shutil
+import sys
+
+
+def Main(args):
+  # Exit early if disable_nacl=1.
+  if 'disable_nacl=1' in os.environ.get('GYP_DEFINES', ''):
+    return 0
+  script_dir = os.path.dirname(os.path.abspath(__file__))
+  src_dir = os.path.dirname(script_dir)
+  nacl_dir = os.path.join(src_dir, 'native_client')
+  nacl_build_dir = os.path.join(nacl_dir, 'build')
+  package_version_dir = os.path.join(nacl_build_dir, 'package_version')
+  package_version = os.path.join(package_version_dir, 'package_version.py')
+  if not os.path.exists(package_version):
+    print "Can't find '%s'" % package_version
+    print 'Presumably you are intentionally building without NativeClient.'
+    print 'Skipping NativeClient toolchain download.'
+    sys.exit(0)
+  sys.path.insert(0, package_version_dir)
+  import package_version
+
+  # BUG:
+  # We remove this --optional-pnacl argument, and instead replace it with
+  # --no-pnacl for most cases.  However, if the bot name is an sdk
+  # bot then we will go ahead and download it.  This prevents increasing the
+  # gclient sync time for developers, or standard Chrome bots.
+  if '--optional-pnacl' in args:
+    args.remove('--optional-pnacl')
+    use_pnacl = False
+    buildbot_name = os.environ.get('BUILDBOT_BUILDERNAME', '')
+    if 'pnacl' in buildbot_name and 'sdk' in buildbot_name:
+      use_pnacl = True
+    if use_pnacl:
+      print '\n*** DOWNLOADING PNACL TOOLCHAIN ***\n'
+    else:
+      args = ['--exclude', 'pnacl_newlib'] + args
+
+  # Only download the ARM gcc toolchain if we are building for ARM
+  # TODO(olonho): we need to invent more reliable way to get build
+  # configuration info, to know if we're building for ARM.
+  if 'target_arch=arm' not in os.environ.get('GYP_DEFINES', ''):
+      args = ['--exclude', 'nacl_arm_newlib'] + args
+
+  package_version.main(args)
+
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(Main(sys.argv[1:]))
diff --git a/build/download_sdk_extras.py b/build/download_sdk_extras.py
new file mode 100755
index 0000000..d7c5d6c
--- /dev/null
+++ b/build/download_sdk_extras.py
@@ -0,0 +1,71 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Script to download sdk/extras packages on the bots from google storage.
+
+The script expects arguments that specify zips file in the google storage
+bucket named: <dir in SDK extras>_<package name>_<version>.zip. The file will
+be extracted in the android_tools/sdk/extras directory on the test bots. This
+script will not do anything for developers.
+
+TODO(navabi): Move this script (crbug.com/459819).
+"""
+
+import json
+import os
+import shutil
+import subprocess
+import sys
+import zipfile
+
+SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
+CHROME_SRC = os.path.abspath(os.path.join(SCRIPT_DIR, os.pardir))
+sys.path.insert(0, os.path.join(SCRIPT_DIR, 'android'))
+sys.path.insert(1, os.path.join(CHROME_SRC, 'tools'))
+
+from pylib import constants
+import find_depot_tools
+
+DEPOT_PATH = find_depot_tools.add_depot_tools_to_path()
+GSUTIL_PATH = os.path.join(DEPOT_PATH, 'gsutil.py')
+SDK_EXTRAS_BUCKET = 'gs://chrome-sdk-extras'
+SDK_EXTRAS_PATH = os.path.join(constants.ANDROID_SDK_ROOT, 'extras')
+SDK_EXTRAS_JSON_FILE = os.path.join(os.path.dirname(__file__),
+                                    'android_sdk_extras.json')
+
+
+def clean_and_extract(dir_name, package_name, zip_file):
+  local_dir = '%s/%s/%s' % (SDK_EXTRAS_PATH, dir_name, package_name)
+  if os.path.exists(local_dir):
+    shutil.rmtree(local_dir)
+  local_zip = '%s/%s' % (SDK_EXTRAS_PATH, zip_file)
+  with zipfile.ZipFile(local_zip) as z:
+    z.extractall(path=SDK_EXTRAS_PATH)
+
+
+def main():
+  if not os.environ.get('CHROME_HEADLESS'):
+    # This is not a buildbot checkout.
+    return 0
+  # Update the android_sdk_extras.json file to update downloaded packages.
+  with open(SDK_EXTRAS_JSON_FILE) as json_file:
+    packages = json.load(json_file)
+  for package in packages:
+    local_zip = '%s/%s' % (SDK_EXTRAS_PATH, package['zip'])
+    if not os.path.exists(local_zip):
+      package_zip = '%s/%s' % (SDK_EXTRAS_BUCKET, package['zip'])
+      try:
+        subprocess.check_call(['python', GSUTIL_PATH, '--force-version', '4.7',
+                               'cp', package_zip, local_zip])
+      except subprocess.CalledProcessError:
+        print ('WARNING: Failed to download SDK packages. If this bot compiles '
+               'for Android, it may have errors.')
+        return 0
+    # Always clean dir and extract zip to ensure correct contents.
+    clean_and_extract(package['dir_name'], package['package'], package['zip'])
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/env_dump.py b/build/env_dump.py
new file mode 100755
index 0000000..21edfe6
--- /dev/null
+++ b/build/env_dump.py
@@ -0,0 +1,56 @@
+#!/usr/bin/python
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This script can either source a file and dump the enironment changes done by
+# it, or just simply dump the current environment as JSON into a file.
+
+import json
+import optparse
+import os
+import pipes
+import subprocess
+import sys
+
+
+def main():
+  parser = optparse.OptionParser()
+  parser.add_option('-f', '--output-json',
+                    help='File to dump the environment as JSON into.')
+  parser.add_option(
+      '-d', '--dump-mode', action='store_true',
+      help='Dump the environment to sys.stdout and exit immediately.')
+
+  parser.disable_interspersed_args()
+  options, args = parser.parse_args()
+  if options.dump_mode:
+    if args or options.output_json:
+      parser.error('Cannot specify args or --output-json with --dump-mode.')
+    json.dump(dict(os.environ), sys.stdout)
+  else:
+    if not options.output_json:
+      parser.error('Requires --output-json option.')
+
+    envsetup_cmd = ' '.join(map(pipes.quote, args))
+    full_cmd = [
+        'bash', '-c',
+        '. %s > /dev/null; %s -d' % (envsetup_cmd, os.path.abspath(__file__))
+    ]
+    try:
+      output = subprocess.check_output(full_cmd)
+    except Exception as e:
+      sys.exit('Error running %s and dumping environment.' % envsetup_cmd)
+
+    env_diff = {}
+    new_env = json.loads(output)
+    for k, val in new_env.items():
+      if k == '_' or (k in os.environ and os.environ[k] == val):
+        continue
+      env_diff[k] = val
+    with open(options.output_json, 'w') as f:
+      json.dump(env_diff, f)
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/extract_from_cab.py b/build/extract_from_cab.py
new file mode 100755
index 0000000..080370c
--- /dev/null
+++ b/build/extract_from_cab.py
@@ -0,0 +1,63 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Extracts a single file from a CAB archive."""
+
+import os
+import shutil
+import subprocess
+import sys
+import tempfile
+
+def run_quiet(*args):
+  """Run 'expand' suppressing noisy output. Returns returncode from process."""
+  popen = subprocess.Popen(args, stdout=subprocess.PIPE)
+  out, _ = popen.communicate()
+  if popen.returncode:
+    # expand emits errors to stdout, so if we fail, then print that out.
+    print out
+  return popen.returncode
+
+def main():
+  if len(sys.argv) != 4:
+    print 'Usage: extract_from_cab.py cab_path archived_file output_dir'
+    return 1
+
+  [cab_path, archived_file, output_dir] = sys.argv[1:]
+
+  # Expand.exe does its work in a fixed-named temporary directory created within
+  # the given output directory. This is a problem for concurrent extractions, so
+  # create a unique temp dir within the desired output directory to work around
+  # this limitation.
+  temp_dir = tempfile.mkdtemp(dir=output_dir)
+
+  try:
+    # Invoke the Windows expand utility to extract the file.
+    level = run_quiet('expand', cab_path, '-F:' + archived_file, temp_dir)
+    if level == 0:
+      # Move the output file into place, preserving expand.exe's behavior of
+      # paving over any preexisting file.
+      output_file = os.path.join(output_dir, archived_file)
+      try:
+        os.remove(output_file)
+      except OSError:
+        pass
+      os.rename(os.path.join(temp_dir, archived_file), output_file)
+  finally:
+    shutil.rmtree(temp_dir, True)
+
+  if level != 0:
+    return level
+
+  # The expand utility preserves the modification date and time of the archived
+  # file. Touch the extracted file. This helps build systems that compare the
+  # modification times of input and output files to determine whether to do an
+  # action.
+  os.utime(os.path.join(output_dir, archived_file), None)
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/filename_rules.gypi b/build/filename_rules.gypi
new file mode 100644
index 0000000..f67287f
--- /dev/null
+++ b/build/filename_rules.gypi
@@ -0,0 +1,106 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This gypi file defines the patterns used for determining whether a
+# file is excluded from the build on a given platform.  It is
+# included by common.gypi for chromium_code.
+
+{
+  'target_conditions': [
+    ['OS!="win" or >(nacl_untrusted_build)==1', {
+      'sources/': [ ['exclude', '_win(_browsertest|_unittest)?\\.(h|cc)$'],
+                    ['exclude', '(^|/)win/'],
+                    ['exclude', '(^|/)win_[^/]*\\.(h|cc)$'] ],
+    }],
+    ['OS!="mac" or >(nacl_untrusted_build)==1', {
+      'sources/': [ ['exclude', '_(cocoa|mac)(_unittest)?\\.(h|cc|mm?)$'],
+                    ['exclude', '(^|/)(cocoa|mac)/'] ],
+    }],
+    ['OS!="ios" or >(nacl_untrusted_build)==1', {
+      'sources/': [ ['exclude', '_ios(_unittest)?\\.(h|cc|mm?)$'],
+                    ['exclude', '(^|/)ios/'] ],
+    }],
+    ['(OS!="mac" and OS!="ios") or >(nacl_untrusted_build)==1', {
+      'sources/': [ ['exclude', '\\.mm?$' ] ],
+    }],
+    # Do not exclude the linux files on *BSD since most of them can be
+    # shared at this point.
+    # In case a file is not needed, it is going to be excluded later on.
+    # TODO(evan): the above is not correct; we shouldn't build _linux
+    # files on non-linux.
+    ['OS!="linux" and OS!="openbsd" and OS!="freebsd" or >(nacl_untrusted_build)==1', {
+      'sources/': [
+        ['exclude', '_linux(_unittest)?\\.(h|cc)$'],
+        ['exclude', '(^|/)linux/'],
+      ],
+    }],
+    ['OS!="android" or _toolset=="host" or >(nacl_untrusted_build)==1', {
+      'sources/': [
+        ['exclude', '_android(_unittest)?\\.(h|cc)$'],
+        ['exclude', '(^|/)android/'],
+      ],
+    }],
+    ['OS=="win" and >(nacl_untrusted_build)==0', {
+      'sources/': [
+        ['exclude', '_posix(_unittest)?\\.(h|cc)$'],
+        ['exclude', '(^|/)posix/'],
+      ],
+    }],
+    ['<(chromeos)!=1 or >(nacl_untrusted_build)==1', {
+      'sources/': [
+        ['exclude', '_chromeos(_unittest)?\\.(h|cc)$'],
+        ['exclude', '(^|/)chromeos/'],
+      ],
+    }],
+    ['>(nacl_untrusted_build)==0', {
+      'sources/': [
+        ['exclude', '_nacl(_unittest)?\\.(h|cc)$'],
+      ],
+    }],
+    ['OS!="linux" and OS!="openbsd" and OS!="freebsd" or >(nacl_untrusted_build)==1', {
+      'sources/': [
+        ['exclude', '_xdg(_unittest)?\\.(h|cc)$'],
+      ],
+    }],
+    ['<(use_x11)!=1 or >(nacl_untrusted_build)==1', {
+      'sources/': [
+        ['exclude', '_(x|x11)(_interactive_uitest|_unittest)?\\.(h|cc)$'],
+        ['exclude', '(^|/)x11_[^/]*\\.(h|cc)$'],
+        ['exclude', '(^|/)x11/'],
+        ['exclude', '(^|/)x/'],
+      ],
+    }],
+    ['<(toolkit_views)==0 or >(nacl_untrusted_build)==1', {
+      'sources/': [ ['exclude', '_views(_browsertest|_unittest)?\\.(h|cc)$'] ]
+    }],
+    ['<(use_aura)==0 or >(nacl_untrusted_build)==1', {
+      'sources/': [ ['exclude', '_aura(_browsertest|_unittest)?\\.(h|cc)$'],
+                    ['exclude', '(^|/)aura/'],
+      ]
+    }],
+    ['<(use_aura)==0 or <(use_x11)==0 or >(nacl_untrusted_build)==1', {
+      'sources/': [ ['exclude', '_aurax11(_browsertest|_unittest)?\\.(h|cc)$'] ]
+    }],
+    ['<(use_aura)==0 or OS!="win" or >(nacl_untrusted_build)==1', {
+      'sources/': [ ['exclude', '_aurawin\\.(h|cc)$'] ]
+    }],
+    ['<(use_aura)==0 or OS!="linux" or >(nacl_untrusted_build)==1', {
+      'sources/': [ ['exclude', '_auralinux\\.(h|cc)$'] ]
+    }],
+    ['<(use_ash)==0 or >(nacl_untrusted_build)==1', {
+      'sources/': [ ['exclude', '_ash(_browsertest|_unittest)?\\.(h|cc)$'],
+                    ['exclude', '(^|/)ash/'],
+      ]
+    }],
+    ['<(use_ash)==0 or OS!="win" or >(nacl_untrusted_build)==1', {
+      'sources/': [ ['exclude', '_ashwin\\.(h|cc)$'] ]
+    }],
+    ['<(use_ozone)==0 or >(nacl_untrusted_build)==1', {
+      'sources/': [ ['exclude', '_ozone(_browsertest|_unittest)?\\.(h|cc)$'] ]
+    }],
+    ['<(use_pango)==0', {
+      'sources/': [ ['exclude', '(^|_)pango(_util|_browsertest|_unittest)?\\.(h|cc)$'], ],
+    }],
+  ]
+}
diff --git a/build/find_isolated_tests.py b/build/find_isolated_tests.py
new file mode 100755
index 0000000..c5b3ab7
--- /dev/null
+++ b/build/find_isolated_tests.py
@@ -0,0 +1,78 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Scans build output directory for .isolated files, calculates their SHA1
+hashes, stores final list in JSON document and then removes *.isolated files
+found (to ensure no stale *.isolated stay around on the next build).
+
+Used to figure out what tests were build in isolated mode to trigger these
+tests to run on swarming.
+
+For more info see:
+https://sites.google.com/a/chromium.org/dev/developers/testing/isolated-testing
+"""
+
+import glob
+import hashlib
+import json
+import optparse
+import os
+import re
+import sys
+
+
+def hash_file(filepath):
+  """Calculates the hash of a file without reading it all in memory at once."""
+  digest = hashlib.sha1()
+  with open(filepath, 'rb') as f:
+    while True:
+      chunk = f.read(1024*1024)
+      if not chunk:
+        break
+      digest.update(chunk)
+  return digest.hexdigest()
+
+
+def main():
+  parser = optparse.OptionParser(
+      usage='%prog --build-dir <path> --output-json <path>',
+      description=sys.modules[__name__].__doc__)
+  parser.add_option(
+      '--build-dir',
+      help='Path to a directory to search for *.isolated files.')
+  parser.add_option(
+      '--output-json',
+      help='File to dump JSON results into.')
+
+  options, _ = parser.parse_args()
+  if not options.build_dir:
+    parser.error('--build-dir option is required')
+  if not options.output_json:
+    parser.error('--output-json option is required')
+
+  result = {}
+
+  # Get the file hash values and output the pair.
+  pattern = os.path.join(options.build_dir, '*.isolated')
+  for filepath in sorted(glob.glob(pattern)):
+    test_name = os.path.splitext(os.path.basename(filepath))[0]
+    if re.match(r'^.+?\.\d$', test_name):
+      # It's a split .isolated file, e.g. foo.0.isolated. Ignore these.
+      continue
+
+    # TODO(csharp): Remove deletion once the isolate tracked dependencies are
+    # inputs for the isolated files.
+    sha1_hash = hash_file(filepath)
+    os.remove(filepath)
+    result[test_name] = sha1_hash
+
+  with open(options.output_json, 'wb') as f:
+    json.dump(result, f)
+
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/gdb-add-index b/build/gdb-add-index
new file mode 100755
index 0000000..992ac16
--- /dev/null
+++ b/build/gdb-add-index
@@ -0,0 +1,162 @@
+#!/bin/bash
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Saves the gdb index for a given binary and its shared library dependencies.
+#
+# This will run gdb index in parallel on a number of binaries using SIGUSR1
+# as the communication mechanism to simulate a semaphore. Because of the
+# nature of this technique, using "set -e" is very difficult. The SIGUSR1
+# terminates a "wait" with an error which we need to interpret.
+#
+# When modifying this code, most of the real logic is in the index_one_file
+# function. The rest is cleanup + sempahore plumbing.
+
+# Cleanup temp directory and ensure all child jobs are dead-dead.
+function on_exit {
+  trap "" EXIT USR1  # Avoid reentrancy.
+
+  local jobs=$(jobs -p)
+  if [ -n "$jobs" ]; then
+    echo -n "Killing outstanding index jobs..."
+    kill -KILL $(jobs -p)
+    wait
+    echo "done"
+  fi
+
+  if [ -f "$DIRECTORY" ]; then
+    echo -n "Removing temp directory $DIRECTORY..."
+    rm -rf $DIRECTORY
+    echo done
+  fi
+}
+
+# Add index to one binary.
+function index_one_file {
+  local file=$1
+  local basename=$(basename "$file")
+  local should_index="${SHOULD_INDEX}"
+
+  local readelf_out=$(${TOOLCHAIN_PREFIX}readelf -S "$file")
+  if [[ $readelf_out =~ "gdb_index" ]]; then
+    if [ "${REMOVE_INDEX}" = 1 ]; then
+      ${TOOLCHAIN_PREFIX}objcopy --remove-section .gdb_index "$file"
+      echo "Removed index from $basename."
+    else
+      echo "Skipped $basename -- already contains index."
+      should_index=0
+    fi
+  fi
+
+  if [ "${should_index}" = 1 ]; then
+    local start=$(date +"%s%N")
+    echo "Adding index to $basename..."
+
+    ${TOOLCHAIN_PREFIX}gdb -batch "$file" -ex "save gdb-index $DIRECTORY" \
+      -ex "quit"
+    local index_file="$DIRECTORY/$basename.gdb-index"
+    if [ -f "$index_file" ]; then
+      ${TOOLCHAIN_PREFIX}objcopy --add-section .gdb_index="$index_file" \
+        --set-section-flags .gdb_index=readonly "$file" "$file"
+      local finish=$(date +"%s%N")
+      local elapsed=$(((finish - start)/1000000))
+      echo "   ...$basename indexed. [${elapsed}ms]"
+    else
+      echo "   ...$basename unindexable."
+    fi
+  fi
+}
+
+# Functions that when combined, concurrently index all files in FILES_TO_INDEX
+# array. The global FILES_TO_INDEX is declared in the main body of the script.
+function async_index {
+  # Start a background subshell to run the index command.
+  {
+    index_one_file $1
+    kill -SIGUSR1 $$  # $$ resolves to the parent script.
+    exit 129  # See comment above wait loop at bottom.
+  } &
+}
+
+CUR_FILE_NUM=0
+function index_next {
+  if (( CUR_FILE_NUM >= ${#FILES_TO_INDEX[@]} )); then
+    return
+  fi
+
+  async_index "${FILES_TO_INDEX[CUR_FILE_NUM]}"
+  ((CUR_FILE_NUM += 1)) || true
+}
+
+
+########
+### Main body of the script.
+
+REMOVE_INDEX=0
+SHOULD_INDEX=1
+while getopts ":f:r" opt; do
+  case $opt in
+    f)
+      REMOVE_INDEX=1
+      shift
+      ;;
+    r)
+      REMOVE_INDEX=1
+      SHOULD_INDEX=0
+      shift
+      ;;
+    *)
+      echo "Invalid option: -$OPTARG" >&2
+      ;;
+  esac
+done
+
+if [[ ! $# == 1 ]]; then
+  echo "Usage: $0 [-f] [-r] path-to-binary"
+  echo "  -f forces replacement of an existing index."
+  echo "  -r removes the index section."
+  exit 1
+fi
+
+FILENAME="$1"
+if [[ ! -f "$FILENAME" ]]; then
+  echo "Path $FILENAME does not exist."
+  exit 1
+fi
+
+# Ensure we cleanup on on exit.
+trap on_exit EXIT
+
+# We're good to go! Create temp directory for index files.
+DIRECTORY=$(mktemp -d)
+echo "Made temp directory $DIRECTORY."
+
+# Create array with the filename and all shared libraries that
+# have the same dirname. The dirname is a signal that these
+# shared libraries were part of the same build as the binary.
+declare -a FILES_TO_INDEX=($FILENAME
+ $(ldd "$FILENAME" 2>/dev/null \
+  | grep $(dirname "$FILENAME") \
+  | sed "s/.*[ \t]\(.*\) (.*/\1/")
+)
+
+# Start concurrent indexing.
+trap index_next USR1
+
+# 4 is an arbitrary default. When changing, remember we are likely IO bound
+# so basing this off the number of cores is not sensible.
+INDEX_TASKS=${INDEX_TASKS:-4}
+for ((i=0;i<${INDEX_TASKS};i++)); do
+  index_next
+done
+
+# Do a wait loop. Bash waits that terminate due a trap have an exit
+# code > 128. We also ensure that our subshell's "normal" exit occurs with
+# an exit code > 128. This allows us to do consider a > 128 exit code as
+# an indication that the loop should continue. Unfortunately, it also means
+# we cannot use set -e since technically the "wait" is failing.
+wait
+while (( $? > 128 )); do
+  wait
+done
diff --git a/build/get_landmines.py b/build/get_landmines.py
new file mode 100755
index 0000000..92f81fe
--- /dev/null
+++ b/build/get_landmines.py
@@ -0,0 +1,42 @@
+#!/usr/bin/env python
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+This file emits the list of reasons why a particular build needs to be clobbered
+(or a list of 'landmines').
+"""
+
+import sys
+
+import landmine_utils
+
+
+builder = landmine_utils.builder
+distributor = landmine_utils.distributor
+gyp_defines = landmine_utils.gyp_defines
+gyp_msvs_version = landmine_utils.gyp_msvs_version
+platform = landmine_utils.platform
+
+
+def print_landmines():
+  """
+  ALL LANDMINES ARE EMITTED FROM HERE.
+  """
+  # DO NOT add landmines as part of a regular CL. Landmines are a last-effort
+  # bandaid fix if a CL that got landed has a build dependency bug and all bots
+  # need to be cleaned up. If you're writing a new CL that causes build
+  # dependency problems, fix the dependency problems instead of adding a
+  # landmine.
+
+  print 'Lets start a new landmines file.'
+
+
+def main():
+  print_landmines()
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/get_sdk_extras_packages.py b/build/get_sdk_extras_packages.py
new file mode 100755
index 0000000..a90b8a8
--- /dev/null
+++ b/build/get_sdk_extras_packages.py
@@ -0,0 +1,24 @@
+#!/usr/bin/env python
+# Copyright (c) 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import json
+import os
+import sys
+
+SDK_EXTRAS_JSON_FILE = os.path.join(os.path.dirname(__file__),
+                                    'android_sdk_extras.json')
+
+def main():
+  with open(SDK_EXTRAS_JSON_FILE) as json_file:
+    packages = json.load(json_file)
+
+  out = []
+  for package in packages:
+    out.append(package['package_id'])
+
+  print ','.join(out)
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/get_syzygy_binaries.py b/build/get_syzygy_binaries.py
new file mode 100755
index 0000000..1cab3fc
--- /dev/null
+++ b/build/get_syzygy_binaries.py
@@ -0,0 +1,487 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""A utility script for downloading versioned Syzygy binaries."""
+
+import hashlib
+import errno
+import json
+import logging
+import optparse
+import os
+import re
+import shutil
+import stat
+import sys
+import subprocess
+import tempfile
+import time
+import zipfile
+
+
+_LOGGER = logging.getLogger(os.path.basename(__file__))
+
+# The relative path where official builds are archived in their GS bucket.
+_SYZYGY_ARCHIVE_PATH = ('/builds/official/%(revision)s')
+
+# A JSON file containing the state of the download directory. If this file and
+# directory state do not agree, then the binaries will be downloaded and
+# installed again.
+_STATE = '.state'
+
+# This matches an integer (an SVN revision number) or a SHA1 value (a GIT hash).
+# The archive exclusively uses lowercase GIT hashes.
+_REVISION_RE = re.compile('^(?:\d+|[a-f0-9]{40})$')
+
+# This matches an MD5 hash.
+_MD5_RE = re.compile('^[a-f0-9]{32}$')
+
+# List of reources to be downloaded and installed. These are tuples with the
+# following format:
+# (basename, logging name, relative installation path, extraction filter)
+_RESOURCES = [
+  ('benchmark.zip', 'benchmark', '', None),
+  ('binaries.zip', 'binaries', 'exe', None),
+  ('symbols.zip', 'symbols', 'exe',
+      lambda x: x.filename.endswith('.dll.pdb'))]
+
+
+def _LoadState(output_dir):
+  """Loads the contents of the state file for a given |output_dir|, returning
+  None if it doesn't exist.
+  """
+  path = os.path.join(output_dir, _STATE)
+  if not os.path.exists(path):
+    _LOGGER.debug('No state file found.')
+    return None
+  with open(path, 'rb') as f:
+    _LOGGER.debug('Reading state file: %s', path)
+    try:
+      return json.load(f)
+    except ValueError:
+      _LOGGER.debug('Invalid state file.')
+      return None
+
+
+def _SaveState(output_dir, state, dry_run=False):
+  """Saves the |state| dictionary to the given |output_dir| as a JSON file."""
+  path = os.path.join(output_dir, _STATE)
+  _LOGGER.debug('Writing state file: %s', path)
+  if dry_run:
+    return
+  with open(path, 'wb') as f:
+    f.write(json.dumps(state, sort_keys=True, indent=2))
+
+
+def _Md5(path):
+  """Returns the MD5 hash of the file at |path|, which must exist."""
+  return hashlib.md5(open(path, 'rb').read()).hexdigest()
+
+
+def _StateIsValid(state):
+  """Returns true if the given state structure is valid."""
+  if not isinstance(state, dict):
+    _LOGGER.debug('State must be a dict.')
+    return False
+  r = state.get('revision', None)
+  if not isinstance(r, basestring) or not _REVISION_RE.match(r):
+    _LOGGER.debug('State contains an invalid revision.')
+    return False
+  c = state.get('contents', None)
+  if not isinstance(c, dict):
+    _LOGGER.debug('State must contain a contents dict.')
+    return False
+  for (relpath, md5) in c.iteritems():
+    if not isinstance(relpath, basestring) or len(relpath) == 0:
+      _LOGGER.debug('State contents dict contains an invalid path.')
+      return False
+    if not isinstance(md5, basestring) or not _MD5_RE.match(md5):
+      _LOGGER.debug('State contents dict contains an invalid MD5 digest.')
+      return False
+  return True
+
+
+def _BuildActualState(stored, revision, output_dir):
+  """Builds the actual state using the provided |stored| state as a template.
+  Only examines files listed in the stored state, causing the script to ignore
+  files that have been added to the directories locally. |stored| must be a
+  valid state dictionary.
+  """
+  contents = {}
+  state = { 'revision': revision, 'contents': contents }
+  for relpath, md5 in stored['contents'].iteritems():
+    abspath = os.path.abspath(os.path.join(output_dir, relpath))
+    if os.path.isfile(abspath):
+      m = _Md5(abspath)
+      contents[relpath] = m
+
+  return state
+
+
+def _StatesAreConsistent(stored, actual):
+  """Validates whether two state dictionaries are consistent. Both must be valid
+  state dictionaries. Additional entries in |actual| are ignored.
+  """
+  if stored['revision'] != actual['revision']:
+    _LOGGER.debug('Mismatched revision number.')
+    return False
+  cont_stored = stored['contents']
+  cont_actual = actual['contents']
+  for relpath, md5 in cont_stored.iteritems():
+    if relpath not in cont_actual:
+      _LOGGER.debug('Missing content: %s', relpath)
+      return False
+    if md5 != cont_actual[relpath]:
+      _LOGGER.debug('Modified content: %s', relpath)
+      return False
+  return True
+
+
+def _GetCurrentState(revision, output_dir):
+  """Loads the current state and checks to see if it is consistent. Returns
+  a tuple (state, bool). The returned state will always be valid, even if an
+  invalid state is present on disk.
+  """
+  stored = _LoadState(output_dir)
+  if not _StateIsValid(stored):
+    _LOGGER.debug('State is invalid.')
+    # Return a valid but empty state.
+    return ({'revision': '0', 'contents': {}}, False)
+  actual = _BuildActualState(stored, revision, output_dir)
+  # If the script has been modified consider the state invalid.
+  path = os.path.join(output_dir, _STATE)
+  if os.path.getmtime(__file__) > os.path.getmtime(path):
+    return (stored, False)
+  # Otherwise, explicitly validate the state.
+  if not _StatesAreConsistent(stored, actual):
+    return (stored, False)
+  return (stored, True)
+
+
+def _DirIsEmpty(path):
+  """Returns true if the given directory is empty, false otherwise."""
+  for root, dirs, files in os.walk(path):
+    return not dirs and not files
+
+
+def _RmTreeHandleReadOnly(func, path, exc):
+  """An error handling function for use with shutil.rmtree. This will
+  detect failures to remove read-only files, and will change their properties
+  prior to removing them. This is necessary on Windows as os.remove will return
+  an access error for read-only files, and git repos contain read-only
+  pack/index files.
+  """
+  excvalue = exc[1]
+  if func in (os.rmdir, os.remove) and excvalue.errno == errno.EACCES:
+    _LOGGER.debug('Removing read-only path: %s', path)
+    os.chmod(path, stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO)
+    func(path)
+  else:
+    raise
+
+
+def _RmTree(path):
+  """A wrapper of shutil.rmtree that handles read-only files."""
+  shutil.rmtree(path, ignore_errors=False, onerror=_RmTreeHandleReadOnly)
+
+
+def _CleanState(output_dir, state, dry_run=False):
+  """Cleans up files/directories in |output_dir| that are referenced by
+  the given |state|. Raises an error if there are local changes. Returns a
+  dictionary of files that were deleted.
+  """
+  _LOGGER.debug('Deleting files from previous installation.')
+  deleted = {}
+
+  # Generate a list of files to delete, relative to |output_dir|.
+  contents = state['contents']
+  files = sorted(contents.keys())
+
+  # Try to delete the files. Keep track of directories to delete as well.
+  dirs = {}
+  for relpath in files:
+    fullpath = os.path.join(output_dir, relpath)
+    fulldir = os.path.dirname(fullpath)
+    dirs[fulldir] = True
+    if os.path.exists(fullpath):
+      # If somehow the file has become a directory complain about it.
+      if os.path.isdir(fullpath):
+        raise Exception('Directory exists where file expected: %s' % fullpath)
+
+      # Double check that the file doesn't have local changes. If it does
+      # then refuse to delete it.
+      if relpath in contents:
+        stored_md5 = contents[relpath]
+        actual_md5 = _Md5(fullpath)
+        if actual_md5 != stored_md5:
+          raise Exception('File has local changes: %s' % fullpath)
+
+      # The file is unchanged so it can safely be deleted.
+      _LOGGER.debug('Deleting file "%s".', fullpath)
+      deleted[relpath] = True
+      if not dry_run:
+        os.unlink(fullpath)
+
+  # Sort directories from longest name to shortest. This lets us remove empty
+  # directories from the most nested paths first.
+  dirs = sorted(dirs.keys(), key=lambda x: len(x), reverse=True)
+  for p in dirs:
+    if os.path.exists(p) and _DirIsEmpty(p):
+      _LOGGER.debug('Deleting empty directory "%s".', p)
+      if not dry_run:
+        _RmTree(p)
+
+  return deleted
+
+
+def _FindGsUtil():
+  """Looks for depot_tools and returns the absolute path to gsutil.py."""
+  for path in os.environ['PATH'].split(os.pathsep):
+    path = os.path.abspath(path)
+    git_cl = os.path.join(path, 'git_cl.py')
+    gs_util = os.path.join(path, 'gsutil.py')
+    if os.path.exists(git_cl) and os.path.exists(gs_util):
+      return gs_util
+  return None
+
+
+def _GsUtil(*cmd):
+  """Runs the given command in gsutil with exponential backoff and retries."""
+  gs_util = _FindGsUtil()
+  cmd = [sys.executable, gs_util] + list(cmd)
+
+  retries = 3
+  timeout = 4  # Seconds.
+  while True:
+    _LOGGER.debug('Running %s', cmd)
+    prog = subprocess.Popen(cmd, shell=False)
+    prog.communicate()
+
+    # Stop retrying on success.
+    if prog.returncode == 0:
+      return
+
+    # Raise a permanent failure if retries have been exhausted.
+    if retries == 0:
+      raise RuntimeError('Command "%s" returned %d.' % (cmd, prog.returncode))
+
+    _LOGGER.debug('Sleeping %d seconds and trying again.', timeout)
+    time.sleep(timeout)
+    retries -= 1
+    timeout *= 2
+
+
+def _Download(resource):
+  """Downloads the given GS resource to a temporary file, returning its path."""
+  tmp = tempfile.mkstemp(suffix='syzygy_archive')
+  os.close(tmp[0])
+  url = 'gs://syzygy-archive' + resource
+  _GsUtil('cp', url, tmp[1])
+  return tmp[1]
+
+
+def _InstallBinaries(options, deleted={}):
+  """Installs Syzygy binaries. This assumes that the output directory has
+  already been cleaned, as it will refuse to overwrite existing files."""
+  contents = {}
+  state = { 'revision': options.revision, 'contents': contents }
+  archive_path = _SYZYGY_ARCHIVE_PATH % { 'revision': options.revision }
+  if options.resources:
+    resources = [(resource, resource, '', None)
+                 for resource in options.resources]
+  else:
+    resources = _RESOURCES
+  for (base, name, subdir, filt) in resources:
+    # Create the output directory if it doesn't exist.
+    fulldir = os.path.join(options.output_dir, subdir)
+    if os.path.isfile(fulldir):
+      raise Exception('File exists where a directory needs to be created: %s' %
+                      fulldir)
+    if not os.path.exists(fulldir):
+      _LOGGER.debug('Creating directory: %s', fulldir)
+      if not options.dry_run:
+        os.makedirs(fulldir)
+
+    # Download and read the archive.
+    resource = archive_path + '/' + base
+    _LOGGER.debug('Retrieving %s archive at "%s".', name, resource)
+    path = _Download(resource)
+
+    _LOGGER.debug('Unzipping %s archive.', name)
+    with open(path, 'rb') as data:
+      archive = zipfile.ZipFile(data)
+      for entry in archive.infolist():
+        if not filt or filt(entry):
+          fullpath = os.path.normpath(os.path.join(fulldir, entry.filename))
+          relpath = os.path.relpath(fullpath, options.output_dir)
+          if os.path.exists(fullpath):
+            # If in a dry-run take into account the fact that the file *would*
+            # have been deleted.
+            if options.dry_run and relpath in deleted:
+              pass
+            else:
+              raise Exception('Path already exists: %s' % fullpath)
+
+          # Extract the file and update the state dictionary.
+          _LOGGER.debug('Extracting "%s".', fullpath)
+          if not options.dry_run:
+            archive.extract(entry.filename, fulldir)
+            md5 = _Md5(fullpath)
+            contents[relpath] = md5
+            if sys.platform == 'cygwin':
+              os.chmod(fullpath, os.stat(fullpath).st_mode | stat.S_IXUSR)
+
+    _LOGGER.debug('Removing temporary file "%s".', path)
+    os.remove(path)
+
+  return state
+
+
+def _ParseCommandLine():
+  """Parses the command-line and returns an options structure."""
+  option_parser = optparse.OptionParser()
+  option_parser.add_option('--dry-run', action='store_true', default=False,
+      help='If true then will simply list actions that would be performed.')
+  option_parser.add_option('--force', action='store_true', default=False,
+      help='Force an installation even if the binaries are up to date.')
+  option_parser.add_option('--no-cleanup', action='store_true', default=False,
+      help='Allow installation on non-Windows platforms, and skip the forced '
+           'cleanup step.')
+  option_parser.add_option('--output-dir', type='string',
+      help='The path where the binaries will be replaced. Existing binaries '
+           'will only be overwritten if not up to date.')
+  option_parser.add_option('--overwrite', action='store_true', default=False,
+      help='If specified then the installation will happily delete and rewrite '
+           'the entire output directory, blasting any local changes.')
+  option_parser.add_option('--revision', type='string',
+      help='The SVN revision or GIT hash associated with the required version.')
+  option_parser.add_option('--revision-file', type='string',
+      help='A text file containing an SVN revision or GIT hash.')
+  option_parser.add_option('--resource', type='string', action='append',
+      dest='resources', help='A resource to be downloaded.')
+  option_parser.add_option('--verbose', dest='log_level', action='store_const',
+      default=logging.INFO, const=logging.DEBUG,
+      help='Enables verbose logging.')
+  option_parser.add_option('--quiet', dest='log_level', action='store_const',
+      default=logging.INFO, const=logging.ERROR,
+      help='Disables all output except for errors.')
+  options, args = option_parser.parse_args()
+  if args:
+    option_parser.error('Unexpected arguments: %s' % args)
+  if not options.output_dir:
+    option_parser.error('Must specify --output-dir.')
+  if not options.revision and not options.revision_file:
+    option_parser.error('Must specify one of --revision or --revision-file.')
+  if options.revision and options.revision_file:
+    option_parser.error('Must not specify both --revision and --revision-file.')
+
+  # Configure logging.
+  logging.basicConfig(level=options.log_level)
+
+  # If a revision file has been specified then read it.
+  if options.revision_file:
+    options.revision = open(options.revision_file, 'rb').read().strip()
+    _LOGGER.debug('Parsed revision "%s" from file "%s".',
+                 options.revision, options.revision_file)
+
+  # Ensure that the specified SVN revision or GIT hash is valid.
+  if not _REVISION_RE.match(options.revision):
+    option_parser.error('Must specify a valid SVN or GIT revision.')
+
+  # This just makes output prettier to read.
+  options.output_dir = os.path.normpath(options.output_dir)
+
+  return options
+
+
+def _RemoveOrphanedFiles(options):
+  """This is run on non-Windows systems to remove orphaned files that may have
+  been downloaded by a previous version of this script.
+  """
+  # Reconfigure logging to output info messages. This will allow inspection of
+  # cleanup status on non-Windows buildbots.
+  _LOGGER.setLevel(logging.INFO)
+
+  output_dir = os.path.abspath(options.output_dir)
+
+  # We only want to clean up the folder in 'src/third_party/syzygy', and we
+  # expect to be called with that as an output directory. This is an attempt to
+  # not start deleting random things if the script is run from an alternate
+  # location, or not called from the gclient hooks.
+  expected_syzygy_dir = os.path.abspath(os.path.join(
+      os.path.dirname(__file__), '..', 'third_party', 'syzygy'))
+  expected_output_dir = os.path.join(expected_syzygy_dir, 'binaries')
+  if expected_output_dir != output_dir:
+    _LOGGER.info('Unexpected output directory, skipping cleanup.')
+    return
+
+  if not os.path.isdir(expected_syzygy_dir):
+    _LOGGER.info('Output directory does not exist, skipping cleanup.')
+    return
+
+  def OnError(function, path, excinfo):
+    """Logs error encountered by shutil.rmtree."""
+    _LOGGER.error('Error when running %s(%s)', function, path, exc_info=excinfo)
+
+  _LOGGER.info('Removing orphaned files from %s', expected_syzygy_dir)
+  if not options.dry_run:
+    shutil.rmtree(expected_syzygy_dir, True, OnError)
+
+
+def main():
+  options = _ParseCommandLine()
+
+  if options.dry_run:
+    _LOGGER.debug('Performing a dry-run.')
+
+  # We only care about Windows platforms, as the Syzygy binaries aren't used
+  # elsewhere. However, there was a short period of time where this script
+  # wasn't gated on OS types, and those OSes downloaded and installed binaries.
+  # This will cleanup orphaned files on those operating systems.
+  if sys.platform not in ('win32', 'cygwin'):
+    if options.no_cleanup:
+      _LOGGER.debug('Skipping usual cleanup for non-Windows platforms.')
+    else:
+      return _RemoveOrphanedFiles(options)
+
+  # Load the current installation state, and validate it against the
+  # requested installation.
+  state, is_consistent = _GetCurrentState(options.revision, options.output_dir)
+
+  # Decide whether or not an install is necessary.
+  if options.force:
+    _LOGGER.debug('Forcing reinstall of binaries.')
+  elif is_consistent:
+    # Avoid doing any work if the contents of the directory are consistent.
+    _LOGGER.debug('State unchanged, no reinstall necessary.')
+    return
+
+  # Under normal logging this is the only only message that will be reported.
+  _LOGGER.info('Installing revision %s Syzygy binaries.',
+               options.revision[0:12])
+
+  # Clean up the old state to begin with.
+  deleted = []
+  if options.overwrite:
+    if os.path.exists(options.output_dir):
+      # If overwrite was specified then take a heavy-handed approach.
+      _LOGGER.debug('Deleting entire installation directory.')
+      if not options.dry_run:
+        _RmTree(options.output_dir)
+  else:
+    # Otherwise only delete things that the previous installation put in place,
+    # and take care to preserve any local changes.
+    deleted = _CleanState(options.output_dir, state, options.dry_run)
+
+  # Install the new binaries. In a dry-run this will actually download the
+  # archives, but it won't write anything to disk.
+  state = _InstallBinaries(options, deleted)
+
+  # Build and save the state for the directory.
+  _SaveState(options.output_dir, state, options.dry_run)
+
+
+if __name__ == '__main__':
+  main()
diff --git a/build/git-hooks/OWNERS b/build/git-hooks/OWNERS
new file mode 100644
index 0000000..3e327dc
--- /dev/null
+++ b/build/git-hooks/OWNERS
@@ -0,0 +1,3 @@
+set noparent
+szager@chromium.org
+cmp@chromium.org
diff --git a/build/git-hooks/pre-commit b/build/git-hooks/pre-commit
new file mode 100755
index 0000000..41b5963
--- /dev/null
+++ b/build/git-hooks/pre-commit
@@ -0,0 +1,60 @@
+#!/bin/sh
+
+submodule_diff() {
+  if test -n "$2"; then
+    git diff-tree -r --ignore-submodules=dirty "$1" "$2" | grep -e '^:160000' -e '^:...... 160000' | xargs
+  else
+    git diff-index --cached --ignore-submodules=dirty "$1" | grep -e '^:160000' -e '^:...... 160000' | xargs
+  fi
+}
+
+if git rev-parse --verify --quiet --no-revs MERGE_HEAD; then
+  merge_base=$(git merge-base HEAD MERGE_HEAD)
+  if test -z "$(submodule_diff $merge_base HEAD)"; then
+    # Most up-to-date submodules are in MERGE_HEAD.
+    head_ref=MERGE_HEAD
+  else
+    # Most up-to-date submodules are in HEAD.
+    head_ref=HEAD
+  fi
+else
+  # No merge in progress. Submodules must match HEAD.
+  head_ref=HEAD
+fi
+
+submods=$(submodule_diff $head_ref)
+if test "$submods"; then
+  echo "You are trying to commit changes to the following submodules:" 1>&2
+  echo 1>&2
+  echo $submods | cut -d ' ' -f 6 | sed 's/^/  /g' 1>&2
+  cat <<EOF 1>&2
+
+Submodule commits are not allowed.  Please run:
+
+  git status --ignore-submodules=dirty
+
+and/or:
+
+  git diff-index --cached --ignore-submodules=dirty HEAD
+
+... to see what's in your index.
+
+If you're really and truly trying to roll the version of a submodule, you should
+commit the new version to DEPS, instead.
+EOF
+  exit 1
+fi
+
+gitmodules_diff() {
+  git diff-index --cached "$1" .gitmodules
+}
+
+if [ "$(git ls-files .gitmodules)" ] && [ "$(gitmodules_diff $head_ref)" ]; then
+  cat <<EOF 1>&2
+You are trying to commit a change to .gitmodules.  That is not allowed.
+To make changes to submodule names/paths, edit DEPS.
+EOF
+  exit 1
+fi
+
+exit 0
diff --git a/build/gn_helpers.py b/build/gn_helpers.py
new file mode 100644
index 0000000..3b0647d
--- /dev/null
+++ b/build/gn_helpers.py
@@ -0,0 +1,39 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Helper functions useful when writing scripts that are run from GN's
+exec_script function."""
+
+class GNException(Exception):
+  pass
+
+
+def ToGNString(value, allow_dicts = True):
+  """Prints the given value to stdout.
+
+  allow_dicts indicates if this function will allow converting dictionaries
+  to GN scopes. This is only possible at the top level, you can't nest a
+  GN scope in a list, so this should be set to False for recursive calls."""
+  if isinstance(value, str):
+    if value.find('\n') >= 0:
+      raise GNException("Trying to print a string with a newline in it.")
+    return '"' + value.replace('"', '\\"') + '"'
+
+  if isinstance(value, list):
+    return '[ %s ]' % ', '.join(ToGNString(v) for v in value)
+
+  if isinstance(value, dict):
+    if not allow_dicts:
+      raise GNException("Attempting to recursively print a dictionary.")
+    result = ""
+    for key in value:
+      if not isinstance(key, str):
+        raise GNException("Dictionary key is not a string.")
+      result += "%s = %s\n" % (key, ToGNString(value[key], False))
+    return result
+
+  if isinstance(value, int):
+    return str(value)
+
+  raise GNException("Unsupported type when printing to GN.")
diff --git a/build/gn_migration.gypi b/build/gn_migration.gypi
new file mode 100644
index 0000000..2527b2e
--- /dev/null
+++ b/build/gn_migration.gypi
@@ -0,0 +1,726 @@
+# Copyright (c) 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file defines five targets that we are using to track the progress of the
+# GYP->GN migration:
+#
+# 'both_gn_and_gyp' lists what GN is currently capable of building and should
+# match the 'both_gn_and_gyp' target in //BUILD.gn.
+#
+# 'gyp_all' Should include everything built when building "all"; i.e., if you
+# type 'ninja gyp_all' and then 'ninja all', the second build should do
+# nothing. 'gyp_all' should just depend on the other four targets.
+#
+# 'gyp_only' lists any targets that are not meant to be ported over to the GN
+# build.
+#
+# 'gyp_remaining' lists all of the targets that still need to be converted,
+# i.e., all of the other (non-empty) targets that a GYP build will build.
+#
+# TODO(GYP): crbug.com/481694. Add a build step to the bot that enforces the
+# above contracts.
+
+{
+  'targets': [
+    {
+      'target_name': 'gyp_all',
+      'type': 'none',
+      'dependencies': [
+        'both_gn_and_gyp',
+        'gyp_only',
+        'gyp_remaining',
+      ]
+    },
+    {
+      # This target should mirror the structure of //:both_gn_and_gyp
+      # in src/BUILD.gn as closely as possible, for ease of comparison.
+      'target_name': 'both_gn_and_gyp',
+      'type': 'none',
+      'dependencies': [
+        '../base/base.gyp:base_i18n_perftests',
+        '../base/base.gyp:base_perftests',
+        '../base/base.gyp:base_unittests',
+        '../base/base.gyp:build_utf8_validator_tables#host',
+        '../base/base.gyp:check_example',
+        '../cc/cc_tests.gyp:cc_perftests',
+        '../cc/cc_tests.gyp:cc_unittests',
+        '../cc/blink/cc_blink_tests.gyp:cc_blink_unittests',
+        '../chrome/chrome.gyp:chrome',
+        '../chrome/chrome.gyp:browser_tests',
+        '../chrome/chrome.gyp:chrome_app_unittests',
+        '../chrome/chrome.gyp:chromedriver',
+        '../chrome/chrome.gyp:chromedriver_tests',
+        '../chrome/chrome.gyp:chromedriver_unittests',
+        '../chrome/chrome.gyp:interactive_ui_tests',
+        '../chrome/chrome.gyp:load_library_perf_tests',
+        '../chrome/chrome.gyp:performance_browser_tests',
+        '../chrome/chrome.gyp:sync_integration_tests',
+        '../chrome/chrome.gyp:sync_performance_tests',
+        '../chrome/chrome.gyp:unit_tests',
+        '../chrome/tools/profile_reset/jtl_compiler.gyp:jtl_compiler',
+        '../cloud_print/cloud_print.gyp:cloud_print_unittests',
+        '../components/components.gyp:network_hints_browser',
+        '../components/components.gyp:policy_templates',
+        '../components/components_tests.gyp:components_browsertests',
+        '../components/components_tests.gyp:components_perftests',
+        '../components/components_tests.gyp:components_unittests',
+        '../content/content.gyp:content_app_browser',
+        '../content/content.gyp:content_app_child',
+        '../content/content_shell_and_tests.gyp:content_browsertests',
+        '../content/content_shell_and_tests.gyp:content_gl_benchmark',
+        '../content/content_shell_and_tests.gyp:content_gl_tests',
+        '../content/content_shell_and_tests.gyp:content_perftests',
+        '../content/content_shell_and_tests.gyp:content_shell',
+        '../content/content_shell_and_tests.gyp:content_unittests',
+        '../courgette/courgette.gyp:courgette',
+        '../courgette/courgette.gyp:courgette_fuzz',
+        '../courgette/courgette.gyp:courgette_minimal_tool',
+        '../courgette/courgette.gyp:courgette_unittests',
+        '../crypto/crypto.gyp:crypto_unittests',
+        '../extensions/extensions_tests.gyp:extensions_browsertests',
+        '../extensions/extensions_tests.gyp:extensions_unittests',
+        '../device/device_tests.gyp:device_unittests',
+        '../gin/gin.gyp:gin_v8_snapshot_fingerprint',
+        '../gin/gin.gyp:gin_shell',
+        '../gin/gin.gyp:gin_unittests',
+        '../google_apis/gcm/gcm.gyp:gcm_unit_tests',
+        '../google_apis/gcm/gcm.gyp:mcs_probe',
+        '../google_apis/google_apis.gyp:google_apis_unittests',
+        '../gpu/gpu.gyp:angle_unittests',
+        '../gpu/gpu.gyp:gl_tests',
+        '../gpu/gpu.gyp:gpu_perftests',
+        '../gpu/gpu.gyp:gpu_unittests',
+        '../gpu/gles2_conform_support/gles2_conform_support.gyp:gles2_conform_support',  # TODO(GYP) crbug.com/471920
+        '../gpu/gles2_conform_support/gles2_conform_test.gyp:gles2_conform_test',  # TODO(GYP) crbug.com/471920
+        '../gpu/khronos_glcts_support/khronos_glcts_test.gyp:khronos_glcts_test',  # TODO(GYP) crbug.com/471903 to make this complete.
+        '../ipc/ipc.gyp:ipc_perftests',
+        '../ipc/ipc.gyp:ipc_tests',
+        '../ipc/mojo/ipc_mojo.gyp:ipc_mojo_unittests',
+        '../jingle/jingle.gyp:jingle_unittests',
+        '../media/media.gyp:ffmpeg_regression_tests',  # TODO(GYP) this should be conditional on media_use_ffmpeg
+        '../media/media.gyp:media_perftests',
+        '../media/media.gyp:media_unittests',
+        '../media/midi/midi.gyp:midi_unittests',
+        '../media/cast/cast.gyp:cast_benchmarks',
+        '../media/cast/cast.gyp:cast_unittests',
+        '../media/cast/cast.gyp:generate_barcode_video',
+        '../media/cast/cast.gyp:generate_timecode_audio',
+        '../mojo/mojo.gyp:mojo',
+        '../mojo/mojo_base.gyp:mojo_application_base',
+        '../mojo/mojo_base.gyp:mojo_common_unittests',
+        '../net/net.gyp:crash_cache',
+        '../net/net.gyp:crl_set_dump',
+        '../net/net.gyp:dns_fuzz_stub',
+        '../net/net.gyp:dump_cache',
+        '../net/net.gyp:gdig',
+        '../net/net.gyp:get_server_time',
+        '../net/net.gyp:hpack_example_generator',
+        '../net/net.gyp:hpack_fuzz_mutator',
+        '../net/net.gyp:hpack_fuzz_wrapper',
+        '../net/net.gyp:net_perftests',
+        '../net/net.gyp:net_unittests',
+        '../net/net.gyp:net_watcher',  # TODO(GYP): This should be conditional on use_v8_in_net
+        '../net/net.gyp:run_testserver',
+        '../net/net.gyp:stress_cache',
+        '../net/net.gyp:tld_cleanup',
+        '../ppapi/ppapi_internal.gyp:ppapi_example_audio',
+        '../ppapi/ppapi_internal.gyp:ppapi_example_audio_input',
+        '../ppapi/ppapi_internal.gyp:ppapi_example_c_stub',
+        '../ppapi/ppapi_internal.gyp:ppapi_example_cc_stub',
+        '../ppapi/ppapi_internal.gyp:ppapi_example_compositor',
+        '../ppapi/ppapi_internal.gyp:ppapi_example_crxfs',
+        '../ppapi/ppapi_internal.gyp:ppapi_example_enumerate_devices',
+        '../ppapi/ppapi_internal.gyp:ppapi_example_file_chooser',
+        '../ppapi/ppapi_internal.gyp:ppapi_example_flash_topmost',
+        '../ppapi/ppapi_internal.gyp:ppapi_example_gamepad',
+        '../ppapi/ppapi_internal.gyp:ppapi_example_gles2',
+        '../ppapi/ppapi_internal.gyp:ppapi_example_gles2_spinning_cube',
+        '../ppapi/ppapi_internal.gyp:ppapi_example_graphics_2d',
+        '../ppapi/ppapi_internal.gyp:ppapi_example_ime',
+        '../ppapi/ppapi_internal.gyp:ppapi_example_input',
+        '../ppapi/ppapi_internal.gyp:ppapi_example_media_stream_audio',
+        '../ppapi/ppapi_internal.gyp:ppapi_example_media_stream_video',
+        '../ppapi/ppapi_internal.gyp:ppapi_example_mouse_cursor',
+        '../ppapi/ppapi_internal.gyp:ppapi_example_mouse_lock',
+        '../ppapi/ppapi_internal.gyp:ppapi_example_paint_manager',
+        '../ppapi/ppapi_internal.gyp:ppapi_example_post_message',
+        '../ppapi/ppapi_internal.gyp:ppapi_example_printing',
+        '../ppapi/ppapi_internal.gyp:ppapi_example_scaling',
+        '../ppapi/ppapi_internal.gyp:ppapi_example_scroll',
+        '../ppapi/ppapi_internal.gyp:ppapi_example_simple_font',
+        '../ppapi/ppapi_internal.gyp:ppapi_example_threading',
+        '../ppapi/ppapi_internal.gyp:ppapi_example_url_loader',
+        '../ppapi/ppapi_internal.gyp:ppapi_example_url_loader_file',
+        '../ppapi/ppapi_internal.gyp:ppapi_example_vc',
+        '../ppapi/ppapi_internal.gyp:ppapi_example_video_decode',
+        '../ppapi/ppapi_internal.gyp:ppapi_example_video_decode_dev',
+        '../ppapi/ppapi_internal.gyp:ppapi_example_video_effects',
+        '../ppapi/ppapi_internal.gyp:ppapi_example_video_encode',
+        '../ppapi/ppapi_internal.gyp:ppapi_tests',
+        '../ppapi/ppapi_internal.gyp:ppapi_perftests',
+        '../ppapi/ppapi_internal.gyp:ppapi_unittests',
+        '../ppapi/tools/ppapi_tools.gyp:pepper_hash_for_uma',
+        '../printing/printing.gyp:printing_unittests',
+        '../skia/skia_tests.gyp:skia_unittests',
+        '../skia/skia.gyp:filter_fuzz_stub',
+        '../skia/skia.gyp:image_operations_bench',
+        '../sql/sql.gyp:sql_unittests',
+        '../sync/sync.gyp:run_sync_testserver',
+        '../sync/sync.gyp:sync_unit_tests',
+        '../sync/tools/sync_tools.gyp:sync_client',
+        '../sync/tools/sync_tools.gyp:sync_listen_notifications',
+        '../testing/gmock.gyp:gmock_main',
+        '../third_party/WebKit/Source/platform/blink_platform_tests.gyp:blink_heap_unittests',
+        '../third_party/WebKit/Source/platform/blink_platform_tests.gyp:blink_platform_unittests',
+        '../third_party/WebKit/Source/web/web_tests.gyp:webkit_unit_tests',
+        '../third_party/WebKit/Source/wtf/wtf_tests.gyp:wtf_unittests',
+        '../third_party/cacheinvalidation/cacheinvalidation.gyp:cacheinvalidation_unittests',
+        '../third_party/codesighs/codesighs.gyp:codesighs',
+        '../third_party/codesighs/codesighs.gyp:maptsvdifftool',
+        '../third_party/leveldatabase/leveldatabase.gyp:env_chromium_unittests',
+        '../third_party/libphonenumber/libphonenumber.gyp:libphonenumber_unittests',
+        '../third_party/libaddressinput/libaddressinput.gyp:libaddressinput_unittests',
+        '../third_party/mojo/mojo_edk_tests.gyp:mojo_system_unittests',
+        '../third_party/mojo/mojo_edk_tests.gyp:mojo_public_bindings_unittests',
+        '../third_party/mojo/mojo_edk_tests.gyp:mojo_public_environment_unittests',
+        '../third_party/mojo/mojo_edk_tests.gyp:mojo_public_system_perftests',
+        '../third_party/mojo/mojo_edk_tests.gyp:mojo_public_system_unittests',
+        '../third_party/mojo/mojo_edk_tests.gyp:mojo_public_utility_unittests',
+        '../third_party/pdfium/samples/samples.gyp:pdfium_diff',
+        '../third_party/pdfium/samples/samples.gyp:pdfium_test',
+        '../third_party/smhasher/smhasher.gyp:pmurhash',
+        '../tools/gn/gn.gyp:gn',
+        '../tools/gn/gn.gyp:generate_test_gn_data',
+        '../tools/gn/gn.gyp:gn_unittests',
+        '../tools/imagediff/image_diff.gyp:image_diff',
+        '../tools/perf/clear_system_cache/clear_system_cache.gyp:clear_system_cache',
+        '../tools/telemetry/telemetry.gyp:bitmaptools#host',
+        '../ui/accessibility/accessibility.gyp:accessibility_unittests',
+        '../ui/app_list/app_list.gyp:app_list_unittests',
+        '../ui/base/ui_base_tests.gyp:ui_base_unittests',
+        '../ui/compositor/compositor.gyp:compositor_unittests',
+        '../ui/display/display.gyp:display_unittests',
+        '../ui/events/events.gyp:events_unittests',
+        '../ui/gfx/gfx_tests.gyp:gfx_unittests',
+        '../ui/gl/gl_tests.gyp:gl_unittests',
+        '../ui/message_center/message_center.gyp:message_center_unittests',
+        '../ui/snapshot/snapshot.gyp:snapshot_unittests',
+        '../ui/touch_selection/ui_touch_selection.gyp:ui_touch_selection_unittests',
+        '../ui/views/examples/examples.gyp:views_examples_with_content_exe',
+        '../url/url.gyp:url_unittests',
+        '../v8/tools/gyp/v8.gyp:v8_snapshot',
+        '../v8/tools/gyp/v8.gyp:postmortem-metadata',
+      ],
+      'conditions': [
+        ['clang==1', {
+          'dependencies': [
+            '../build/sanitizers/sanitizers.gyp:llvm-symbolizer',
+          ],
+        }],
+        ['disable_nacl==0 and disable_nacl_untrusted==0', {
+          'dependencies': [
+            '../components/nacl.gyp:nacl_loader_unittests',
+          ]
+        }],
+        ['enable_extensions==1 and OS!="mac"', {
+          'dependencies': [
+            '../extensions/shell/app_shell.gyp:app_shell',
+            '../extensions/shell/app_shell.gyp:app_shell_unittests',
+          ],
+        }],
+        ['enable_mdns==1', {
+          'dependencies': [
+            '../chrome/chrome.gyp:service_discovery_sniffer',
+          ]
+        }],
+        ['remoting==1', {
+          'dependencies': [
+            '../remoting/remoting_all.gyp:remoting_all',
+          ],
+        }],
+        ['remoting==1 and chromeos==0 and use_x11==1', {
+          'dependencies': [
+            '../remoting/remoting.gyp:remoting_me2me_host',
+            '../remoting/remoting.gyp:remoting_me2me_native_messaging_host',
+          ],
+        }],
+        ['toolkit_views==1', {
+          'dependencies': [
+            '../ui/app_list/app_list.gyp:app_list_demo',
+            '../ui/views/views.gyp:views_unittests',
+          ],
+        }],
+        ['use_ash==1', {
+          'dependencies': [
+            '../ash/ash.gyp:ash_shell',
+            '../ash/ash.gyp:ash_shell_unittests',
+            '../ash/ash.gyp:ash_unittests',
+          ],
+        }],
+        ['use_ash==1 or chromeos== 1', {
+          'dependencies': [
+            '../components/components.gyp:session_manager_component',
+          ]
+        }],
+        ['use_aura==1', {
+          'dependencies': [
+            '../ui/aura/aura.gyp:aura_bench',
+            '../ui/aura/aura.gyp:aura_demo',
+            '../ui/aura/aura.gyp:aura_unittests',
+            '../ui/keyboard/keyboard.gyp:keyboard_unittests',
+            '../ui/wm/wm.gyp:wm_unittests',
+          ],
+        }],
+        ['use_ozone==1', {
+          'dependencies': [
+            '../ui/ozone/ozone.gyp:ozone',
+          ],
+        }],
+        ['use_x11==1', {
+          'dependencies': [
+            '../tools/xdisplaycheck/xdisplaycheck.gyp:xdisplaycheck',
+          ],
+          'conditions': [
+            ['target_arch!="arm"', {
+              'dependencies': [
+                '../gpu/tools/tools.gyp:compositor_model_bench',
+              ],
+            }],
+          ],
+        }],
+        ['OS=="android"', {
+          'dependencies': [
+            '../base/base.gyp:chromium_android_linker',
+            '../breakpad/breakpad.gyp:dump_syms',
+            '../build/android/rezip.gyp:rezip_apk_jar',
+            '../chrome/chrome.gyp:chrome_public_apk',
+            '../chrome/chrome.gyp:chrome_public_test_apk',
+            '../chrome/chrome.gyp:chrome_shell_apk',
+            '../chrome/chrome.gyp:chromedriver_webview_shell_apk',
+            #"//clank" TODO(GYP) - conditional somehow?
+            '../tools/imagediff/image_diff.gyp:image_diff#host',
+            '../tools/telemetry/telemetry.gyp:bitmaptools#host',
+
+            # TODO(GYP): Remove these when the components_unittests work.
+            #"//components/history/core/test:test",
+            #"//components/policy:policy_component_test_support",
+            #"//components/policy:test_support",
+            #"//components/rappor:test_support",
+            #"//components/signin/core/browser:test_support",
+            #"//components/sync_driver:test_support",
+            #"//components/user_manager",
+            #"//components/wallpaper",
+
+            '../content/content_shell_and_tests.gyp:content_shell_apk',
+
+            '../third_party/WebKit/Source/platform/blink_platform_tests.gyp:blink_heap_unittests_apk',
+            '../third_party/WebKit/Source/platform/blink_platform_tests.gyp:blink_platform_unittests_apk',
+            '../third_party/WebKit/Source/web/web_tests.gyp:webkit_unit_tests_apk',
+            '../third_party/WebKit/Source/wtf/wtf_tests.gyp:wtf_unittests_apk',
+            # TODO(GYP): Are these needed, or will they be pulled in automatically?
+            #"//third_party/android_tools:android_gcm_java",
+            #"//third_party/android_tools:uiautomator_java",
+            #"//third_party/android_tools:android_support_v13_java",
+            #"//third_party/android_tools:android_support_v7_appcompat_java",
+            #"//third_party/android_tools:android_support_v7_mediarouter_java",
+            #"//third_party/mesa",
+            #"//third_party/mockito:mockito_java",
+            #"//third_party/openmax_dl/dl",
+            #"//third_party/speex",
+            #"//ui/android:ui_java",
+
+            # TODO(GYP): Are these needed?
+            #"//chrome/test:test_support_unit",
+            #"//third_party/smhasher:murmurhash3",
+            #"//ui/message_center:test_support",
+          ],
+          'dependencies!': [
+            '../breakpad/breakpad.gyp:symupload',
+            '../chrome/chrome.gyp:browser_tests',
+            '../chrome/chrome.gyp:chromedriver',
+            '../chrome/chrome.gyp:chromedriver_unitests',
+            '../chrome/chrome.gyp:interactive_ui_tests',
+            '../chrome/chrome.gyp:performance_browser_tests',
+            '../chrome/chrome.gyp:sync_integration_tests',
+            '../chrome/chrome.gyp:unit_tests',
+            '../extensions/extensions_tests.gyp:extensions_browsertests',
+            '../extensions/extensions_tests.gyp:extensions_unittests',
+            '../google_apis/gcm/gcm.gyp:gcm_unit_tests',
+            '../ipc/ipc.gyp:ipc_tests',
+            '../jingle/jingle.gyp:jingle_unittests',
+            '../net/net.gyp:net_unittests',
+            #"//ppapi/examples",
+            '../third_party/pdfium/samples/samples.gyp:pdfium_test',
+            '../tools/gn/gn.gyp:gn',
+            '../tools/gn/gn.gyp:gn_unittests',
+            '../tools/imagediff/image_diff.gyp:image_diff',
+            '../tools/gn/gn.gyp:gn',
+            '../tools/gn/gn.gyp:gn_unittests',
+            '../ui/app_list/app_list.gyp:app_list_unittests',
+            '../url/url.gyp:url_unittests',
+          ],
+        }],
+        ['OS=="android" or OS=="linux"', {
+          'dependencies': [
+            '../net/net.gyp:disk_cache_memory_test',
+          ],
+        }],
+        ['chromeos==1', {
+          'dependencies': [
+            '../chromeos/chromeos.gyp:chromeos_unittests',
+            '../ui/chromeos/ui_chromeos.gyp:ui_chromeos_unittests',
+          ]
+        }],
+        ['chromeos==1 or OS=="win" or OS=="mac"', {
+          'dependencies': [
+            '../rlz/rlz.gyp:rlz_id',
+            '../rlz/rlz.gyp:rlz_lib',
+            '../rlz/rlz.gyp:rlz_unittests',
+          ],
+        }],
+        ['OS=="android" or OS=="linux" or os_bsd==1', {
+          'dependencies': [
+            '../breakpad/breakpad.gyp:core-2-minidump',
+            '../breakpad/breakpad.gyp:microdump_stackwalk',
+            '../breakpad/breakpad.gyp:minidump_dump',
+            '../breakpad/breakpad.gyp:minidump_stackwalk',
+            '../breakpad/breakpad.gyp:symupload',
+            '../third_party/codesighs/codesighs.gyp:nm2tsv',
+          ],
+        }],
+        ['OS=="linux"', {
+          'dependencies': [
+            '../breakpad/breakpad.gyp:breakpad_unittests',
+            '../breakpad/breakpad.gyp:dump_syms#host',
+            '../breakpad/breakpad.gyp:generate_test_dump',
+            '../breakpad/breakpad.gyp:minidump-2-core',
+            '../dbus/dbus.gyp:dbus_test_server',
+            '../dbus/dbus.gyp:dbus_unittests',
+            '../media/cast/cast.gyp:tap_proxy',
+            '../net/net.gyp:disk_cache_memory_test',
+            '../net/net.gyp:flip_in_mem_edsm_server',
+            '../net/net.gyp:flip_in_mem_edsm_server_unittests',
+            '../net/net.gyp:epoll_quic_client',
+            '../net/net.gyp:epoll_quic_server',
+            '../net/net.gyp:hpack_example_generator',
+            '../net/net.gyp:hpack_fuzz_mutator',
+            '../net/net.gyp:hpack_fuzz_wrapper',
+            '../net/net.gyp:net_perftests',
+            '../net/net.gyp:quic_client',
+            '../net/net.gyp:quic_server',
+            '../sandbox/sandbox.gyp:chrome_sandbox',
+            '../sandbox/sandbox.gyp:sandbox_linux_unittests',
+            '../sandbox/sandbox.gyp:sandbox_linux_jni_unittests',
+            '../third_party/sqlite/sqlite.gyp:sqlite_shell',
+         ],
+        }],
+        ['OS=="mac"', {
+          'dependencies': [
+            '../breakpad/breakpad.gyp:crash_inspector',
+            '../breakpad/breakpad.gyp:dump_syms',
+            '../breakpad/breakpad.gyp:symupload',
+            '../third_party/apple_sample_code/apple_sample_code.gyp:apple_sample_code',
+            '../third_party/molokocacao/molokocacao.gyp:molokocacao',
+
+            # TODO(GYP): remove these when the corresponding root targets work.
+            #"//cc/blink",
+            #"//components/ui/zoom:ui_zoom",
+            #"//content",
+            #"//content/test:test_support",
+            #"//device/battery",
+            #"//device/bluetooth",
+            #"//device/nfc",
+            #"//device/usb",
+            #"//device/vibration",
+            #"//media/blink",
+            #"//pdf",
+            #"//storage/browser",
+            #"//third_party/brotli",
+            #"//third_party/flac",
+            #"//third_party/hunspell",
+            #//third_party/iccjpeg",
+            #"//third_party/libphonenumber",
+            #"//third_party/ots",
+            #"//third_party/qcms",
+            #"//third_party/smhasher:murmurhash3",
+            #"//third_party/speex",
+            #"//third_party/webrtc/system_wrappers",
+            #"//ui/native_theme",
+            #"//ui/snapshot",
+            #"//ui/surface",
+          ],
+          'dependencies!': [
+            #"//chrome",  # TODO(GYP)
+            #"//chrome/test:browser_tests",  # TODO(GYP)
+            #"//chrome/test:interactive_ui_tests",  # TODO(GYP)
+            #"//chrome/test:sync_integration_tests",  # TODO(GYP)
+            #"//chrome/test:unit_tests",  # TODO(GYP)
+            #"//components:components_unittests",  # TODO(GYP)
+            #"//content/test:content_browsertests",  # TODO(GYP)
+            #"//content/test:content_perftests",  # TODO(GYP)
+            #"//content/test:content_unittests",  # TODO(GYP)
+            #"//extensions:extensions_browsertests",  # TODO(GYP)
+            #"//extensions:extensions_unittests",  # TODO(GYP)
+            #"//net:net_unittests",  # TODO(GYP)
+            #"//third_party/usrsctp",  # TODO(GYP)
+            #"//ui/app_list:app_list_unittests",  # TODO(GYP)
+            #"//ui/gfx:gfx_unittests",  # TODO(GYP)
+          ],
+        }],
+        ['OS=="win"', {
+          'dependencies': [
+            '../base/base.gyp:pe_image_test',
+            '../chrome/chrome.gyp:crash_service',
+            '../chrome/chrome.gyp:setup_unittests',
+            '../chrome_elf/chrome_elf.gyp:chrome_elf_unittests',
+            '../chrome_elf/chrome_elf.gyp:dll_hash_main',
+            '../components/components.gyp:wifi_test',
+            '../net/net.gyp:quic_client',
+            '../net/net.gyp:quic_server',
+            '../sandbox/sandbox.gyp:pocdll',
+            '../sandbox/sandbox.gyp:sandbox_poc',
+            '../sandbox/sandbox.gyp:sbox_integration_tests',
+            '../sandbox/sandbox.gyp:sbox_unittests',
+            '../sandbox/sandbox.gyp:sbox_validation_tests',
+            '../testing/gtest.gyp:gtest_main',
+            '../third_party/codesighs/codesighs.gyp:msdump2symdb',
+            '../third_party/codesighs/codesighs.gyp:msmap2tsv',
+            '../third_party/pdfium/samples/samples.gyp:pdfium_diff',
+            '../win8/win8.gyp:metro_viewer',
+          ],
+        }],
+      ],
+    },
+    {
+      'target_name': 'gyp_only',
+      'type': 'none',
+      'conditions': [
+        ['OS=="linux" or OS=="win"', {
+          'conditions': [
+            ['disable_nacl==0 and disable_nacl_untrusted==0', {
+              'dependencies': [
+                '../mojo/mojo_nacl.gyp:monacl_shell',  # This should not be built in chromium.
+              ]
+            }],
+          ]
+        }],
+      ],
+    },
+    {
+      'target_name': 'gyp_remaining',
+      'type': 'none',
+      'conditions': [
+        ['remoting==1', {
+          'dependencies': [
+            '../remoting/app_remoting_webapp.gyp:ar_sample_app',  # crbug.com/471916
+          ],
+        }],
+        ['test_isolation_mode!="noop"', {
+          'dependencies': [
+            '../base/base.gyp:base_unittests_run',
+            '../cc/cc_tests.gyp:cc_unittests_run',
+            '../chrome/chrome.gyp:browser_tests_run',
+            '../chrome/chrome.gyp:chrome_run',
+            '../chrome/chrome.gyp:interactive_ui_tests_run',
+            '../chrome/chrome.gyp:sync_integration_tests_run',
+            '../chrome/chrome.gyp:unit_tests_run',
+            '../components/components_tests.gyp:components_browsertests_run',
+            '../components/components_tests.gyp:components_unittests_run',
+            '../content/content_shell_and_tests.gyp:content_browsertests_run',
+            '../content/content_shell_and_tests.gyp:content_unittests_run',
+            '../courgette/courgette.gyp:courgette_unittests_run',
+            '../crypto/crypto.gyp:crypto_unittests_run',
+            '../google_apis/gcm/gcm.gyp:gcm_unit_tests_run',
+            '../gpu/gpu.gyp:gpu_unittests_run',
+            '../ipc/ipc.gyp:ipc_tests_run',
+            '../media/cast/cast.gyp:cast_unittests_run',
+            '../media/media.gyp:media_unittests_run',
+            '../media/midi/midi.gyp:midi_unittests_run',
+            '../net/net.gyp:net_unittests_run',
+            '../printing/printing.gyp:printing_unittests_run',
+            '../remoting/remoting.gyp:remoting_unittests_run',
+            '../skia/skia_tests.gyp:skia_unittests_run',
+            '../sql/sql.gyp:sql_unittests_run',
+            '../sync/sync.gyp:sync_unit_tests_run',
+            '../third_party/cacheinvalidation/cacheinvalidation.gyp:cacheinvalidation_unittests_run',
+            '../third_party/mojo/mojo_edk_tests.gyp:mojo_public_bindings_unittests_run',
+            '../tools/gn/gn.gyp:gn_unittests_run',
+            '../ui/accessibility/accessibility.gyp:accessibility_unittests_run',
+            '../ui/app_list/app_list.gyp:app_list_unittests_run',
+            '../ui/compositor/compositor.gyp:compositor_unittests_run',
+            '../ui/events/events.gyp:events_unittests_run',
+            '../ui/gl/gl_tests.gyp:gl_unittests_run',
+            '../ui/message_center/message_center.gyp:message_center_unittests_run',
+            '../ui/touch_selection/ui_touch_selection.gyp:ui_touch_selection_unittests_run',
+            '../url/url.gyp:url_unittests_run',
+          ],
+          'conditions': [
+            ['OS=="linux"', {
+              'dependencies': [
+                '../sandbox/sandbox.gyp:sandbox_linux_unittests_run',
+                '../ui/display/display.gyp:display_unittests_run',
+              ],
+            }],
+            ['OS=="mac"', {
+              'dependencies': [
+                '../sandbox/sandbox.gyp:sandbox_mac_unittests_run',
+              ],
+            }],
+            ['OS=="win"', {
+              'dependencies': [
+                '../chrome/chrome.gyp:installer_util_unittests_run',
+                '../chrome/chrome.gyp:setup_unittests_run',
+                '../sandbox/sandbox.gyp:sbox_integration_tests',
+                '../sandbox/sandbox.gyp:sbox_unittests',
+                '../sandbox/sandbox.gyp:sbox_validation_tests',
+              ],
+            }],
+            ['use_ash==1', {
+              'dependencies': [
+                '../ash/ash.gyp:ash_unittests_run',
+              ],
+            }],
+            ['use_aura==1', {
+              'dependencies': [
+                '../ui/aura/aura.gyp:aura_unittests_run',
+                '../ui/wm/wm.gyp:wm_unittests_run',
+              ],
+            }],
+            ['enable_webrtc==1 or OS!="android"', {
+              'dependencies': [
+                '../jingle/jingle.gyp:jingle_unittests_run',
+              ],
+            }],
+            ['disable_nacl==0 and disable_nacl_untrusted==0', {
+              'dependencies': [
+                '../components/nacl.gyp:nacl_loader_unittests_run',
+              ]
+            }],
+          ],
+        }],
+        ['use_openssl==1', {
+          'dependencies': [
+            # TODO(GYP): All of these targets still need to be converted.
+            '../third_party/boringssl/boringssl_tests.gyp:boringssl_ecdsa_test',
+            '../third_party/boringssl/boringssl_tests.gyp:boringssl_bn_test',
+            '../third_party/boringssl/boringssl_tests.gyp:boringssl_pqueue_test',
+            '../third_party/boringssl/boringssl_tests.gyp:boringssl_digest_test',
+            '../third_party/boringssl/boringssl_tests.gyp:boringssl_cipher_test',
+            '../third_party/boringssl/boringssl_tests.gyp:boringssl_hkdf_test',
+            '../third_party/boringssl/boringssl_tests.gyp:boringssl_constant_time_test',
+            '../third_party/boringssl/boringssl_tests.gyp:boringssl_thread_test',
+            '../third_party/boringssl/boringssl_tests.gyp:boringssl_base64_test',
+            '../third_party/boringssl/boringssl_tests.gyp:boringssl_gcm_test',
+            '../third_party/boringssl/boringssl_tests.gyp:boringssl_bytestring_test',
+            '../third_party/boringssl/boringssl_tests.gyp:boringssl_evp_test',
+            '../third_party/boringssl/boringssl_tests.gyp:boringssl_dsa_test',
+            '../third_party/boringssl/boringssl_tests.gyp:boringssl_rsa_test',
+            '../third_party/boringssl/boringssl_tests.gyp:boringssl_hmac_test',
+            '../third_party/boringssl/boringssl_tests.gyp:boringssl_aead_test',
+            '../third_party/boringssl/boringssl_tests.gyp:boringssl_ssl_test',
+            '../third_party/boringssl/boringssl_tests.gyp:boringssl_err_test',
+            '../third_party/boringssl/boringssl_tests.gyp:boringssl_lhash_test',
+            '../third_party/boringssl/boringssl_tests.gyp:boringssl_pbkdf_test',
+            '../third_party/boringssl/boringssl_tests.gyp:boringssl_dh_test',
+            '../third_party/boringssl/boringssl_tests.gyp:boringssl_pkcs12_test',
+            '../third_party/boringssl/boringssl_tests.gyp:boringssl_example_mul',
+            '../third_party/boringssl/boringssl_tests.gyp:boringssl_ec_test',
+            '../third_party/boringssl/boringssl_tests.gyp:boringssl_bio_test',
+            '../third_party/boringssl/boringssl_tests.gyp:boringssl_pkcs7_test',
+            '../third_party/boringssl/boringssl_tests.gyp:boringssl_unittests',
+          ],
+        }],
+        ['chromeos==1', {
+          'dependencies': [
+            '../content/content_shell_and_tests.gyp:jpeg_decode_accelerator_unittest',
+            '../content/content_shell_and_tests.gyp:video_encode_accelerator_unittest',
+          ],
+        }],
+        ['chromeos==1 and target_arch != "arm"', {
+          'dependencies': [
+            '../content/content_shell_and_tests.gyp:vaapi_jpeg_decoder_unittest',
+          ],
+        }],
+        ['chromeos==1 or OS=="win" or OS=="android"', {
+          'dependencies': [
+            '../content/content_shell_and_tests.gyp:video_decode_accelerator_unittest',
+          ],
+        }],
+        ['OS=="linux" or OS=="win"', {
+          'dependencies': [
+            # TODO(GYP): Figure out which of these run on android/mac/win/ios/etc.
+            '../net/net.gyp:net_docs',
+            '../remoting/remoting.gyp:ar_sample_test_driver',
+
+            # TODO(GYP): in progress - see tfarina.
+            '../third_party/webrtc/tools/tools.gyp:frame_analyzer',
+            '../third_party/webrtc/tools/tools.gyp:rgba_to_i420_converter',
+          ],
+        }],
+        ['OS=="win"', {
+          'dependencies': [
+            # TODO(GYP): All of these targets still need to be converted.
+            '../base/base.gyp:debug_message',
+            '../chrome/chrome.gyp:app_shim',
+            '../chrome/chrome.gyp:gcapi_dll',
+            '../chrome/chrome.gyp:gcapi_test',
+            '../chrome/chrome.gyp:installer_util_unittests',
+            '../chrome/chrome.gyp:pack_policy_templates',
+            '../chrome/chrome.gyp:sb_sigutil',
+            '../chrome/chrome.gyp:setup',
+            '../chrome/installer/mini_installer.gyp:mini_installer',
+            '../chrome/tools/crash_service/caps/caps.gyp:caps',
+            '../cloud_print/gcp20/prototype/gcp20_device.gyp:gcp20_device',
+            '../cloud_print/gcp20/prototype/gcp20_device.gyp:gcp20_device_unittests',
+            '../cloud_print/service/win/service.gyp:cloud_print_service',
+            '../cloud_print/service/win/service.gyp:cloud_print_service_config',
+            '../cloud_print/service/win/service.gyp:cloud_print_service_setup',
+            '../cloud_print/virtual_driver/win/install/virtual_driver_install.gyp:virtual_driver_setup',
+            '../cloud_print/virtual_driver/win/virtual_driver.gyp:gcp_portmon',
+            '../components/test_runner/test_runner.gyp:layout_test_helper',
+            '../content/content_shell_and_tests.gyp:content_shell_crash_service',
+            '../gpu/gpu.gyp:angle_end2end_tests',
+            '../gpu/gpu.gyp:angle_perftests',
+            '../net/net.gyp:net_docs',
+            '../ppapi/ppapi_internal.gyp:ppapi_perftests',
+            '../remoting/remoting.gyp:ar_sample_test_driver',
+            '../remoting/remoting.gyp:remoting_breakpad_tester',
+            '../remoting/remoting.gyp:remoting_console',
+            '../remoting/remoting.gyp:remoting_desktop',
+            '../rlz/rlz.gyp:rlz',
+            '../tools/win/static_initializers/static_initializers.gyp:static_initializers',
+          ],
+        }],
+        ['OS=="win" and win_use_allocator_shim==1', {
+          'dependencies': [
+            '../base/allocator/allocator.gyp:allocator_unittests',
+          ]
+        }],
+        ['OS=="win" and target_arch=="ia32"', {
+          'dependencies': [
+            # TODO(GYP): All of these targets need to be ported over.
+            '../base/base.gyp:base_win64',
+            '../base/base.gyp:base_i18n_nacl_win64',
+            '../chrome/chrome.gyp:crash_service_win64',
+            '../chrome/chrome.gyp:launcher_support64',
+            '../components/components.gyp:breakpad_win64',
+            '../courgette/courgette.gyp:courgette64',
+            '../crypto/crypto.gyp:crypto_nacl_win64',
+            '../ipc/ipc.gyp:ipc_win64',
+            '../sandbox/sandbox.gyp:sandbox_win64',
+            '../cloud_print/virtual_driver/win/virtual_driver64.gyp:gcp_portmon64',
+            '../cloud_print/virtual_driver/win/virtual_driver64.gyp:virtual_driver_lib64',
+          ],
+        }],
+        ['OS=="win" and target_arch=="ia32" and configuration_policy==1', {
+          'dependencies': [
+            # TODO(GYP): All of these targets need to be ported over.
+            '../components/components.gyp:policy_win64',
+          ]
+        }],
+      ],
+    },
+  ]
+}
+
diff --git a/build/gn_run_binary.py b/build/gn_run_binary.py
new file mode 100644
index 0000000..7d83f61
--- /dev/null
+++ b/build/gn_run_binary.py
@@ -0,0 +1,22 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Helper script for GN to run an arbitrary binary. See compiled_action.gni.
+
+Run with:
+  python gn_run_binary.py <binary_name> [args ...]
+"""
+
+import sys
+import subprocess
+
+# This script is designed to run binaries produced by the current build. We
+# always prefix it with "./" to avoid picking up system versions that might
+# also be on the path.
+path = './' + sys.argv[1]
+
+# The rest of the arguements are passed directly to the executable.
+args = [path] + sys.argv[2:]
+
+sys.exit(subprocess.call(args))
diff --git a/build/grit_action.gypi b/build/grit_action.gypi
new file mode 100644
index 0000000..b24f0f8
--- /dev/null
+++ b/build/grit_action.gypi
@@ -0,0 +1,71 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into an action to invoke grit in a
+# consistent manner. To use this the following variables need to be
+# defined:
+#   grit_grd_file: string: grd file path
+#   grit_out_dir: string: the output directory path
+
+# It would be really nice to do this with a rule instead of actions, but it
+# would need to determine inputs and outputs via grit_info on a per-file
+# basis. GYP rules don't currently support that. They could be extended to
+# do this, but then every generator would need to be updated to handle this.
+
+{
+  'variables': {
+    'grit_cmd': ['python', '<(DEPTH)/tools/grit/grit.py'],
+    'grit_resource_ids%': '<(DEPTH)/tools/gritsettings/resource_ids',
+    # This makes it possible to add more defines in specific targets,
+    # instead of build/common.gypi .
+    'grit_additional_defines%': [],
+    'grit_rc_header_format%': [],
+    'grit_whitelist%': '',
+
+    'conditions': [
+      # These scripts can skip writing generated files if they are identical
+      # to the already existing files, which avoids further build steps, like
+      # recompilation. However, a dependency (earlier build step) having a
+      # newer timestamp than an output (later build step) confuses some build
+      # systems, so only use this on ninja, which explicitly supports this use
+      # case (gyp turns all actions into ninja restat rules).
+      ['"<(GENERATOR)"=="ninja"', {
+        'write_only_new': '1',
+      }, {
+        'write_only_new': '0',
+      }],
+    ],
+  },
+  'conditions': [
+    ['"<(grit_whitelist)"==""', {
+      'variables': {
+        'grit_whitelist_flag': [],
+      }
+    }, {
+      'variables': {
+        'grit_whitelist_flag': ['-w', '<(grit_whitelist)'],
+      }
+    }]
+  ],
+  'inputs': [
+    '<!@pymod_do_main(grit_info <@(grit_defines) <@(grit_additional_defines) '
+        '<@(grit_whitelist_flag) --inputs <(grit_grd_file) '
+        '-f "<(grit_resource_ids)")',
+  ],
+  'outputs': [
+    '<!@pymod_do_main(grit_info <@(grit_defines) <@(grit_additional_defines) '
+        '<@(grit_whitelist_flag) --outputs \'<(grit_out_dir)\' '
+        '<(grit_grd_file) -f "<(grit_resource_ids)")',
+  ],
+  'action': ['<@(grit_cmd)',
+             '-i', '<(grit_grd_file)', 'build',
+             '-f', '<(grit_resource_ids)',
+             '-o', '<(grit_out_dir)',
+             '--write-only-new=<(write_only_new)',
+             '<@(grit_defines)',
+             '<@(grit_whitelist_flag)',
+             '<@(grit_additional_defines)',
+             '<@(grit_rc_header_format)'],
+  'message': 'Generating resources from <(grit_grd_file)',
+}
diff --git a/build/grit_target.gypi b/build/grit_target.gypi
new file mode 100644
index 0000000..179f986
--- /dev/null
+++ b/build/grit_target.gypi
@@ -0,0 +1,31 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into a target that will have one or more
+# uses of grit_action.gypi. To use this the following variables need to be
+# defined:
+#   grit_out_dir: string: the output directory path
+
+# DO NOT USE THIS FILE. Instead, use qualified includes.
+# TODO: Convert everything to qualified includes, and delete this file,
+# http://crbug.com/401588
+{
+  'conditions': [
+    # If the target is a direct binary, it needs to be able to find the header,
+    # otherwise it probably a supporting target just for grit so the include
+    # dir needs to be set on anything that depends on this action.
+    ['_type=="executable" or _type=="shared_library" or \
+      _type=="loadable_module" or _type=="static_library"', {
+      'include_dirs': [
+        '<(grit_out_dir)',
+      ],
+    }, {
+      'direct_dependent_settings': {
+        'include_dirs': [
+          '<(grit_out_dir)',
+        ],
+      },
+    }],
+  ],
+}
diff --git a/build/gyp_chromium b/build/gyp_chromium
new file mode 100755
index 0000000..9dac871
--- /dev/null
+++ b/build/gyp_chromium
@@ -0,0 +1,333 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This script is wrapper for Chromium that adds some support for how GYP
+# is invoked by Chromium beyond what can be done in the gclient hooks.
+
+import argparse
+import glob
+import gyp_environment
+import os
+import re
+import shlex
+import subprocess
+import string
+import sys
+import vs_toolchain
+
+script_dir = os.path.dirname(os.path.realpath(__file__))
+chrome_src = os.path.abspath(os.path.join(script_dir, os.pardir))
+
+sys.path.insert(0, os.path.join(chrome_src, 'tools', 'gyp', 'pylib'))
+import gyp
+
+# Assume this file is in a one-level-deep subdirectory of the source root.
+SRC_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
+
+# Add paths so that pymod_do_main(...) can import files.
+sys.path.insert(1, os.path.join(chrome_src, 'android_webview', 'tools'))
+sys.path.insert(1, os.path.join(chrome_src, 'build', 'android', 'gyp'))
+sys.path.insert(1, os.path.join(chrome_src, 'chrome', 'tools', 'build'))
+sys.path.insert(1, os.path.join(chrome_src, 'chromecast', 'tools', 'build'))
+sys.path.insert(1, os.path.join(chrome_src, 'ios', 'chrome', 'tools', 'build'))
+sys.path.insert(1, os.path.join(chrome_src, 'native_client', 'build'))
+sys.path.insert(1, os.path.join(chrome_src, 'native_client_sdk', 'src',
+    'build_tools'))
+sys.path.insert(1, os.path.join(chrome_src, 'remoting', 'tools', 'build'))
+sys.path.insert(1, os.path.join(chrome_src, 'third_party', 'liblouis'))
+sys.path.insert(1, os.path.join(chrome_src, 'third_party', 'WebKit',
+    'Source', 'build', 'scripts'))
+sys.path.insert(1, os.path.join(chrome_src, 'tools'))
+sys.path.insert(1, os.path.join(chrome_src, 'tools', 'generate_shim_headers'))
+sys.path.insert(1, os.path.join(chrome_src, 'tools', 'grit'))
+
+# On Windows, Psyco shortens warm runs of build/gyp_chromium by about
+# 20 seconds on a z600 machine with 12 GB of RAM, from 90 down to 70
+# seconds.  Conversely, memory usage of build/gyp_chromium with Psyco
+# maxes out at about 158 MB vs. 132 MB without it.
+#
+# Psyco uses native libraries, so we need to load a different
+# installation depending on which OS we are running under. It has not
+# been tested whether using Psyco on our Mac and Linux builds is worth
+# it (the GYP running time is a lot shorter, so the JIT startup cost
+# may not be worth it).
+if sys.platform == 'win32':
+  try:
+    sys.path.insert(0, os.path.join(chrome_src, 'third_party', 'psyco_win32'))
+    import psyco
+  except:
+    psyco = None
+else:
+  psyco = None
+
+
+def GetSupplementalFiles():
+  """Returns a list of the supplemental files that are included in all GYP
+  sources."""
+  return glob.glob(os.path.join(chrome_src, '*', 'supplement.gypi'))
+
+
+def ProcessGypDefinesItems(items):
+  """Converts a list of strings to a list of key-value pairs."""
+  result = []
+  for item in items:
+    tokens = item.split('=', 1)
+    # Some GYP variables have hyphens, which we don't support.
+    if len(tokens) == 2:
+      result += [(tokens[0], tokens[1])]
+    else:
+      # No value supplied, treat it as a boolean and set it. Note that we
+      # use the string '1' here so we have a consistent definition whether
+      # you do 'foo=1' or 'foo'.
+      result += [(tokens[0], '1')]
+  return result
+
+
+def GetGypVars(supplemental_files):
+  """Returns a dictionary of all GYP vars."""
+  # Find the .gyp directory in the user's home directory.
+  home_dot_gyp = os.environ.get('GYP_CONFIG_DIR', None)
+  if home_dot_gyp:
+    home_dot_gyp = os.path.expanduser(home_dot_gyp)
+  if not home_dot_gyp:
+    home_vars = ['HOME']
+    if sys.platform in ('cygwin', 'win32'):
+      home_vars.append('USERPROFILE')
+    for home_var in home_vars:
+      home = os.getenv(home_var)
+      if home != None:
+        home_dot_gyp = os.path.join(home, '.gyp')
+        if not os.path.exists(home_dot_gyp):
+          home_dot_gyp = None
+        else:
+          break
+
+  if home_dot_gyp:
+    include_gypi = os.path.join(home_dot_gyp, "include.gypi")
+    if os.path.exists(include_gypi):
+      supplemental_files += [include_gypi]
+
+  # GYP defines from the supplemental.gypi files.
+  supp_items = []
+  for supplement in supplemental_files:
+    with open(supplement, 'r') as f:
+      try:
+        file_data = eval(f.read(), {'__builtins__': None}, None)
+      except SyntaxError, e:
+        e.filename = os.path.abspath(supplement)
+        raise
+      variables = file_data.get('variables', [])
+      for v in variables:
+        supp_items += [(v, str(variables[v]))]
+
+  # GYP defines from the environment.
+  env_items = ProcessGypDefinesItems(
+      shlex.split(os.environ.get('GYP_DEFINES', '')))
+
+  # GYP defines from the command line.
+  parser = argparse.ArgumentParser()
+  parser.add_argument('-D', dest='defines', action='append', default=[])
+  cmdline_input_items = parser.parse_known_args()[0].defines
+  cmdline_items = ProcessGypDefinesItems(cmdline_input_items)
+
+  vars_dict = dict(supp_items + env_items + cmdline_items)
+  return vars_dict
+
+
+def GetOutputDirectory():
+  """Returns the output directory that GYP will use."""
+
+  # Handle command line generator flags.
+  parser = argparse.ArgumentParser()
+  parser.add_argument('-G', dest='genflags', default=[], action='append')
+  genflags = parser.parse_known_args()[0].genflags
+
+  # Handle generator flags from the environment.
+  genflags += shlex.split(os.environ.get('GYP_GENERATOR_FLAGS', ''))
+
+  needle = 'output_dir='
+  for item in genflags:
+    if item.startswith(needle):
+      return item[len(needle):]
+
+  return 'out'
+
+
+def additional_include_files(supplemental_files, args=[]):
+  """
+  Returns a list of additional (.gypi) files to include, without duplicating
+  ones that are already specified on the command line. The list of supplemental
+  include files is passed in as an argument.
+  """
+  # Determine the include files specified on the command line.
+  # This doesn't cover all the different option formats you can use,
+  # but it's mainly intended to avoid duplicating flags on the automatic
+  # makefile regeneration which only uses this format.
+  specified_includes = set()
+  for arg in args:
+    if arg.startswith('-I') and len(arg) > 2:
+      specified_includes.add(os.path.realpath(arg[2:]))
+
+  result = []
+  def AddInclude(path):
+    if os.path.realpath(path) not in specified_includes:
+      result.append(path)
+
+  if os.environ.get('GYP_INCLUDE_FIRST') != None:
+    AddInclude(os.path.join(chrome_src, os.environ.get('GYP_INCLUDE_FIRST')))
+
+  # Always include common.gypi.
+  AddInclude(os.path.join(script_dir, 'common.gypi'))
+
+  # Optionally add supplemental .gypi files if present.
+  for supplement in supplemental_files:
+    AddInclude(supplement)
+
+  if os.environ.get('GYP_INCLUDE_LAST') != None:
+    AddInclude(os.path.join(chrome_src, os.environ.get('GYP_INCLUDE_LAST')))
+
+  return result
+
+
+if __name__ == '__main__':
+  # Disabling garbage collection saves about 1 second out of 16 on a Linux
+  # z620 workstation. Since this is a short-lived process it's not a problem to
+  # leak a few cyclyc references in order to spare the CPU cycles for
+  # scanning the heap.
+  import gc
+  gc.disable()
+
+  args = sys.argv[1:]
+
+  use_analyzer = len(args) and args[0] == '--analyzer'
+  if use_analyzer:
+    args.pop(0)
+    os.environ['GYP_GENERATORS'] = 'analyzer'
+    args.append('-Gconfig_path=' + args.pop(0))
+    args.append('-Ganalyzer_output_path=' + args.pop(0))
+
+  if int(os.environ.get('GYP_CHROMIUM_NO_ACTION', 0)):
+    print 'Skipping gyp_chromium due to GYP_CHROMIUM_NO_ACTION env var.'
+    sys.exit(0)
+
+  # Use the Psyco JIT if available.
+  if psyco:
+    psyco.profile()
+    print "Enabled Psyco JIT."
+
+  # Fall back on hermetic python if we happen to get run under cygwin.
+  # TODO(bradnelson): take this out once this issue is fixed:
+  #    http://code.google.com/p/gyp/issues/detail?id=177
+  if sys.platform == 'cygwin':
+    import find_depot_tools
+    depot_tools_path = find_depot_tools.add_depot_tools_to_path()
+    python_dir = sorted(glob.glob(os.path.join(depot_tools_path,
+                                               'python2*_bin')))[-1]
+    env = os.environ.copy()
+    env['PATH'] = python_dir + os.pathsep + env.get('PATH', '')
+    cmd = [os.path.join(python_dir, 'python.exe')] + sys.argv
+    sys.exit(subprocess.call(cmd, env=env))
+
+  # This could give false positives since it doesn't actually do real option
+  # parsing.  Oh well.
+  gyp_file_specified = any(arg.endswith('.gyp') for arg in args)
+
+  gyp_environment.SetEnvironment()
+
+  # If we didn't get a file, check an env var, and then fall back to
+  # assuming 'all.gyp' from the same directory as the script.
+  if not gyp_file_specified:
+    gyp_file = os.environ.get('CHROMIUM_GYP_FILE')
+    if gyp_file:
+      # Note that CHROMIUM_GYP_FILE values can't have backslashes as
+      # path separators even on Windows due to the use of shlex.split().
+      args.extend(shlex.split(gyp_file))
+    else:
+      args.append(os.path.join(script_dir, 'all.gyp'))
+
+  supplemental_includes = GetSupplementalFiles()
+  gyp_vars_dict = GetGypVars(supplemental_includes)
+  # There shouldn't be a circular dependency relationship between .gyp files,
+  # but in Chromium's .gyp files, on non-Mac platforms, circular relationships
+  # currently exist.  The check for circular dependencies is currently
+  # bypassed on other platforms, but is left enabled on iOS, where a violation
+  # of the rule causes Xcode to misbehave badly.
+  # TODO(mark): Find and kill remaining circular dependencies, and remove this
+  # option.  http://crbug.com/35878.
+  # TODO(tc): Fix circular dependencies in ChromiumOS then add linux2 to the
+  # list.
+  if gyp_vars_dict.get('OS') != 'ios':
+    args.append('--no-circular-check')
+
+  # libtool on Mac warns about duplicate basenames in static libraries, so
+  # they're disallowed in general by gyp. We are lax on this point, so disable
+  # this check other than on Mac. GN does not use static libraries as heavily,
+  # so over time this restriction will mostly go away anyway, even on Mac.
+  # https://code.google.com/p/gyp/issues/detail?id=384
+  if sys.platform != 'darwin':
+    args.append('--no-duplicate-basename-check')
+
+  # We explicitly don't support the make gyp generator (crbug.com/348686). Be
+  # nice and fail here, rather than choking in gyp.
+  if re.search(r'(^|,|\s)make($|,|\s)', os.environ.get('GYP_GENERATORS', '')):
+    print 'Error: make gyp generator not supported (check GYP_GENERATORS).'
+    sys.exit(1)
+
+  # We explicitly don't support the native msvs gyp generator. Be nice and
+  # fail here, rather than generating broken projects.
+  if re.search(r'(^|,|\s)msvs($|,|\s)', os.environ.get('GYP_GENERATORS', '')):
+    print 'Error: msvs gyp generator not supported (check GYP_GENERATORS).'
+    print 'Did you mean to use the `msvs-ninja` generator?'
+    sys.exit(1)
+
+  # If CHROMIUM_GYP_SYNTAX_CHECK is set to 1, it will invoke gyp with --check
+  # to enfore syntax checking.
+  syntax_check = os.environ.get('CHROMIUM_GYP_SYNTAX_CHECK')
+  if syntax_check and int(syntax_check):
+    args.append('--check')
+
+  # TODO(dmikurube): Remove these checks and messages after a while.
+  if ('linux_use_tcmalloc' in gyp_vars_dict or
+      'android_use_tcmalloc' in gyp_vars_dict):
+    print '*****************************************************************'
+    print '"linux_use_tcmalloc" and "android_use_tcmalloc" are deprecated!'
+    print '-----------------------------------------------------------------'
+    print 'You specify "linux_use_tcmalloc" or "android_use_tcmalloc" in'
+    print 'your GYP_DEFINES. Please switch them into "use_allocator" now.'
+    print 'See http://crbug.com/345554 for the details.'
+    print '*****************************************************************'
+
+  # Automatically turn on crosscompile support for platforms that need it.
+  # (The Chrome OS build sets CC_host / CC_target which implicitly enables
+  # this mode.)
+  if all(('ninja' in os.environ.get('GYP_GENERATORS', ''),
+          gyp_vars_dict.get('OS') in ['android', 'ios'],
+          'GYP_CROSSCOMPILE' not in os.environ)):
+    os.environ['GYP_CROSSCOMPILE'] = '1'
+  if gyp_vars_dict.get('OS') == 'android':
+    args.append('--check')
+
+  args.extend(
+      ['-I' + i for i in additional_include_files(supplemental_includes, args)])
+
+  args.extend(['-D', 'gyp_output_dir=' + GetOutputDirectory()])
+
+  if not use_analyzer:
+    print 'Updating projects from gyp files...'
+    sys.stdout.flush()
+
+  # Off we go...
+  gyp_rc = gyp.main(args)
+
+  if not use_analyzer:
+    vs2013_runtime_dll_dirs = vs_toolchain.SetEnvironmentAndGetRuntimeDllDirs()
+    if vs2013_runtime_dll_dirs:
+      x64_runtime, x86_runtime = vs2013_runtime_dll_dirs
+      vs_toolchain.CopyVsRuntimeDlls(
+        os.path.join(chrome_src, GetOutputDirectory()),
+        (x86_runtime, x64_runtime))
+
+  sys.exit(gyp_rc)
diff --git a/build/gyp_chromium.py b/build/gyp_chromium.py
new file mode 100644
index 0000000..f9e8ac8
--- /dev/null
+++ b/build/gyp_chromium.py
@@ -0,0 +1,18 @@
+# Copyright 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is (possibly, depending on python version) imported by
+# gyp_chromium when GYP_PARALLEL=1 and it creates sub-processes
+# through the multiprocessing library.
+
+# Importing in Python 2.6 (fixed in 2.7) on Windows doesn't search for
+# imports that don't end in .py (and aren't directories with an
+# __init__.py). This wrapper makes "import gyp_chromium" work with
+# those old versions and makes it possible to execute gyp_chromium.py
+# directly on Windows where the extension is useful.
+
+import os
+
+path = os.path.abspath(os.path.split(__file__)[0])
+execfile(os.path.join(path, 'gyp_chromium'))
diff --git a/build/gyp_chromium_test.py b/build/gyp_chromium_test.py
new file mode 100755
index 0000000..0c0e479
--- /dev/null
+++ b/build/gyp_chromium_test.py
@@ -0,0 +1,66 @@
+#!/usr/bin/env python
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import sys
+import unittest
+
+SCRIPT_DIR = os.path.abspath(os.path.dirname(__file__))
+SRC_DIR = os.path.dirname(SCRIPT_DIR)
+
+sys.path.append(os.path.join(SRC_DIR, 'third_party', 'pymock'))
+
+import mock
+
+# TODO(sbc): Make gyp_chromium more testable by putting the code in
+# a .py file.
+gyp_chromium = __import__('gyp_chromium')
+
+
+class TestGetOutputDirectory(unittest.TestCase):
+  @mock.patch('os.environ', {})
+  @mock.patch('sys.argv', [__file__])
+  def testDefaultValue(self):
+    self.assertEqual(gyp_chromium.GetOutputDirectory(), 'out')
+
+  @mock.patch('os.environ', {'GYP_GENERATOR_FLAGS': 'output_dir=envfoo'})
+  @mock.patch('sys.argv', [__file__])
+  def testEnvironment(self):
+    self.assertEqual(gyp_chromium.GetOutputDirectory(), 'envfoo')
+
+  @mock.patch('os.environ', {'GYP_GENERATOR_FLAGS': 'output_dir=envfoo'})
+  @mock.patch('sys.argv', [__file__, '-Goutput_dir=cmdfoo'])
+  def testGFlagOverridesEnv(self):
+    self.assertEqual(gyp_chromium.GetOutputDirectory(), 'cmdfoo')
+
+  @mock.patch('os.environ', {})
+  @mock.patch('sys.argv', [__file__, '-G', 'output_dir=foo'])
+  def testGFlagWithSpace(self):
+    self.assertEqual(gyp_chromium.GetOutputDirectory(), 'foo')
+
+
+class TestGetGypVars(unittest.TestCase):
+  @mock.patch('os.environ', {})
+  def testDefault(self):
+    self.assertEqual(gyp_chromium.GetGypVars([]), {})
+
+  @mock.patch('os.environ', {})
+  @mock.patch('sys.argv', [__file__, '-D', 'foo=bar'])
+  def testDFlags(self):
+    self.assertEqual(gyp_chromium.GetGypVars([]), {'foo': 'bar'})
+
+  @mock.patch('os.environ', {})
+  @mock.patch('sys.argv', [__file__, '-D', 'foo'])
+  def testDFlagsNoValue(self):
+    self.assertEqual(gyp_chromium.GetGypVars([]), {'foo': '1'})
+
+  @mock.patch('os.environ', {})
+  @mock.patch('sys.argv', [__file__, '-D', 'foo=bar', '-Dbaz'])
+  def testDFlagMulti(self):
+    self.assertEqual(gyp_chromium.GetGypVars([]), {'foo': 'bar', 'baz': '1'})
+
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/build/gyp_environment.py b/build/gyp_environment.py
new file mode 100644
index 0000000..fb50645
--- /dev/null
+++ b/build/gyp_environment.py
@@ -0,0 +1,33 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Sets up various automatic gyp environment variables. These are used by
+gyp_chromium and landmines.py which run at different stages of runhooks. To
+make sure settings are consistent between them, all setup should happen here.
+"""
+
+import gyp_helper
+import os
+import sys
+import vs_toolchain
+
+def SetEnvironment():
+  """Sets defaults for GYP_* variables."""
+  gyp_helper.apply_chromium_gyp_env()
+
+  # Default to ninja on linux and windows, but only if no generator has
+  # explicitly been set.
+  # Also default to ninja on mac, but only when not building chrome/ios.
+  # . -f / --format has precedence over the env var, no need to check for it
+  # . set the env var only if it hasn't been set yet
+  # . chromium.gyp_env has been applied to os.environ at this point already
+  if sys.platform.startswith(('linux', 'win', 'freebsd')) and \
+      not os.environ.get('GYP_GENERATORS'):
+    os.environ['GYP_GENERATORS'] = 'ninja'
+  elif sys.platform == 'darwin' and not os.environ.get('GYP_GENERATORS') and \
+      not 'OS=ios' in os.environ.get('GYP_DEFINES', []):
+    os.environ['GYP_GENERATORS'] = 'ninja'
+
+  vs_toolchain.SetEnvironmentAndGetRuntimeDllDirs()
diff --git a/build/gyp_helper.py b/build/gyp_helper.py
new file mode 100644
index 0000000..c840f2d
--- /dev/null
+++ b/build/gyp_helper.py
@@ -0,0 +1,68 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file helps gyp_chromium and landmines correctly set up the gyp
+# environment from chromium.gyp_env on disk
+
+import os
+
+SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
+CHROME_SRC = os.path.dirname(SCRIPT_DIR)
+
+
+def apply_gyp_environment_from_file(file_path):
+  """Reads in a *.gyp_env file and applies the valid keys to os.environ."""
+  if not os.path.exists(file_path):
+    return
+  with open(file_path, 'rU') as f:
+    file_contents = f.read()
+  try:
+    file_data = eval(file_contents, {'__builtins__': None}, None)
+  except SyntaxError, e:
+    e.filename = os.path.abspath(file_path)
+    raise
+  supported_vars = (
+      'CC',
+      'CC_wrapper',
+      'CC.host_wrapper',
+      'CHROMIUM_GYP_FILE',
+      'CHROMIUM_GYP_SYNTAX_CHECK',
+      'CXX',
+      'CXX_wrapper',
+      'CXX.host_wrapper',
+      'GYP_DEFINES',
+      'GYP_GENERATOR_FLAGS',
+      'GYP_CROSSCOMPILE',
+      'GYP_GENERATOR_OUTPUT',
+      'GYP_GENERATORS',
+      'GYP_INCLUDE_FIRST',
+      'GYP_INCLUDE_LAST',
+      'GYP_MSVS_VERSION',
+  )
+  for var in supported_vars:
+    file_val = file_data.get(var)
+    if file_val:
+      if var in os.environ:
+        behavior = 'replaces'
+        if var == 'GYP_DEFINES':
+          result = file_val + ' ' + os.environ[var]
+          behavior = 'merges with, and individual components override,'
+        else:
+          result = os.environ[var]
+        print 'INFO: Environment value for "%s" %s value in %s' % (
+            var, behavior, os.path.abspath(file_path)
+        )
+        string_padding = max(len(var), len(file_path), len('result'))
+        print '      %s: %s' % (var.rjust(string_padding), os.environ[var])
+        print '      %s: %s' % (file_path.rjust(string_padding), file_val)
+        os.environ[var] = result
+      else:
+        os.environ[var] = file_val
+
+
+def apply_chromium_gyp_env():
+  if 'SKIP_CHROMIUM_GYP_ENV' not in os.environ:
+    # Update the environment based on chromium.gyp_env
+    path = os.path.join(os.path.dirname(CHROME_SRC), 'chromium.gyp_env')
+    apply_gyp_environment_from_file(path)
diff --git a/build/gypi_to_gn.py b/build/gypi_to_gn.py
new file mode 100644
index 0000000..a107f94
--- /dev/null
+++ b/build/gypi_to_gn.py
@@ -0,0 +1,167 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Converts a given gypi file to a python scope and writes the result to stdout.
+
+It is assumed that the file contains a toplevel dictionary, and this script
+will return that dictionary as a GN "scope" (see example below). This script
+does not know anything about GYP and it will not expand variables or execute
+conditions.
+
+It will strip conditions blocks.
+
+A variables block at the top level will be flattened so that the variables
+appear in the root dictionary. This way they can be returned to the GN code.
+
+Say your_file.gypi looked like this:
+  {
+     'sources': [ 'a.cc', 'b.cc' ],
+     'defines': [ 'ENABLE_DOOM_MELON' ],
+  }
+
+You would call it like this:
+  gypi_values = exec_script("//build/gypi_to_gn.py",
+                            [ rebase_path("your_file.gypi") ],
+                            "scope",
+                            [ "your_file.gypi" ])
+
+Notes:
+ - The rebase_path call converts the gypi file from being relative to the
+   current build file to being system absolute for calling the script, which
+   will have a different current directory than this file.
+
+ - The "scope" parameter tells GN to interpret the result as a series of GN
+   variable assignments.
+
+ - The last file argument to exec_script tells GN that the given file is a
+   dependency of the build so Ninja can automatically re-run GN if the file
+   changes.
+
+Read the values into a target like this:
+  component("mycomponent") {
+    sources = gypi_values.sources
+    defines = gypi_values.defines
+  }
+
+Sometimes your .gypi file will include paths relative to a different
+directory than the current .gn file. In this case, you can rebase them to
+be relative to the current directory.
+  sources = rebase_path(gypi_values.sources, ".",
+                        "//path/gypi/input/values/are/relative/to")
+
+This script will tolerate a 'variables' in the toplevel dictionary or not. If
+the toplevel dictionary just contains one item called 'variables', it will be
+collapsed away and the result will be the contents of that dictinoary. Some
+.gypi files are written with or without this, depending on how they expect to
+be embedded into a .gyp file.
+
+This script also has the ability to replace certain substrings in the input.
+Generally this is used to emulate GYP variable expansion. If you passed the
+argument "--replace=<(foo)=bar" then all instances of "<(foo)" in strings in
+the input will be replaced with "bar":
+
+  gypi_values = exec_script("//build/gypi_to_gn.py",
+                            [ rebase_path("your_file.gypi"),
+                              "--replace=<(foo)=bar"],
+                            "scope",
+                            [ "your_file.gypi" ])
+
+"""
+
+import gn_helpers
+from optparse import OptionParser
+import sys
+
+def LoadPythonDictionary(path):
+  file_string = open(path).read()
+  try:
+    file_data = eval(file_string, {'__builtins__': None}, None)
+  except SyntaxError, e:
+    e.filename = path
+    raise
+  except Exception, e:
+    raise Exception("Unexpected error while reading %s: %s" % (path, str(e)))
+
+  assert isinstance(file_data, dict), "%s does not eval to a dictionary" % path
+
+  # Flatten any variables to the top level.
+  if 'variables' in file_data:
+    file_data.update(file_data['variables'])
+    del file_data['variables']
+
+  # Strip any conditions.
+  if 'conditions' in file_data:
+    del file_data['conditions']
+  if 'target_conditions' in file_data:
+    del file_data['target_conditions']
+
+  # Strip targets in the toplevel, since some files define these and we can't
+  # slurp them in.
+  if 'targets' in file_data:
+    del file_data['targets']
+
+  return file_data
+
+
+def ReplaceSubstrings(values, search_for, replace_with):
+  """Recursively replaces substrings in a value.
+
+  Replaces all substrings of the "search_for" with "repace_with" for all
+  strings occurring in "values". This is done by recursively iterating into
+  lists as well as the keys and values of dictionaries."""
+  if isinstance(values, str):
+    return values.replace(search_for, replace_with)
+
+  if isinstance(values, list):
+    return [ReplaceSubstrings(v, search_for, replace_with) for v in values]
+
+  if isinstance(values, dict):
+    # For dictionaries, do the search for both the key and values.
+    result = {}
+    for key, value in values.items():
+      new_key = ReplaceSubstrings(key, search_for, replace_with)
+      new_value = ReplaceSubstrings(value, search_for, replace_with)
+      result[new_key] = new_value
+    return result
+
+  # Assume everything else is unchanged.
+  return values
+
+def main():
+  parser = OptionParser()
+  parser.add_option("-r", "--replace", action="append",
+    help="Replaces substrings. If passed a=b, replaces all substrs a with b.")
+  (options, args) = parser.parse_args()
+
+  if len(args) != 1:
+    raise Exception("Need one argument which is the .gypi file to read.")
+
+  data = LoadPythonDictionary(args[0])
+  if options.replace:
+    # Do replacements for all specified patterns.
+    for replace in options.replace:
+      split = replace.split('=')
+      # Allow "foo=" to replace with nothing.
+      if len(split) == 1:
+        split.append('')
+      assert len(split) == 2, "Replacement must be of the form 'key=value'."
+      data = ReplaceSubstrings(data, split[0], split[1])
+
+  # Sometimes .gypi files use the GYP syntax with percents at the end of the
+  # variable name (to indicate not to overwrite a previously-defined value):
+  #   'foo%': 'bar',
+  # Convert these to regular variables.
+  for key in data:
+    if len(key) > 1 and key[len(key) - 1] == '%':
+      data[key[:-1]] = data[key]
+      del data[key]
+
+  print gn_helpers.ToGNString(data)
+
+if __name__ == '__main__':
+  try:
+    main()
+  except Exception, e:
+    print str(e)
+    sys.exit(1)
diff --git a/build/host_jar.gypi b/build/host_jar.gypi
new file mode 100644
index 0000000..a47f6bb
--- /dev/null
+++ b/build/host_jar.gypi
@@ -0,0 +1,146 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into a target to provide a rule to build
+# a JAR file for use on a host in a consistent manner. If a main class is
+# specified, this file will also generate an executable to run the jar in the
+# output folder's /bin/ directory.
+#
+# To use this, create a gyp target with the following form:
+# {
+#   'target_name': 'my_jar',
+#   'type': 'none',
+#   'variables': {
+#     'src_paths': [
+#       'path/to/directory',
+#       'path/to/other/directory',
+#       'path/to/individual_file.java',
+#       ...
+#     ],
+#   },
+#   'includes': [ 'path/to/this/gypi/file' ],
+# }
+#
+# Required variables:
+#   src_paths - A list of all paths containing java files that should be
+#     included in the jar. Paths can be either directories or files.
+# Optional/automatic variables:
+#   excluded_src_paths - A list of all paths that should be excluded from
+#     the jar.
+#   generated_src_dirs - Directories containing additional .java files
+#     generated at build time.
+#   input_jars_paths - A list of paths to the jars that should be included
+#     in the classpath.
+#   main_class - The class containing the main() function that should be called
+#     when running the jar file.
+#   jar_excluded_classes - A list of .class files that should be excluded
+#     from the jar.
+
+{
+  'dependencies': [
+    '<(DEPTH)/build/android/setup.gyp:build_output_dirs',
+  ],
+  'variables': {
+    'classes_dir': '<(intermediate_dir)/classes',
+    'excluded_src_paths': [],
+    'generated_src_dirs': [],
+    'input_jars_paths': [],
+    'intermediate_dir': '<(SHARED_INTERMEDIATE_DIR)/<(_target_name)',
+    'jar_dir': '<(PRODUCT_DIR)/lib.java',
+    'jar_excluded_classes': [],
+    'jar_name': '<(_target_name).jar',
+    'jar_path': '<(jar_dir)/<(jar_name)',
+    'main_class%': '',
+    'stamp': '<(intermediate_dir)/jar.stamp',
+    'enable_errorprone%': '0',
+    'errorprone_exe_path': '<(PRODUCT_DIR)/bin.java/chromium_errorprone',
+  },
+  'all_dependent_settings': {
+    'variables': {
+      'input_jars_paths': ['<(jar_path)']
+    },
+  },
+  'actions': [
+    {
+      'action_name': 'javac_<(_target_name)',
+      'message': 'Compiling <(_target_name) java sources',
+      'variables': {
+        'extra_args': [],
+        'extra_inputs': [],
+        'java_sources': [ '<!@(find <@(src_paths) -name "*.java")' ],
+        'conditions': [
+          ['"<(excluded_src_paths)" != ""', {
+            'java_sources!': ['<!@(find <@(excluded_src_paths) -name "*.java")']
+          }],
+          ['"<(jar_excluded_classes)" != ""', {
+            'extra_args': ['--jar-excluded-classes=<(jar_excluded_classes)']
+          }],
+          ['main_class != ""', {
+            'extra_args': ['--main-class=>(main_class)']
+          }],
+          ['enable_errorprone == 1', {
+            'extra_inputs': [
+              '<(errorprone_exe_path)',
+            ],
+            'extra_args': [ '--use-errorprone-path=<(errorprone_exe_path)' ],
+          }],
+        ],
+      },
+      'inputs': [
+        '<(DEPTH)/build/android/gyp/util/build_utils.py',
+        '<(DEPTH)/build/android/gyp/javac.py',
+        '^@(java_sources)',
+        '>@(input_jars_paths)',
+        '<@(extra_inputs)',
+      ],
+      'outputs': [
+        '<(jar_path)',
+        '<(stamp)',
+      ],
+      'action': [
+        'python', '<(DEPTH)/build/android/gyp/javac.py',
+        '--classpath=>(input_jars_paths)',
+        '--src-gendirs=>(generated_src_dirs)',
+        '--chromium-code=<(chromium_code)',
+        '--stamp=<(stamp)',
+        '--jar-path=<(jar_path)',
+        '<@(extra_args)',
+        '^@(java_sources)',
+      ],
+    },
+  ],
+  'conditions': [
+    ['main_class != ""', {
+      'actions': [
+        {
+          'action_name': 'create_java_binary_script_<(_target_name)',
+          'message': 'Creating java binary script <(_target_name)',
+          'variables': {
+            'output': '<(PRODUCT_DIR)/bin/<(_target_name)',
+          },
+          'inputs': [
+            '<(DEPTH)/build/android/gyp/create_java_binary_script.py',
+            '<(jar_path)',
+          ],
+          'outputs': [
+            '<(output)',
+          ],
+          'action': [
+            'python', '<(DEPTH)/build/android/gyp/create_java_binary_script.py',
+            '--classpath=>(input_jars_paths)',
+            '--jar-path=<(jar_path)',
+            '--output=<(output)',
+            '--main-class=>(main_class)',
+          ]
+        }
+      ]
+    }],
+    ['enable_errorprone == 1', {
+      'dependencies': [
+        '<(DEPTH)/third_party/errorprone/errorprone.gyp:chromium_errorprone',
+      ],
+    }],
+  ]
+}
+
diff --git a/build/host_prebuilt_jar.gypi b/build/host_prebuilt_jar.gypi
new file mode 100644
index 0000000..feed5ca
--- /dev/null
+++ b/build/host_prebuilt_jar.gypi
@@ -0,0 +1,50 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into a target to provide a rule to
+# copy a prebuilt JAR for use on a host to the output directory.
+#
+# To use this, create a gyp target with the following form:
+# {
+#   'target_name': 'my_prebuilt_jar',
+#   'type': 'none',
+#   'variables': {
+#     'jar_path': 'path/to/prebuilt.jar',
+#   },
+#   'includes': [ 'path/to/this/gypi/file' ],
+# }
+#
+# Required variables:
+#   jar_path - The path to the prebuilt jar.
+
+{
+  'dependencies': [
+  ],
+  'variables': {
+    'dest_path': '<(PRODUCT_DIR)/lib.java/<(_target_name).jar',
+    'src_path': '<(jar_path)',
+  },
+  'all_dependent_settings': {
+    'variables': {
+      'input_jars_paths': [
+        '<(dest_path)',
+      ]
+    },
+  },
+  'actions': [
+    {
+      'action_name': 'copy_prebuilt_jar',
+      'message': 'Copy <(src_path) to <(dest_path)',
+      'inputs': [
+        '<(src_path)',
+      ],
+      'outputs': [
+        '<(dest_path)',
+      ],
+      'action': [
+        'python', '<(DEPTH)/build/cp.py', '<(src_path)', '<(dest_path)',
+      ],
+    }
+  ]
+}
diff --git a/build/install-android-sdks.sh b/build/install-android-sdks.sh
new file mode 100755
index 0000000..1119b7d
--- /dev/null
+++ b/build/install-android-sdks.sh
@@ -0,0 +1,20 @@
+#!/bin/bash -e
+
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Script to install SDKs needed to build chromium on android.
+# See http://code.google.com/p/chromium/wiki/AndroidBuildInstructions
+
+echo 'checking for sdk packages install'
+# Use absolute path to call 'android' so script can be run from any directory.
+cwd=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )
+# Get the SDK extras packages to install from the DEPS file 'sdkextras' hook.
+packages="$(python ${cwd}/get_sdk_extras_packages.py)"
+if [[ -n "${packages}" ]]; then
+  ${cwd}/../third_party/android_tools/sdk/tools/android update sdk --no-ui \
+      --filter ${packages}
+fi
+
+echo "install-android-sdks.sh complete."
diff --git a/build/install-build-deps-android.sh b/build/install-build-deps-android.sh
new file mode 100755
index 0000000..cf87381
--- /dev/null
+++ b/build/install-build-deps-android.sh
@@ -0,0 +1,100 @@
+#!/bin/bash -e
+
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Script to install everything needed to build chromium on android, including
+# items requiring sudo privileges.
+# See http://code.google.com/p/chromium/wiki/AndroidBuildInstructions
+
+# This script installs the sun-java6 packages (bin, jre and jdk). Sun requires
+# a license agreement, so upon installation it will prompt the user. To get
+# past the curses-based dialog press TAB <ret> TAB <ret> to agree.
+
+args="$@"
+if test "$1" = "--skip-sdk-packages"; then
+  skip_inst_sdk_packages=1
+  args="${@:2}"
+else
+  skip_inst_sdk_packages=0
+fi
+
+if ! uname -m | egrep -q "i686|x86_64"; then
+  echo "Only x86 architectures are currently supported" >&2
+  exit
+fi
+
+# Install first the default Linux build deps.
+"$(dirname "${BASH_SOURCE[0]}")/install-build-deps.sh" \
+  --no-syms --lib32 --no-arm --no-chromeos-fonts --no-nacl --no-prompt "${args}"
+
+lsb_release=$(lsb_release --codename --short)
+
+# The temporary directory used to store output of update-java-alternatives
+TEMPDIR=$(mktemp -d)
+cleanup() {
+  local status=${?}
+  trap - EXIT
+  rm -rf "${TEMPDIR}"
+  exit ${status}
+}
+trap cleanup EXIT
+
+# Fix deps
+sudo apt-get -f install
+
+# Install deps
+# This step differs depending on what Ubuntu release we are running
+# on since the package names are different, and Sun's Java must
+# be installed manually on late-model versions.
+
+# common
+sudo apt-get -y install lighttpd python-pexpect xvfb x11-utils
+
+# Some binaries in the Android SDK require 32-bit libraries on the host.
+# See https://developer.android.com/sdk/installing/index.html?pkg=tools
+if [[ $lsb_release == "precise" ]]; then
+  sudo apt-get -y install ia32-libs
+else
+  sudo apt-get -y install libncurses5:i386 libstdc++6:i386 zlib1g:i386
+fi
+
+sudo apt-get -y install ant
+
+# Install openjdk and openjre 7 stuff
+sudo apt-get -y install openjdk-7-jre openjdk-7-jdk
+
+# Switch version of Java to openjdk 7.
+# Some Java plugins (e.g. for firefox, mozilla) are not required to build, and
+# thus are treated only as warnings. Any errors in updating java alternatives
+# which are not '*-javaplugin.so' will cause errors and stop the script from
+# completing successfully.
+if ! sudo update-java-alternatives -s java-1.7.0-openjdk-amd64 \
+           >& "${TEMPDIR}"/update-java-alternatives.out
+then
+  # Check that there are the expected javaplugin.so errors for the update
+  if grep 'javaplugin.so' "${TEMPDIR}"/update-java-alternatives.out >& \
+      /dev/null
+  then
+    # Print as warnings all the javaplugin.so errors
+    echo 'WARNING: java-6-sun has no alternatives for the following plugins:'
+    grep 'javaplugin.so' "${TEMPDIR}"/update-java-alternatives.out
+  fi
+  # Check if there are any errors that are not javaplugin.so
+  if grep -v 'javaplugin.so' "${TEMPDIR}"/update-java-alternatives.out \
+      >& /dev/null
+  then
+    # If there are non-javaplugin.so errors, treat as errors and exit
+    echo 'ERRORS: Failed to update alternatives for java-6-sun:'
+    grep -v 'javaplugin.so' "${TEMPDIR}"/update-java-alternatives.out
+    exit 1
+  fi
+fi
+
+# Install SDK packages for android
+if test "$skip_inst_sdk_packages" != 1; then
+  "$(dirname "${BASH_SOURCE[0]}")/install-android-sdks.sh"
+fi
+
+echo "install-build-deps-android.sh complete."
diff --git a/build/install-build-deps.py b/build/install-build-deps.py
new file mode 100755
index 0000000..7cc3760
--- /dev/null
+++ b/build/install-build-deps.py
@@ -0,0 +1,430 @@
+#!/usr/bin/env python
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import argparse
+import operator
+import os
+import platform
+import re
+import subprocess
+import sys
+
+
+SUPPORTED_UBUNTU_VERSIONS = (
+  {'number': '12.04', 'codename': 'precise'},
+  {'number': '14.04', 'codename': 'trusty'},
+  {'number': '14.10', 'codename': 'utopic'},
+  {'number': '15.04', 'codename': 'vivid'},
+)
+
+
+# Packages needed for chromeos only.
+_packages_chromeos_dev = (
+  'libbluetooth-dev',
+  'libxkbcommon-dev',
+  'realpath',
+)
+
+
+# Packages needed for development.
+_packages_dev = (
+  'apache2.2-bin',
+  'bison',
+  'cdbs',
+  'curl',
+  'devscripts',
+  'dpkg-dev',
+  'elfutils',
+  'fakeroot',
+  'flex',
+  'fonts-thai-tlwg',
+  'g++',
+  'git-core',
+  'git-svn',
+  'gperf',
+  'language-pack-da',
+  'language-pack-fr',
+  'language-pack-he',
+  'language-pack-zh-hant',
+  'libapache2-mod-php5',
+  'libasound2-dev',
+  'libav-tools',
+  'libbrlapi-dev',
+  'libbz2-dev',
+  'libcairo2-dev',
+  'libcap-dev',
+  'libcups2-dev',
+  'libcurl4-gnutls-dev',
+  'libdrm-dev',
+  'libelf-dev',
+  'libexif-dev',
+  'libgconf2-dev',
+  'libglib2.0-dev',
+  'libglu1-mesa-dev',
+  'libgnome-keyring-dev',
+  'libgtk2.0-dev',
+  'libkrb5-dev',
+  'libnspr4-dev',
+  'libnss3-dev',
+  'libpam0g-dev',
+  'libpci-dev',
+  'libpulse-dev',
+  'libsctp-dev',
+  'libspeechd-dev',
+  'libsqlite3-dev',
+  'libssl-dev',
+  'libudev-dev',
+  'libwww-perl',
+  'libxslt1-dev',
+  'libxss-dev',
+  'libxt-dev',
+  'libxtst-dev',
+  'openbox',
+  'patch',
+  'perl',
+  'php5-cgi',
+  'pkg-config',
+  'python',
+  'python-cherrypy3',
+  'python-crypto',
+  'python-dev',
+  'python-numpy',
+  'python-opencv',
+  'python-openssl',
+  'python-psutil',
+  'python-yaml',
+  'rpm',
+  'ruby',
+  'subversion',
+  'ttf-dejavu-core',
+  'ttf-indic-fonts',
+  'ttf-kochi-gothic',
+  'ttf-kochi-mincho',
+  'wdiff',
+  'xfonts-mathml',
+  'zip',
+)
+
+
+# Run-time libraries required by chromeos only.
+_packages_chromeos_lib = (
+  'libbz2-1.0',
+  'libpulse0',
+)
+
+
+# Full list of required run-time libraries.
+_packages_lib = (
+  'libasound2',
+  'libatk1.0-0',
+  'libc6',
+  'libcairo2',
+  'libcap2',
+  'libcups2',
+  'libexif12',
+  'libexpat1',
+  'libfontconfig1',
+  'libfreetype6',
+  'libglib2.0-0',
+  'libgnome-keyring0',
+  'libgtk2.0-0',
+  'libpam0g',
+  'libpango1.0-0',
+  'libpci3',
+  'libpcre3',
+  'libpixman-1-0',
+  'libpng12-0',
+  'libspeechd2',
+  'libsqlite3-0',
+  'libstdc++6',
+  'libx11-6',
+  'libxau6',
+  'libxcb1',
+  'libxcomposite1',
+  'libxcursor1',
+  'libxdamage1',
+  'libxdmcp6',
+  'libxext6',
+  'libxfixes3',
+  'libxi6',
+  'libxinerama1',
+  'libxrandr2',
+  'libxrender1',
+  'libxtst6',
+  'zlib1g',
+)
+
+
+# Debugging symbols for all of the run-time libraries.
+_packages_dbg = (
+  'libatk1.0-dbg',
+  'libc6-dbg',
+  'libcairo2-dbg',
+  'libfontconfig1-dbg',
+  'libglib2.0-0-dbg',
+  'libgtk2.0-0-dbg',
+  'libpango1.0-0-dbg',
+  'libpcre3-dbg',
+  'libpixman-1-0-dbg',
+  'libsqlite3-0-dbg',
+  'libx11-6-dbg',
+  'libxau6-dbg',
+  'libxcb1-dbg',
+  'libxcomposite1-dbg',
+  'libxcursor1-dbg',
+  'libxdamage1-dbg',
+  'libxdmcp6-dbg',
+  'libxext6-dbg',
+  'libxfixes3-dbg',
+  'libxi6-dbg',
+  'libxinerama1-dbg',
+  'libxrandr2-dbg',
+  'libxrender1-dbg',
+  'libxtst6-dbg',
+  'zlib1g-dbg',
+)
+
+
+# 32-bit libraries needed e.g. to compile V8 snapshot for Android or armhf.
+_packages_lib32 = (
+  'linux-libc-dev:i386',
+)
+
+
+# arm cross toolchain packages needed to build chrome on armhf.
+_packages_arm = (
+  'g++-arm-linux-gnueabihf',
+  'libc6-dev-armhf-cross',
+  'linux-libc-dev-armhf-cross',
+)
+
+
+# Packages to build NaCl, its toolchains, and its ports.
+_packages_naclports = (
+  'ant',
+  'autoconf',
+  'bison',
+  'cmake',
+  'gawk',
+  'intltool',
+  'xsltproc',
+  'xutils-dev',
+)
+_packages_nacl = (
+  'g++-mingw-w64-i686',
+  'lib32ncurses5-dev',
+  'lib32z1-dev',
+  'libasound2:i386',
+  'libcap2:i386',
+  'libelf-dev:i386',
+  'libexif12:i386',
+  'libfontconfig1:i386',
+  'libgconf-2-4:i386',
+  'libglib2.0-0:i386',
+  'libgpm2:i386',
+  'libgtk2.0-0:i386',
+  'libncurses5:i386',
+  'libnss3:i386',
+  'libpango1.0-0:i386',
+  'libssl1.0.0:i386',
+  'libtinfo-dev',
+  'libtinfo-dev:i386',
+  'libtool',
+  'libxcomposite1:i386',
+  'libxcursor1:i386',
+  'libxdamage1:i386',
+  'libxi6:i386',
+  'libxrandr2:i386',
+  'libxss1:i386',
+  'libxtst6:i386',
+  'texinfo',
+  'xvfb',
+)
+
+
+def is_userland_64_bit():
+  return platform.architecture()[0] == '64bit'
+
+
+def package_exists(pkg):
+  return pkg in subprocess.check_output(['apt-cache', 'pkgnames']).splitlines()
+
+
+def lsb_release_short_codename():
+  return subprocess.check_output(
+      ['lsb_release', '--codename', '--short']).strip()
+
+
+def write_error(message):
+  sys.stderr.write('ERROR: %s\n' % message)
+  sys.stderr.flush()
+
+
+def nonfatal_get_output(*popenargs, **kwargs):
+  process = subprocess.Popen(
+      stdout=subprocess.PIPE, stderr=subprocess.PIPE, *popenargs, **kwargs)
+  stdout, stderr = process.communicate()
+  retcode = process.poll()
+  return retcode, stdout, stderr
+
+
+def compute_dynamic_package_lists():
+  global _packages_arm
+  global _packages_dbg
+  global _packages_dev
+  global _packages_lib
+  global _packages_lib32
+  global _packages_nacl
+
+  if is_userland_64_bit():
+    # 64-bit systems need a minimum set of 32-bit compat packages
+    # for the pre-built NaCl binaries.
+    _packages_dev += (
+      'lib32gcc1',
+      'lib32stdc++6',
+      'libc6-i386',
+    )
+
+    # When cross building for arm/Android on 64-bit systems the host binaries
+    # that are part of v8 need to be compiled with -m32 which means
+    # that basic multilib support is needed.
+    # gcc-multilib conflicts with the arm cross compiler (at least in trusty)
+    # but g++-X.Y-multilib gives us the 32-bit support that we need. Find out
+    # the appropriate value of X and Y by seeing what version the current
+    # distribution's g++-multilib package depends on.
+    output = subprocess.check_output(['apt-cache', 'depends', 'g++-multilib'])
+    multilib_package = re.search(r'g\+\+-[0-9.]+-multilib', output).group()
+    _packages_lib32 += (multilib_package,)
+
+  lsb_codename = lsb_release_short_codename()
+
+  # Find the proper version of libstdc++6-4.x-dbg.
+  if lsb_codename == 'precise':
+    _packages_dbg += ('libstdc++6-4.6-dbg',)
+  elif lsb_codename == 'trusty':
+    _packages_dbg += ('libstdc++6-4.8-dbg',)
+  else:
+    _packages_dbg += ('libstdc++6-4.9-dbg',)
+
+  # Work around for dependency issue Ubuntu/Trusty: http://crbug.com/435056 .
+  if lsb_codename == 'trusty':
+    _packages_arm += (
+      'g++-4.8-multilib-arm-linux-gnueabihf',
+      'gcc-4.8-multilib-arm-linux-gnueabihf',
+    )
+
+  # Find the proper version of libgbm-dev. We can't just install libgbm-dev as
+  # it depends on mesa, and only one version of mesa can exists on the system.
+  # Hence we must match the same version or this entire script will fail.
+  mesa_variant = ''
+  for variant in ('-lts-trusty', '-lts-utopic'):
+    rc, stdout, stderr = nonfatal_get_output(
+        ['dpkg-query', '-Wf\'{Status}\'', 'libgl1-mesa-glx' + variant])
+    if 'ok installed' in output:
+      mesa_variant = variant
+  _packages_dev += (
+    'libgbm-dev' + mesa_variant,
+    'libgl1-mesa-dev' + mesa_variant,
+    'libgles2-mesa-dev' + mesa_variant,
+    'mesa-common-dev' + mesa_variant,
+  )
+
+  if package_exists('ttf-mscorefonts-installer'):
+    _packages_dev += ('ttf-mscorefonts-installer',)
+  else:
+    _packages_dev += ('msttcorefonts',)
+
+  if package_exists('libnspr4-dbg'):
+    _packages_dbg += ('libnspr4-dbg', 'libnss3-dbg')
+    _packages_lib += ('libnspr4', 'libnss3')
+  else:
+    _packages_dbg += ('libnspr4-0d-dbg', 'libnss3-1d-dbg')
+    _packages_lib += ('libnspr4-0d', 'libnss3-1d')
+
+  if package_exists('libjpeg-dev'):
+    _packages_dev += ('libjpeg-dev',)
+  else:
+    _packages_dev += ('libjpeg62-dev',)
+
+  if package_exists('libudev1'):
+    _packages_dev += ('libudev1',)
+    _packages_nacl += ('libudev1:i386',)
+  else:
+    _packages_dev += ('libudev0',)
+    _packages_nacl += ('libudev0:i386',)
+
+  if package_exists('libbrlapi0.6'):
+    _packages_dev += ('libbrlapi0.6',)
+  else:
+    _packages_dev += ('libbrlapi0.5',)
+
+  # Some packages are only needed if the distribution actually supports
+  # installing them.
+  if package_exists('appmenu-gtk'):
+    _packages_lib += ('appmenu-gtk',)
+
+  _packages_dev += _packages_chromeos_dev
+  _packages_lib += _packages_chromeos_lib
+  _packages_nacl += _packages_naclports
+
+
+def quick_check(packages):
+  rc, stdout, stderr = nonfatal_get_output([
+      'dpkg-query', '-W', '-f', '${PackageSpec}:${Status}\n'] + list(packages))
+  if rc == 0 and not stderr:
+    return 0
+  print stderr
+  return 1
+
+
+def main(argv):
+  parser = argparse.ArgumentParser()
+  parser.add_argument('--quick-check', action='store_true',
+                      help='quickly try to determine if dependencies are '
+                           'installed (this avoids interactive prompts and '
+                           'sudo commands so might not be 100% accurate)')
+  parser.add_argument('--unsupported', action='store_true',
+                      help='attempt installation even on unsupported systems')
+  args = parser.parse_args(argv)
+
+  lsb_codename = lsb_release_short_codename()
+  if not args.unsupported and not args.quick_check:
+    if lsb_codename not in map(
+        operator.itemgetter('codename'), SUPPORTED_UBUNTU_VERSIONS):
+      supported_ubuntus = ['%(number)s (%(codename)s)' % v
+                           for v in SUPPORTED_UBUNTU_VERSIONS]
+      write_error('Only Ubuntu %s are currently supported.' %
+                  ', '.join(supported_ubuntus))
+      return 1
+
+    if platform.machine() not in ('i686', 'x86_64'):
+      write_error('Only x86 architectures are currently supported.')
+      return 1
+
+  if os.geteuid() != 0 and not args.quick_check:
+    print 'Running as non-root user.'
+    print 'You might have to enter your password one or more times'
+    print 'for \'sudo\'.'
+    print
+
+  compute_dynamic_package_lists()
+
+  packages = (_packages_dev + _packages_lib + _packages_dbg + _packages_lib32 +
+              _packages_arm + _packages_nacl)
+  def packages_key(pkg):
+    s = pkg.rsplit(':', 1)
+    if len(s) == 1:
+      return (s, '')
+    return s
+  packages = sorted(set(packages), key=packages_key)
+
+  if args.quick_check:
+    return quick_check(packages)
+
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/build/install-build-deps.sh b/build/install-build-deps.sh
new file mode 100755
index 0000000..57f7216
--- /dev/null
+++ b/build/install-build-deps.sh
@@ -0,0 +1,477 @@
+#!/bin/bash -e
+
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Script to install everything needed to build chromium (well, ideally, anyway)
+# See http://code.google.com/p/chromium/wiki/LinuxBuildInstructions
+# and http://code.google.com/p/chromium/wiki/LinuxBuild64Bit
+
+usage() {
+  echo "Usage: $0 [--options]"
+  echo "Options:"
+  echo "--[no-]syms: enable or disable installation of debugging symbols"
+  echo "--lib32: enable installation of 32-bit libraries, e.g. for V8 snapshot"
+  echo "--[no-]arm: enable or disable installation of arm cross toolchain"
+  echo "--[no-]chromeos-fonts: enable or disable installation of Chrome OS"\
+       "fonts"
+  echo "--[no-]nacl: enable or disable installation of prerequisites for"\
+       "building standalone NaCl and all its toolchains"
+  echo "--no-prompt: silently select standard options/defaults"
+  echo "--quick-check: quickly try to determine if dependencies are installed"
+  echo "               (this avoids interactive prompts and sudo commands,"
+  echo "               so might not be 100% accurate)"
+  echo "--unsupported: attempt installation even on unsupported systems"
+  echo "Script will prompt interactively if options not given."
+  exit 1
+}
+
+# Checks whether a particular package is available in the repos.
+# USAGE: $ package_exists <package name>
+package_exists() {
+  apt-cache pkgnames | grep -x "$1" > /dev/null 2>&1
+}
+
+# These default to on because (some) bots need them and it keeps things
+# simple for the bot setup if all bots just run the script in its default
+# mode.  Developers who don't want stuff they don't need installed on their
+# own workstations can pass --no-arm --no-nacl when running the script.
+do_inst_arm=1
+do_inst_nacl=1
+
+while test "$1" != ""
+do
+  case "$1" in
+  --syms)                   do_inst_syms=1;;
+  --no-syms)                do_inst_syms=0;;
+  --lib32)                  do_inst_lib32=1;;
+  --arm)                    do_inst_arm=1;;
+  --no-arm)                 do_inst_arm=0;;
+  --chromeos-fonts)         do_inst_chromeos_fonts=1;;
+  --no-chromeos-fonts)      do_inst_chromeos_fonts=0;;
+  --nacl)                   do_inst_nacl=1;;
+  --no-nacl)                do_inst_nacl=0;;
+  --no-prompt)              do_default=1
+                            do_quietly="-qq --assume-yes"
+    ;;
+  --quick-check)            do_quick_check=1;;
+  --unsupported)            do_unsupported=1;;
+  *) usage;;
+  esac
+  shift
+done
+
+if test "$do_inst_arm" = "1"; then
+  do_inst_lib32=1
+fi
+
+# Check for lsb_release command in $PATH
+if ! which lsb_release > /dev/null; then
+  echo "ERROR: lsb_release not found in \$PATH" >&2
+  exit 1;
+fi
+
+distro=$(lsb_release --id --short)
+codename=$(lsb_release --codename --short)
+ubuntu_codenames="(precise|trusty|utopic|vivid)"
+debian_codenames="(stretch)"
+if [ 0 -eq "${do_unsupported-0}" ] && [ 0 -eq "${do_quick_check-0}" ] ; then
+  if [[ ! $codename =~ $ubuntu_codenames && ! $codename =~ $debian_codenames ]]; then
+    echo "ERROR: Only Ubuntu 12.04 (precise), 14.04 (trusty), " \
+      "14.10 (utopic) and 15.04 (vivid), and Debian Testing (stretch) are currently supported" >&2
+    exit 1
+  fi
+
+  if ! uname -m | egrep -q "i686|x86_64"; then
+    echo "Only x86 architectures are currently supported" >&2
+    exit
+  fi
+fi
+
+if [ "x$(id -u)" != x0 ] && [ 0 -eq "${do_quick_check-0}" ]; then
+  echo "Running as non-root user."
+  echo "You might have to enter your password one or more times for 'sudo'."
+  echo
+fi
+
+# Packages needed for chromeos only
+chromeos_dev_list="libbluetooth-dev libxkbcommon-dev realpath"
+
+# Packages needed for development
+if [[ $distro = Debian ]] ; then
+  # Debian-specific package names
+  dev_list="apache2-bin fonts-indic fonts-lyx"
+else
+  # Ubuntu-specific package names
+  dev_list="apache2.2-bin ttf-indic-fonts xfonts-mathml language-pack-da
+            language-pack-fr language-pack-he language-pack-zh-hant"
+fi
+dev_list="$dev_list bison cdbs curl dpkg-dev elfutils devscripts fakeroot
+          flex fonts-thai-tlwg g++ git-core git-svn gperf libapache2-mod-php5
+          libasound2-dev libbrlapi-dev libav-tools
+          libbz2-dev libcairo2-dev libcap-dev libcups2-dev libcurl4-gnutls-dev
+          libdrm-dev libelf-dev libexif-dev libgconf2-dev libglib2.0-dev
+          libglu1-mesa-dev libgnome-keyring-dev libgtk2.0-dev libkrb5-dev
+          libnspr4-dev libnss3-dev libpam0g-dev libpci-dev libpulse-dev
+          libsctp-dev libspeechd-dev libsqlite3-dev libssl-dev libudev-dev
+          libwww-perl libxslt1-dev libxss-dev libxt-dev libxtst-dev openbox
+          patch perl php5-cgi pkg-config python python-cherrypy3 python-crypto
+          python-dev python-numpy python-opencv python-openssl python-psutil
+          python-yaml rpm ruby subversion ttf-dejavu-core
+          ttf-kochi-gothic ttf-kochi-mincho wdiff zip
+          $chromeos_dev_list"
+
+# 64-bit systems need a minimum set of 32-bit compat packages for the pre-built
+# NaCl binaries.
+if file /sbin/init | grep -q 'ELF 64-bit'; then
+  dev_list="${dev_list} libc6-i386 lib32gcc1 lib32stdc++6"
+fi
+
+# Run-time libraries required by chromeos only
+chromeos_lib_list="libpulse0 libbz2-1.0"
+
+# Full list of required run-time libraries
+lib_list="libatk1.0-0 libc6 libasound2 libcairo2 libcap2 libcups2 libexpat1
+          libexif12 libfontconfig1 libfreetype6 libglib2.0-0 libgnome-keyring0
+          libgtk2.0-0 libpam0g libpango1.0-0 libpci3 libpcre3 libpixman-1-0
+          libpng12-0 libspeechd2 libstdc++6 libsqlite3-0 libx11-6
+          libxau6 libxcb1 libxcomposite1 libxcursor1 libxdamage1 libxdmcp6
+          libxext6 libxfixes3 libxi6 libxinerama1 libxrandr2 libxrender1
+          libxtst6 zlib1g $chromeos_lib_list"
+
+# Debugging symbols for all of the run-time libraries
+dbg_list="libatk1.0-dbg libc6-dbg libcairo2-dbg libfontconfig1-dbg
+          libglib2.0-0-dbg libgtk2.0-0-dbg libpango1.0-0-dbg libpcre3-dbg
+          libpixman-1-0-dbg libsqlite3-0-dbg libx11-6-dbg libxau6-dbg
+          libxcb1-dbg libxcomposite1-dbg libxcursor1-dbg libxdamage1-dbg
+          libxdmcp6-dbg libxext6-dbg libxfixes3-dbg libxi6-dbg libxinerama1-dbg
+          libxrandr2-dbg libxrender1-dbg libxtst6-dbg zlib1g-dbg"
+
+# Find the proper version of libstdc++6-4.x-dbg.
+if [ "x$codename" = "xprecise" ]; then
+  dbg_list="${dbg_list} libstdc++6-4.6-dbg"
+elif [ "x$codename" = "xtrusty" ]; then
+  dbg_list="${dbg_list} libstdc++6-4.8-dbg"
+else
+  dbg_list="${dbg_list} libstdc++6-4.9-dbg"
+fi
+
+# 32-bit libraries needed e.g. to compile V8 snapshot for Android or armhf
+lib32_list="linux-libc-dev:i386"
+
+# arm cross toolchain packages needed to build chrome on armhf
+arm_list="libc6-dev-armhf-cross
+          linux-libc-dev-armhf-cross"
+
+# Work around for dependency issue Debian/Stretch
+if [ "x$codename" = "xstretch" ]; then
+  arm_list+=" g++-5-arm-linux-gnueabihf"
+else
+  arm_list+=" g++-arm-linux-gnueabihf"
+fi
+
+# Work around for dependency issue Ubuntu/Trusty: http://crbug.com/435056
+if [ "x$codename" = "xtrusty" ]; then
+  arm_list+=" g++-4.8-multilib-arm-linux-gnueabihf
+              gcc-4.8-multilib-arm-linux-gnueabihf"
+fi
+
+# Packages to build NaCl, its toolchains, and its ports.
+naclports_list="ant autoconf bison cmake gawk intltool xutils-dev xsltproc"
+nacl_list="g++-mingw-w64-i686 lib32z1-dev
+           libasound2:i386 libcap2:i386 libelf-dev:i386 libexif12:i386
+           libfontconfig1:i386 libgconf-2-4:i386 libglib2.0-0:i386 libgpm2:i386
+           libgtk2.0-0:i386 libncurses5:i386 lib32ncurses5-dev
+           libnss3:i386 libpango1.0-0:i386
+           libssl1.0.0:i386 libtinfo-dev libtinfo-dev:i386 libtool
+           libxcomposite1:i386 libxcursor1:i386 libxdamage1:i386 libxi6:i386
+           libxrandr2:i386 libxss1:i386 libxtst6:i386 texinfo xvfb
+           ${naclports_list}"
+
+# Find the proper version of libgbm-dev. We can't just install libgbm-dev as
+# it depends on mesa, and only one version of mesa can exists on the system.
+# Hence we must match the same version or this entire script will fail.
+mesa_variant=""
+for variant in "-lts-trusty" "-lts-utopic"; do
+  if $(dpkg-query -Wf'${Status}' libgl1-mesa-glx${variant} 2>/dev/null | \
+       grep -q " ok installed"); then
+    mesa_variant="${variant}"
+  fi
+done
+dev_list="${dev_list} libgbm-dev${mesa_variant}
+          libgles2-mesa-dev${mesa_variant} libgl1-mesa-dev${mesa_variant}
+          mesa-common-dev${mesa_variant}"
+nacl_list="${nacl_list} libgl1-mesa-glx${mesa_variant}:i386"
+
+# Some package names have changed over time
+if package_exists ttf-mscorefonts-installer; then
+  dev_list="${dev_list} ttf-mscorefonts-installer"
+else
+  dev_list="${dev_list} msttcorefonts"
+fi
+if package_exists libnspr4-dbg; then
+  dbg_list="${dbg_list} libnspr4-dbg libnss3-dbg"
+  lib_list="${lib_list} libnspr4 libnss3"
+else
+  dbg_list="${dbg_list} libnspr4-0d-dbg libnss3-1d-dbg"
+  lib_list="${lib_list} libnspr4-0d libnss3-1d"
+fi
+if package_exists libjpeg-dev; then
+  dev_list="${dev_list} libjpeg-dev"
+else
+  dev_list="${dev_list} libjpeg62-dev"
+fi
+if package_exists libudev1; then
+  dev_list="${dev_list} libudev1"
+  nacl_list="${nacl_list} libudev1:i386"
+else
+  dev_list="${dev_list} libudev0"
+  nacl_list="${nacl_list} libudev0:i386"
+fi
+if package_exists libbrlapi0.6; then
+  dev_list="${dev_list} libbrlapi0.6"
+else
+  dev_list="${dev_list} libbrlapi0.5"
+fi
+
+
+# Some packages are only needed if the distribution actually supports
+# installing them.
+if package_exists appmenu-gtk; then
+  lib_list="$lib_list appmenu-gtk"
+fi
+
+# When cross building for arm/Android on 64-bit systems the host binaries
+# that are part of v8 need to be compiled with -m32 which means
+# that basic multilib support is needed.
+if file /sbin/init | grep -q 'ELF 64-bit'; then
+  # gcc-multilib conflicts with the arm cross compiler (at least in trusty) but
+  # g++-X.Y-multilib gives us the 32-bit support that we need. Find out the
+  # appropriate value of X and Y by seeing what version the current
+  # distribution's g++-multilib package depends on.
+  multilib_package=$(apt-cache depends g++-multilib --important | \
+      grep -E --color=never --only-matching '\bg\+\+-[0-9.]+-multilib\b')
+  lib32_list="$lib32_list $multilib_package"
+fi
+
+# Waits for the user to press 'Y' or 'N'. Either uppercase of lowercase is
+# accepted. Returns 0 for 'Y' and 1 for 'N'. If an optional parameter has
+# been provided to yes_no(), the function also accepts RETURN as a user input.
+# The parameter specifies the exit code that should be returned in that case.
+# The function will echo the user's selection followed by a newline character.
+# Users can abort the function by pressing CTRL-C. This will call "exit 1".
+yes_no() {
+  if [ 0 -ne "${do_default-0}" ] ; then
+    [ $1 -eq 0 ] && echo "Y" || echo "N"
+    return $1
+  fi
+  local c
+  while :; do
+    c="$(trap 'stty echo -iuclc icanon 2>/dev/null' EXIT INT TERM QUIT
+         stty -echo iuclc -icanon 2>/dev/null
+         dd count=1 bs=1 2>/dev/null | od -An -tx1)"
+    case "$c" in
+      " 0a") if [ -n "$1" ]; then
+               [ $1 -eq 0 ] && echo "Y" || echo "N"
+               return $1
+             fi
+             ;;
+      " 79") echo "Y"
+             return 0
+             ;;
+      " 6e") echo "N"
+             return 1
+             ;;
+      "")    echo "Aborted" >&2
+             exit 1
+             ;;
+      *)     # The user pressed an unrecognized key. As we are not echoing
+             # any incorrect user input, alert the user by ringing the bell.
+             (tput bel) 2>/dev/null
+             ;;
+    esac
+  done
+}
+
+if test "$do_inst_syms" = "" && test 0 -eq ${do_quick_check-0}
+then
+  echo "This script installs all tools and libraries needed to build Chromium."
+  echo ""
+  echo "For most of the libraries, it can also install debugging symbols, which"
+  echo "will allow you to debug code in the system libraries. Most developers"
+  echo "won't need these symbols."
+  echo -n "Do you want me to install them for you (y/N) "
+  if yes_no 1; then
+    do_inst_syms=1
+  fi
+fi
+if test "$do_inst_syms" = "1"; then
+  echo "Including debugging symbols."
+else
+  echo "Skipping debugging symbols."
+  dbg_list=
+fi
+
+if test "$do_inst_lib32" = "1" ; then
+  echo "Including 32-bit libraries for ARM/Android."
+else
+  echo "Skipping 32-bit libraries for ARM/Android."
+  lib32_list=
+fi
+
+if test "$do_inst_arm" = "1" ; then
+  echo "Including ARM cross toolchain."
+else
+  echo "Skipping ARM cross toolchain."
+  arm_list=
+fi
+
+if test "$do_inst_nacl" = "1"; then
+  echo "Including NaCl, NaCl toolchain, NaCl ports dependencies."
+else
+  echo "Skipping NaCl, NaCl toolchain, NaCl ports dependencies."
+  nacl_list=
+fi
+
+# The `sort -r -s -t: -k2` sorts all the :i386 packages to the front, to avoid
+# confusing dpkg-query (crbug.com/446172).
+packages="$(
+  echo "${dev_list} ${lib_list} ${dbg_list} ${lib32_list} ${arm_list}"\
+       "${nacl_list}" | tr " " "\n" | sort -u | sort -r -s -t: -k2 | tr "\n" " "
+)"
+
+if [ 1 -eq "${do_quick_check-0}" ] ; then
+  failed_check="$(dpkg-query -W -f '${PackageSpec}:${Status}\n' \
+    ${packages} 2>&1 | grep -v "ok installed" || :)"
+  if [ -n "${failed_check}" ]; then
+    echo
+    nomatch="$(echo "${failed_check}" | \
+      sed -e "s/^No packages found matching \(.*\).$/\1/;t;d")"
+    missing="$(echo "${failed_check}" | \
+      sed -e "/^No packages found matching/d;s/^\(.*\):.*$/\1/")"
+    if [ "$nomatch" ]; then
+      # Distinguish between packages that actually aren't available to the
+      # system (i.e. not in any repo) and packages that just aren't known to
+      # dpkg (i.e. managed by apt).
+      unknown=""
+      for p in ${nomatch}; do
+        if apt-cache show ${p} > /dev/null 2>&1; then
+          missing="${p}\n${missing}"
+        else
+          unknown="${p}\n${unknown}"
+        fi
+      done
+      if [ -n "${unknown}" ]; then
+        echo "WARNING: The following packages are unknown to your system"
+        echo "(maybe missing a repo or need to 'sudo apt-get update'):"
+        echo -e "${unknown}" | sed -e "s/^/  /"
+      fi
+    fi
+    if [ -n "${missing}" ]; then
+      echo "WARNING: The following packages are not installed:"
+      echo -e "${missing}" | sed -e "s/^/  /"
+    fi
+    exit 1
+  fi
+  exit 0
+fi
+
+if test "$do_inst_lib32" = "1" || test "$do_inst_nacl" = "1"; then
+  if [[ ! $codename =~ (precise) ]]; then
+    sudo dpkg --add-architecture i386
+  fi
+fi
+sudo apt-get update
+
+# We initially run "apt-get" with the --reinstall option and parse its output.
+# This way, we can find all the packages that need to be newly installed
+# without accidentally promoting any packages from "auto" to "manual".
+# We then re-run "apt-get" with just the list of missing packages.
+echo "Finding missing packages..."
+# Intentionally leaving $packages unquoted so it's more readable.
+echo "Packages required: " $packages
+echo
+new_list_cmd="sudo apt-get install --reinstall $(echo $packages)"
+if new_list="$(yes n | LANGUAGE=en LANG=C $new_list_cmd)"; then
+  # We probably never hit this following line.
+  echo "No missing packages, and the packages are up-to-date."
+elif [ $? -eq 1 ]; then
+  # We expect apt-get to have exit status of 1.
+  # This indicates that we cancelled the install with "yes n|".
+  new_list=$(echo "$new_list" |
+    sed -e '1,/The following NEW packages will be installed:/d;s/^  //;t;d')
+  new_list=$(echo "$new_list" | sed 's/ *$//')
+  if [ -z "$new_list" ] ; then
+    echo "No missing packages, and the packages are up-to-date."
+  else
+    echo "Installing missing packages: $new_list."
+    sudo apt-get install ${do_quietly-} ${new_list}
+  fi
+  echo
+else
+  # An apt-get exit status of 100 indicates that a real error has occurred.
+
+  # I am intentionally leaving out the '"'s around new_list_cmd,
+  # as this makes it easier to cut and paste the output
+  echo "The following command failed: " ${new_list_cmd}
+  echo
+  echo "It produces the following output:"
+  yes n | $new_list_cmd || true
+  echo
+  echo "You will have to install the above packages yourself."
+  echo
+  exit 100
+fi
+
+# Install the Chrome OS default fonts. This must go after running
+# apt-get, since install-chromeos-fonts depends on curl.
+if test "$do_inst_chromeos_fonts" != "0"; then
+  echo
+  echo "Installing Chrome OS fonts."
+  dir=`echo $0 | sed -r -e 's/\/[^/]+$//'`
+  if ! sudo $dir/linux/install-chromeos-fonts.py; then
+    echo "ERROR: The installation of the Chrome OS default fonts failed."
+    if [ `stat -f -c %T $dir` == "nfs" ]; then
+      echo "The reason is that your repo is installed on a remote file system."
+    else
+      echo "This is expected if your repo is installed on a remote file system."
+    fi
+    echo "It is recommended to install your repo on a local file system."
+    echo "You can skip the installation of the Chrome OS default founts with"
+    echo "the command line option: --no-chromeos-fonts."
+    exit 1
+  fi
+else
+  echo "Skipping installation of Chrome OS fonts."
+fi
+
+# $1 - target name
+# $2 - link name
+create_library_symlink() {
+  target=$1
+  linkname=$2
+  if [ -L $linkname ]; then
+    if [ "$(basename $(readlink $linkname))" != "$(basename $target)" ]; then
+      sudo rm $linkname
+    fi
+  fi
+  if [ ! -r $linkname ]; then
+    echo "Creating link: $linkname"
+    sudo ln -fs $target $linkname
+  fi
+}
+
+if test "$do_inst_nacl" = "1"; then
+  echo "Installing symbolic links for NaCl."
+  # naclports needs to cross build python for i386, but libssl1.0.0:i386
+  # only contains libcrypto.so.1.0.0 and not the symlink needed for
+  # linking (libcrypto.so).
+  create_library_symlink /lib/i386-linux-gnu/libcrypto.so.1.0.0 \
+      /usr/lib/i386-linux-gnu/libcrypto.so
+
+  create_library_symlink /lib/i386-linux-gnu/libssl.so.1.0.0 \
+      /usr/lib/i386-linux-gnu/libssl.so
+else
+  echo "Skipping symbolic links for NaCl."
+fi
diff --git a/build/install-chroot.sh b/build/install-chroot.sh
new file mode 100755
index 0000000..99451ed
--- /dev/null
+++ b/build/install-chroot.sh
@@ -0,0 +1,888 @@
+#!/bin/bash -e
+
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This script installs Debian-derived distributions in a chroot environment.
+# It can for example be used to have an accurate 32bit build and test
+# environment when otherwise working on a 64bit machine.
+# N. B. it is unlikely that this script will ever work on anything other than a
+# Debian-derived system.
+
+# Older Debian based systems had both "admin" and "adm" groups, with "admin"
+# apparently being used in more places. Newer distributions have standardized
+# on just the "adm" group. Check /etc/group for the preferred name of the
+# administrator group.
+admin=$(grep '^admin:' /etc/group >&/dev/null && echo admin || echo adm)
+
+usage() {
+  echo "usage: ${0##*/} [-m mirror] [-g group,...] [-s] [-c]"
+  echo "-b dir       additional directories that should be bind mounted,"
+  echo '             or "NONE".'
+  echo "             Default: if local filesystems present, ask user for help"
+  echo "-g group,... groups that can use the chroot unauthenticated"
+  echo "             Default: '${admin}' and current user's group ('$(id -gn)')"
+  echo "-l           List all installed chroot environments"
+  echo "-m mirror    an alternate repository mirror for package downloads"
+  echo "-s           configure default deb-srcs"
+  echo "-c           always copy 64bit helper binaries to 32bit chroot"
+  echo "-h           this help message"
+}
+
+process_opts() {
+  local OPTNAME OPTIND OPTERR OPTARG
+  while getopts ":b:g:lm:sch" OPTNAME; do
+    case "$OPTNAME" in
+      b)
+        if [ "${OPTARG}" = "NONE" -a -z "${bind_mounts}" ]; then
+          bind_mounts="${OPTARG}"
+        else
+          if [ "${bind_mounts}" = "NONE" -o "${OPTARG}" = "${OPTARG#/}" -o \
+               ! -d "${OPTARG}" ]; then
+            echo "Invalid -b option(s)"
+            usage
+            exit 1
+          fi
+          bind_mounts="${bind_mounts}
+${OPTARG} ${OPTARG} none rw,bind 0 0"
+        fi
+        ;;
+      g)
+        [ -n "${OPTARG}" ] &&
+          chroot_groups="${chroot_groups}${chroot_groups:+,}${OPTARG}"
+        ;;
+      l)
+        list_all_chroots
+        exit
+        ;;
+      m)
+        if [ -n "${mirror}" ]; then
+          echo "You can only specify exactly one mirror location"
+          usage
+          exit 1
+        fi
+        mirror="$OPTARG"
+        ;;
+      s)
+        add_srcs="y"
+        ;;
+      c)
+        copy_64="y"
+        ;;
+      h)
+        usage
+        exit 0
+        ;;
+      \:)
+        echo "'-$OPTARG' needs an argument."
+        usage
+        exit 1
+        ;;
+      *)
+        echo "invalid command-line option: $OPTARG"
+        usage
+        exit 1
+        ;;
+    esac
+  done
+
+  if [ $# -ge ${OPTIND} ]; then
+    eval echo "Unexpected command line argument: \${${OPTIND}}"
+    usage
+    exit 1
+  fi
+}
+
+list_all_chroots() {
+  for i in /var/lib/chroot/*; do
+    i="${i##*/}"
+    [ "${i}" = "*" ] && continue
+    [ -x "/usr/local/bin/${i%bit}" ] || continue
+    grep -qs "^\[${i%bit}\]\$" /etc/schroot/schroot.conf || continue
+    [ -r "/etc/schroot/script-${i}" -a \
+      -r "/etc/schroot/mount-${i}" ] || continue
+    echo "${i%bit}"
+  done
+}
+
+getkey() {
+  (
+    trap 'stty echo -iuclc icanon 2>/dev/null' EXIT INT TERM QUIT HUP
+    stty -echo iuclc -icanon 2>/dev/null
+    dd count=1 bs=1 2>/dev/null
+  )
+}
+
+chr() {
+  printf "\\$(printf '%03o' "$1")"
+}
+
+ord() {
+  printf '%d' $(printf '%c' "$1" | od -tu1 -An)
+}
+
+is_network_drive() {
+  stat -c %T -f "$1/" 2>/dev/null |
+    egrep -qs '^nfs|cifs|smbfs'
+}
+
+# Check that we are running as a regular user
+[ "$(id -nu)" = root ] && {
+  echo "Run this script as a regular user and provide your \"sudo\""           \
+       "password if requested" >&2
+  exit 1
+}
+
+process_opts "$@"
+
+echo "This script will help you through the process of installing a"
+echo "Debian or Ubuntu distribution in a chroot environment. You will"
+echo "have to provide your \"sudo\" password when requested."
+echo
+
+# Error handler
+trap 'exit 1' INT TERM QUIT HUP
+trap 'sudo apt-get clean; tput bel; echo; echo Failed' EXIT
+
+# Install any missing applications that this script relies on. If these packages
+# are already installed, don't force another "apt-get install". That would
+# prevent them from being auto-removed, if they ever become eligible for that.
+# And as this script only needs the packages once, there is no good reason to
+# introduce a hard dependency on things such as dchroot and debootstrap.
+dep=
+for i in dchroot debootstrap libwww-perl; do
+  [ -d /usr/share/doc/"$i" ] || dep="$dep $i"
+done
+[ -n "$dep" ] && sudo apt-get -y install $dep
+sudo apt-get -y install schroot
+
+# Create directory for chroot
+sudo mkdir -p /var/lib/chroot
+
+# Find chroot environments that can be installed with debootstrap
+targets="$(cd /usr/share/debootstrap/scripts
+           ls | grep '^[a-z]*$')"
+
+# Ask user to pick one of the available targets
+echo "The following targets are available to be installed in a chroot:"
+j=1; for i in $targets; do
+  printf '%4d: %s\n' "$j" "$i"
+  j=$(($j+1))
+done
+while :; do
+  printf "Which target would you like to install: "
+  read n
+  [ "$n" -gt 0 -a "$n" -lt "$j" ] >&/dev/null && break
+done
+j=1; for i in $targets; do
+  [ "$j" -eq "$n" ] && { distname="$i"; break; }
+  j=$(($j+1))
+done
+echo
+
+# On x86-64, ask whether the user wants to install x86-32 or x86-64
+archflag=
+arch=
+if [ "$(uname -m)" = x86_64 ]; then
+  while :; do
+    echo "You are running a 64bit kernel. This allows you to install either a"
+    printf "32bit or a 64bit chroot environment. %s"                           \
+           "Which one do you want (32, 64) "
+    read arch
+    [ "${arch}" == 32 -o "${arch}" == 64 ] && break
+  done
+  [ "${arch}" == 32 ] && archflag="--arch i386" || archflag="--arch amd64"
+  arch="${arch}bit"
+  echo
+fi
+target="${distname}${arch}"
+
+# Don't accidentally overwrite an existing installation
+[ -d /var/lib/chroot/"${target}" ] && {
+  while :; do
+    echo "This chroot already exists on your machine."
+    if schroot -l --all-sessions 2>&1 |
+       sed 's/^session://' |
+       grep -qs "^${target%bit}-"; then
+      echo "And it appears to be in active use. Terminate all programs that"
+      echo "are currently using the chroot environment and then re-run this"
+      echo "script."
+      echo "If you still get an error message, you might have stale mounts"
+      echo "that you forgot to delete. You can always clean up mounts by"
+      echo "executing \"${target%bit} -c\"."
+      exit 1
+    fi
+    echo "I can abort installation, I can overwrite the existing chroot,"
+    echo "or I can delete the old one and then exit. What would you like to"
+    printf "do (a/o/d)? "
+    read choice
+    case "${choice}" in
+      a|A) exit 1;;
+      o|O) sudo rm -rf "/var/lib/chroot/${target}"; break;;
+      d|D) sudo rm -rf "/var/lib/chroot/${target}"      \
+                       "/usr/local/bin/${target%bit}"   \
+                       "/etc/schroot/mount-${target}"   \
+                       "/etc/schroot/script-${target}"  \
+                       "/etc/schroot/${target}"
+           sudo sed -ni '/^[[]'"${target%bit}"']$/,${
+                         :1;n;/^[[]/b2;b1;:2;p;n;b2};p' \
+                       "/etc/schroot/schroot.conf"
+           trap '' INT TERM QUIT HUP
+           trap '' EXIT
+           echo "Deleted!"
+           exit 0;;
+    esac
+  done
+  echo
+}
+sudo mkdir -p /var/lib/chroot/"${target}"
+
+# Offer to include additional standard repositories for Ubuntu-based chroots.
+alt_repos=
+grep -qs ubuntu.com /usr/share/debootstrap/scripts/"${distname}" && {
+  while :; do
+    echo "Would you like to add ${distname}-updates and ${distname}-security "
+    printf "to the chroot's sources.list (y/n)? "
+    read alt_repos
+    case "${alt_repos}" in
+      y|Y)
+        alt_repos="y"
+        break
+      ;;
+      n|N)
+        break
+      ;;
+    esac
+  done
+  echo
+}
+
+# Check for non-standard file system mount points and ask the user whether
+# they should be imported into the chroot environment
+# We limit to the first 26 mount points that much some basic heuristics,
+# because a) that allows us to enumerate choices with a single character,
+# and b) if we find more than 26 mount points, then these are probably
+# false-positives and something is very unusual about the system's
+# configuration. No need to spam the user with even more information that
+# is likely completely irrelevant.
+if [ -z "${bind_mounts}" ]; then
+  mounts="$(awk '$2 != "/" && $2 !~ "^/boot" && $2 !~ "^/home" &&
+                 $2 !~ "^/media" && $2 !~ "^/run" &&
+                 ($3 ~ "ext[2-4]" || $3 == "reiserfs" || $3 == "btrfs" ||
+                 $3 == "xfs" || $3 == "jfs" || $3 == "u?msdos" ||
+                 $3 == "v?fat" || $3 == "hfs" || $3 == "ntfs" ||
+                 $3 ~ "nfs[4-9]?" || $3 == "smbfs" || $3 == "cifs") {
+                   print $2
+                 }' /proc/mounts |
+            head -n26)"
+  if [ -n "${mounts}" ]; then
+    echo "You appear to have non-standard mount points that you"
+    echo "might want to import into the chroot environment:"
+    echo
+    sel=
+    while :; do
+      # Print a menu, listing all non-default mounts of local or network
+      # file systems.
+      j=1; for m in ${mounts}; do
+        c="$(printf $(printf '\\%03o' $((64+$j))))"
+        echo "$sel" | grep -qs $c &&
+          state="mounted in chroot" || state="$(tput el)"
+        printf "   $c) %-40s${state}\n" "$m"
+        j=$(($j+1))
+      done
+      # Allow user to interactively (de-)select any of the entries
+      echo
+      printf "Select mount points that you want to be included or press %s" \
+             "SPACE to continue"
+      c="$(getkey | tr a-z A-Z)"
+      [ "$c" == " " ] && { echo; echo; break; }
+      if [ -z "$c" ] ||
+         [ "$c" '<' 'A' -o $(ord "$c") -gt $((64 + $(ord "$j"))) ]; then
+          # Invalid input, ring the console bell
+          tput bel
+      else
+        # Toggle the selection for the given entry
+        if echo "$sel" | grep -qs $c; then
+          sel="$(printf "$sel" | sed "s/$c//")"
+        else
+          sel="$sel$c"
+        fi
+      fi
+      # Reposition cursor to the top of the list of entries
+      tput cuu $(($j + 1))
+      echo
+    done
+  fi
+  j=1; for m in ${mounts}; do
+    c="$(chr $(($j + 64)))"
+    if echo "$sel" | grep -qs $c; then
+      bind_mounts="${bind_mounts}$m $m none rw,bind 0 0
+"
+    fi
+    j=$(($j+1))
+  done
+fi
+
+# Remove stale entry from /etc/schroot/schroot.conf. Entries start
+# with the target name in square brackets, followed by an arbitrary
+# number of lines. The entry stops when either the end of file has
+# been reached, or when the beginning of a new target is encountered.
+# This means, we cannot easily match for a range of lines in
+# "sed". Instead, we actually have to iterate over each line and check
+# whether it is the beginning of a new entry.
+sudo sed -ni '/^[[]'"${target%bit}"']$/,${:1;n;/^[[]/b2;b1;:2;p;n;b2};p'       \
+         /etc/schroot/schroot.conf
+
+# Download base system. This takes some time
+if [ -z "${mirror}" ]; then
+ grep -qs ubuntu.com /usr/share/debootstrap/scripts/"${distname}" &&
+   mirror="http://archive.ubuntu.com/ubuntu" ||
+   mirror="http://ftp.us.debian.org/debian"
+fi
+
+sudo ${http_proxy:+http_proxy="${http_proxy}"} debootstrap ${archflag} \
+    "${distname}" "/var/lib/chroot/${target}"  "$mirror"
+
+# Add new entry to /etc/schroot/schroot.conf
+grep -qs ubuntu.com /usr/share/debootstrap/scripts/"${distname}" &&
+  brand="Ubuntu" || brand="Debian"
+if [ -z "${chroot_groups}" ]; then
+  chroot_groups="${admin},$(id -gn)"
+fi
+
+if [ -d '/etc/schroot/default' ]; then
+  new_version=1
+  fstab="/etc/schroot/${target}/fstab"
+else
+  new_version=0
+  fstab="/etc/schroot/mount-${target}"
+fi
+
+if [ "$new_version" = "1" ]; then
+  sudo cp -ar /etc/schroot/default /etc/schroot/${target}
+
+  sudo sh -c 'cat >>/etc/schroot/schroot.conf' <<EOF
+[${target%bit}]
+description=${brand} ${distname} ${arch}
+type=directory
+directory=/var/lib/chroot/${target}
+users=root
+groups=${chroot_groups}
+root-groups=${chroot_groups}
+personality=linux$([ "${arch}" != 64bit ] && echo 32)
+profile=${target}
+
+EOF
+  [ -n "${bind_mounts}" -a "${bind_mounts}" != "NONE" ] &&
+    printf "${bind_mounts}" |
+      sudo sh -c "cat >>${fstab}"
+else
+  # Older versions of schroot wanted a "priority=" line, whereas recent
+  # versions deprecate "priority=" and warn if they see it. We don't have
+  # a good feature test, but scanning for the string "priority=" in the
+  # existing "schroot.conf" file is a good indication of what to do.
+  priority=$(grep -qs 'priority=' /etc/schroot/schroot.conf &&
+           echo 'priority=3' || :)
+  sudo sh -c 'cat >>/etc/schroot/schroot.conf' <<EOF
+[${target%bit}]
+description=${brand} ${distname} ${arch}
+type=directory
+directory=/var/lib/chroot/${target}
+users=root
+groups=${chroot_groups}
+root-groups=${chroot_groups}
+personality=linux$([ "${arch}" != 64bit ] && echo 32)
+script-config=script-${target}
+${priority}
+
+EOF
+
+  # Set up a list of mount points that is specific to this
+  # chroot environment.
+  sed '/^FSTAB=/s,"[^"]*","'"${fstab}"'",' \
+           /etc/schroot/script-defaults |
+    sudo sh -c 'cat >/etc/schroot/script-'"${target}"
+  sed '\,^/home[/[:space:]],s/\([,[:space:]]\)bind[[:space:]]/\1rbind /' \
+    /etc/schroot/mount-defaults |
+    sudo sh -c "cat > ${fstab}"
+fi
+
+# Add the extra mount points that the user told us about
+[ -n "${bind_mounts}" -a "${bind_mounts}" != "NONE" ] &&
+  printf "${bind_mounts}" |
+    sudo sh -c 'cat >>'"${fstab}"
+
+# If this system has a "/media" mountpoint, import it into the chroot
+# environment. Most modern distributions use this mount point to
+# automatically mount devices such as CDROMs, USB sticks, etc...
+if [ -d /media ] &&
+   ! grep -qs '^/media' "${fstab}"; then
+  echo '/media /media none rw,rbind 0 0' |
+    sudo sh -c 'cat >>'"${fstab}"
+fi
+
+# Share /dev/shm, /run and /run/shm.
+grep -qs '^/dev/shm' "${fstab}" ||
+  echo '/dev/shm /dev/shm none rw,bind 0 0' |
+    sudo sh -c 'cat >>'"${fstab}"
+if [ ! -d "/var/lib/chroot/${target}/run" ] &&
+   ! grep -qs '^/run' "${fstab}"; then
+  echo '/run /run none rw,bind 0 0' |
+    sudo sh -c 'cat >>'"${fstab}"
+fi
+if ! grep -qs '^/run/shm' "${fstab}"; then
+  { [ -d /run ] && echo '/run/shm /run/shm none rw,bind 0 0' ||
+                   echo '/dev/shm /run/shm none rw,bind 0 0'; } |
+    sudo sh -c 'cat >>'"${fstab}"
+fi
+
+# Set up a special directory that changes contents depending on the target
+# that is executing.
+d="$(readlink -f "${HOME}/chroot" 2>/dev/null || echo "${HOME}/chroot")"
+s="${d}/.${target}"
+echo "${s} ${d} none rw,bind 0 0" |
+  sudo sh -c 'cat >>'"${target}"
+mkdir -p "${s}"
+
+# Install a helper script to launch commands in the chroot
+sudo sh -c 'cat >/usr/local/bin/'"${target%bit}" <<'EOF'
+#!/bin/bash
+
+chroot="${0##*/}"
+
+wrap() {
+  # Word-wrap the text passed-in on stdin. Optionally, on continuation lines
+  # insert the same number of spaces as the number of characters in the
+  # parameter(s) passed to this function.
+  # If the "fold" program cannot be found, or if the actual width of the
+  # terminal cannot be determined, this function doesn't attempt to do any
+  # wrapping.
+  local f="$(type -P fold)"
+  [ -z "${f}" ] && { cat; return; }
+  local c="$(stty -a </dev/tty 2>/dev/null |
+             sed 's/.*columns[[:space:]]*\([0-9]*\).*/\1/;t;d')"
+  [ -z "${c}" ] && { cat; return; }
+  local i="$(echo "$*"|sed 's/./ /g')"
+  local j="$(printf %s "${i}"|wc -c)"
+  if [ "${c}" -gt "${j}" ]; then
+    dd bs=1 count="${j}" 2>/dev/null
+    "${f}" -sw "$((${c}-${j}))" | sed '2,$s/^/'"${i}"'/'
+  else
+    "${f}" -sw "${c}"
+  fi
+}
+
+help() {
+  echo "Usage ${0##*/} [-h|--help] [-c|--clean] [-C|--clean-all] [-l|--list] [--] args" | wrap "Usage ${0##*/} "
+  echo "  help:      print this message"                                                | wrap "             "
+  echo "  list:      list all known chroot environments"                                | wrap "             "
+  echo "  clean:     remove all old chroot sessions for \"${chroot}\""                  | wrap "             "
+  echo "  clean-all: remove all old chroot sessions for all environments"               | wrap "             "
+  exit 0
+}
+
+clean() {
+  local s t rc
+  rc=0
+  for s in $(schroot -l --all-sessions); do
+    if [ -n "$1" ]; then
+      t="${s#session:}"
+      [ "${t#${chroot}-}" == "${t}" ] && continue
+    fi
+    if ls -l /proc/*/{cwd,fd} 2>/dev/null |
+       fgrep -qs "/var/lib/schroot/mount/${t}"; then
+      echo "Session \"${t}\" still has active users, not cleaning up" | wrap
+      rc=1
+      continue
+    fi
+    sudo schroot -c "${s}" -e || rc=1
+  done
+  exit ${rc}
+}
+
+list() {
+  for e in $(schroot -l); do
+    e="${e#chroot:}"
+    [ -x "/usr/local/bin/${e}" ] || continue
+    if schroot -l --all-sessions 2>/dev/null |
+       sed 's/^session://' |
+       grep -qs "^${e}-"; then
+      echo "${e} is currently active"
+    else
+      echo "${e}"
+    fi
+  done
+  exit 0
+}
+
+while [ "$#" -ne 0 ]; do
+  case "$1" in
+    --)             shift; break;;
+    -h|--help)      shift; help;;
+    -l|--list)      shift; list;;
+    -c|--clean)     shift; clean "${chroot}";;
+    -C|--clean-all) shift; clean;;
+    *)              break;;
+  esac
+done
+
+# Start a new chroot session and keep track of the session id. We inject this
+# id into all processes that run inside the chroot. Unless they go out of their
+# way to clear their environment, we can then later identify our child and
+# grand-child processes by scanning their environment.
+session="$(schroot -c "${chroot}" -b)"
+export CHROOT_SESSION_ID="${session}"
+
+# Set GOMA_TMP_DIR for better handling of goma inside chroot.
+export GOMA_TMP_DIR="/tmp/goma_tmp_$CHROOT_SESSION_ID"
+mkdir -p "$GOMA_TMP_DIR"
+
+if [ $# -eq 0 ]; then
+  # Run an interactive shell session
+  schroot -c "${session}" -r -p
+else
+  # Run a command inside of the chroot environment
+  p="$1"; shift
+  schroot -c "${session}" -r -p "$p" -- "$@"
+fi
+rc=$?
+
+# Compute the inode of the root directory inside of the chroot environment.
+i=$(schroot -c "${session}" -r -p ls -- -id /proc/self/root/. |
+     awk '{ print $1 }') 2>/dev/null
+other_pids=
+while [ -n "$i" ]; do
+  # Identify processes by the inode number of their root directory. Then
+  # remove all processes that we know belong to other sessions. We use
+  # "sort | uniq -u" to do what amounts to a "set substraction operation".
+  pids=$({ ls -id1 /proc/*/root/. 2>/dev/null |
+         sed -e 's,^[^0-9]*'$i'.*/\([1-9][0-9]*\)/.*$,\1,
+                 t
+                 d';
+         echo "${other_pids}";
+         echo "${other_pids}"; } | sort | uniq -u) >/dev/null 2>&1
+  # Kill all processes that are still left running in the session. This is
+  # typically an assortment of daemon processes that were started
+  # automatically. They result in us being unable to tear down the session
+  # cleanly.
+  [ -z "${pids}" ] && break
+  for j in $pids; do
+    # Unfortunately, the way that schroot sets up sessions has the
+    # side-effect of being unable to tell one session apart from another.
+    # This can result in us attempting to kill processes in other sessions.
+    # We make a best-effort to avoid doing so.
+    k="$( ( xargs -0 -n1 </proc/$j/environ ) 2>/dev/null |
+         sed 's/^CHROOT_SESSION_ID=/x/;t1;d;:1;q')"
+    if [ -n "${k}" -a "${k#x}" != "${session}" ]; then
+      other_pids="${other_pids}
+${j}"
+      continue
+    fi
+    kill -9 $pids
+  done
+done
+# End the chroot session. This should clean up all temporary files. But if we
+# earlier failed to terminate all (daemon) processes inside of the session,
+# deleting the session could fail. When that happens, the user has to manually
+# clean up the stale files by invoking us with "--clean" after having killed
+# all running processes.
+schroot -c "${session}" -e
+# Since no goma processes are running, we can remove goma directory.
+rm -rf "$GOMA_TMP_DIR"
+exit $rc
+EOF
+sudo chown root:root /usr/local/bin/"${target%bit}"
+sudo chmod 755 /usr/local/bin/"${target%bit}"
+
+# Add the standard Ubuntu update repositories if requested.
+[ "${alt_repos}" = "y" -a \
+  -r "/var/lib/chroot/${target}/etc/apt/sources.list" ] &&
+sudo sed -i '/^deb .* [^ -]\+ main$/p
+             s/^\(deb .* [^ -]\+\) main/\1-security main/
+             p
+             t1
+             d
+             :1;s/-security main/-updates main/
+             t
+             d' "/var/lib/chroot/${target}/etc/apt/sources.list"
+
+# Add a few more repositories to the chroot
+[ -r "/var/lib/chroot/${target}/etc/apt/sources.list" ] &&
+sudo sed -i 's/ main$/ main restricted universe multiverse/' \
+         "/var/lib/chroot/${target}/etc/apt/sources.list"
+
+# Add the Ubuntu "partner" repository, if available
+if [ -r "/var/lib/chroot/${target}/etc/apt/sources.list" ] &&
+   HEAD "http://archive.canonical.com/ubuntu/dists/${distname}/partner" \
+   >&/dev/null; then
+  sudo sh -c '
+    echo "deb http://archive.canonical.com/ubuntu" \
+         "'"${distname}"' partner" \
+      >>"/var/lib/chroot/'"${target}"'/etc/apt/sources.list"'
+fi
+
+# Add source repositories, if the user requested we do so
+[ "${add_srcs}" = "y" -a \
+  -r "/var/lib/chroot/${target}/etc/apt/sources.list" ] &&
+sudo sed -i '/^deb[^-]/p
+             s/^deb\([^-]\)/deb-src\1/' \
+         "/var/lib/chroot/${target}/etc/apt/sources.list"
+
+# Set apt proxy if host has set http_proxy
+if [ -n "${http_proxy}" ]; then
+  sudo sh -c '
+    echo "Acquire::http::proxy \"'"${http_proxy}"'\";" \
+        >>"/var/lib/chroot/'"${target}"'/etc/apt/apt.conf"'
+fi
+
+# Update packages
+sudo "/usr/local/bin/${target%bit}" /bin/sh -c '
+  apt-get update; apt-get -y dist-upgrade' || :
+
+# Install a couple of missing packages
+for i in debian-keyring ubuntu-keyring locales sudo; do
+  [ -d "/var/lib/chroot/${target}/usr/share/doc/$i" ] ||
+    sudo "/usr/local/bin/${target%bit}" apt-get -y install "$i" || :
+done
+
+# Configure locales
+sudo "/usr/local/bin/${target%bit}" /bin/sh -c '
+  l='"${LANG:-en_US}"'; l="${l%%.*}"
+  [ -r /etc/locale.gen ] &&
+    sed -i "s/^# \($l\)/\1/" /etc/locale.gen
+  locale-gen $LANG en_US en_US.UTF-8' || :
+
+# Enable multi-arch support, if available
+sudo "/usr/local/bin/${target%bit}" dpkg --assert-multi-arch >&/dev/null &&
+  [ -r "/var/lib/chroot/${target}/etc/apt/sources.list" ] && {
+  sudo sed -i 's/ / [arch=amd64,i386] /' \
+              "/var/lib/chroot/${target}/etc/apt/sources.list"
+  [ -d /var/lib/chroot/${target}/etc/dpkg/dpkg.cfg.d/ ] &&
+  sudo "/usr/local/bin/${target%bit}" dpkg --add-architecture \
+      $([ "${arch}" = "32bit" ] && echo amd64 || echo i386) >&/dev/null ||
+    echo foreign-architecture \
+        $([ "${arch}" = "32bit" ] && echo amd64 || echo i386) |
+      sudo sh -c \
+        "cat >'/var/lib/chroot/${target}/etc/dpkg/dpkg.cfg.d/multiarch'"
+}
+
+# Configure "sudo" package
+sudo "/usr/local/bin/${target%bit}" /bin/sh -c '
+  egrep -qs '"'^$(id -nu) '"' /etc/sudoers ||
+  echo '"'$(id -nu) ALL=(ALL) ALL'"' >>/etc/sudoers'
+
+# Install a few more commonly used packages
+sudo "/usr/local/bin/${target%bit}" apt-get -y install                         \
+  autoconf automake1.9 dpkg-dev g++-multilib gcc-multilib gdb less libtool     \
+  lsof strace
+
+# If running a 32bit environment on a 64bit machine, install a few binaries
+# as 64bit. This is only done automatically if the chroot distro is the same as
+# the host, otherwise there might be incompatibilities in build settings or
+# runtime dependencies. The user can force it with the '-c' flag.
+host_distro=$(grep -s DISTRIB_CODENAME /etc/lsb-release | \
+  cut -d "=" -f 2)
+if [ "${copy_64}" = "y" -o \
+    "${host_distro}" = "${distname}" -a "${arch}" = 32bit ] && \
+    file /bin/bash 2>/dev/null | grep -q x86-64; then
+  readlinepkg=$(sudo "/usr/local/bin/${target%bit}" sh -c \
+    'apt-cache search "lib64readline.\$" | sort | tail -n 1 | cut -d " " -f 1')
+  sudo "/usr/local/bin/${target%bit}" apt-get -y install                       \
+    lib64expat1 lib64ncurses5 ${readlinepkg} lib64z1 lib64stdc++6
+  dep=
+  for i in binutils gdb; do
+    [ -d /usr/share/doc/"$i" ] || dep="$dep $i"
+  done
+  [ -n "$dep" ] && sudo apt-get -y install $dep
+  sudo mkdir -p "/var/lib/chroot/${target}/usr/local/lib/amd64"
+  for i in libbfd libpython; do
+    lib="$({ ldd /usr/bin/ld; ldd /usr/bin/gdb; } |
+           grep -s "$i" | awk '{ print $3 }')"
+    if [ -n "$lib" -a -r "$lib" ]; then
+      sudo cp "$lib" "/var/lib/chroot/${target}/usr/local/lib/amd64"
+    fi
+  done
+  for lib in libssl libcrypt; do
+    for path in /usr/lib /usr/lib/x86_64-linux-gnu; do
+      sudo cp $path/$lib* \
+              "/var/lib/chroot/${target}/usr/local/lib/amd64/" >&/dev/null || :
+    done
+  done
+  for i in gdb ld; do
+    sudo cp /usr/bin/$i "/var/lib/chroot/${target}/usr/local/lib/amd64/"
+    sudo sh -c "cat >'/var/lib/chroot/${target}/usr/local/bin/$i'" <<EOF
+#!/bin/sh
+exec /lib64/ld-linux-x86-64.so.2 --library-path /usr/local/lib/amd64 \
+  /usr/local/lib/amd64/$i "\$@"
+EOF
+    sudo chmod 755 "/var/lib/chroot/${target}/usr/local/bin/$i"
+  done
+fi
+
+
+# If the install-build-deps.sh script can be found, offer to run it now
+script="$(dirname $(readlink -f "$0"))/install-build-deps.sh"
+if [ -x "${script}" ]; then
+  while :; do
+    echo
+    echo "If you plan on building Chrome inside of the new chroot environment,"
+    echo "you now have to install the build dependencies. Do you want me to"
+    printf "start the script that does this for you (y/n)? "
+    read install_deps
+    case "${install_deps}" in
+      y|Y)
+        echo
+        # We prefer running the script in-place, but this might not be
+        # possible, if it lives on a network filesystem that denies
+        # access to root.
+        tmp_script=
+        if ! sudo /usr/local/bin/"${target%bit}" \
+            sh -c "[ -x '${script}' ]" >&/dev/null; then
+          tmp_script="/tmp/${script##*/}"
+          cp "${script}" "${tmp_script}"
+        fi
+        # Some distributions automatically start an instance of the system-
+        # wide dbus daemon, cron daemon or of the logging daemon, when
+        # installing the Chrome build depencies. This prevents the chroot
+        # session from being closed.  So, we always try to shut down any running
+        # instance of dbus and rsyslog.
+        sudo /usr/local/bin/"${target%bit}" sh -c "${script};
+              rc=$?;
+              /etc/init.d/cron stop >/dev/null 2>&1 || :;
+              /etc/init.d/rsyslog stop >/dev/null 2>&1 || :;
+              /etc/init.d/dbus stop >/dev/null 2>&1 || :;
+              exit $rc"
+        rc=$?
+        [ -n "${tmp_script}" ] && rm -f "${tmp_script}"
+        [ $rc -ne 0 ] && exit $rc
+        break
+      ;;
+      n|N)
+        break
+      ;;
+    esac
+  done
+  echo
+fi
+
+# Check whether ~/chroot is on a (slow) network file system and offer to
+# relocate it. Also offer relocation, if the user appears to have multiple
+# spindles (as indicated by "${bind_mount}" being non-empty).
+# We only offer this option, if it doesn't look as if a chroot environment
+# is currently active. Otherwise, relocation is unlikely to work and it
+# can be difficult for the user to recover from the failed attempt to relocate
+# the ~/chroot directory.
+# We don't aim to solve this problem for every configuration,
+# but try to help with the common cases. For more advanced configuration
+# options, the user can always manually adjust things.
+mkdir -p "${HOME}/chroot/"
+if [ ! -h "${HOME}/chroot" ] &&
+   ! egrep -qs '^[^[:space:]]*/chroot' /etc/fstab &&
+   { [ -n "${bind_mounts}" -a "${bind_mounts}" != "NONE" ] ||
+     is_network_drive "${HOME}/chroot"; } &&
+   ! egrep -qs '/var/lib/[^/]*chroot/.*/chroot' /proc/mounts; then
+  echo "${HOME}/chroot is currently located on the same device as your"
+  echo "home directory."
+  echo "This might not be what you want. Do you want me to move it somewhere"
+  echo "else?"
+  # If the computer has multiple spindles, many users configure all or part of
+  # the secondary hard disk to be writable by the primary user of this machine.
+  # Make some reasonable effort to detect this type of configuration and
+  # then offer a good location for where to put the ~/chroot directory.
+  suggest=
+  for i in $(echo "${bind_mounts}"|cut -d ' ' -f 1); do
+    if [ -d "$i" -a -w "$i" -a \( ! -a "$i/chroot" -o -w "$i/chroot/." \) ] &&
+       ! is_network_drive "$i"; then
+      suggest="$i"
+    else
+      for j in "$i/"*; do
+        if [ -d "$j" -a -w "$j" -a \
+             \( ! -a "$j/chroot" -o -w "$j/chroot/." \) ] &&
+           ! is_network_drive "$j"; then
+          suggest="$j"
+        else
+          for k in "$j/"*; do
+            if [ -d "$k" -a -w "$k" -a \
+                 \( ! -a "$k/chroot" -o -w "$k/chroot/." \) ] &&
+               ! is_network_drive "$k"; then
+              suggest="$k"
+              break
+            fi
+          done
+        fi
+        [ -n "${suggest}" ] && break
+      done
+    fi
+    [ -n "${suggest}" ] && break
+  done
+  def_suggest="${HOME}"
+  if [ -n "${suggest}" ]; then
+    # For home directories that reside on network drives, make our suggestion
+    # the default option. For home directories that reside on a local drive,
+    # require that the user manually enters the new location.
+    if is_network_drive "${HOME}"; then
+      def_suggest="${suggest}"
+    else
+      echo "A good location would probably be in \"${suggest}\""
+    fi
+  fi
+  while :; do
+    printf "Physical location [${def_suggest}]: "
+    read dir
+    [ -z "${dir}" ] && dir="${def_suggest}"
+    [ "${dir%%/}" == "${HOME%%/}" ] && break
+    if ! [ -d "${dir}" -a -w "${dir}" ] ||
+       [ -a "${dir}/chroot" -a ! -w "${dir}/chroot/." ]; then
+      echo "Cannot write to ${dir}/chroot. Please try again"
+    else
+      mv "${HOME}/chroot" "${dir}/chroot"
+      ln -s "${dir}/chroot" "${HOME}/chroot"
+      for i in $(list_all_chroots); do
+        sudo "$i" mkdir -p "${dir}/chroot"
+      done
+      sudo sed -i "s,${HOME}/chroot,${dir}/chroot,g" /etc/schroot/mount-*
+      break
+    fi
+  done
+fi
+
+# Clean up package files
+sudo schroot -c "${target%bit}" -p -- apt-get clean
+sudo apt-get clean
+
+trap '' INT TERM QUIT HUP
+trap '' EXIT
+
+# Let the user know what we did
+cat <<EOF
+
+
+Successfully installed ${distname} ${arch}
+
+You can run programs inside of the chroot by invoking the
+"/usr/local/bin/${target%bit}" command.
+
+This command can be used with arguments, in order to just run a single
+program inside of the chroot environment (e.g. "${target%bit} make chrome")
+or without arguments, in order to run an interactive shell session inside
+of the chroot environment.
+
+If you need to run things as "root", you can use "sudo" (e.g. try
+"sudo ${target%bit} apt-get update").
+
+Your home directory is shared between the host and the chroot. But I
+configured "${HOME}/chroot" to be private to the chroot environment.
+You can use it for files that need to differ between environments. This
+would be a good place to store binaries that you have built from your
+source files.
+
+For Chrome, this probably means you want to make your "out" directory a
+symbolic link that points somewhere inside of "${HOME}/chroot".
+
+You still need to run "gclient runhooks" whenever you switch from building
+outside of the chroot to inside of the chroot. But you will find that you
+don't have to repeatedly erase and then completely rebuild all your object
+and binary files.
+
+EOF
diff --git a/build/internal/README.chromium b/build/internal/README.chromium
new file mode 100644
index 0000000..4624830
--- /dev/null
+++ b/build/internal/README.chromium
@@ -0,0 +1,24 @@
+Internal property sheets:
+  essential.vsprops
+    Contains the common settings used throughout the projects. Is included by either ..\debug.vsprops or ..\release.vsprops, so in general, it is not included directly.
+
+  release_defaults.vsprops
+    Included by ..\release.vsprops. Its settings are overriden by release_impl$(CHROME_BUILD_TYPE).vsprops. Uses the default VS setting which is "Maximize Speed". Results in relatively fast build with reasonable optimization level but without whole program optimization to reduce build time.
+
+  release_impl.vsprops
+    Included by ..\release.vsprops by default when CHROME_BUILD_TYPE is undefined. Includes release_defaults.vsprops.
+
+  release_impl_checksenabled.vsprops
+    Included by ..\release.vsprops when CHROME_BUILD_TYPE=_checksenabled. Matches what release_defaults.vsprops does, but doesn't actually inherit from it as we couldn't quite get that working. The only difference is that _DEBUG is set instead of NDEBUG. Used for keeping debug checks enabled with a build that is fast enough to dogfood with.
+
+  release_impl_official.vsprops
+    Included by ..\release.vsprops when CHROME_BUILD_TYPE=_official. Includes release_defaults.vsprops. Enables Whole Program Optimizations (WPO), which doubles the build time. Results in much more optimized build. Uses "Full Optimization" and "Flavor small code".
+
+  release_impl_pgo_instrument.vsprops
+    Included by ..\release.vsprops when CHROME_BUILD_TYPE=_pgo_instrument. Includes release_defaults.vsprops. Enables Profile Guided Optimization (PGO) instrumentation (first pass). Uses "Full Optimization" and "Flavor small code".
+
+  release_impl_pgo_optimize.vsprops
+    Included by ..\release.vsprops when CHROME_BUILD_TYPE=_pgo_optimize. Includes release_defaults.vsprops. Enables Profile Guided Optimization (PGO) optimization (second pass). Uses "Full Optimization" and "Flavor small code".
+
+  release_impl_purify.vsprops
+    Included by ..\release.vsprops when CHROME_BUILD_TYPE=_purify. Includes release_defaults.vsprops. Disables optimizations. Used with Purify to test without debug tools and without optimization; i.e. NDEBUG is defined but the compiler doesn't optimize the binary.
diff --git a/build/internal/release_defaults.gypi b/build/internal/release_defaults.gypi
new file mode 100644
index 0000000..1bf674a
--- /dev/null
+++ b/build/internal/release_defaults.gypi
@@ -0,0 +1,18 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+  'msvs_settings': {
+    'VCCLCompilerTool': {
+      'StringPooling': 'true',
+    },
+    'VCLinkerTool': {
+      # No incremental linking.
+      'LinkIncremental': '1',
+      # Eliminate Unreferenced Data (/OPT:REF).
+      'OptimizeReferences': '2',
+      # Folding on (/OPT:ICF).
+      'EnableCOMDATFolding': '2',
+    },
+  },
+}
diff --git a/build/internal/release_impl.gypi b/build/internal/release_impl.gypi
new file mode 100644
index 0000000..5ac0e09
--- /dev/null
+++ b/build/internal/release_impl.gypi
@@ -0,0 +1,17 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+  'includes': ['release_defaults.gypi'],
+  'msvs_settings': {
+    'VCCLCompilerTool': {
+      'OmitFramePointers': 'false',
+      # The above is not sufficient (http://crbug.com/106711): it
+      # simply eliminates an explicit "/Oy", but both /O2 and /Ox
+      # perform FPO regardless, so we must explicitly disable.
+      # We still want the false setting above to avoid having
+      # "/Oy /Oy-" and warnings about overriding.
+      'AdditionalOptions': ['/Oy-'],
+    },
+  },
+}
diff --git a/build/internal/release_impl_official.gypi b/build/internal/release_impl_official.gypi
new file mode 100644
index 0000000..36d5d78
--- /dev/null
+++ b/build/internal/release_impl_official.gypi
@@ -0,0 +1,41 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+  'includes': ['release_defaults.gypi'],
+  'defines': ['OFFICIAL_BUILD'],
+  'msvs_settings': {
+    'VCCLCompilerTool': {
+      'InlineFunctionExpansion': '2',
+      'EnableIntrinsicFunctions': 'true',
+      'OmitFramePointers': 'false',
+      # The above is not sufficient (http://crbug.com/106711): it
+      # simply eliminates an explicit "/Oy", but both /O2 and /Ox
+      # perform FPO regardless, so we must explicitly disable.
+      # We still want the false setting above to avoid having
+      # "/Oy /Oy-" and warnings about overriding.
+      'AdditionalOptions': ['/Oy-'],
+    },
+    'VCLibrarianTool': {
+      'AdditionalOptions': [
+        '/ltcg',
+        '/expectedoutputsize:120000000'
+      ],
+    },
+    'VCLinkerTool': {
+      'AdditionalOptions': [
+        '/time',
+        # This may reduce memory fragmentation during linking.
+        # The expected size is 40*1024*1024, which gives us about 10M of
+        # headroom as of Dec 16, 2011.
+        '/expectedoutputsize:41943040',
+      ],
+      # The /PROFILE flag causes the linker to add a "FIXUP" debug stream to
+      # the generated PDB. According to MSDN documentation, this flag is only
+      # available (or perhaps supported) in the Enterprise (team development)
+      # version of Visual Studio. If this blocks your official build, simply
+      # comment out this line, then  re-run "gclient runhooks".
+      'Profile': 'true',
+    },
+  },
+}
diff --git a/build/inverse_depth.py b/build/inverse_depth.py
new file mode 100755
index 0000000..ce7a6ab
--- /dev/null
+++ b/build/inverse_depth.py
@@ -0,0 +1,24 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import sys
+
+
+def DoMain(argv):
+  depth = argv[0]
+  return os.path.relpath(os.getcwd(), os.path.abspath(depth))
+
+
+def main(argv):
+  if len(argv) < 2:
+    print "USAGE: inverse_depth.py depth"
+    return 1
+  print DoMain(argv[1:])
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
diff --git a/build/ios/OWNERS b/build/ios/OWNERS
new file mode 100644
index 0000000..4caf405
--- /dev/null
+++ b/build/ios/OWNERS
@@ -0,0 +1,4 @@
+rohitrao@chromium.org
+stuartmorgan@chromium.org
+
+per-file grit_whitelist.txt=*
diff --git a/build/ios/PRESUBMIT.py b/build/ios/PRESUBMIT.py
new file mode 100644
index 0000000..bbd17b3
--- /dev/null
+++ b/build/ios/PRESUBMIT.py
@@ -0,0 +1,42 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+
+"""Chromium presubmit script for src/tools/ios.
+
+See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
+for more details on the presubmit API built into depot_tools.
+"""
+
+WHITELIST_FILE = 'build/ios/grit_whitelist.txt'
+
+def _CheckWhitelistSorted(input_api, output_api):
+  for path in input_api.LocalPaths():
+    if WHITELIST_FILE == path:
+      lines = open(os.path.join('../..', WHITELIST_FILE)).readlines()
+      i = 0
+      while i < len(lines) - 1 and lines[i] <= lines[i + 1]:
+        i += 1
+      if i < len(lines) - 1:
+        return [output_api.PresubmitError(
+            'The file ' + WHITELIST_FILE + ' must be sorted.  ' +
+            'First offending line: #' + str(i + 2))]
+  return []
+
+def _CommonChecks(input_api, output_api):
+  """Checks common to both upload and commit."""
+  results = []
+  results.extend(_CheckWhitelistSorted(input_api, output_api))
+  return results
+
+def CheckChangeOnUpload(input_api, output_api):
+  results = []
+  results.extend(_CommonChecks(input_api, output_api))
+  return results
+
+def CheckChangeOnCommit(input_api, output_api):
+  results = []
+  results.extend(_CommonChecks(input_api, output_api))
+  return results
diff --git a/build/ios/chrome_ios.croc b/build/ios/chrome_ios.croc
new file mode 100644
index 0000000..938a2e9
--- /dev/null
+++ b/build/ios/chrome_ios.croc
@@ -0,0 +1,71 @@
+# -*- python -*-
+# Crocodile config file for Chromium iOS.
+#
+# Note that Chromium iOS also uses the config file at src/build/common.croc.
+#
+# See src/tools/code_coverage/example.croc for more info on config files.
+
+{
+  # List of rules, applied in order
+  'rules' : [
+    # Specify inclusions before exclusions, since rules are in order.
+
+    # Exclude everything to negate whatever is in src/build/common.croc
+    {
+      'regexp' : '.*',
+      'include' : 0,
+    },
+
+    # Include all directories (but not the files in the directories).
+    # This is a workaround for how croc.py walks the directory tree. See the
+    # TODO in the AddFiles method of src/tools/code_coverage/croc.py
+    {
+      'regexp' : '.*/$',
+      'include' : 1,
+    },
+
+    # Include any file with an 'ios' directory in the path.
+    {
+      'regexp' : '.*/ios/.*',
+      'include' : 1,
+      'add_if_missing' : 1,
+    },
+    
+    # Include any file that ends with _ios.
+    {
+      'regexp' : '.*_ios\\.(c|cc|m|mm)$',
+      'include' : 1,
+      'add_if_missing' : 1,
+    },
+
+    # Include any file that ends with _ios_unittest (and label it a test).
+    {
+      'regexp' : '.*_ios_unittest\\.(c|cc|m|mm)$',
+      'include' : 1,
+      'add_if_missing' : 1,
+      'group' : 'test',
+    },
+
+    # Don't scan for executable lines in uninstrumented header files
+    {
+      'regexp' : '.*\\.(h|hpp)$',
+      'add_if_missing' : 0,
+    },
+
+    # Don't measure coverage of perftests.
+    {
+      'regexp' : '.*perftest\\.(c|cc|m|mm)$',
+      'include' : 0,
+    },
+
+    # Languages
+    {
+      'regexp' : '.*\\.m$',
+      'language' : 'ObjC',
+    },
+    {
+      'regexp' : '.*\\.mm$',
+      'language' : 'ObjC++',
+    },
+  ],
+}
diff --git a/build/ios/clean_env.py b/build/ios/clean_env.py
new file mode 100755
index 0000000..548e2b9
--- /dev/null
+++ b/build/ios/clean_env.py
@@ -0,0 +1,77 @@
+#!/usr/bin/python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import sys
+
+def Main(argv):
+  """This is like 'env -i', but it uses a whitelist of env variables to allow
+  through to the command being run.  It attempts to strip off Xcode-added
+  values from PATH.
+  """
+  # Note: An attempt was made to do something like: env -i bash -lc '[command]'
+  # but that fails to set the things set by login (USER, etc.), so instead
+  # the only approach that seems to work is to have a whitelist.
+  env_key_whitelist = (
+    'HOME',
+    'LOGNAME',
+    # 'PATH' added below (but filtered).
+    'PWD',
+    'SHELL',
+    'TEMP',
+    'TMPDIR',
+    'USER'
+  )
+
+  # Need something to run.
+  # TODO(lliabraa): Make this output a usage string and exit (here and below).
+  assert(len(argv) > 0)
+
+  add_to_path = [];
+  first_entry = argv[0];
+  if first_entry.startswith('ADD_TO_PATH='):
+    argv = argv[1:];
+    add_to_path = first_entry.replace('ADD_TO_PATH=', '', 1).split(':')
+
+  # Still need something to run.
+  assert(len(argv) > 0)
+
+  clean_env = {}
+
+  # Pull over the whitelisted keys.
+  for key in env_key_whitelist:
+    val = os.environ.get(key, None)
+    if not val is None:
+      clean_env[key] = val
+
+  # Collect the developer dir as set via Xcode, defaulting it.
+  dev_prefix = os.environ.get('DEVELOPER_DIR', '/Developer/')
+  if dev_prefix[-1:] != '/':
+    dev_prefix += '/'
+
+  # Now pull in PATH, but remove anything Xcode might have added.
+  initial_path = os.environ.get('PATH', '')
+  filtered_chunks = \
+      [x for x in initial_path.split(':') if not x.startswith(dev_prefix)]
+  if filtered_chunks:
+    clean_env['PATH'] = ':'.join(add_to_path + filtered_chunks)
+
+  # Add any KEY=VALUE args before the command to the cleaned environment.
+  args = argv[:]
+  while '=' in args[0]:
+    (key, val) = args[0].split('=', 1)
+    clean_env[key] = val
+    args = args[1:]
+
+  # Still need something to run.
+  assert(len(args) > 0)
+
+  # Off it goes...
+  os.execvpe(args[0], args, clean_env)
+  # Should never get here, so return a distinctive, non-zero status code.
+  return 66
+
+if __name__ == '__main__':
+  sys.exit(Main(sys.argv[1:]))
diff --git a/build/ios/coverage.gypi b/build/ios/coverage.gypi
new file mode 100644
index 0000000..e822089
--- /dev/null
+++ b/build/ios/coverage.gypi
@@ -0,0 +1,32 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'variables': {
+    'enable_coverage%': 0,
+  },
+  'conditions': [
+    ['enable_coverage', {
+        'target_defaults': {
+          'defines': [
+            'ENABLE_TEST_CODE_COVERAGE=1'
+          ],
+          'link_settings': {
+            'xcode_settings': {
+              'OTHER_LDFLAGS': [
+                '-fprofile-arcs',
+              ],
+            },
+          },
+          'xcode_settings': {
+            'OTHER_CFLAGS': [
+              '-fprofile-arcs',
+              '-ftest-coverage',
+            ],
+          },
+        },
+    }],
+  ],
+}
+
diff --git a/build/ios/grit_whitelist.txt b/build/ios/grit_whitelist.txt
new file mode 100644
index 0000000..a976daf
--- /dev/null
+++ b/build/ios/grit_whitelist.txt
@@ -0,0 +1,1155 @@
+IDR_ABOUT_DOM_DISTILLER_CSS
+IDR_ABOUT_DOM_DISTILLER_HTML
+IDR_ABOUT_DOM_DISTILLER_JS
+IDR_ABOUT_STATS_HTML
+IDR_ABOUT_STATS_JS
+IDR_ABOUT_VERSION_CSS
+IDR_ABOUT_VERSION_HTML
+IDR_ABOUT_VERSION_JS
+IDR_CONTEXTUAL_SEARCH_PROMO_HTML
+IDR_CONTROLLED_SETTING_MANDATORY
+IDR_CRASHES_HTML
+IDR_CRASHES_JS
+IDR_CREDITS_HTML
+IDR_CREDITS_JS
+IDR_CREDIT_CARD_CVC_HINT
+IDR_CREDIT_CARD_CVC_HINT_AMEX
+IDR_DATA_REDUCTION_PROXY_INTERSTITIAL_HTML
+IDR_DEFAULT_FAVICON
+IDR_DEFAULT_FAVICON_32
+IDR_DEFAULT_FAVICON_64
+IDR_DIR_HEADER_HTML
+IDR_DISTILLABLE_PAGE_SERIALIZED_MODEL
+IDR_DISTILLER_CSS
+IDR_DISTILLER_IOS_CSS
+IDR_DISTILLER_JS
+IDR_DOM_DISTILLER_VIEWER_HTML
+IDR_DOM_DISTILLER_VIEWER_JS
+IDR_EXTRACT_PAGE_FEATURES_JS
+IDR_FLAGS_FAVICON
+IDR_FLAGS_HTML
+IDR_FLAGS_JS
+IDR_GCM_INTERNALS_CSS
+IDR_GCM_INTERNALS_HTML
+IDR_GCM_INTERNALS_JS
+IDR_HISTORY_FAVICON
+IDR_HISTORY_HTML
+IDR_HISTORY_JS
+IDR_INCOGNITO_TAB_HTML
+IDR_INFOBAR_AUTOFILL_CC
+IDR_INFOBAR_AUTOLOGIN
+IDR_INFOBAR_RESTORE_SESSION
+IDR_INFOBAR_SAVE_PASSWORD
+IDR_INFOBAR_TRANSLATE_IOS
+IDR_INFOBAR_WARNING
+IDR_IS_DISTILLABLE_JS
+IDR_LOCATION_BAR_HTTP
+IDR_NET_ERROR_HTML
+IDR_NET_EXPORT_HTML
+IDR_NET_EXPORT_JS
+IDR_NET_INTERNALS_INDEX_HTML
+IDR_NET_INTERNALS_INDEX_JS
+IDR_OMAHA_HTML
+IDR_OMAHA_JS
+IDR_OMNIBOX_CALCULATOR
+IDR_OMNIBOX_CLEAR_IOS
+IDR_OMNIBOX_CLEAR_OTR_IOS
+IDR_OMNIBOX_CLEAR_OTR_PRESSED_IOS
+IDR_OMNIBOX_CLEAR_PRESSED_IOS
+IDR_OMNIBOX_EXTENSION_APP
+IDR_OMNIBOX_HISTORY
+IDR_OMNIBOX_HISTORY_INCOGNITO
+IDR_OMNIBOX_HTTP
+IDR_OMNIBOX_HTTPS_INVALID
+IDR_OMNIBOX_HTTPS_POLICY_WARNING
+IDR_OMNIBOX_HTTPS_VALID
+IDR_OMNIBOX_HTTPS_WARNING
+IDR_OMNIBOX_HTTP_INCOGNITO
+IDR_OMNIBOX_KEYBOARD_VIEW_APPEND
+IDR_OMNIBOX_KEYBOARD_VIEW_APPEND_HIGHLIGHTED
+IDR_OMNIBOX_KEYBOARD_VIEW_APPEND_INCOGNITO
+IDR_OMNIBOX_KEYBOARD_VIEW_APPEND_INCOGNITO_HIGHLIGHTED
+IDR_OMNIBOX_SEARCH
+IDR_OMNIBOX_SEARCH_INCOGNITO
+IDR_OMNIBOX_SEARCH_SECURED
+IDR_OMNIBOX_STAR
+IDR_OMNIBOX_STAR_INCOGNITO
+IDR_OTHER_DEVICES_JS
+IDR_PAGEINFO_BAD
+IDR_PAGEINFO_GOOD
+IDR_PAGEINFO_INFO
+IDR_PAGEINFO_WARNING_MAJOR
+IDR_PAGEINFO_WARNING_MINOR
+IDR_POLICY_CSS
+IDR_POLICY_HTML
+IDR_POLICY_JS
+IDR_PRINTER_FAVICON
+IDR_SAD_FAVICON
+IDR_SAD_TAB
+IDR_SECURITY_INTERSTITIAL_HTML
+IDR_SIGNIN_INTERNALS_INDEX_HTML
+IDR_SIGNIN_INTERNALS_INDEX_JS
+IDR_SYNC_INTERNALS_ABOUT_JS
+IDR_SYNC_INTERNALS_CHROME_SYNC_JS
+IDR_SYNC_INTERNALS_DATA_JS
+IDR_SYNC_INTERNALS_EVENTS_JS
+IDR_SYNC_INTERNALS_INDEX_HTML
+IDR_SYNC_INTERNALS_INDEX_JS
+IDR_SYNC_INTERNALS_SEARCH_JS
+IDR_SYNC_INTERNALS_SYNC_LOG_JS
+IDR_SYNC_INTERNALS_SYNC_NODE_BROWSER_JS
+IDR_SYNC_INTERNALS_SYNC_SEARCH_JS
+IDR_SYNC_INTERNALS_TYPES_JS
+IDR_TOOLBAR_SHADOW_FULL_BLEED
+IDR_TRANSLATE_JS
+IDR_UBER_UTILS_JS
+IDR_WEBUI_CSS_TEXT_DEFAULTS
+IDR_WEBUI_I18N_TEMPLATE_JS
+IDR_WEBUI_JSTEMPLATE_JS
+IDR_WEBUI_JS_LOAD_TIME_DATA
+IDS_ABOUT_MAC
+IDS_ABOUT_VERSION_COMMAND_LINE
+IDS_ABOUT_VERSION_COMPANY_NAME
+IDS_ABOUT_VERSION_COPYRIGHT
+IDS_ABOUT_VERSION_EXECUTABLE_PATH
+IDS_ABOUT_VERSION_OFFICIAL
+IDS_ABOUT_VERSION_OS
+IDS_ABOUT_VERSION_PATH_NOTFOUND
+IDS_ABOUT_VERSION_PROFILE_PATH
+IDS_ABOUT_VERSION_REVISION
+IDS_ABOUT_VERSION_TITLE
+IDS_ABOUT_VERSION_UNOFFICIAL
+IDS_ABOUT_VERSION_USER_AGENT
+IDS_ABOUT_VERSION_VARIATIONS
+IDS_ACCEPT_LANGUAGES
+IDS_ACCNAME_BACK
+IDS_ACCNAME_CLEAR_TEXT
+IDS_ACCNAME_FORWARD
+IDS_ACCNAME_LOCATION
+IDS_ACCNAME_VOICE_SEARCH
+IDS_ALLOW_INSECURE_CONTENT_BUTTON
+IDS_ALTERNATE_NAV_URL_VIEW_LABEL
+IDS_ANNOTATED_SUGGESTION
+IDS_APP_CANCEL
+IDS_APP_OK
+IDS_APP_UNTITLED_SHORTCUT_FILE_NAME
+IDS_AUTOCOMPLETE_SEARCH_DESCRIPTION
+IDS_AUTOFILL_ADDRESS_LINE_SEPARATOR
+IDS_AUTOFILL_ADDRESS_SUMMARY_SEPARATOR
+IDS_AUTOFILL_CARD_UNMASK_PROMPT_ERROR_NETWORK
+IDS_AUTOFILL_CARD_UNMASK_PROMPT_ERROR_PERMANENT
+IDS_AUTOFILL_CARD_UNMASK_PROMPT_ERROR_TRY_AGAIN
+IDS_AUTOFILL_CARD_UNMASK_PROMPT_INSTRUCTIONS
+IDS_AUTOFILL_CARD_UNMASK_PROMPT_INSTRUCTIONS_AMEX
+IDS_AUTOFILL_CARD_UNMASK_PROMPT_INSTRUCTIONS_EXPIRED
+IDS_AUTOFILL_CARD_UNMASK_PROMPT_INSTRUCTIONS_EXPIRED_AMEX
+IDS_AUTOFILL_CC_AMEX
+IDS_AUTOFILL_CC_AMEX_SHORT
+IDS_AUTOFILL_CC_DINERS
+IDS_AUTOFILL_CC_DISCOVER
+IDS_AUTOFILL_CC_GENERIC
+IDS_AUTOFILL_CC_INFOBAR_ACCEPT
+IDS_AUTOFILL_CC_INFOBAR_DENY
+IDS_AUTOFILL_CC_INFOBAR_TEXT
+IDS_AUTOFILL_CC_JCB
+IDS_AUTOFILL_CC_MASTERCARD
+IDS_AUTOFILL_CC_UNION_PAY
+IDS_AUTOFILL_CC_VISA
+IDS_AUTOFILL_CLEAR_FORM_MENU_ITEM
+IDS_AUTOFILL_DELETE_AUTOCOMPLETE_SUGGESTION_CONFIRMATION_BODY
+IDS_AUTOFILL_DELETE_CREDIT_CARD_SUGGESTION_CONFIRMATION_BODY
+IDS_AUTOFILL_DELETE_PROFILE_SUGGESTION_CONFIRMATION_BODY
+IDS_AUTOFILL_DIALOG_PRIVACY_POLICY_LINK
+IDS_AUTOFILL_FIELD_LABEL_AREA
+IDS_AUTOFILL_FIELD_LABEL_COUNTY
+IDS_AUTOFILL_FIELD_LABEL_DEPARTMENT
+IDS_AUTOFILL_FIELD_LABEL_DISTRICT
+IDS_AUTOFILL_FIELD_LABEL_EMIRATE
+IDS_AUTOFILL_FIELD_LABEL_ISLAND
+IDS_AUTOFILL_FIELD_LABEL_PARISH
+IDS_AUTOFILL_FIELD_LABEL_POSTAL_CODE
+IDS_AUTOFILL_FIELD_LABEL_PREFECTURE
+IDS_AUTOFILL_FIELD_LABEL_PROVINCE
+IDS_AUTOFILL_FIELD_LABEL_STATE
+IDS_AUTOFILL_FIELD_LABEL_ZIP_CODE
+IDS_AUTOFILL_OPTIONS_POPUP
+IDS_AUTOFILL_PASSWORD_FIELD_SUGGESTIONS_TITLE
+IDS_AUTOFILL_SCAN_CREDIT_CARD
+IDS_AUTOFILL_WARNING_FORM_DISABLED
+IDS_AUTOFILL_WARNING_INSECURE_CONNECTION
+IDS_AUTOLOGIN_INFOBAR_CANCEL_BUTTON
+IDS_AUTOLOGIN_INFOBAR_MESSAGE
+IDS_AUTOLOGIN_INFOBAR_OK_BUTTON
+IDS_BLOCKED_DISPLAYING_INSECURE_CONTENT
+IDS_BLOCK_INSECURE_CONTENT_BUTTON
+IDS_BOOKMARK_ADD_EDITOR_TITLE
+IDS_BOOKMARK_ALL_TABS_DIALOG_TITLE
+IDS_BOOKMARK_BAR_FOLDER_NAME
+IDS_BOOKMARK_BAR_MANAGED_FOLDER_DEFAULT_NAME
+IDS_BOOKMARK_BAR_MANAGED_FOLDER_DOMAIN_NAME
+IDS_BOOKMARK_BAR_MOBILE_FOLDER_NAME
+IDS_BOOKMARK_BAR_OTHER_FOLDER_NAME
+IDS_BOOKMARK_BAR_REDO
+IDS_BOOKMARK_BAR_REDO_ADD
+IDS_BOOKMARK_BAR_REDO_DELETE
+IDS_BOOKMARK_BAR_REDO_EDIT
+IDS_BOOKMARK_BAR_REDO_MOVE
+IDS_BOOKMARK_BAR_REDO_REORDER
+IDS_BOOKMARK_BAR_SUPERVISED_FOLDER_DEFAULT_NAME
+IDS_BOOKMARK_BAR_UNDO
+IDS_BOOKMARK_BAR_UNDO_ADD
+IDS_BOOKMARK_BAR_UNDO_DELETE
+IDS_BOOKMARK_BAR_UNDO_EDIT
+IDS_BOOKMARK_BAR_UNDO_MOVE
+IDS_BOOKMARK_BAR_UNDO_REORDER
+IDS_BOOKMARK_BUBBLE_CHOOSER_ANOTHER_FOLDER
+IDS_BOOKMARK_BUBBLE_REMOVE_BOOKMARK
+IDS_BOOKMARK_EDITOR_CONFIRM_DELETE
+IDS_BOOKMARK_EDITOR_NEW_FOLDER_NAME
+IDS_BOOKMARK_EDITOR_TITLE
+IDS_BOOKMARK_FOLDER_CHOOSER_TITLE
+IDS_BOOKMARK_FOLDER_EDITOR_TITLE
+IDS_BOOKMARK_FOLDER_EDITOR_WINDOW_TITLE
+IDS_BOOKMARK_FOLDER_EDITOR_WINDOW_TITLE_NEW
+IDS_BOOKMARK_MANAGER_FOLDER_SECTION
+IDS_BOOKMARK_MANAGER_FOLDER_TITLE
+IDS_BOOKMARK_MANAGER_NAME_INPUT_PLACE_HOLDER
+IDS_BOOKMARK_MANAGER_REMOVE_TITLE
+IDS_BOOKMARK_MANAGER_URL_INPUT_PLACE_HOLDER
+IDS_BOOKMARK_NEW_FOLDER_BUTTON_TITLE
+IDS_CANCEL
+IDS_CERT_ERROR_AUTHORITY_INVALID_DESCRIPTION
+IDS_CERT_ERROR_AUTHORITY_INVALID_DETAILS
+IDS_CERT_ERROR_AUTHORITY_INVALID_EXTRA_INFO_2
+IDS_CERT_ERROR_AUTHORITY_INVALID_TITLE
+IDS_CERT_ERROR_CHAIN_EXPIRED_DESCRIPTION
+IDS_CERT_ERROR_CHAIN_EXPIRED_DETAILS
+IDS_CERT_ERROR_COMMON_NAME_INVALID_DESCRIPTION
+IDS_CERT_ERROR_COMMON_NAME_INVALID_DETAILS
+IDS_CERT_ERROR_COMMON_NAME_INVALID_EXTRA_INFO_2
+IDS_CERT_ERROR_COMMON_NAME_INVALID_TITLE
+IDS_CERT_ERROR_CONTAINS_ERRORS_DESCRIPTION
+IDS_CERT_ERROR_CONTAINS_ERRORS_DETAILS
+IDS_CERT_ERROR_CONTAINS_ERRORS_EXTRA_INFO_2
+IDS_CERT_ERROR_CONTAINS_ERRORS_TITLE
+IDS_CERT_ERROR_EXPIRED_DESCRIPTION
+IDS_CERT_ERROR_EXPIRED_DETAILS
+IDS_CERT_ERROR_EXPIRED_DETAILS_EXTRA_INFO_2
+IDS_CERT_ERROR_EXPIRED_TITLE
+IDS_CERT_ERROR_EXTRA_INFO_1
+IDS_CERT_ERROR_EXTRA_INFO_TITLE
+IDS_CERT_ERROR_INVALID_CERT_DESCRIPTION
+IDS_CERT_ERROR_INVALID_CERT_DETAILS
+IDS_CERT_ERROR_INVALID_CERT_EXTRA_INFO_2
+IDS_CERT_ERROR_INVALID_CERT_TITLE
+IDS_CERT_ERROR_NAME_CONSTRAINT_VIOLATION_DESCRIPTION
+IDS_CERT_ERROR_NAME_CONSTRAINT_VIOLATION_DETAILS
+IDS_CERT_ERROR_NAME_CONSTRAINT_VIOLATION_TITLE
+IDS_CERT_ERROR_NOT_YET_VALID_DESCRIPTION
+IDS_CERT_ERROR_NOT_YET_VALID_DETAILS
+IDS_CERT_ERROR_NOT_YET_VALID_DETAILS_EXTRA_INFO_2
+IDS_CERT_ERROR_NOT_YET_VALID_TITLE
+IDS_CERT_ERROR_NO_REVOCATION_MECHANISM_DESCRIPTION
+IDS_CERT_ERROR_NO_REVOCATION_MECHANISM_DETAILS
+IDS_CERT_ERROR_NO_REVOCATION_MECHANISM_TITLE
+IDS_CERT_ERROR_REVOKED_CERT_DESCRIPTION
+IDS_CERT_ERROR_REVOKED_CERT_DETAILS
+IDS_CERT_ERROR_REVOKED_CERT_EXTRA_INFO_2
+IDS_CERT_ERROR_REVOKED_CERT_TITLE
+IDS_CERT_ERROR_UNABLE_TO_CHECK_REVOCATION_DESCRIPTION
+IDS_CERT_ERROR_UNABLE_TO_CHECK_REVOCATION_DETAILS
+IDS_CERT_ERROR_UNABLE_TO_CHECK_REVOCATION_TITLE
+IDS_CERT_ERROR_UNKNOWN_ERROR_DESCRIPTION
+IDS_CERT_ERROR_UNKNOWN_ERROR_DETAILS
+IDS_CERT_ERROR_UNKNOWN_ERROR_TITLE
+IDS_CERT_ERROR_WEAK_KEY_DESCRIPTION
+IDS_CERT_ERROR_WEAK_KEY_DETAILS
+IDS_CERT_ERROR_WEAK_KEY_EXTRA_INFO_2
+IDS_CERT_ERROR_WEAK_KEY_TITLE
+IDS_CERT_ERROR_WEAK_SIGNATURE_ALGORITHM_DESCRIPTION
+IDS_CERT_ERROR_WEAK_SIGNATURE_ALGORITHM_DETAILS
+IDS_CERT_ERROR_WEAK_SIGNATURE_ALGORITHM_EXTRA_INFO_2
+IDS_CERT_ERROR_WEAK_SIGNATURE_ALGORITHM_TITLE
+IDS_CHROME_TO_DEVICE_PRINT_TO_PHONE
+IDS_CHROME_TO_DEVICE_SNAPSHOTS
+IDS_CLOSE
+IDS_CONTEXTUAL_SEARCH_HEADER
+IDS_CONTEXTUAL_SEARCH_PROMO_DESCRIPTION_1
+IDS_CONTEXTUAL_SEARCH_PROMO_DESCRIPTION_2
+IDS_CONTEXTUAL_SEARCH_PROMO_FEATURE_NAME
+IDS_CONTEXTUAL_SEARCH_PROMO_OPTIN
+IDS_CONTEXTUAL_SEARCH_PROMO_OPTOUT
+IDS_COULDNT_OPEN_PROFILE_ERROR
+IDS_CRASHES_BUG_LINK_LABEL
+IDS_CRASHES_CRASH_COUNT_BANNER_FORMAT
+IDS_CRASHES_CRASH_HEADER_FORMAT
+IDS_CRASHES_CRASH_TIME_FORMAT
+IDS_CRASHES_DISABLED_HEADER
+IDS_CRASHES_DISABLED_MESSAGE
+IDS_CRASHES_NO_CRASHES_MESSAGE
+IDS_CRASHES_TITLE
+IDS_CRASHES_UPLOAD_MESSAGE
+IDS_DATA_REDUCTION_PROXY_BACK_BUTTON
+IDS_DATA_REDUCTION_PROXY_CANNOT_PROXY_HEADING
+IDS_DATA_REDUCTION_PROXY_CANNOT_PROXY_PRIMARY_PARAGRAPH
+IDS_DATA_REDUCTION_PROXY_CANNOT_PROXY_SECONDARY_PARAGRAPH
+IDS_DATA_REDUCTION_PROXY_CONTINUE_BUTTON
+IDS_DATA_REDUCTION_PROXY_TITLE
+IDS_DEFAULT_AVATAR_NAME_10
+IDS_DEFAULT_AVATAR_NAME_11
+IDS_DEFAULT_AVATAR_NAME_12
+IDS_DEFAULT_AVATAR_NAME_13
+IDS_DEFAULT_AVATAR_NAME_14
+IDS_DEFAULT_AVATAR_NAME_15
+IDS_DEFAULT_AVATAR_NAME_16
+IDS_DEFAULT_AVATAR_NAME_17
+IDS_DEFAULT_AVATAR_NAME_18
+IDS_DEFAULT_AVATAR_NAME_19
+IDS_DEFAULT_AVATAR_NAME_20
+IDS_DEFAULT_AVATAR_NAME_21
+IDS_DEFAULT_AVATAR_NAME_22
+IDS_DEFAULT_AVATAR_NAME_23
+IDS_DEFAULT_AVATAR_NAME_24
+IDS_DEFAULT_AVATAR_NAME_25
+IDS_DEFAULT_AVATAR_NAME_26
+IDS_DEFAULT_AVATAR_NAME_8
+IDS_DEFAULT_AVATAR_NAME_9
+IDS_DEFAULT_ENCODING
+IDS_DEFAULT_PROFILE_NAME
+IDS_DEFAULT_TAB_TITLE
+IDS_DELETE
+IDS_DISABLE_TOUCH_ADJUSTMENT_DESCRIPTION
+IDS_DISABLE_TOUCH_ADJUSTMENT_NAME
+IDS_DOM_DISTILLER_JAVASCRIPT_DISABLED_CONTENT
+IDS_DOM_DISTILLER_QUALITY_ANSWER_NO
+IDS_DOM_DISTILLER_QUALITY_ANSWER_YES
+IDS_DOM_DISTILLER_QUALITY_QUESTION
+IDS_DOM_DISTILLER_VIEWER_CLOSE_READER_VIEW
+IDS_DOM_DISTILLER_VIEWER_FAILED_TO_FIND_ARTICLE_CONTENT
+IDS_DOM_DISTILLER_VIEWER_FAILED_TO_FIND_ARTICLE_TITLE
+IDS_DOM_DISTILLER_VIEWER_LOADING_STRING
+IDS_DOM_DISTILLER_VIEWER_LOADING_TITLE
+IDS_DOM_DISTILLER_VIEWER_NO_DATA_CONTENT
+IDS_DOM_DISTILLER_VIEWER_NO_DATA_TITLE
+IDS_DOM_DISTILLER_VIEWER_VIEW_ORIGINAL
+IDS_DOM_DISTILLER_WEBUI_ENTRY_ADD
+IDS_DOM_DISTILLER_WEBUI_ENTRY_ADD_FAILED
+IDS_DOM_DISTILLER_WEBUI_ENTRY_URL
+IDS_DOM_DISTILLER_WEBUI_FETCHING_ENTRIES
+IDS_DOM_DISTILLER_WEBUI_REFRESH
+IDS_DOM_DISTILLER_WEBUI_TITLE
+IDS_DOM_DISTILLER_WEBUI_VIEW_URL
+IDS_DOM_DISTILLER_WEBUI_VIEW_URL_FAILED
+IDS_DONE
+IDS_EASY_UNLOCK_SCREENLOCK_USER_POD_AUTH_VALUE
+IDS_EDIT_FIND_MAC
+IDS_EMPTY_KEYWORD_VALUE
+IDS_ERRORPAGES_BUTTON_LESS
+IDS_ERRORPAGES_BUTTON_MORE
+IDS_ERRORPAGES_BUTTON_RELOAD
+IDS_ERRORPAGES_BUTTON_SHOW_SAVED_COPY
+IDS_ERRORPAGES_BUTTON_SHOW_SAVED_COPY_HELP
+IDS_ERRORPAGES_DETAILS_ADDRESS_UNREACHABLE
+IDS_ERRORPAGES_DETAILS_BAD_GATEWAY
+IDS_ERRORPAGES_DETAILS_BAD_SSL_CLIENT_AUTH_CERT
+IDS_ERRORPAGES_DETAILS_BLOCKED
+IDS_ERRORPAGES_DETAILS_BLOCKED_BY_ADMINISTRATOR
+IDS_ERRORPAGES_DETAILS_BLOCKED_ENROLLMENT_CHECK_PENDING
+IDS_ERRORPAGES_DETAILS_CACHE_MISS
+IDS_ERRORPAGES_DETAILS_CACHE_READ_FAILURE
+IDS_ERRORPAGES_DETAILS_CONNECTION_CLOSED
+IDS_ERRORPAGES_DETAILS_CONNECTION_FAILED
+IDS_ERRORPAGES_DETAILS_CONNECTION_REFUSED
+IDS_ERRORPAGES_DETAILS_CONNECTION_RESET
+IDS_ERRORPAGES_DETAILS_DNS_PROBE_RUNNING
+IDS_ERRORPAGES_DETAILS_DOWNLOAD_FILE_TYPE_ERROR
+IDS_ERRORPAGES_DETAILS_EMPTY_RESPONSE
+IDS_ERRORPAGES_DETAILS_FILE_ACCESS_DENIED
+IDS_ERRORPAGES_DETAILS_FILE_NOT_FOUND
+IDS_ERRORPAGES_DETAILS_FORBIDDEN
+IDS_ERRORPAGES_DETAILS_GATEWAY_TIMEOUT
+IDS_ERRORPAGES_DETAILS_GONE
+IDS_ERRORPAGES_DETAILS_HTTP_VERSION_NOT_SUPPORTED
+IDS_ERRORPAGES_DETAILS_ICANN_NAME_COLLISION
+IDS_ERRORPAGES_DETAILS_INTERNAL_SERVER_ERROR
+IDS_ERRORPAGES_DETAILS_INTERNET_DISCONNECTED
+IDS_ERRORPAGES_DETAILS_NAME_NOT_RESOLVED
+IDS_ERRORPAGES_DETAILS_NETWORK_ACCESS_DENIED
+IDS_ERRORPAGES_DETAILS_NETWORK_CHANGED
+IDS_ERRORPAGES_DETAILS_NETWORK_IO_SUSPENDED
+IDS_ERRORPAGES_DETAILS_NOT_IMPLEMENTED
+IDS_ERRORPAGES_DETAILS_PINNING_FAILURE
+IDS_ERRORPAGES_DETAILS_PROXY_CONNECTION_FAILED
+IDS_ERRORPAGES_DETAILS_RESPONSE_HEADERS_MULTIPLE_CONTENT_DISPOSITION
+IDS_ERRORPAGES_DETAILS_RESPONSE_HEADERS_MULTIPLE_CONTENT_LENGTH
+IDS_ERRORPAGES_DETAILS_RESPONSE_HEADERS_MULTIPLE_LOCATION
+IDS_ERRORPAGES_DETAILS_SERVICE_UNAVAILABLE
+IDS_ERRORPAGES_DETAILS_SSL_FALLBACK_BEYOND_MINIMUM_VERSION
+IDS_ERRORPAGES_DETAILS_SSL_PROTOCOL_ERROR
+IDS_ERRORPAGES_DETAILS_SSL_VERSION_OR_CIPHER_MISMATCH
+IDS_ERRORPAGES_DETAILS_TEMPORARILY_THROTTLED
+IDS_ERRORPAGES_DETAILS_TIMED_OUT
+IDS_ERRORPAGES_DETAILS_TOO_MANY_REDIRECTS
+IDS_ERRORPAGES_DETAILS_UNKNOWN
+IDS_ERRORPAGES_ERROR_CODE
+IDS_ERRORPAGES_HEADING_ACCESS_DENIED
+IDS_ERRORPAGES_HEADING_BAD_SSL_CLIENT_AUTH_CERT
+IDS_ERRORPAGES_HEADING_BLOCKED
+IDS_ERRORPAGES_HEADING_BLOCKED_BY_ADMINISTRATOR
+IDS_ERRORPAGES_HEADING_CACHE_MISS
+IDS_ERRORPAGES_HEADING_CACHE_READ_FAILURE
+IDS_ERRORPAGES_HEADING_DOWNLOAD_FILE_TYPE_ERROR
+IDS_ERRORPAGES_HEADING_DUPLICATE_HEADERS
+IDS_ERRORPAGES_HEADING_EMPTY_RESPONSE
+IDS_ERRORPAGES_HEADING_FILE_ACCESS_DENIED
+IDS_ERRORPAGES_HEADING_HTTP_SERVER_ERROR
+IDS_ERRORPAGES_HEADING_INTERNET_DISCONNECTED
+IDS_ERRORPAGES_HEADING_NETWORK_ACCESS_DENIED
+IDS_ERRORPAGES_HEADING_NETWORK_IO_SUSPENDED
+IDS_ERRORPAGES_HEADING_NOT_AVAILABLE
+IDS_ERRORPAGES_HEADING_NOT_FOUND
+IDS_ERRORPAGES_HEADING_PINNING_FAILURE
+IDS_ERRORPAGES_HEADING_PROXY_CONNECTION_FAILED
+IDS_ERRORPAGES_HEADING_SSL_FALLBACK_BEYOND_MINIMUM_VERSION
+IDS_ERRORPAGES_HEADING_SSL_PROTOCOL_ERROR
+IDS_ERRORPAGES_HEADING_SSL_VERSION_OR_CIPHER_MISMATCH
+IDS_ERRORPAGES_HEADING_TOO_MANY_REDIRECTS
+IDS_ERRORPAGES_HEADING_WEAK_SERVER_EPHEMERAL_DH_KEY
+IDS_ERRORPAGES_HTTP_POST_WARNING
+IDS_ERRORPAGES_SUGGESTION_CHECK_CONNECTION_BODY
+IDS_ERRORPAGES_SUGGESTION_CHECK_CONNECTION_HEADER
+IDS_ERRORPAGES_SUGGESTION_CONTACT_ADMINISTRATOR
+IDS_ERRORPAGES_SUGGESTION_DNS_CONFIG
+IDS_ERRORPAGES_SUGGESTION_FIREWALL_CONFIG
+IDS_ERRORPAGES_SUGGESTION_GOOGLE_SEARCH
+IDS_ERRORPAGES_SUGGESTION_LEARNMORE_BODY
+IDS_ERRORPAGES_SUGGESTION_NETWORK_PREDICTION
+IDS_ERRORPAGES_SUGGESTION_PROXY_CONFIG
+IDS_ERRORPAGES_SUGGESTION_PROXY_DISABLE_PLATFORM
+IDS_ERRORPAGES_SUGGESTION_RELOAD
+IDS_ERRORPAGES_SUGGESTION_RELOAD_REPOST_BODY
+IDS_ERRORPAGES_SUGGESTION_RELOAD_REPOST_HEADER
+IDS_ERRORPAGES_SUGGESTION_VIEW_POLICIES
+IDS_ERRORPAGES_SUMMARY_ADDRESS_UNREACHABLE
+IDS_ERRORPAGES_SUMMARY_BAD_GATEWAY
+IDS_ERRORPAGES_SUMMARY_BAD_SSL_CLIENT_AUTH_CERT
+IDS_ERRORPAGES_SUMMARY_BLOCKED
+IDS_ERRORPAGES_SUMMARY_BLOCKED_BY_ADMINISTRATOR
+IDS_ERRORPAGES_SUMMARY_BLOCKED_ENROLLMENT_CHECK_PENDING
+IDS_ERRORPAGES_SUMMARY_CACHE_MISS
+IDS_ERRORPAGES_SUMMARY_CACHE_READ_FAILURE
+IDS_ERRORPAGES_SUMMARY_CONNECTION_REFUSED
+IDS_ERRORPAGES_SUMMARY_CONNECTION_RESET
+IDS_ERRORPAGES_SUMMARY_DNS_PROBE_RUNNING
+IDS_ERRORPAGES_SUMMARY_DOWNLOAD_FILE_TYPE_ERROR
+IDS_ERRORPAGES_SUMMARY_DUPLICATE_HEADERS
+IDS_ERRORPAGES_SUMMARY_EMPTY_RESPONSE
+IDS_ERRORPAGES_SUMMARY_FILE_ACCESS_DENIED
+IDS_ERRORPAGES_SUMMARY_FORBIDDEN
+IDS_ERRORPAGES_SUMMARY_GATEWAY_TIMEOUT
+IDS_ERRORPAGES_SUMMARY_GONE
+IDS_ERRORPAGES_SUMMARY_ICANN_NAME_COLLISION
+IDS_ERRORPAGES_SUMMARY_INTERNAL_SERVER_ERROR
+IDS_ERRORPAGES_SUMMARY_INTERNET_DISCONNECTED
+IDS_ERRORPAGES_SUMMARY_INTERNET_DISCONNECTED_INSTRUCTIONS_TEMPLATE
+IDS_ERRORPAGES_SUMMARY_INTERNET_DISCONNECTED_PLATFORM
+IDS_ERRORPAGES_SUMMARY_NAME_NOT_RESOLVED
+IDS_ERRORPAGES_SUMMARY_NETWORK_ACCESS_DENIED
+IDS_ERRORPAGES_SUMMARY_NETWORK_CHANGED
+IDS_ERRORPAGES_SUMMARY_NETWORK_IO_SUSPENDED
+IDS_ERRORPAGES_SUMMARY_NOT_AVAILABLE
+IDS_ERRORPAGES_SUMMARY_NOT_FOUND
+IDS_ERRORPAGES_SUMMARY_PINNING_FAILURE
+IDS_ERRORPAGES_SUMMARY_PROXY_CONNECTION_FAILED
+IDS_ERRORPAGES_SUMMARY_SERVICE_UNAVAILABLE
+IDS_ERRORPAGES_SUMMARY_SSL_FALLBACK_BEYOND_MINIMUM_VERSION
+IDS_ERRORPAGES_SUMMARY_SSL_PROTOCOL_ERROR
+IDS_ERRORPAGES_SUMMARY_SSL_VERSION_OR_CIPHER_MISMATCH
+IDS_ERRORPAGES_SUMMARY_TEMPORARILY_THROTTLED
+IDS_ERRORPAGES_SUMMARY_TIMED_OUT
+IDS_ERRORPAGES_SUMMARY_TOO_MANY_REDIRECTS
+IDS_ERRORPAGES_SUMMARY_WEAK_SERVER_EPHEMERAL_DH_KEY
+IDS_ERRORPAGES_SUMMARY_WEBSITE_CANNOT_HANDLE
+IDS_ERRORPAGES_TITLE_ACCESS_DENIED
+IDS_ERRORPAGES_TITLE_BLOCKED
+IDS_ERRORPAGES_TITLE_LOAD_FAILED
+IDS_ERRORPAGES_TITLE_NOT_AVAILABLE
+IDS_ERRORPAGES_TITLE_NOT_FOUND
+IDS_ERRORPAGE_NET_BUTTON_DETAILS
+IDS_ERRORPAGE_NET_BUTTON_HIDE_DETAILS
+IDS_EXTENSION_KEYWORD_COMMAND
+IDS_FEEDBACK_REPORT_PAGE_TITLE
+IDS_FEEDBACK_REPORT_URL_LABEL
+IDS_FEEDBACK_SEND_REPORT
+IDS_FEEDBACK_USER_EMAIL_LABEL
+IDS_FIND_IN_PAGE_CLOSE_TOOLTIP
+IDS_FIND_IN_PAGE_COUNT
+IDS_FIND_IN_PAGE_NEXT_TOOLTIP
+IDS_FIND_IN_PAGE_PREVIOUS_TOOLTIP
+IDS_FLAGS_ACCELERATED_FIXED_ROOT_BACKGROUND_DESCRIPTION
+IDS_FLAGS_ACCELERATED_FIXED_ROOT_BACKGROUND_NAME
+IDS_FLAGS_ALLOW_NACL_SOCKET_API_DESCRIPTION
+IDS_FLAGS_ALLOW_NACL_SOCKET_API_NAME
+IDS_FLAGS_ALLOW_TOUCHPAD_THREE_FINGER_CLICK_DESCRIPTION
+IDS_FLAGS_ALLOW_TOUCHPAD_THREE_FINGER_CLICK_NAME
+IDS_FLAGS_COMPOSITED_LAYER_BORDERS
+IDS_FLAGS_COMPOSITED_LAYER_BORDERS_DESCRIPTION
+IDS_FLAGS_COMPOSITING_FOR_FIXED_POSITION_DESCRIPTION
+IDS_FLAGS_COMPOSITING_FOR_FIXED_POSITION_HIGH_DPI
+IDS_FLAGS_COMPOSITING_FOR_FIXED_POSITION_NAME
+IDS_FLAGS_CONFLICTS_CHECK_DESCRIPTION
+IDS_FLAGS_CONFLICTS_CHECK_NAME
+IDS_FLAGS_DEBUG_PACKED_APP_DESCRIPTION
+IDS_FLAGS_DEBUG_PACKED_APP_NAME
+IDS_FLAGS_DEBUG_SHORTCUTS_DESCRIPTION
+IDS_FLAGS_DEBUG_SHORTCUTS_NAME
+IDS_FLAGS_DEFAULT_TILE_HEIGHT_DESCRIPTION
+IDS_FLAGS_DEFAULT_TILE_HEIGHT_GRANDE
+IDS_FLAGS_DEFAULT_TILE_HEIGHT_NAME
+IDS_FLAGS_DEFAULT_TILE_HEIGHT_SHORT
+IDS_FLAGS_DEFAULT_TILE_HEIGHT_TALL
+IDS_FLAGS_DEFAULT_TILE_HEIGHT_VENTI
+IDS_FLAGS_DEFAULT_TILE_WIDTH_DESCRIPTION
+IDS_FLAGS_DEFAULT_TILE_WIDTH_GRANDE
+IDS_FLAGS_DEFAULT_TILE_WIDTH_NAME
+IDS_FLAGS_DEFAULT_TILE_WIDTH_SHORT
+IDS_FLAGS_DEFAULT_TILE_WIDTH_TALL
+IDS_FLAGS_DEFAULT_TILE_WIDTH_VENTI
+IDS_FLAGS_DISABLE
+IDS_FLAGS_DISABLE_ACCELERATED_2D_CANVAS_DESCRIPTION
+IDS_FLAGS_DISABLE_ACCELERATED_2D_CANVAS_NAME
+IDS_FLAGS_DISABLE_ACCELERATED_VIDEO_DECODE_DESCRIPTION
+IDS_FLAGS_DISABLE_ACCELERATED_VIDEO_DECODE_NAME
+IDS_FLAGS_DISABLE_BOOT_ANIMATION
+IDS_FLAGS_DISABLE_BOOT_ANIMATION_DESCRIPTION
+IDS_FLAGS_DISABLE_GESTURE_REQUIREMENT_FOR_MEDIA_PLAYBACK_DESCRIPTION
+IDS_FLAGS_DISABLE_GESTURE_REQUIREMENT_FOR_MEDIA_PLAYBACK_NAME
+IDS_FLAGS_DISABLE_HYPERLINK_AUDITING_DESCRIPTION
+IDS_FLAGS_DISABLE_HYPERLINK_AUDITING_NAME
+IDS_FLAGS_DISABLE_PNACL_DESCRIPTION
+IDS_FLAGS_DISABLE_PNACL_NAME
+IDS_FLAGS_DISABLE_SOFTWARE_RASTERIZER_DESCRIPTION
+IDS_FLAGS_DISABLE_SOFTWARE_RASTERIZER_NAME
+IDS_FLAGS_DISABLE_WEBGL_DESCRIPTION
+IDS_FLAGS_DISABLE_WEBGL_NAME
+IDS_FLAGS_DISABLE_WEBRTC_DESCRIPTION
+IDS_FLAGS_DISABLE_WEBRTC_NAME
+IDS_FLAGS_ENABLE
+IDS_FLAGS_ENABLE_ACCELERATED_MJPEG_DECODE_DESCRIPTION
+IDS_FLAGS_ENABLE_ACCELERATED_MJPEG_DECODE_NAME
+IDS_FLAGS_ENABLE_APPS_SHOW_ON_FIRST_PAINT_DESCRIPTION
+IDS_FLAGS_ENABLE_APPS_SHOW_ON_FIRST_PAINT_NAME
+IDS_FLAGS_ENABLE_CONTEXTUAL_SEARCH
+IDS_FLAGS_ENABLE_CONTEXTUAL_SEARCH_DESCRIPTION
+IDS_FLAGS_ENABLE_DEVTOOLS_EXPERIMENTS_DESCRIPTION
+IDS_FLAGS_ENABLE_DEVTOOLS_EXPERIMENTS_NAME
+IDS_FLAGS_ENABLE_DOWNLOAD_RESUMPTION_DESCRIPTION
+IDS_FLAGS_ENABLE_DOWNLOAD_RESUMPTION_NAME
+IDS_FLAGS_ENABLE_ENHANCED_BOOKMARKS_DESCRIPTION
+IDS_FLAGS_ENABLE_ENHANCED_BOOKMARKS_NAME
+IDS_FLAGS_ENABLE_EXPERIMENTAL_CANVAS_FEATURES_DESCRIPTION
+IDS_FLAGS_ENABLE_EXPERIMENTAL_CANVAS_FEATURES_NAME
+IDS_FLAGS_ENABLE_GESTURE_TAP_HIGHLIGHTING_DESCRIPTION
+IDS_FLAGS_ENABLE_GESTURE_TAP_HIGHLIGHTING_NAME
+IDS_FLAGS_ENABLE_ICON_NTP_DESCRIPTION
+IDS_FLAGS_ENABLE_ICON_NTP_NAME
+IDS_FLAGS_ENABLE_JAVASCRIPT_HARMONY_DESCRIPTION
+IDS_FLAGS_ENABLE_JAVASCRIPT_HARMONY_NAME
+IDS_FLAGS_ENABLE_NACL_DEBUG_DESCRIPTION
+IDS_FLAGS_ENABLE_NACL_DEBUG_NAME
+IDS_FLAGS_ENABLE_NACL_DESCRIPTION
+IDS_FLAGS_ENABLE_NACL_NAME
+IDS_FLAGS_ENABLE_PANELS_DESCRIPTION
+IDS_FLAGS_ENABLE_PANELS_NAME
+IDS_FLAGS_ENABLE_PASSWORD_GENERATION_DESCRIPTION
+IDS_FLAGS_ENABLE_PASSWORD_GENERATION_NAME
+IDS_FLAGS_ENABLE_PINCH_SCALE_DESCRIPTION
+IDS_FLAGS_ENABLE_PINCH_SCALE_NAME
+IDS_FLAGS_ENABLE_REQUEST_TABLET_SITE_DESCRIPTION
+IDS_FLAGS_ENABLE_REQUEST_TABLET_SITE_NAME
+IDS_FLAGS_ENABLE_SCREEN_CAPTURE_DESCRIPTION
+IDS_FLAGS_ENABLE_SCREEN_CAPTURE_NAME
+IDS_FLAGS_ENABLE_SIMPLE_CACHE_BACKEND_DESCRIPTION
+IDS_FLAGS_ENABLE_SIMPLE_CACHE_BACKEND_NAME
+IDS_FLAGS_ENABLE_SMOOTH_SCROLLING_DESCRIPTION
+IDS_FLAGS_ENABLE_SMOOTH_SCROLLING_NAME
+IDS_FLAGS_ENABLE_STALE_WHILE_REVALIDATE_DESCRIPTION
+IDS_FLAGS_ENABLE_STALE_WHILE_REVALIDATE_NAME
+IDS_FLAGS_ENABLE_SUGGESTIONS_SERVICE_DESCRIPTION
+IDS_FLAGS_ENABLE_SUGGESTIONS_SERVICE_NAME
+IDS_FLAGS_ENABLE_SYNCED_NOTIFICATIONS_DESCRIPTION
+IDS_FLAGS_ENABLE_SYNCED_NOTIFICATIONS_NAME
+IDS_FLAGS_ENABLE_TCP_FAST_OPEN_DESCRIPTION
+IDS_FLAGS_ENABLE_TCP_FAST_OPEN_NAME
+IDS_FLAGS_ENABLE_TOUCH_DRAG_DROP_DESCRIPTION
+IDS_FLAGS_ENABLE_TOUCH_DRAG_DROP_NAME
+IDS_FLAGS_ENABLE_TOUCH_EDITING_DESCRIPTION
+IDS_FLAGS_ENABLE_TOUCH_EDITING_NAME
+IDS_FLAGS_ENABLE_TRANSLATE_NEW_UX_DESCRIPTION
+IDS_FLAGS_ENABLE_TRANSLATE_NEW_UX_NAME
+IDS_FLAGS_EXPERIMENTAL_EXTENSION_APIS_DESCRIPTION
+IDS_FLAGS_EXPERIMENTAL_EXTENSION_APIS_NAME
+IDS_FLAGS_EXPERIMENTAL_WEB_PLATFORM_FEATURES_DESCRIPTION
+IDS_FLAGS_EXPERIMENTAL_WEB_PLATFORM_FEATURES_NAME
+IDS_FLAGS_EXTENSIONS_ON_CHROME_URLS_DESCRIPTION
+IDS_FLAGS_EXTENSIONS_ON_CHROME_URLS_NAME
+IDS_FLAGS_FORCE_ACCELERATED_OVERFLOW_SCROLL_MODE_DESCRIPTION
+IDS_FLAGS_FORCE_ACCELERATED_OVERFLOW_SCROLL_MODE_NAME
+IDS_FLAGS_FORCE_HIGH_DPI_DESCRIPTION
+IDS_FLAGS_FORCE_HIGH_DPI_NAME
+IDS_FLAGS_IGNORE_GPU_BLACKLIST_DESCRIPTION
+IDS_FLAGS_IGNORE_GPU_BLACKLIST_NAME
+IDS_FLAGS_LONG_TITLE
+IDS_FLAGS_NACL_DEBUG_MASK_DESCRIPTION
+IDS_FLAGS_NACL_DEBUG_MASK_NAME
+IDS_FLAGS_NOT_AVAILABLE
+IDS_FLAGS_NO_EXPERIMENTS_AVAILABLE
+IDS_FLAGS_NO_UNSUPPORTED_EXPERIMENTS
+IDS_FLAGS_NTP_OTHER_SESSIONS_MENU_DESCRIPTION
+IDS_FLAGS_NTP_OTHER_SESSIONS_MENU_NAME
+IDS_FLAGS_PERFORMANCE_MONITOR_GATHERING_DESCRIPTION
+IDS_FLAGS_PERFORMANCE_MONITOR_GATHERING_NAME
+IDS_FLAGS_RELAUNCH_BUTTON
+IDS_FLAGS_RELAUNCH_NOTICE
+IDS_FLAGS_RESET_ALL_BUTTON
+IDS_FLAGS_SAVE_PAGE_AS_MHTML_DESCRIPTION
+IDS_FLAGS_SAVE_PAGE_AS_MHTML_NAME
+IDS_FLAGS_SHOW_AUTOFILL_TYPE_PREDICTIONS_DESCRIPTION
+IDS_FLAGS_SHOW_AUTOFILL_TYPE_PREDICTIONS_NAME
+IDS_FLAGS_SHOW_FPS_COUNTER
+IDS_FLAGS_SHOW_FPS_COUNTER_DESCRIPTION
+IDS_FLAGS_SHOW_TOUCH_HUD_DESCRIPTION
+IDS_FLAGS_SHOW_TOUCH_HUD_NAME
+IDS_FLAGS_SILENT_DEBUGGER_EXTENSION_API_DESCRIPTION
+IDS_FLAGS_SILENT_DEBUGGER_EXTENSION_API_NAME
+IDS_FLAGS_SPELLCHECK_AUTOCORRECT
+IDS_FLAGS_SPELLCHECK_AUTOCORRECT_DESCRIPTION
+IDS_FLAGS_STACKED_TAB_STRIP_DESCRIPTION
+IDS_FLAGS_STACKED_TAB_STRIP_NAME
+IDS_FLAGS_TABLE_TITLE
+IDS_FLAGS_THREADED_COMPOSITING_MODE_DESCRIPTION
+IDS_FLAGS_THREADED_COMPOSITING_MODE_NAME
+IDS_FLAGS_TOUCH_SCROLLING_MODE_ABSORB_TOUCHMOVE
+IDS_FLAGS_TOUCH_SCROLLING_MODE_DESCRIPTION
+IDS_FLAGS_TOUCH_SCROLLING_MODE_NAME
+IDS_FLAGS_TOUCH_SCROLLING_MODE_SYNC_TOUCHMOVE
+IDS_FLAGS_TOUCH_SCROLLING_MODE_TOUCHCANCEL
+IDS_FLAGS_UNSUPPORTED_TABLE_TITLE
+IDS_FLAGS_WALLET_SERVICE_USE_SANDBOX_DESCRIPTION
+IDS_FLAGS_WALLET_SERVICE_USE_SANDBOX_NAME
+IDS_FLAGS_WARNING_HEADER
+IDS_FLAGS_WARNING_TEXT
+IDS_FULLSCREEN
+IDS_GENERIC_EXPERIMENT_CHOICE_AUTOMATIC
+IDS_GENERIC_EXPERIMENT_CHOICE_DEFAULT
+IDS_GENERIC_EXPERIMENT_CHOICE_DISABLED
+IDS_GENERIC_EXPERIMENT_CHOICE_ENABLED
+IDS_GROUP_BY_DOMAIN_LABEL
+IDS_GUEST_PROFILE_NAME
+IDS_HARMFUL_V3_EXPLANATION_PARAGRAPH
+IDS_HARMFUL_V3_HEADING
+IDS_HARMFUL_V3_PRIMARY_PARAGRAPH
+IDS_HARMFUL_V3_PROCEED_PARAGRAPH
+IDS_HISTORY_ACTION_MENU_DESCRIPTION
+IDS_HISTORY_BLOCKED_VISIT_TEXT
+IDS_HISTORY_BROWSERESULTS
+IDS_HISTORY_CONTINUED
+IDS_HISTORY_DATE_WITH_RELATIVE_TIME
+IDS_HISTORY_DELETE_PRIOR_VISITS_CONFIRM_BUTTON
+IDS_HISTORY_DELETE_PRIOR_VISITS_WARNING
+IDS_HISTORY_FILTER_ALLOWED
+IDS_HISTORY_FILTER_ALLOW_ITEMS
+IDS_HISTORY_FILTER_BLOCKED
+IDS_HISTORY_FILTER_BLOCK_ITEMS
+IDS_HISTORY_HAS_SYNCED_RESULTS
+IDS_HISTORY_INTERVAL
+IDS_HISTORY_IN_CONTENT_PACK
+IDS_HISTORY_LOADING
+IDS_HISTORY_LOCK_BUTTON
+IDS_HISTORY_MORE_FROM_SITE
+IDS_HISTORY_NEWER
+IDS_HISTORY_NEWEST
+IDS_HISTORY_NO_RESULTS
+IDS_HISTORY_NO_SEARCH_RESULTS
+IDS_HISTORY_NO_SYNCED_RESULTS
+IDS_HISTORY_NUMBER_VISITS
+IDS_HISTORY_OLDER
+IDS_HISTORY_OPEN_CLEAR_BROWSING_DATA_DIALOG
+IDS_HISTORY_OTHER_SESSIONS_COLLAPSE_SESSION
+IDS_HISTORY_OTHER_SESSIONS_EXPAND_SESSION
+IDS_HISTORY_OTHER_SESSIONS_OPEN_ALL
+IDS_HISTORY_RANGE_ALL_TIME
+IDS_HISTORY_RANGE_LABEL
+IDS_HISTORY_RANGE_MONTH
+IDS_HISTORY_RANGE_NEXT
+IDS_HISTORY_RANGE_PREVIOUS
+IDS_HISTORY_RANGE_TODAY
+IDS_HISTORY_RANGE_WEEK
+IDS_HISTORY_REMOVE_BOOKMARK
+IDS_HISTORY_REMOVE_PAGE
+IDS_HISTORY_REMOVE_SELECTED_ITEMS
+IDS_HISTORY_SEARCHRESULTSFOR
+IDS_HISTORY_SEARCH_BUTTON
+IDS_HISTORY_TITLE
+IDS_HISTORY_UNKNOWN_DEVICE
+IDS_HISTORY_UNLOCK_BUTTON
+IDS_HTTP_POST_WARNING
+IDS_HTTP_POST_WARNING_RESEND
+IDS_HTTP_POST_WARNING_TITLE
+IDS_IMPORT_FROM_FIREFOX
+IDS_IMPORT_FROM_ICEWEASEL
+IDS_JAVASCRIPT_ALERT_DEFAULT_TITLE
+IDS_JAVASCRIPT_ALERT_TITLE
+IDS_JAVASCRIPT_MESSAGEBOX_DEFAULT_TITLE
+IDS_JAVASCRIPT_MESSAGEBOX_TITLE
+IDS_KEYWORD_SEARCH
+IDS_LEARN_MORE
+IDS_LEGACY_DEFAULT_PROFILE_NAME
+IDS_LIBADDRESSINPUT_ADDRESS_LINE_1_LABEL
+IDS_LIBADDRESSINPUT_AREA
+IDS_LIBADDRESSINPUT_COUNTRY_OR_REGION_LABEL
+IDS_LIBADDRESSINPUT_COUNTY
+IDS_LIBADDRESSINPUT_DEPARTMENT
+IDS_LIBADDRESSINPUT_DISTRICT
+IDS_LIBADDRESSINPUT_DO_SI
+IDS_LIBADDRESSINPUT_EMIRATE
+IDS_LIBADDRESSINPUT_ISLAND
+IDS_LIBADDRESSINPUT_LOCALITY_LABEL
+IDS_LIBADDRESSINPUT_MISMATCHING_VALUE_POSTAL_CODE
+IDS_LIBADDRESSINPUT_MISMATCHING_VALUE_POSTAL_CODE_URL
+IDS_LIBADDRESSINPUT_MISMATCHING_VALUE_ZIP
+IDS_LIBADDRESSINPUT_MISMATCHING_VALUE_ZIP_URL
+IDS_LIBADDRESSINPUT_MISSING_REQUIRED_FIELD
+IDS_LIBADDRESSINPUT_MISSING_REQUIRED_POSTAL_CODE_EXAMPLE
+IDS_LIBADDRESSINPUT_MISSING_REQUIRED_POSTAL_CODE_EXAMPLE_AND_URL
+IDS_LIBADDRESSINPUT_MISSING_REQUIRED_ZIP_CODE_EXAMPLE
+IDS_LIBADDRESSINPUT_MISSING_REQUIRED_ZIP_CODE_EXAMPLE_AND_URL
+IDS_LIBADDRESSINPUT_NEIGHBORHOOD
+IDS_LIBADDRESSINPUT_OBLAST
+IDS_LIBADDRESSINPUT_ORGANIZATION_LABEL
+IDS_LIBADDRESSINPUT_PARISH
+IDS_LIBADDRESSINPUT_PIN_CODE_LABEL
+IDS_LIBADDRESSINPUT_POSTAL_CODE_LABEL
+IDS_LIBADDRESSINPUT_POST_TOWN
+IDS_LIBADDRESSINPUT_PO_BOX_FORBIDDEN_VALUE
+IDS_LIBADDRESSINPUT_PREFECTURE
+IDS_LIBADDRESSINPUT_PROVINCE
+IDS_LIBADDRESSINPUT_RECIPIENT_LABEL
+IDS_LIBADDRESSINPUT_STATE
+IDS_LIBADDRESSINPUT_SUBURB
+IDS_LIBADDRESSINPUT_UNKNOWN_VALUE
+IDS_LIBADDRESSINPUT_UNRECOGNIZED_FORMAT_POSTAL_CODE
+IDS_LIBADDRESSINPUT_UNRECOGNIZED_FORMAT_POSTAL_CODE_EXAMPLE
+IDS_LIBADDRESSINPUT_UNRECOGNIZED_FORMAT_POSTAL_CODE_EXAMPLE_AND_URL
+IDS_LIBADDRESSINPUT_UNRECOGNIZED_FORMAT_ZIP
+IDS_LIBADDRESSINPUT_UNRECOGNIZED_FORMAT_ZIP_CODE_EXAMPLE
+IDS_LIBADDRESSINPUT_UNRECOGNIZED_FORMAT_ZIP_CODE_EXAMPLE_AND_URL
+IDS_LIBADDRESSINPUT_VILLAGE_TOWNSHIP
+IDS_LIBADDRESSINPUT_ZIP_CODE_LABEL
+IDS_LINK_FROM_CLIPBOARD
+IDS_LOGIN_DIALOG_OK_BUTTON_LABEL
+IDS_LOGIN_DIALOG_PASSWORD_FIELD
+IDS_LOGIN_DIALOG_TITLE
+IDS_LOGIN_DIALOG_USERNAME_FIELD
+IDS_MALWARE_V3_ADVICE_HEADING
+IDS_MALWARE_V3_EXPLANATION_PARAGRAPH
+IDS_MALWARE_V3_EXPLANATION_PARAGRAPH_ADVICE
+IDS_MALWARE_V3_EXPLANATION_PARAGRAPH_HISTORY
+IDS_MALWARE_V3_EXPLANATION_PARAGRAPH_SUBRESOURCE
+IDS_MALWARE_V3_EXPLANATION_PARAGRAPH_SUBRESOURCE_ADVICE
+IDS_MALWARE_V3_EXPLANATION_PARAGRAPH_SUBRESOURCE_HISTORY
+IDS_MALWARE_V3_HEADING
+IDS_MALWARE_V3_PRIMARY_PARAGRAPH
+IDS_MALWARE_V3_PROCEED_PARAGRAPH
+IDS_MALWARE_V3_PROCEED_PARAGRAPH_NOT_RECOMMEND
+IDS_MALWARE_V3_PROCEED_PARAGRAPH_SOCIAL
+IDS_MANAGED_USER_AVATAR_LABEL
+IDS_MIDI_SYSEX_INFOBAR_QUESTION
+IDS_MIDI_SYSEX_PERMISSION_FRAGMENT
+IDS_MOBILE_WELCOME_URL
+IDS_NACL_DEBUG_MASK_CHOICE_DEBUG_ALL
+IDS_NACL_DEBUG_MASK_CHOICE_EXCLUDE_UTILS_PNACL
+IDS_NACL_DEBUG_MASK_CHOICE_INCLUDE_DEBUG
+IDS_NETWORK_PREDICTION_ENABLED_DESCRIPTION
+IDS_NET_EXPORT_NO_EMAIL_ACCOUNTS_ALERT_MESSAGE
+IDS_NET_EXPORT_NO_EMAIL_ACCOUNTS_ALERT_TITLE
+IDS_NEW_INCOGNITO_WINDOW_MAC
+IDS_NEW_NUMBERED_PROFILE_NAME
+IDS_NEW_TAB_CHROME_WELCOME_PAGE_TITLE
+IDS_NEW_TAB_MOST_VISITED
+IDS_NEW_TAB_RECENTLY_CLOSED
+IDS_NEW_TAB_RESTORE_THUMBNAILS_SHORT_LINK
+IDS_NEW_TAB_THUMBNAIL_REMOVED_NOTIFICATION
+IDS_NEW_TAB_TITLE
+IDS_NEW_TAB_UNDO_THUMBNAIL_REMOVE
+IDS_NUMBERED_PROFILE_NAME
+IDS_OK
+IDS_OMNIBOX_EMPTY_HINT
+IDS_ONE_CLICK_SIGNIN_CONFIRM_EMAIL_DIALOG_CANCEL_BUTTON
+IDS_OPEN_TABS_NOTYETSYNCED
+IDS_OPEN_TABS_PROMOCOMPUTER
+IDS_OPTIONS_ADVANCED_SECTION_TITLE_PRIVACY
+IDS_OPTIONS_DISABLE_WEB_SERVICES
+IDS_OPTIONS_ENABLE_LOGGING
+IDS_OPTIONS_IMPROVE_BROWSING_EXPERIENCE
+IDS_OPTIONS_PROXIES_CONFIGURE_BUTTON
+IDS_OTHER_DEVICES_X_MORE
+IDS_PAGEINFO_ADDRESS
+IDS_PAGEINFO_CERT_INFO_BUTTON
+IDS_PAGEINFO_PARTIAL_ADDRESS
+IDS_PAGE_INFO_HELP_CENTER_LINK
+IDS_PAGE_INFO_INTERNAL_PAGE
+IDS_PAGE_INFO_SECURITY_BUTTON_ACCESSIBILITY_LABEL
+IDS_PAGE_INFO_SECURITY_TAB_DEPRECATED_SIGNATURE_ALGORITHM
+IDS_PAGE_INFO_SECURITY_TAB_ENCRYPTED_CONNECTION_TEXT
+IDS_PAGE_INFO_SECURITY_TAB_ENCRYPTED_INSECURE_CONTENT_ERROR
+IDS_PAGE_INFO_SECURITY_TAB_ENCRYPTED_INSECURE_CONTENT_WARNING
+IDS_PAGE_INFO_SECURITY_TAB_ENCRYPTED_SENTENCE_LINK
+IDS_PAGE_INFO_SECURITY_TAB_ENCRYPTION_DETAILS
+IDS_PAGE_INFO_SECURITY_TAB_ENCRYPTION_DETAILS_AEAD
+IDS_PAGE_INFO_SECURITY_TAB_FALLBACK_MESSAGE
+IDS_PAGE_INFO_SECURITY_TAB_FIRST_VISITED_TODAY
+IDS_PAGE_INFO_SECURITY_TAB_INSECURE_IDENTITY
+IDS_PAGE_INFO_SECURITY_TAB_NON_UNIQUE_NAME
+IDS_PAGE_INFO_SECURITY_TAB_NOT_ENCRYPTED_CONNECTION_TEXT
+IDS_PAGE_INFO_SECURITY_TAB_NO_REVOCATION_MECHANISM
+IDS_PAGE_INFO_SECURITY_TAB_RENEGOTIATION_MESSAGE
+IDS_PAGE_INFO_SECURITY_TAB_SECURE_IDENTITY_EV_NO_CT
+IDS_PAGE_INFO_SECURITY_TAB_SECURE_IDENTITY_NO_CT
+IDS_PAGE_INFO_SECURITY_TAB_SSL_VERSION
+IDS_PAGE_INFO_SECURITY_TAB_UNABLE_TO_CHECK_REVOCATION
+IDS_PAGE_INFO_SECURITY_TAB_UNKNOWN_PARTY
+IDS_PAGE_INFO_SECURITY_TAB_WEAK_ENCRYPTION_CONNECTION_TEXT
+IDS_PASSWORDS_EXCEPTIONS_TAB_TITLE
+IDS_PASSWORDS_SHOW_PASSWORDS_TAB_TITLE
+IDS_PASSWORD_MANAGER_BLACKLIST_BUTTON
+IDS_PASSWORD_MANAGER_EMPTY_LOGIN
+IDS_PASSWORD_MANAGER_SAVE_BUTTON
+IDS_PASSWORD_MANAGER_SAVE_PASSWORD_PROMPT
+IDS_PAST_TIME_TODAY
+IDS_PAST_TIME_YESTERDAY
+IDS_PDF_INFOBAR_ALWAYS_USE_READER_BUTTON
+IDS_PERMISSION_ALLOW
+IDS_PERMISSION_DENY
+IDS_PHISHING_V3_EXPLANATION_PARAGRAPH
+IDS_PHISHING_V3_HEADING
+IDS_PHISHING_V3_PRIMARY_PARAGRAPH
+IDS_PHISHING_V3_PROCEED_PARAGRAPH
+IDS_PLATFORM_LABEL
+IDS_PLUGIN_CONFIRM_INSTALL_DIALOG_ACCEPT_BUTTON
+IDS_PLUGIN_CONFIRM_INSTALL_DIALOG_TITLE
+IDS_PLUGIN_NOT_SUPPORTED
+IDS_POLICY_ASSOCIATION_STATE_ACTIVE
+IDS_POLICY_ASSOCIATION_STATE_DEPROVISIONED
+IDS_POLICY_ASSOCIATION_STATE_UNMANAGED
+IDS_POLICY_DEFAULT_SEARCH_DISABLED
+IDS_POLICY_DEPRECATED
+IDS_POLICY_DM_STATUS_HTTP_STATUS_ERROR
+IDS_POLICY_DM_STATUS_REQUEST_FAILED
+IDS_POLICY_DM_STATUS_REQUEST_INVALID
+IDS_POLICY_DM_STATUS_RESPONSE_DECODING_ERROR
+IDS_POLICY_DM_STATUS_SERVICE_ACTIVATION_PENDING
+IDS_POLICY_DM_STATUS_SERVICE_DEPROVISIONED
+IDS_POLICY_DM_STATUS_SERVICE_DEVICE_ID_CONFLICT
+IDS_POLICY_DM_STATUS_SERVICE_DEVICE_NOT_FOUND
+IDS_POLICY_DM_STATUS_SERVICE_DOMAIN_MISMATCH
+IDS_POLICY_DM_STATUS_SERVICE_INVALID_SERIAL_NUMBER
+IDS_POLICY_DM_STATUS_SERVICE_MANAGEMENT_NOT_SUPPORTED
+IDS_POLICY_DM_STATUS_SERVICE_MANAGEMENT_TOKEN_INVALID
+IDS_POLICY_DM_STATUS_SERVICE_MISSING_LICENSES
+IDS_POLICY_DM_STATUS_SERVICE_POLICY_NOT_FOUND
+IDS_POLICY_DM_STATUS_SUCCESS
+IDS_POLICY_DM_STATUS_TEMPORARY_UNAVAILABLE
+IDS_POLICY_DM_STATUS_UNKNOWN_ERROR
+IDS_POLICY_FILTER_PLACEHOLDER
+IDS_POLICY_HEADER_LEVEL
+IDS_POLICY_HEADER_NAME
+IDS_POLICY_HEADER_SCOPE
+IDS_POLICY_HEADER_STATUS
+IDS_POLICY_HEADER_VALUE
+IDS_POLICY_HIDE_EXPANDED_VALUE
+IDS_POLICY_INVALID_BOOKMARK
+IDS_POLICY_INVALID_PROXY_MODE_ERROR
+IDS_POLICY_INVALID_SEARCH_URL_ERROR
+IDS_POLICY_LABEL_ASSET_ID
+IDS_POLICY_LABEL_CLIENT_ID
+IDS_POLICY_LABEL_DIRECTORY_API_ID
+IDS_POLICY_LABEL_DOMAIN
+IDS_POLICY_LABEL_LOCATION
+IDS_POLICY_LABEL_REFRESH_INTERVAL
+IDS_POLICY_LABEL_STATUS
+IDS_POLICY_LABEL_TIME_SINCE_LAST_REFRESH
+IDS_POLICY_LABEL_USERNAME
+IDS_POLICY_LEVEL_ERROR
+IDS_POLICY_LEVEL_MANDATORY
+IDS_POLICY_LEVEL_RECOMMENDED
+IDS_POLICY_LIST_ENTRY_ERROR
+IDS_POLICY_NEVER_FETCHED
+IDS_POLICY_NOT_SPECIFIED
+IDS_POLICY_NOT_SPECIFIED_ERROR
+IDS_POLICY_NO_POLICIES_SET
+IDS_POLICY_OK
+IDS_POLICY_OUT_OF_RANGE_ERROR
+IDS_POLICY_OVERRIDDEN
+IDS_POLICY_PROXY_BOTH_SPECIFIED_ERROR
+IDS_POLICY_PROXY_MODE_AUTO_DETECT_ERROR
+IDS_POLICY_PROXY_MODE_DISABLED_ERROR
+IDS_POLICY_PROXY_MODE_FIXED_SERVERS_ERROR
+IDS_POLICY_PROXY_MODE_PAC_URL_ERROR
+IDS_POLICY_PROXY_MODE_SYSTEM_ERROR
+IDS_POLICY_PROXY_NEITHER_SPECIFIED_ERROR
+IDS_POLICY_RELOAD_POLICIES
+IDS_POLICY_SCHEMA_VALIDATION_ERROR
+IDS_POLICY_SCOPE_DEVICE
+IDS_POLICY_SCOPE_USER
+IDS_POLICY_SHOW_EXPANDED_VALUE
+IDS_POLICY_SHOW_UNSET
+IDS_POLICY_STATUS
+IDS_POLICY_STATUS_DEVICE
+IDS_POLICY_STATUS_USER
+IDS_POLICY_STORE_STATUS_BAD_STATE
+IDS_POLICY_STORE_STATUS_LOAD_ERROR
+IDS_POLICY_STORE_STATUS_OK
+IDS_POLICY_STORE_STATUS_PARSE_ERROR
+IDS_POLICY_STORE_STATUS_SERIALIZE_ERROR
+IDS_POLICY_STORE_STATUS_STORE_ERROR
+IDS_POLICY_STORE_STATUS_UNKNOWN_ERROR
+IDS_POLICY_STORE_STATUS_VALIDATION_ERROR
+IDS_POLICY_SUBKEY_ERROR
+IDS_POLICY_TITLE
+IDS_POLICY_TYPE_ERROR
+IDS_POLICY_UNKNOWN
+IDS_POLICY_UNSET
+IDS_POLICY_VALIDATION_BAD_INITIAL_SIGNATURE
+IDS_POLICY_VALIDATION_BAD_KEY_VERIFICATION_SIGNATURE
+IDS_POLICY_VALIDATION_BAD_SIGNATURE
+IDS_POLICY_VALIDATION_BAD_TIMESTAMP
+IDS_POLICY_VALIDATION_BAD_USERNAME
+IDS_POLICY_VALIDATION_ERROR_CODE_PRESENT
+IDS_POLICY_VALIDATION_OK
+IDS_POLICY_VALIDATION_PAYLOAD_PARSE_ERROR
+IDS_POLICY_VALIDATION_POLICY_PARSE_ERROR
+IDS_POLICY_VALIDATION_UNKNOWN_ERROR
+IDS_POLICY_VALIDATION_WRONG_POLICY_TYPE
+IDS_POLICY_VALIDATION_WRONG_SETTINGS_ENTITY_ID
+IDS_POLICY_VALIDATION_WRONG_TOKEN
+IDS_PREFERENCES_CORRUPT_ERROR
+IDS_PREFERENCES_UNREADABLE_ERROR
+IDS_PRINT
+IDS_PRIVACY_POLICY_URL
+IDS_PRODUCT_NAME
+IDS_PROFILES_GUEST_PROFILE_NAME
+IDS_PROFILES_LOCAL_PROFILE_STATE
+IDS_PROFILE_TOO_NEW_ERROR
+IDS_PUSH_MESSAGES_BUBBLE_FRAGMENT
+IDS_PUSH_MESSAGES_BUBBLE_TEXT
+IDS_PUSH_MESSAGES_PERMISSION_QUESTION
+IDS_RECENT_TABS_MENU
+IDS_SAD_TAB_MESSAGE
+IDS_SAD_TAB_RELOAD_LABEL
+IDS_SAD_TAB_TITLE
+IDS_SAFEBROWSING_OVERRIDABLE_SAFETY_BUTTON
+IDS_SAFEBROWSING_V3_CLOSE_DETAILS_BUTTON
+IDS_SAFEBROWSING_V3_OPEN_DETAILS_BUTTON
+IDS_SAFEBROWSING_V3_TITLE
+IDS_SAFE_BROWSING_MALWARE_BACK_BUTTON
+IDS_SAFE_BROWSING_MALWARE_BACK_HEADLINE
+IDS_SAFE_BROWSING_MALWARE_COLLAB_HEADLINE
+IDS_SAFE_BROWSING_MALWARE_DIAGNOSTIC_PAGE
+IDS_SAFE_BROWSING_MALWARE_FEAR_HEADLINE
+IDS_SAFE_BROWSING_MALWARE_HEADLINE
+IDS_SAFE_BROWSING_MALWARE_LABEL
+IDS_SAFE_BROWSING_MALWARE_QUESTION_HEADLINE
+IDS_SAFE_BROWSING_MALWARE_REPORTING_AGREE
+IDS_SAFE_BROWSING_MALWARE_TITLE
+IDS_SAFE_BROWSING_MALWARE_V2_DESCRIPTION1
+IDS_SAFE_BROWSING_MALWARE_V2_DESCRIPTION1_SUBRESOURCE
+IDS_SAFE_BROWSING_MALWARE_V2_DESCRIPTION2
+IDS_SAFE_BROWSING_MALWARE_V2_DESCRIPTION2_SUBRESOURCE
+IDS_SAFE_BROWSING_MALWARE_V2_DESCRIPTION3
+IDS_SAFE_BROWSING_MALWARE_V2_DETAILS
+IDS_SAFE_BROWSING_MALWARE_V2_DETAILS_SUBRESOURCE
+IDS_SAFE_BROWSING_MALWARE_V2_HEADLINE
+IDS_SAFE_BROWSING_MALWARE_V2_HEADLINE_SUBRESOURCE
+IDS_SAFE_BROWSING_MALWARE_V2_LEARN_MORE
+IDS_SAFE_BROWSING_MALWARE_V2_PROCEED_LINK
+IDS_SAFE_BROWSING_MALWARE_V2_REPORTING_AGREE
+IDS_SAFE_BROWSING_MALWARE_V2_SEE_MORE
+IDS_SAFE_BROWSING_MALWARE_V2_TITLE
+IDS_SAFE_BROWSING_MULTI_MALWARE_DESCRIPTION1
+IDS_SAFE_BROWSING_MULTI_MALWARE_DESCRIPTION2
+IDS_SAFE_BROWSING_MULTI_MALWARE_DESCRIPTION3
+IDS_SAFE_BROWSING_MULTI_MALWARE_DESCRIPTION_AGREE
+IDS_SAFE_BROWSING_MULTI_MALWARE_PROCEED_BUTTON
+IDS_SAFE_BROWSING_MULTI_PHISHING_DESCRIPTION1
+IDS_SAFE_BROWSING_MULTI_THREAT_DESCRIPTION1
+IDS_SAFE_BROWSING_MULTI_THREAT_DESCRIPTION2
+IDS_SAFE_BROWSING_MULTI_THREAT_TITLE
+IDS_SAFE_BROWSING_PHISHING_BACK_HEADLINE
+IDS_SAFE_BROWSING_PHISHING_COLLAB_HEADLINE
+IDS_SAFE_BROWSING_PHISHING_FEAR_HEADLINE
+IDS_SAFE_BROWSING_PHISHING_HEADLINE
+IDS_SAFE_BROWSING_PHISHING_LABEL
+IDS_SAFE_BROWSING_PHISHING_QUESTION_HEADLINE
+IDS_SAFE_BROWSING_PHISHING_REPORT_ERROR
+IDS_SAFE_BROWSING_PHISHING_TITLE
+IDS_SAFE_BROWSING_PHISHING_V2_DESCRIPTION1
+IDS_SAFE_BROWSING_PHISHING_V2_DESCRIPTION2
+IDS_SAFE_BROWSING_PHISHING_V2_HEADLINE
+IDS_SAFE_BROWSING_PHISHING_V2_REPORT_ERROR
+IDS_SAFE_BROWSING_PHISHING_V2_TITLE
+IDS_SAFE_BROWSING_PRIVACY_POLICY_PAGE
+IDS_SAFE_BROWSING_PRIVACY_POLICY_PAGE_V2
+IDS_SAFE_BROWSING_PRIVACY_POLICY_URL
+IDS_SAVE
+IDS_SEARCH_BOX_EMPTY_HINT
+IDS_SECURE_CONNECTION_EV
+IDS_SESSION_CRASHED_VIEW_MESSAGE
+IDS_SESSION_CRASHED_VIEW_RESTORE_BUTTON
+IDS_SETTINGS_SHOW_ADVANCED_SETTINGS
+IDS_SHORT_PRODUCT_NAME
+IDS_SHOW_HISTORY
+IDS_SIGNED_IN_WITH_SYNC_DISABLED
+IDS_SIGNED_IN_WITH_SYNC_SUPPRESSED
+IDS_SIGNIN_ERROR_BUBBLE_VIEW_TITLE
+IDS_SINGLE_PROFILE_DISPLAY_NAME
+IDS_SSL_CLOCK_ERROR
+IDS_SSL_CLOCK_ERROR_EXPLANATION
+IDS_SSL_NONOVERRIDABLE_HSTS
+IDS_SSL_NONOVERRIDABLE_INVALID
+IDS_SSL_NONOVERRIDABLE_MORE
+IDS_SSL_NONOVERRIDABLE_MORE_INVALID_SP3
+IDS_SSL_NONOVERRIDABLE_PINNED
+IDS_SSL_NONOVERRIDABLE_REVOKED
+IDS_SSL_OVERRIDABLE_PRIMARY_PARAGRAPH
+IDS_SSL_OVERRIDABLE_PROCEED_LINK_TEXT
+IDS_SSL_OVERRIDABLE_PROCEED_PARAGRAPH
+IDS_SSL_OVERRIDABLE_SAFETY_BUTTON
+IDS_SSL_OVERRIDABLE_TITLE
+IDS_SSL_RELOAD
+IDS_SSL_V2_CLOCK_AHEAD_HEADING
+IDS_SSL_V2_CLOCK_BEHIND_HEADING
+IDS_SSL_V2_CLOCK_PRIMARY_PARAGRAPH
+IDS_SSL_V2_CLOCK_TITLE
+IDS_SSL_V2_CLOCK_UPDATE_DATE_AND_TIME
+IDS_SSL_V2_CLOSE_DETAILS_BUTTON
+IDS_SSL_V2_HEADING
+IDS_SSL_V2_OPEN_DETAILS_BUTTON
+IDS_SSL_V2_PRIMARY_PARAGRAPH
+IDS_SSL_V2_TITLE
+IDS_STARS_PROMO_LABEL_IOS
+IDS_SUPERVISED_USER_AVATAR_LABEL
+IDS_SUPERVISED_USER_NEW_AVATAR_LABEL
+IDS_SYNC_ACCOUNT_DETAILS_NOT_ENTERED
+IDS_SYNC_ACCOUNT_SYNCING_TO_USER
+IDS_SYNC_ACCOUNT_SYNCING_TO_USER_WITH_MANAGE_LINK
+IDS_SYNC_AUTHENTICATING_LABEL
+IDS_SYNC_BASIC_ENCRYPTION_DATA
+IDS_SYNC_CLEAR_USER_DATA
+IDS_SYNC_CONFIGURE_ENCRYPTION
+IDS_SYNC_DATATYPE_AUTOFILL
+IDS_SYNC_DATATYPE_BOOKMARKS
+IDS_SYNC_DATATYPE_PASSWORDS
+IDS_SYNC_DATATYPE_PREFERENCES
+IDS_SYNC_DATATYPE_TABS
+IDS_SYNC_DATATYPE_TYPED_URLS
+IDS_SYNC_EMPTY_PASSPHRASE_ERROR
+IDS_SYNC_ENABLE_SYNC_ON_ACCOUNT
+IDS_SYNC_ENCRYPTION_SECTION_TITLE
+IDS_SYNC_ENTER_GOOGLE_PASSPHRASE_BODY
+IDS_SYNC_ENTER_PASSPHRASE_BODY
+IDS_SYNC_ENTER_PASSPHRASE_BODY_WITH_DATE
+IDS_SYNC_ENTER_PASSPHRASE_TITLE
+IDS_SYNC_ERROR_BUBBLE_VIEW_TITLE
+IDS_SYNC_ERROR_SIGNING_IN
+IDS_SYNC_FULL_ENCRYPTION_DATA
+IDS_SYNC_INVALID_USER_CREDENTIALS
+IDS_SYNC_LOGIN_INFO_OUT_OF_DATE
+IDS_SYNC_LOGIN_SETTING_UP
+IDS_SYNC_MENU_PRE_SYNCED_LABEL
+IDS_SYNC_MENU_SYNCED_LABEL
+IDS_SYNC_NTP_PASSWORD_ENABLE
+IDS_SYNC_NTP_PASSWORD_PROMO
+IDS_SYNC_NTP_PASSWORD_PROMO,
+IDS_SYNC_NTP_SETUP_IN_PROGRESS
+IDS_SYNC_OPTIONS_GROUP_NAME
+IDS_SYNC_OTHER_SIGN_IN_ERROR_BUBBLE_VIEW_MESSAGE
+IDS_SYNC_PASSPHRASE_ERROR_BUBBLE_VIEW_ACCEPT
+IDS_SYNC_PASSPHRASE_ERROR_BUBBLE_VIEW_MESSAGE
+IDS_SYNC_PASSPHRASE_ERROR_WRENCH_MENU_ITEM
+IDS_SYNC_PASSPHRASE_LABEL
+IDS_SYNC_PASSPHRASE_MISMATCH_ERROR
+IDS_SYNC_PASSPHRASE_MSG_EXPLICIT_POSTFIX
+IDS_SYNC_PASSPHRASE_MSG_EXPLICIT_PREFIX
+IDS_SYNC_PASSWORD_SYNC_ATTENTION
+IDS_SYNC_PROMO_NTP_BUBBLE_MESSAGE
+IDS_SYNC_PROMO_TAB_TITLE
+IDS_SYNC_RELOGIN_LINK_LABEL
+IDS_SYNC_SERVER_IS_UNREACHABLE
+IDS_SYNC_SERVICE_UNAVAILABLE
+IDS_SYNC_SETUP_ERROR
+IDS_SYNC_SIGN_IN_ERROR_BUBBLE_VIEW_ACCEPT
+IDS_SYNC_SIGN_IN_ERROR_BUBBLE_VIEW_MESSAGE
+IDS_SYNC_SIGN_IN_ERROR_WRENCH_MENU_ITEM
+IDS_SYNC_START_SYNC_BUTTON_LABEL
+IDS_SYNC_STATUS_UNRECOVERABLE_ERROR
+IDS_SYNC_STOP_AND_RESTART_SYNC
+IDS_SYNC_TIME_JUST_NOW
+IDS_SYNC_TIME_NEVER
+IDS_SYNC_UNAVAILABLE_ERROR_BUBBLE_VIEW_ACCEPT
+IDS_SYNC_UNAVAILABLE_ERROR_BUBBLE_VIEW_MESSAGE
+IDS_SYNC_UNRECOVERABLE_ERROR_HELP_URL
+IDS_SYNC_UPGRADE_CLIENT
+IDS_SYSTEM_FLAGS_OWNER_ONLY
+IDS_TERMS_HTML
+IDS_TIME_DAYS
+IDS_TIME_DAYS_1ST
+IDS_TIME_ELAPSED_DAYS
+IDS_TIME_ELAPSED_HOURS
+IDS_TIME_ELAPSED_MINS
+IDS_TIME_ELAPSED_SECS
+IDS_TIME_HOURS
+IDS_TIME_HOURS_1ST
+IDS_TIME_HOURS_2ND
+IDS_TIME_LONG_MINS
+IDS_TIME_LONG_MINS_1ST
+IDS_TIME_LONG_MINS_2ND
+IDS_TIME_LONG_SECS
+IDS_TIME_LONG_SECS_2ND
+IDS_TIME_MINS
+IDS_TIME_REMAINING_DAYS
+IDS_TIME_REMAINING_HOURS
+IDS_TIME_REMAINING_LONG_MINS
+IDS_TIME_REMAINING_LONG_SECS
+IDS_TIME_REMAINING_MINS
+IDS_TIME_REMAINING_SECS
+IDS_TIME_SECS
+IDS_TOOLTIP_STAR
+IDS_TOUCH_EVENTS_DESCRIPTION
+IDS_TOUCH_EVENTS_NAME
+IDS_TRANSLATE_INFOBAR_ACCEPT
+IDS_TRANSLATE_INFOBAR_AFTER_MESSAGE
+IDS_TRANSLATE_INFOBAR_AFTER_MESSAGE_AUTODETERMINED_SOURCE_LANGUAGE
+IDS_TRANSLATE_INFOBAR_ALWAYS_TRANSLATE
+IDS_TRANSLATE_INFOBAR_BEFORE_MESSAGE
+IDS_TRANSLATE_INFOBAR_BEFORE_MESSAGE_IOS
+IDS_TRANSLATE_INFOBAR_DENY
+IDS_TRANSLATE_INFOBAR_ERROR_CANT_CONNECT
+IDS_TRANSLATE_INFOBAR_ERROR_CANT_TRANSLATE
+IDS_TRANSLATE_INFOBAR_ERROR_SAME_LANGUAGE
+IDS_TRANSLATE_INFOBAR_NEVER_MESSAGE_IOS
+IDS_TRANSLATE_INFOBAR_NEVER_TRANSLATE
+IDS_TRANSLATE_INFOBAR_OPTIONS_ABOUT
+IDS_TRANSLATE_INFOBAR_OPTIONS_ALWAYS
+IDS_TRANSLATE_INFOBAR_OPTIONS_NEVER_TRANSLATE_LANG
+IDS_TRANSLATE_INFOBAR_OPTIONS_NEVER_TRANSLATE_SITE
+IDS_TRANSLATE_INFOBAR_OPTIONS_REPORT_ERROR
+IDS_TRANSLATE_INFOBAR_RETRY
+IDS_TRANSLATE_INFOBAR_REVERT
+IDS_TRANSLATE_INFOBAR_TRANSLATING_TO
+IDS_TRANSLATE_INFOBAR_UNKNOWN_PAGE_LANGUAGE
+IDS_TRANSLATE_INFOBAR_UNSUPPORTED_PAGE_LANGUAGE
+IDS_UPGRADE_AVAILABLE
+IDS_UPGRADE_AVAILABLE_BUTTON
+IDS_WEB_FONT_FAMILY
+IDS_WEB_FONT_SIZE
diff --git a/build/ios/mac_build.gypi b/build/ios/mac_build.gypi
new file mode 100644
index 0000000..4da21eb
--- /dev/null
+++ b/build/ios/mac_build.gypi
@@ -0,0 +1,83 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Xcode throws an error if an iOS target depends on a Mac OS X target. So
+# any place a utility program needs to be build and run, an action is
+# used to run ninja as script to work around this.
+# Example:
+# {
+#   'target_name': 'foo',
+#   'type': 'none',
+#   'variables': {
+#     # The name of a directory used for ninja. This cannot be shared with
+#     # another mac build.
+#     'ninja_output_dir': 'ninja-foo',
+#     # The full path to the location in which the ninja executable should be
+#     # placed. This cannot be shared with another mac build.
+#    'ninja_product_dir':
+#      '<(DEPTH)/xcodebuild/<(ninja_output_dir)/<(CONFIGURATION_NAME)',
+#     # The list of all the gyp files that contain the targets to run.
+#     're_run_targets': [
+#       'foo.gyp',
+#     ],
+#   },
+#   'includes': ['path_to/mac_build.gypi'],
+#   'actions': [
+#     {
+#       'action_name': 'compile foo',
+#       'inputs': [],
+#       'outputs': [],
+#       'action': [
+#         '<@(ninja_cmd)',
+#         # All the targets to build.
+#         'foo1',
+#         'foo2',
+#       ],
+#     },
+#   ],
+# }
+{
+  'variables': {
+    'variables': {
+     'parent_generator%': '<(GENERATOR)',
+    },
+    'parent_generator%': '<(parent_generator)',
+    # Common ninja command line flags.
+    'ninja_cmd': [
+      # Bounce through clean_env to clean up the environment so things
+      # set by the iOS build don't pollute the Mac build.
+      '<(DEPTH)/build/ios/clean_env.py',
+      # ninja must be found in the PATH.
+      'ADD_TO_PATH=<!(echo $PATH)',
+      'ninja',
+      '-C',
+      '<(ninja_product_dir)',
+    ],
+
+    # Common syntax to rerun gyp to generate the Mac projects.
+    're_run_gyp': [
+      'build/gyp_chromium',
+      '--depth=.',
+      # Don't use anything set for the iOS side of things.
+      '--ignore-environment',
+      # Generate for ninja
+      '--format=ninja',
+      # Generate files into xcodebuild/ninja
+      '-Goutput_dir=xcodebuild/<(ninja_output_dir)',
+      # nacl isn't in the iOS checkout, make sure it's turned off
+      '-Ddisable_nacl=1',
+      # Pass through the Mac SDK version.
+      '-Dmac_sdk=<(mac_sdk)',
+      '-Dparent_generator=<(parent_generator)'
+    ],
+
+    # Rerun gyp for each of the projects needed. This is what actually
+    # generates the projects on disk.
+    're_run_gyp_execution':
+      '<!(cd <(DEPTH) && <@(re_run_gyp) <@(re_run_targets))',
+  },
+  # Since these are used to generate things needed by other targets, make
+  # them hard dependencies so they are always built first.
+  'hard_dependency': 1,
+}
diff --git a/build/isolate.gypi b/build/isolate.gypi
new file mode 100644
index 0000000..69af5b0
--- /dev/null
+++ b/build/isolate.gypi
@@ -0,0 +1,125 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into a target to provide a rule
+# to "build" .isolate files into a .isolated file.
+#
+# To use this, create a gyp target with the following form:
+# 'conditions': [
+#   ['test_isolation_mode != "noop"', {
+#     'targets': [
+#       {
+#         'target_name': 'foo_test_run',
+#         'type': 'none',
+#         'dependencies': [
+#           'foo_test',
+#         ],
+#         'includes': [
+#           '../build/isolate.gypi',
+#         ],
+#         'sources': [
+#           'foo_test.isolate',
+#         ],
+#       },
+#     ],
+#   }],
+# ],
+#
+# Note: foo_test.isolate is included and a source file. It is an inherent
+# property of the .isolate format. This permits to define GYP variables but is
+# a stricter format than GYP so isolate.py can read it.
+#
+# The generated .isolated file will be:
+#   <(PRODUCT_DIR)/foo_test.isolated
+#
+# See http://dev.chromium.org/developers/testing/isolated-testing/for-swes
+# for more information.
+
+{
+  'includes': [
+    '../build/util/version.gypi',
+  ],
+  'rules': [
+    {
+      'rule_name': 'isolate',
+      'extension': 'isolate',
+      'inputs': [
+        # Files that are known to be involved in this step.
+        '<(DEPTH)/tools/isolate_driver.py',
+        '<(DEPTH)/tools/swarming_client/isolate.py',
+        '<(DEPTH)/tools/swarming_client/run_isolated.py',
+      ],
+      'outputs': [],
+      'action': [
+        'python',
+        '<(DEPTH)/tools/isolate_driver.py',
+        '<(test_isolation_mode)',
+        '--isolated', '<(PRODUCT_DIR)/<(RULE_INPUT_ROOT).isolated',
+        '--isolate', '<(RULE_INPUT_PATH)',
+
+        # Variables should use the -V FOO=<(FOO) form so frequent values,
+        # like '0' or '1', aren't stripped out by GYP. Run 'isolate.py help' for
+        # more details.
+
+        # Path variables are used to replace file paths when loading a .isolate
+        # file
+        '--path-variable', 'DEPTH', '<(DEPTH)',
+        '--path-variable', 'PRODUCT_DIR', '<(PRODUCT_DIR) ',
+
+        # Extra variables are replaced on the 'command' entry and on paths in
+        # the .isolate file but are not considered relative paths.
+        '--extra-variable', 'version_full=<(version_full)',
+
+        # Note: This list must match DefaultConfigVariables()
+        # in build/android/pylib/utils/isolator.py
+        '--config-variable', 'CONFIGURATION_NAME=<(CONFIGURATION_NAME)',
+        '--config-variable', 'OS=<(OS)',
+        '--config-variable', 'asan=<(asan)',
+        '--config-variable', 'branding=<(branding)',
+        '--config-variable', 'chromeos=<(chromeos)',
+        '--config-variable', 'component=<(component)',
+        '--config-variable', 'disable_nacl=<(disable_nacl)',
+        '--config-variable', 'enable_pepper_cdms=<(enable_pepper_cdms)',
+        '--config-variable', 'enable_plugins=<(enable_plugins)',
+        '--config-variable', 'fastbuild=<(fastbuild)',
+        '--config-variable', 'icu_use_data_file_flag=<(icu_use_data_file_flag)',
+        # TODO(kbr): move this to chrome_tests.gypi:gles2_conform_tests_run
+        # once support for user-defined config variables is added.
+        '--config-variable',
+          'internal_gles2_conform_tests=<(internal_gles2_conform_tests)',
+        '--config-variable', 'kasko=<(kasko)',
+        '--config-variable', 'libpeer_target_type=<(libpeer_target_type)',
+        '--config-variable', 'lsan=<(lsan)',
+        '--config-variable', 'msan=<(msan)',
+        '--config-variable', 'target_arch=<(target_arch)',
+        '--config-variable', 'tsan=<(tsan)',
+        '--config-variable', 'use_custom_libcxx=<(use_custom_libcxx)',
+        '--config-variable', 'use_instrumented_libraries=<(use_instrumented_libraries)',
+        '--config-variable',
+        'use_prebuilt_instrumented_libraries=<(use_prebuilt_instrumented_libraries)',
+        '--config-variable', 'use_openssl=<(use_openssl)',
+        '--config-variable', 'use_ozone=<(use_ozone)',
+        '--config-variable', 'use_x11=<(use_x11)',
+        '--config-variable', 'v8_use_external_startup_data=<(v8_use_external_startup_data)',
+      ],
+      'conditions': [
+        # Note: When gyp merges lists, it appends them to the old value.
+        ['OS=="mac"', {
+          'action': [
+            '--extra-variable', 'mac_product_name=<(mac_product_name)',
+          ],
+        }],
+        ["test_isolation_mode == 'prepare'", {
+          'outputs': [
+            '<(PRODUCT_DIR)/<(RULE_INPUT_ROOT).isolated.gen.json',
+          ],
+        }, {
+          'outputs': [
+            '<(PRODUCT_DIR)/<(RULE_INPUT_ROOT).isolated',
+          ],
+        }],
+      ],
+    },
+  ],
+}
diff --git a/build/jar_file_jni_generator.gypi b/build/jar_file_jni_generator.gypi
new file mode 100644
index 0000000..3d95b28
--- /dev/null
+++ b/build/jar_file_jni_generator.gypi
@@ -0,0 +1,67 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into a target to provide a rule
+# to generate jni bindings for system Java-files in a consistent manner.
+#
+# To use this, create a gyp target with the following form:
+# {
+#   'target_name': 'android_jar_jni_headers',
+#   'type': 'none',
+#   'variables': {
+#     'jni_gen_package': 'chrome',
+#     'input_java_class': 'java/io/InputStream.class',
+#   },
+#   'includes': [ '../build/jar_file_jni_generator.gypi' ],
+# },
+#
+# Optional variables:
+#  input_jar_file - The input jar file, if omitted, android_sdk_jar will be used.
+
+{
+  'variables': {
+    'jni_generator': '<(DEPTH)/base/android/jni_generator/jni_generator.py',
+    # A comma separated string of include files.
+    'jni_generator_includes%': (
+        'base/android/jni_generator/jni_generator_helper.h'
+    ),
+    'native_exports%': '--native_exports_optional',
+  },
+  'actions': [
+    {
+      'action_name': 'generate_jni_headers_from_jar_file',
+      'inputs': [
+        '<(jni_generator)',
+        '<(input_jar_file)',
+        '<(android_sdk_jar)',
+      ],
+      'variables': {
+        'java_class_name': '<!(basename <(input_java_class)|sed "s/\.class//")',
+        'input_jar_file%': '<(android_sdk_jar)'
+      },
+      'outputs': [
+        '<(SHARED_INTERMEDIATE_DIR)/<(jni_gen_package)/jni/<(java_class_name)_jni.h',
+      ],
+      'action': [
+        '<(jni_generator)',
+        '-j',
+        '<(input_jar_file)',
+        '--input_file',
+        '<(input_java_class)',
+        '--output_dir',
+        '<(SHARED_INTERMEDIATE_DIR)/<(jni_gen_package)/jni',
+        '--includes',
+        '<(jni_generator_includes)',
+        '--optimize_generation',
+        '<(optimize_jni_generation)',
+        '<(native_exports)',
+      ],
+      'message': 'Generating JNI bindings from  <(input_jar_file)/<(input_java_class)',
+      'process_outputs_as_sources': 1,
+    },
+  ],
+  # This target exports a hard dependency because it generates header
+  # files.
+  'hard_dependency': 1,
+}
diff --git a/build/java.gypi b/build/java.gypi
new file mode 100644
index 0000000..73c550d
--- /dev/null
+++ b/build/java.gypi
@@ -0,0 +1,368 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into a target to provide a rule
+# to build Java in a consistent manner.
+#
+# To use this, create a gyp target with the following form:
+# {
+#   'target_name': 'my-package_java',
+#   'type': 'none',
+#   'variables': {
+#     'java_in_dir': 'path/to/package/root',
+#   },
+#   'includes': ['path/to/this/gypi/file'],
+# }
+#
+# Required variables:
+#  java_in_dir - The top-level java directory. The src should be in
+#    <java_in_dir>/src.
+# Optional/automatic variables:
+#  add_to_dependents_classpaths - Set to 0 if the resulting jar file should not
+#    be added to its dependents' classpaths.
+#  additional_input_paths - These paths will be included in the 'inputs' list to
+#    ensure that this target is rebuilt when one of these paths changes.
+#  additional_src_dirs - Additional directories with .java files to be compiled
+#    and included in the output of this target.
+#  generated_src_dirs - Same as additional_src_dirs except used for .java files
+#    that are generated at build time. This should be set automatically by a
+#    target's dependencies. The .java files in these directories are not
+#    included in the 'inputs' list (unlike additional_src_dirs).
+#  input_jars_paths - The path to jars to be included in the classpath. This
+#    should be filled automatically by depending on the appropriate targets.
+#  javac_includes - A list of specific files to include. This is by default
+#    empty, which leads to inclusion of all files specified. May include
+#    wildcard, and supports '**/' for recursive path wildcards, ie.:
+#    '**/MyFileRegardlessOfDirectory.java', '**/IncludedPrefix*.java'.
+#  has_java_resources - Set to 1 if the java target contains an
+#    Android-compatible resources folder named res.  If 1, R_package and
+#    R_package_relpath must also be set.
+#  R_package - The java package in which the R class (which maps resources to
+#    integer IDs) should be generated, e.g. org.chromium.content.
+#  R_package_relpath - Same as R_package, but replace each '.' with '/'.
+#  res_extra_dirs - A list of extra directories containing Android resources.
+#    These directories may be generated at build time.
+#  res_extra_files - A list of the files in res_extra_dirs.
+#  never_lint - Set to 1 to not run lint on this target.
+
+{
+  'dependencies': [
+    '<(DEPTH)/build/android/setup.gyp:build_output_dirs'
+  ],
+  'variables': {
+    'add_to_dependents_classpaths%': 1,
+    'android_jar': '<(android_sdk)/android.jar',
+    'input_jars_paths': [ '<(android_jar)' ],
+    'additional_src_dirs': [],
+    'javac_includes': [],
+    'jar_name': '<(_target_name).jar',
+    'jar_dir': '<(PRODUCT_DIR)/lib.java',
+    'jar_path': '<(intermediate_dir)/<(jar_name)',
+    'jar_final_path': '<(jar_dir)/<(jar_name)',
+    'jar_excluded_classes': [ '*/R.class', '*/R##*.class' ],
+    'instr_stamp': '<(intermediate_dir)/instr.stamp',
+    'additional_input_paths': [],
+    'dex_path': '<(PRODUCT_DIR)/lib.java/<(_target_name).dex.jar',
+    'generated_src_dirs': ['>@(generated_R_dirs)'],
+    'generated_R_dirs': [],
+    'has_java_resources%': 0,
+    'res_extra_dirs': [],
+    'res_extra_files': [],
+    'res_v14_skip%': 0,
+    'resource_input_paths': ['>@(res_extra_files)'],
+    'intermediate_dir': '<(SHARED_INTERMEDIATE_DIR)/<(_target_name)',
+    'compile_stamp': '<(intermediate_dir)/compile.stamp',
+    'lint_stamp': '<(intermediate_dir)/lint.stamp',
+    'lint_result': '<(intermediate_dir)/lint_result.xml',
+    'lint_config': '<(intermediate_dir)/lint_config.xml',
+    'never_lint%': 0,
+    'findbugs_stamp': '<(intermediate_dir)/findbugs.stamp',
+    'run_findbugs%': 0,
+    'java_in_dir_suffix%': '/src',
+    'proguard_config%': '',
+    'proguard_preprocess%': '0',
+    'enable_errorprone%': '0',
+    'errorprone_exe_path': '<(PRODUCT_DIR)/bin.java/chromium_errorprone',
+    'variables': {
+      'variables': {
+        'proguard_preprocess%': 0,
+        'emma_never_instrument%': 0,
+      },
+      'conditions': [
+        ['proguard_preprocess == 1', {
+          'javac_jar_path': '<(intermediate_dir)/<(_target_name).pre.jar'
+        }, {
+          'javac_jar_path': '<(jar_path)'
+        }],
+        ['chromium_code != 0 and emma_coverage != 0 and emma_never_instrument == 0', {
+          'emma_instrument': 1,
+        }, {
+          'emma_instrument': 0,
+        }],
+      ],
+    },
+    'emma_instrument': '<(emma_instrument)',
+    'javac_jar_path': '<(javac_jar_path)',
+  },
+  'conditions': [
+    ['add_to_dependents_classpaths == 1', {
+      # This all_dependent_settings is used for java targets only. This will add the
+      # jar path to the classpath of dependent java targets.
+      'all_dependent_settings': {
+        'variables': {
+          'input_jars_paths': ['<(jar_final_path)'],
+          'library_dexed_jars_paths': ['<(dex_path)'],
+        },
+      },
+    }],
+    ['has_java_resources == 1', {
+      'variables': {
+        'resource_dir': '<(java_in_dir)/res',
+        'res_input_dirs': ['<(resource_dir)', '<@(res_extra_dirs)'],
+        'resource_input_paths': ['<!@(find <(resource_dir) -type f)'],
+
+        'R_dir': '<(intermediate_dir)/java_R',
+        'R_text_file': '<(R_dir)/R.txt',
+
+        'generated_src_dirs': ['<(R_dir)'],
+        'additional_input_paths': ['<(resource_zip_path)', ],
+
+        'dependencies_res_zip_paths': [],
+        'resource_zip_path': '<(PRODUCT_DIR)/res.java/<(_target_name).zip',
+      },
+      'all_dependent_settings': {
+        'variables': {
+          # Dependent libraries include this target's R.java file via
+          # generated_R_dirs.
+          'generated_R_dirs': ['<(R_dir)'],
+
+          # Dependent libraries and apks include this target's resources via
+          # dependencies_res_zip_paths.
+          'additional_input_paths': ['<(resource_zip_path)'],
+          'dependencies_res_zip_paths': ['<(resource_zip_path)'],
+
+          # additional_res_packages and additional_R_text_files are used to
+          # create this packages R.java files when building the APK.
+          'additional_res_packages': ['<(R_package)'],
+          'additional_R_text_files': ['<(R_text_file)'],
+        },
+      },
+      'actions': [
+        # Generate R.java and crunch image resources.
+        {
+          'action_name': 'process_resources',
+          'message': 'processing resources for <(_target_name)',
+          'variables': {
+            'android_manifest': '<(DEPTH)/build/android/AndroidManifest.xml',
+            # Write the inputs list to a file, so that its mtime is updated when
+            # the list of inputs changes.
+            'inputs_list_file': '>|(java_resources.<(_target_name).gypcmd >@(resource_input_paths))',
+            'process_resources_options': [],
+            'conditions': [
+              ['res_v14_skip == 1', {
+                'process_resources_options': ['--v14-skip']
+              }],
+            ],
+          },
+          'inputs': [
+            '<(DEPTH)/build/android/gyp/util/build_utils.py',
+            '<(DEPTH)/build/android/gyp/process_resources.py',
+            '<(DEPTH)/build/android/gyp/generate_v14_compatible_resources.py',
+            '>@(resource_input_paths)',
+            '>@(dependencies_res_zip_paths)',
+            '>(inputs_list_file)',
+          ],
+          'outputs': [
+            '<(resource_zip_path)',
+          ],
+          'action': [
+            'python', '<(DEPTH)/build/android/gyp/process_resources.py',
+            '--android-sdk', '<(android_sdk)',
+            '--aapt-path', '<(android_aapt_path)',
+            '--non-constant-id',
+
+            '--android-manifest', '<(android_manifest)',
+            '--custom-package', '<(R_package)',
+
+            '--dependencies-res-zips', '>(dependencies_res_zip_paths)',
+            '--resource-dirs', '<(res_input_dirs)',
+
+            '--R-dir', '<(R_dir)',
+            '--resource-zip-out', '<(resource_zip_path)',
+
+            '<@(process_resources_options)',
+          ],
+        },
+      ],
+    }],
+    ['proguard_preprocess == 1', {
+      'actions': [
+        {
+          'action_name': 'proguard_<(_target_name)',
+          'message': 'Proguard preprocessing <(_target_name) jar',
+          'inputs': [
+            '<(android_sdk_root)/tools/proguard/lib/proguard.jar',
+            '<(DEPTH)/build/android/gyp/util/build_utils.py',
+            '<(DEPTH)/build/android/gyp/proguard.py',
+            '<(javac_jar_path)',
+            '<(proguard_config)',
+          ],
+          'outputs': [
+            '<(jar_path)',
+          ],
+          'action': [
+            'python', '<(DEPTH)/build/android/gyp/proguard.py',
+            '--proguard-path=<(android_sdk_root)/tools/proguard/lib/proguard.jar',
+            '--input-path=<(javac_jar_path)',
+            '--output-path=<(jar_path)',
+            '--proguard-config=<(proguard_config)',
+            '--classpath=<(android_sdk_jar) >(input_jars_paths)',
+          ]
+        },
+      ],
+    }],
+    ['run_findbugs == 1', {
+      'actions': [
+        {
+          'action_name': 'findbugs_<(_target_name)',
+          'message': 'Running findbugs on <(_target_name)',
+          'inputs': [
+            '<(DEPTH)/build/android/findbugs_diff.py',
+            '<(DEPTH)/build/android/findbugs_filter/findbugs_exclude.xml',
+            '<(DEPTH)/build/android/pylib/utils/findbugs.py',
+            '>@(input_jars_paths)',
+            '<(jar_final_path)',
+            '<(compile_stamp)',
+          ],
+          'outputs': [
+            '<(findbugs_stamp)',
+          ],
+          'action': [
+            'python', '<(DEPTH)/build/android/findbugs_diff.py',
+            '--auxclasspath-gyp', '>(input_jars_paths)',
+            '--stamp', '<(findbugs_stamp)',
+            '<(jar_final_path)',
+          ],
+        },
+      ],
+    }],
+    ['enable_errorprone == 1', {
+      'dependencies': [
+        '<(DEPTH)/third_party/errorprone/errorprone.gyp:chromium_errorprone',
+      ],
+    }],
+  ],
+  'actions': [
+    {
+      'action_name': 'javac_<(_target_name)',
+      'message': 'Compiling <(_target_name) java sources',
+      'variables': {
+        'extra_args': [],
+        'extra_inputs': [],
+        'java_sources': ['>!@(find >(java_in_dir)>(java_in_dir_suffix) >(additional_src_dirs) -name "*.java")'],
+        'conditions': [
+          ['enable_errorprone == 1', {
+            'extra_inputs': [
+              '<(errorprone_exe_path)',
+            ],
+            'extra_args': [ '--use-errorprone-path=<(errorprone_exe_path)' ],
+          }],
+        ],
+      },
+      'inputs': [
+        '<(DEPTH)/build/android/gyp/util/build_utils.py',
+        '<(DEPTH)/build/android/gyp/javac.py',
+        '>@(java_sources)',
+        '>@(input_jars_paths)',
+        '>@(additional_input_paths)',
+        '<@(extra_inputs)',
+      ],
+      'outputs': [
+        '<(compile_stamp)',
+        '<(javac_jar_path)',
+      ],
+      'action': [
+        'python', '<(DEPTH)/build/android/gyp/javac.py',
+        '--bootclasspath=<(android_sdk_jar)',
+        '--classpath=>(input_jars_paths)',
+        '--src-gendirs=>(generated_src_dirs)',
+        '--javac-includes=<(javac_includes)',
+        '--chromium-code=<(chromium_code)',
+        '--jar-path=<(javac_jar_path)',
+        '--jar-excluded-classes=<(jar_excluded_classes)',
+        '--stamp=<(compile_stamp)',
+        '>@(java_sources)',
+        '<@(extra_args)',
+      ]
+    },
+    {
+      'action_name': 'instr_jar_<(_target_name)',
+      'message': 'Instrumenting <(_target_name) jar',
+      'variables': {
+        'input_path': '<(jar_path)',
+        'output_path': '<(jar_final_path)',
+        'stamp_path': '<(instr_stamp)',
+        'instr_type': 'jar',
+      },
+      'outputs': [
+        '<(jar_final_path)',
+      ],
+      'inputs': [
+        '<(jar_path)',
+      ],
+      'includes': [ 'android/instr_action.gypi' ],
+    },
+    {
+      'variables': {
+        'src_dirs': [
+          '<(java_in_dir)<(java_in_dir_suffix)',
+          '>@(additional_src_dirs)',
+        ],
+        'stamp_path': '<(lint_stamp)',
+        'result_path': '<(lint_result)',
+        'config_path': '<(lint_config)',
+        'lint_jar_path': '<(jar_final_path)',
+      },
+      'inputs': [
+        '<(jar_final_path)',
+        '<(compile_stamp)',
+      ],
+      'outputs': [
+        '<(lint_stamp)',
+      ],
+      'includes': [ 'android/lint_action.gypi' ],
+    },
+    {
+      'action_name': 'jar_toc_<(_target_name)',
+      'message': 'Creating <(_target_name) jar.TOC',
+      'inputs': [
+        '<(DEPTH)/build/android/gyp/util/build_utils.py',
+        '<(DEPTH)/build/android/gyp/util/md5_check.py',
+        '<(DEPTH)/build/android/gyp/jar_toc.py',
+        '<(jar_final_path)',
+      ],
+      'outputs': [
+        '<(jar_final_path).TOC',
+      ],
+      'action': [
+        'python', '<(DEPTH)/build/android/gyp/jar_toc.py',
+        '--jar-path=<(jar_final_path)',
+        '--toc-path=<(jar_final_path).TOC',
+      ]
+    },
+    {
+      'action_name': 'dex_<(_target_name)',
+      'variables': {
+        'conditions': [
+          ['emma_instrument != 0', {
+            'dex_no_locals': 1,
+          }],
+        ],
+        'dex_input_paths': [ '<(jar_final_path)' ],
+        'output_path': '<(dex_path)',
+      },
+      'includes': [ 'android/dex_action.gypi' ],
+    },
+  ],
+}
diff --git a/build/java_aidl.gypi b/build/java_aidl.gypi
new file mode 100644
index 0000000..dda2894
--- /dev/null
+++ b/build/java_aidl.gypi
@@ -0,0 +1,79 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into a target to provide a rule
+# to build Java aidl files in a consistent manner.
+#
+# To use this, create a gyp target with the following form:
+# {
+#   'target_name': 'aidl_aidl-file-name',
+#   'type': 'none',
+#   'variables': {
+#     'aidl_interface_file': '<interface-path>/<interface-file>.aidl',
+#     'aidl_import_include': '<(DEPTH)/<path-to-src-dir>',
+#   },
+#   'sources': {
+#     '<input-path1>/<input-file1>.aidl',
+#     '<input-path2>/<input-file2>.aidl',
+#     ...
+#   },
+#   'includes': ['<path-to-this-file>/java_aidl.gypi'],
+# }
+#
+#
+# The generated java files will be:
+#   <(PRODUCT_DIR)/lib.java/<input-file1>.java
+#   <(PRODUCT_DIR)/lib.java/<input-file2>.java
+#   ...
+#
+# Optional variables:
+#  aidl_import_include - This should be an absolute path to your java src folder
+#    that contains the classes that are imported by your aidl files.
+#
+# TODO(cjhopman): dependents need to rebuild when this target's inputs have changed.
+
+{
+  'variables': {
+    'aidl_path%': '<(android_sdk_tools)/aidl',
+    'intermediate_dir': '<(SHARED_INTERMEDIATE_DIR)/<(_target_name)/aidl',
+    'aidl_import_include%': '',
+    'additional_aidl_arguments': [],
+    'additional_aidl_input_paths': [],
+  },
+  'direct_dependent_settings': {
+    'variables': {
+      'generated_src_dirs': ['<(intermediate_dir)/'],
+    },
+  },
+  'conditions': [
+    ['aidl_import_include != ""', {
+      'variables': {
+        'additional_aidl_arguments': [ '-I<(aidl_import_include)' ],
+        'additional_aidl_input_paths': [ '<!@(find <(aidl_import_include) -name "*.java" | sort)' ],
+      }
+    }],
+  ],
+  'rules': [
+    {
+      'rule_name': 'compile_aidl',
+      'extension': 'aidl',
+      'inputs': [
+        '<(android_sdk)/framework.aidl',
+        '<(aidl_interface_file)',
+        '<@(additional_aidl_input_paths)',
+      ],
+      'outputs': [
+        '<(intermediate_dir)/<(RULE_INPUT_ROOT).java',
+      ],
+      'action': [
+        '<(aidl_path)',
+        '-p<(android_sdk)/framework.aidl',
+        '-p<(aidl_interface_file)',
+        '<@(additional_aidl_arguments)',
+        '<(RULE_INPUT_PATH)',
+        '<(intermediate_dir)/<(RULE_INPUT_ROOT).java',
+      ],
+    },
+  ],
+}
diff --git a/build/java_apk.gypi b/build/java_apk.gypi
new file mode 100644
index 0000000..ff837c3
--- /dev/null
+++ b/build/java_apk.gypi
@@ -0,0 +1,1063 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into a target to provide a rule
+# to build Android APKs in a consistent manner.
+#
+# To use this, create a gyp target with the following form:
+# {
+#   'target_name': 'my_package_apk',
+#   'type': 'none',
+#   'variables': {
+#     'apk_name': 'MyPackage',
+#     'java_in_dir': 'path/to/package/root',
+#     'resource_dir': 'path/to/package/root/res',
+#   },
+#   'includes': ['path/to/this/gypi/file'],
+# }
+#
+# Required variables:
+#  apk_name - The final apk will be named <apk_name>.apk
+#  java_in_dir - The top-level java directory. The src should be in
+#    <(java_in_dir)/src.
+# Optional/automatic variables:
+#  additional_input_paths - These paths will be included in the 'inputs' list to
+#    ensure that this target is rebuilt when one of these paths changes.
+#  additional_res_packages - Package names of R.java files generated in addition
+#    to the default package name defined in AndroidManifest.xml.
+#  additional_src_dirs - Additional directories with .java files to be compiled
+#    and included in the output of this target.
+#  additional_bundled_libs - Additional libraries what will be stripped and
+#    bundled in the apk.
+#  asset_location - The directory where assets are located.
+#  create_abi_split - Whether to create abi-based spilts. Splits
+#    are supported only for minSdkVersion >= 21.
+#  create_density_splits - Whether to create density-based apk splits.
+#  language_splits - List of languages to create apk splits for.
+#  generated_src_dirs - Same as additional_src_dirs except used for .java files
+#    that are generated at build time. This should be set automatically by a
+#    target's dependencies. The .java files in these directories are not
+#    included in the 'inputs' list (unlike additional_src_dirs).
+#  input_jars_paths - The path to jars to be included in the classpath. This
+#    should be filled automatically by depending on the appropriate targets.
+#  is_test_apk - Set to 1 if building a test apk.  This prevents resources from
+#    dependencies from being re-included.
+#  native_lib_target - The target_name of the target which generates the final
+#    shared library to be included in this apk. A stripped copy of the
+#    library will be included in the apk.
+#  resource_dir - The directory for resources.
+#  shared_resources - Make a resource package that can be loaded by a different
+#    application at runtime to access the package's resources.
+#  R_package - A custom Java package to generate the resource file R.java in.
+#    By default, the package given in AndroidManifest.xml will be used.
+#  include_all_resources - Set to 1 to include all resource IDs in all generated
+#    R.java files.
+#  use_chromium_linker - Enable the content dynamic linker that allows sharing the
+#    RELRO section of the native libraries between the different processes.
+#  load_library_from_zip - When using the dynamic linker, load the library
+#    directly out of the zip file.
+#  use_relocation_packer - Enable relocation packing. Relies on the chromium
+#    linker, so use_chromium_linker must also be enabled.
+#  enable_chromium_linker_tests - Enable the content dynamic linker test support
+#    code. This allows a test APK to inject a Linker.TestRunner instance at
+#    runtime. Should only be used by the chromium_linker_test_apk target!!
+#  never_lint - Set to 1 to not run lint on this target.
+#  java_in_dir_suffix - To override the /src suffix on java_in_dir.
+#  app_manifest_version_name - set the apps 'human readable' version number.
+#  app_manifest_version_code - set the apps version number.
+{
+  'variables': {
+    'tested_apk_obfuscated_jar_path%': '/',
+    'tested_apk_dex_path%': '/',
+    'additional_input_paths': [],
+    'create_density_splits%': 0,
+    'language_splits': [],
+    'input_jars_paths': [],
+    'library_dexed_jars_paths': [],
+    'additional_src_dirs': [],
+    'generated_src_dirs': [],
+    'app_manifest_version_name%': '<(android_app_version_name)',
+    'app_manifest_version_code%': '<(android_app_version_code)',
+    # aapt generates this proguard.txt.
+    'generated_proguard_file': '<(intermediate_dir)/proguard.txt',
+    'proguard_enabled%': 'false',
+    'proguard_flags_paths': ['<(generated_proguard_file)'],
+    'jar_name': 'chromium_apk_<(_target_name).jar',
+    'resource_dir%':'<(DEPTH)/build/android/ant/empty/res',
+    'R_package%':'',
+    'include_all_resources%': 0,
+    'additional_R_text_files': [],
+    'dependencies_res_zip_paths': [],
+    'additional_res_packages': [],
+    'additional_bundled_libs%': [],
+    'is_test_apk%': 0,
+    # Allow icu data, v8 snapshots, and pak files to be loaded directly from the .apk.
+    # Note: These are actually suffix matches, not necessarily extensions.
+    'extensions_to_not_compress%': '.dat,.bin,.pak',
+    'resource_input_paths': [],
+    'intermediate_dir': '<(PRODUCT_DIR)/<(_target_name)',
+    'asset_location%': '<(intermediate_dir)/assets',
+    'codegen_stamp': '<(intermediate_dir)/codegen.stamp',
+    'package_input_paths': [],
+    'ordered_libraries_file': '<(intermediate_dir)/native_libraries.json',
+    'additional_ordered_libraries_file': '<(intermediate_dir)/additional_native_libraries.json',
+    'native_libraries_template': '<(DEPTH)/base/android/java/templates/NativeLibraries.template',
+    'native_libraries_java_dir': '<(intermediate_dir)/native_libraries_java/',
+    'native_libraries_java_file': '<(native_libraries_java_dir)/NativeLibraries.java',
+    'native_libraries_java_stamp': '<(intermediate_dir)/native_libraries_java.stamp',
+    'native_libraries_template_data_dir': '<(intermediate_dir)/native_libraries/',
+    'native_libraries_template_data_file': '<(native_libraries_template_data_dir)/native_libraries_array.h',
+    'native_libraries_template_version_file': '<(native_libraries_template_data_dir)/native_libraries_version.h',
+    'compile_stamp': '<(intermediate_dir)/compile.stamp',
+    'lint_stamp': '<(intermediate_dir)/lint.stamp',
+    'lint_result': '<(intermediate_dir)/lint_result.xml',
+    'lint_config': '<(intermediate_dir)/lint_config.xml',
+    'never_lint%': 0,
+    'findbugs_stamp': '<(intermediate_dir)/findbugs.stamp',
+    'run_findbugs%': 0,
+    'java_in_dir_suffix%': '/src',
+    'instr_stamp': '<(intermediate_dir)/instr.stamp',
+    'jar_stamp': '<(intermediate_dir)/jar.stamp',
+    'obfuscate_stamp': '<(intermediate_dir)/obfuscate.stamp',
+    'pack_relocations_stamp': '<(intermediate_dir)/pack_relocations.stamp',
+    'strip_stamp': '<(intermediate_dir)/strip.stamp',
+    'stripped_libraries_dir': '<(intermediate_dir)/stripped_libraries',
+    'strip_additional_stamp': '<(intermediate_dir)/strip_additional.stamp',
+    'version_stamp': '<(intermediate_dir)/version.stamp',
+    'javac_includes': [],
+    'jar_excluded_classes': [],
+    'javac_jar_path': '<(intermediate_dir)/<(_target_name).javac.jar',
+    'jar_path': '<(PRODUCT_DIR)/lib.java/<(jar_name)',
+    'obfuscated_jar_path': '<(intermediate_dir)/obfuscated.jar',
+    'test_jar_path': '<(PRODUCT_DIR)/test.lib.java/<(apk_name).jar',
+    'dex_path': '<(intermediate_dir)/classes.dex',
+    'emma_device_jar': '<(android_sdk_root)/tools/lib/emma_device.jar',
+    'android_manifest_path%': '<(java_in_dir)/AndroidManifest.xml',
+    'split_android_manifest_path': '<(intermediate_dir)/split-manifests/<(android_app_abi)/AndroidManifest.xml',
+    'push_stamp': '<(intermediate_dir)/push.stamp',
+    'link_stamp': '<(intermediate_dir)/link.stamp',
+    'resource_zip_path': '<(intermediate_dir)/<(_target_name).resources.zip',
+    'shared_resources%': 0,
+    'final_apk_path%': '<(PRODUCT_DIR)/apks/<(apk_name).apk',
+    'final_apk_path_no_extension%': '<(PRODUCT_DIR)/apks/<(apk_name)',
+    'final_abi_split_apk_path%': '<(PRODUCT_DIR)/apks/<(apk_name)-abi-<(android_app_abi).apk',
+    'incomplete_apk_path': '<(intermediate_dir)/<(apk_name)-incomplete.apk',
+    'apk_install_record': '<(intermediate_dir)/apk_install.record.stamp',
+    'device_intermediate_dir': '/data/data/org.chromium.gyp_managed_install/<(_target_name)/<(CONFIGURATION_NAME)',
+    'symlink_script_host_path': '<(intermediate_dir)/create_symlinks.sh',
+    'symlink_script_device_path': '<(device_intermediate_dir)/create_symlinks.sh',
+    'create_standalone_apk%': 1,
+    'res_v14_skip%': 0,
+    'variables': {
+      'variables': {
+        'native_lib_target%': '',
+        'native_lib_version_name%': '',
+        'use_chromium_linker%' : 0,
+        'use_relocation_packer%' : 0,
+        'enable_chromium_linker_tests%': 0,
+        'is_test_apk%': 0,
+        'unsigned_apk_path': '<(intermediate_dir)/<(apk_name)-unsigned.apk',
+        'unsigned_abi_split_apk_path': '<(intermediate_dir)/<(apk_name)-abi-<(android_app_abi)-unsigned.apk',
+        'create_abi_split%': 0,
+      },
+      'unsigned_apk_path': '<(unsigned_apk_path)',
+      'unsigned_abi_split_apk_path': '<(unsigned_abi_split_apk_path)',
+      'create_abi_split%': '<(create_abi_split)',
+      'conditions': [
+        ['gyp_managed_install == 1 and native_lib_target != ""', {
+          'conditions': [
+            ['create_abi_split == 0', {
+              'unsigned_standalone_apk_path': '<(intermediate_dir)/<(apk_name)-standalone-unsigned.apk',
+            }, {
+              'unsigned_standalone_apk_path': '<(intermediate_dir)/<(apk_name)-abi-<(android_app_abi)-standalone-unsigned.apk',
+            }],
+          ],
+        }, {
+          'unsigned_standalone_apk_path': '<(unsigned_apk_path)',
+        }],
+        ['gyp_managed_install == 1', {
+          'apk_package_native_libs_dir': '<(intermediate_dir)/libs.managed',
+        }, {
+          'apk_package_native_libs_dir': '<(intermediate_dir)/libs',
+        }],
+        ['is_test_apk == 0 and emma_coverage != 0', {
+          'emma_instrument%': 1,
+        },{
+          'emma_instrument%': 0,
+        }],
+        # When using abi splits, the abi split is modified by
+        # gyp_managed_install rather than the main .apk
+        ['create_abi_split == 1', {
+          'managed_input_apk_path': '<(unsigned_abi_split_apk_path)',
+        }, {
+          'managed_input_apk_path': '<(unsigned_apk_path)',
+        }],
+      ],
+    },
+    'native_lib_target%': '',
+    'native_lib_version_name%': '',
+    'use_chromium_linker%' : 0,
+    'load_library_from_zip%' : 0,
+    'use_relocation_packer%' : 0,
+    'enable_chromium_linker_tests%': 0,
+    'emma_instrument%': '<(emma_instrument)',
+    'apk_package_native_libs_dir': '<(apk_package_native_libs_dir)',
+    'unsigned_standalone_apk_path': '<(unsigned_standalone_apk_path)',
+    'unsigned_apk_path': '<(unsigned_apk_path)',
+    'unsigned_abi_split_apk_path': '<(unsigned_abi_split_apk_path)',
+    'create_abi_split%': '<(create_abi_split)',
+    'managed_input_apk_path': '<(managed_input_apk_path)',
+    'libchromium_android_linker': 'libchromium_android_linker.>(android_product_extension)',
+    'extra_native_libs': [],
+    'native_lib_placeholder_stamp': '<(apk_package_native_libs_dir)/<(android_app_abi)/native_lib_placeholder.stamp',
+    'native_lib_placeholders': [],
+    'main_apk_name': '<(apk_name)',
+    'enable_errorprone%': '0',
+    'errorprone_exe_path': '<(PRODUCT_DIR)/bin.java/chromium_errorprone',
+  },
+  # Pass the jar path to the apk's "fake" jar target.  This would be better as
+  # direct_dependent_settings, but a variable set by a direct_dependent_settings
+  # cannot be lifted in a dependent to all_dependent_settings.
+  'all_dependent_settings': {
+    'conditions': [
+      ['proguard_enabled == "true"', {
+        'variables': {
+          'proguard_enabled': 'true',
+        }
+      }],
+    ],
+    'variables': {
+      'apk_output_jar_path': '<(jar_path)',
+      'tested_apk_obfuscated_jar_path': '<(obfuscated_jar_path)',
+      'tested_apk_dex_path': '<(dex_path)',
+    },
+  },
+  'conditions': [
+    ['resource_dir!=""', {
+      'variables': {
+        'resource_input_paths': [ '<!@(find <(resource_dir) -name "*")' ]
+      },
+    }],
+    ['R_package != ""', {
+      'variables': {
+        # We generate R.java in package R_package (in addition to the package
+        # listed in the AndroidManifest.xml, which is unavoidable).
+        'additional_res_packages': ['<(R_package)'],
+        'additional_R_text_files': ['<(intermediate_dir)/R.txt'],
+      },
+    }],
+    ['native_lib_target != "" and component == "shared_library"', {
+      'dependencies': [
+        '<(DEPTH)/build/android/setup.gyp:copy_system_libraries',
+      ],
+    }],
+    ['use_chromium_linker == 1', {
+      'dependencies': [
+        '<(DEPTH)/base/base.gyp:chromium_android_linker',
+      ],
+    }],
+    ['enable_errorprone == 1', {
+      'dependencies': [
+        '<(DEPTH)/third_party/errorprone/errorprone.gyp:chromium_errorprone',
+      ],
+    }],
+    ['native_lib_target != ""', {
+      'variables': {
+        'conditions': [
+          ['use_chromium_linker == 1', {
+            'variables': {
+              'chromium_linker_path': [
+                '<(SHARED_LIB_DIR)/<(libchromium_android_linker)',
+              ],
+            }
+          }, {
+            'variables': {
+              'chromium_linker_path': [],
+            },
+          }],
+        ],
+        'generated_src_dirs': [ '<(native_libraries_java_dir)' ],
+        'native_libs_paths': [
+          '<(SHARED_LIB_DIR)/<(native_lib_target).>(android_product_extension)',
+          '<@(chromium_linker_path)'
+        ],
+        'package_input_paths': [
+          '<(apk_package_native_libs_dir)/<(android_app_abi)/gdbserver',
+        ],
+      },
+      'copies': [
+        {
+          # gdbserver is always copied into the APK's native libs dir. The ant
+          # build scripts (apkbuilder task) will only include it in a debug
+          # build.
+          'destination': '<(apk_package_native_libs_dir)/<(android_app_abi)',
+          'files': [
+            '<(android_gdbserver)',
+          ],
+        },
+      ],
+      'actions': [
+        {
+          'variables': {
+            'input_libraries': [
+              '<@(native_libs_paths)',
+              '<@(extra_native_libs)',
+            ],
+          },
+          'includes': ['../build/android/write_ordered_libraries.gypi'],
+        },
+        {
+          'action_name': 'native_libraries_<(_target_name)',
+          'variables': {
+            'conditions': [
+              ['use_chromium_linker == 1', {
+                'variables': {
+                  'linker_gcc_preprocess_defines': [
+                    '--defines', 'ENABLE_CHROMIUM_LINKER',
+                  ],
+                }
+              }, {
+                'variables': {
+                  'linker_gcc_preprocess_defines': [],
+                },
+              }],
+              ['load_library_from_zip == 1', {
+                'variables': {
+                  'linker_load_from_zip_file_preprocess_defines': [
+                    '--defines', 'ENABLE_CHROMIUM_LINKER_LIBRARY_IN_ZIP_FILE',
+                  ],
+                }
+              }, {
+                'variables': {
+                  'linker_load_from_zip_file_preprocess_defines': [],
+                },
+              }],
+              ['enable_chromium_linker_tests == 1', {
+                'variables': {
+                  'linker_tests_gcc_preprocess_defines': [
+                    '--defines', 'ENABLE_CHROMIUM_LINKER_TESTS',
+                  ],
+                }
+              }, {
+                'variables': {
+                  'linker_tests_gcc_preprocess_defines': [],
+                },
+              }],
+            ],
+            'gcc_preprocess_defines': [
+              '<@(linker_load_from_zip_file_preprocess_defines)',
+              '<@(linker_gcc_preprocess_defines)',
+              '<@(linker_tests_gcc_preprocess_defines)',
+            ],
+          },
+          'message': 'Creating NativeLibraries.java for <(_target_name)',
+          'inputs': [
+            '<(DEPTH)/build/android/gyp/util/build_utils.py',
+            '<(DEPTH)/build/android/gyp/gcc_preprocess.py',
+            '<(ordered_libraries_file)',
+            '<(native_libraries_template)',
+          ],
+          'outputs': [
+            '<(native_libraries_java_stamp)',
+          ],
+          'action': [
+            'python', '<(DEPTH)/build/android/gyp/gcc_preprocess.py',
+            '--include-path=',
+            '--output=<(native_libraries_java_file)',
+            '--template=<(native_libraries_template)',
+            '--stamp=<(native_libraries_java_stamp)',
+            '--defines', 'NATIVE_LIBRARIES_LIST=@FileArg(<(ordered_libraries_file):java_libraries_list)',
+            '--defines', 'NATIVE_LIBRARIES_VERSION_NUMBER="<(native_lib_version_name)"',
+            '<@(gcc_preprocess_defines)',
+          ],
+        },
+        {
+          'action_name': 'strip_native_libraries',
+          'variables': {
+            'ordered_libraries_file%': '<(ordered_libraries_file)',
+            'stripped_libraries_dir%': '<(stripped_libraries_dir)',
+            'input_paths': [
+              '<@(native_libs_paths)',
+              '<@(extra_native_libs)',
+            ],
+            'stamp': '<(strip_stamp)'
+          },
+          'includes': ['../build/android/strip_native_libraries.gypi'],
+        },
+        {
+          'action_name': 'insert_chromium_version',
+          'variables': {
+            'ordered_libraries_file%': '<(ordered_libraries_file)',
+            'stripped_libraries_dir%': '<(stripped_libraries_dir)',
+            'version_string': '<(native_lib_version_name)',
+            'input_paths': [
+              '<(strip_stamp)',
+            ],
+            'stamp': '<(version_stamp)'
+          },
+          'includes': ['../build/android/insert_chromium_version.gypi'],
+        },
+        {
+          'action_name': 'pack_relocations',
+          'variables': {
+            'conditions': [
+              ['use_chromium_linker == 1 and use_relocation_packer == 1 and profiling != 1', {
+                'enable_packing': 1,
+              }, {
+                'enable_packing': 0,
+              }],
+            ],
+            'exclude_packing_list': [
+              '<(libchromium_android_linker)',
+            ],
+            'ordered_libraries_file%': '<(ordered_libraries_file)',
+            'stripped_libraries_dir%': '<(stripped_libraries_dir)',
+            'packed_libraries_dir': '<(libraries_source_dir)',
+            'input_paths': [
+              '<(version_stamp)'
+            ],
+            'stamp': '<(pack_relocations_stamp)',
+          },
+          'includes': ['../build/android/pack_relocations.gypi'],
+        },
+        {
+          'variables': {
+            'input_libraries': [
+              '<@(additional_bundled_libs)',
+            ],
+            'ordered_libraries_file': '<(additional_ordered_libraries_file)',
+            'subtarget': '_additional_libraries',
+          },
+          'includes': ['../build/android/write_ordered_libraries.gypi'],
+        },
+        {
+          'action_name': 'strip_additional_libraries',
+          'variables': {
+            'ordered_libraries_file': '<(additional_ordered_libraries_file)',
+            'stripped_libraries_dir': '<(libraries_source_dir)',
+            'input_paths': [
+              '<@(additional_bundled_libs)',
+              '<(strip_stamp)',
+            ],
+            'stamp': '<(strip_additional_stamp)'
+          },
+          'includes': ['../build/android/strip_native_libraries.gypi'],
+        },
+        {
+          'action_name': 'Create native lib placeholder files for previous releases',
+          'variables': {
+            'placeholders': ['<@(native_lib_placeholders)'],
+            'conditions': [
+              ['gyp_managed_install == 1', {
+                # This "library" just needs to be put in the .apk. It is not loaded
+                # at runtime.
+                'placeholders': ['libfix.crbug.384638.so'],
+              }]
+            ],
+          },
+          'inputs': [
+            '<(DEPTH)/build/android/gyp/create_placeholder_files.py',
+          ],
+          'outputs': [
+            '<(native_lib_placeholder_stamp)',
+          ],
+          'action': [
+            'python', '<(DEPTH)/build/android/gyp/create_placeholder_files.py',
+            '--dest-lib-dir=<(apk_package_native_libs_dir)/<(android_app_abi)/',
+            '--stamp=<(native_lib_placeholder_stamp)',
+            '<@(placeholders)',
+          ],
+        },
+      ],
+      'conditions': [
+        ['gyp_managed_install == 1', {
+          'variables': {
+            'libraries_top_dir': '<(intermediate_dir)/lib.stripped',
+            'libraries_source_dir': '<(libraries_top_dir)/lib/<(android_app_abi)',
+            'device_library_dir': '<(device_intermediate_dir)/lib.stripped',
+            'configuration_name': '<(CONFIGURATION_NAME)',
+          },
+          'dependencies': [
+            '<(DEPTH)/build/android/setup.gyp:get_build_device_configurations',
+            '<(DEPTH)/build/android/pylib/device/commands/commands.gyp:chromium_commands',
+          ],
+          'actions': [
+            {
+              'includes': ['../build/android/push_libraries.gypi'],
+            },
+            {
+              'action_name': 'create device library symlinks',
+              'message': 'Creating links on device for <(_target_name)',
+              'inputs': [
+                '<(DEPTH)/build/android/gyp/util/build_utils.py',
+                '<(DEPTH)/build/android/gyp/create_device_library_links.py',
+                '<(apk_install_record)',
+                '<(build_device_config_path)',
+                '<(ordered_libraries_file)',
+              ],
+              'outputs': [
+                '<(link_stamp)'
+              ],
+              'action': [
+                'python', '<(DEPTH)/build/android/gyp/create_device_library_links.py',
+                '--build-device-configuration=<(build_device_config_path)',
+                '--libraries=@FileArg(<(ordered_libraries_file):libraries)',
+                '--script-host-path=<(symlink_script_host_path)',
+                '--script-device-path=<(symlink_script_device_path)',
+                '--target-dir=<(device_library_dir)',
+                '--apk=<(incomplete_apk_path)',
+                '--stamp=<(link_stamp)',
+                '--configuration-name=<(CONFIGURATION_NAME)',
+              ],
+            },
+          ],
+          'conditions': [
+            ['create_standalone_apk == 1', {
+              'actions': [
+                {
+                  'action_name': 'create standalone APK',
+                  'variables': {
+                    'inputs': [
+                      '<(ordered_libraries_file)',
+                      '<(strip_additional_stamp)',
+                      '<(pack_relocations_stamp)',
+                    ],
+                    'output_apk_path': '<(unsigned_standalone_apk_path)',
+                    'libraries_top_dir%': '<(libraries_top_dir)',
+                    'input_apk_path': '<(managed_input_apk_path)',
+                  },
+                  'includes': [ 'android/create_standalone_apk_action.gypi' ],
+                },
+              ],
+            }],
+          ],
+        }, {
+          # gyp_managed_install != 1
+          'variables': {
+            'libraries_source_dir': '<(apk_package_native_libs_dir)/<(android_app_abi)',
+            'package_input_paths': [
+              '<(strip_additional_stamp)',
+              '<(pack_relocations_stamp)',
+            ],
+          },
+        }],
+      ],
+    }], # native_lib_target != ''
+    ['gyp_managed_install == 0 or create_standalone_apk == 1 or create_abi_split == 1', {
+      'dependencies': [
+        '<(DEPTH)/build/android/rezip.gyp:rezip_apk_jar',
+      ],
+    }],
+    ['create_abi_split == 1 or gyp_managed_install == 0 or create_standalone_apk == 1', {
+      'actions': [
+        {
+          'action_name': 'finalize_base',
+          'variables': {
+            'output_apk_path': '<(final_apk_path)',
+            'conditions': [
+              ['create_abi_split == 0', {
+                'input_apk_path': '<(unsigned_standalone_apk_path)',
+              }, {
+                'input_apk_path': '<(unsigned_apk_path)',
+                'load_library_from_zip': 0,
+              }]
+            ],
+          },
+          'includes': [ 'android/finalize_apk_action.gypi']
+        },
+      ],
+    }],
+    ['create_abi_split == 1', {
+      'actions': [
+        {
+          'action_name': 'generate_split_manifest_<(_target_name)',
+          'inputs': [
+            '<(DEPTH)/build/android/gyp/util/build_utils.py',
+            '<(DEPTH)/build/android/gyp/generate_split_manifest.py',
+            '<(android_manifest_path)',
+          ],
+          'outputs': [
+            '<(split_android_manifest_path)',
+          ],
+          'action': [
+            'python', '<(DEPTH)/build/android/gyp/generate_split_manifest.py',
+            '--main-manifest', '<(android_manifest_path)',
+            '--out-manifest', '<(split_android_manifest_path)',
+            '--split', 'abi_<(android_app_abi)',
+          ],
+        },
+        {
+          'variables': {
+            'apk_name': '<(main_apk_name)-abi-<(android_app_abi)',
+            'asset_location': '',
+            'android_manifest_path': '<(split_android_manifest_path)',
+            'create_density_splits': 0,
+            'language_splits=': [],
+          },
+          'includes': [ 'android/package_resources_action.gypi' ],
+        },
+        {
+          'variables': {
+            'apk_name': '<(main_apk_name)-abi-<(android_app_abi)',
+            'apk_path': '<(unsigned_abi_split_apk_path)',
+            'has_code': 0,
+            'native_libs_dir': '<(apk_package_native_libs_dir)',
+            'extra_inputs': ['<(native_lib_placeholder_stamp)'],
+          },
+          'includes': ['android/apkbuilder_action.gypi'],
+        },
+      ],
+    }],
+    ['create_abi_split == 1 and (gyp_managed_install == 0 or create_standalone_apk == 1)', {
+      'actions': [
+        {
+          'action_name': 'finalize_split',
+          'variables': {
+            'output_apk_path': '<(final_abi_split_apk_path)',
+            'conditions': [
+              ['gyp_managed_install == 1', {
+                'input_apk_path': '<(unsigned_standalone_apk_path)',
+              }, {
+                'input_apk_path': '<(unsigned_abi_split_apk_path)',
+              }],
+            ],
+          },
+          'includes': [ 'android/finalize_apk_action.gypi']
+        },
+      ],
+    }],
+    ['gyp_managed_install == 1', {
+      'actions': [
+        {
+          'action_name': 'finalize incomplete apk',
+          'variables': {
+            'load_library_from_zip': 0,
+            'input_apk_path': '<(managed_input_apk_path)',
+            'output_apk_path': '<(incomplete_apk_path)',
+          },
+          'includes': [ 'android/finalize_apk_action.gypi']
+        },
+        {
+          'action_name': 'apk_install_<(_target_name)',
+          'message': 'Installing <(apk_name).apk',
+          'inputs': [
+            '<(DEPTH)/build/android/gyp/util/build_utils.py',
+            '<(DEPTH)/build/android/gyp/apk_install.py',
+            '<(build_device_config_path)',
+            '<(incomplete_apk_path)',
+          ],
+          'outputs': [
+            '<(apk_install_record)',
+          ],
+          'action': [
+            'python', '<(DEPTH)/build/android/gyp/apk_install.py',
+            '--build-device-configuration=<(build_device_config_path)',
+            '--install-record=<(apk_install_record)',
+            '--configuration-name=<(CONFIGURATION_NAME)',
+            '--android-sdk-tools', '<(android_sdk_tools)',
+          ],
+          'conditions': [
+            ['create_abi_split == 1', {
+              'inputs': [
+                '<(final_apk_path)',
+              ],
+              'action': [
+                '--apk-path=<(final_apk_path)',
+                '--split-apk-path=<(incomplete_apk_path)',
+              ],
+            }, {
+              'action': [
+                '--apk-path=<(incomplete_apk_path)',
+              ],
+            }],
+            ['create_density_splits == 1', {
+              'inputs': [
+                '<(final_apk_path_no_extension)-density-hdpi.apk',
+                '<(final_apk_path_no_extension)-density-xhdpi.apk',
+                '<(final_apk_path_no_extension)-density-xxhdpi.apk',
+                '<(final_apk_path_no_extension)-density-xxxhdpi.apk',
+                '<(final_apk_path_no_extension)-density-tvdpi.apk',
+              ],
+              'action': [
+                '--split-apk-path=<(final_apk_path_no_extension)-density-hdpi.apk',
+                '--split-apk-path=<(final_apk_path_no_extension)-density-xhdpi.apk',
+                '--split-apk-path=<(final_apk_path_no_extension)-density-xxhdpi.apk',
+                '--split-apk-path=<(final_apk_path_no_extension)-density-xxxhdpi.apk',
+                '--split-apk-path=<(final_apk_path_no_extension)-density-tvdpi.apk',
+              ],
+            }],
+            ['language_splits != []', {
+              'inputs': [
+                "<!@(python <(DEPTH)/build/apply_locales.py '<(final_apk_path_no_extension)-lang-ZZLOCALE.apk' <(language_splits))",
+              ],
+              'action': [
+                "<!@(python <(DEPTH)/build/apply_locales.py -- '--split-apk-path=<(final_apk_path_no_extension)-lang-ZZLOCALE.apk' <(language_splits))",
+              ],
+            }],
+          ],
+        },
+      ],
+    }],
+    ['create_density_splits == 1', {
+      'actions': [
+        {
+          'action_name': 'finalize_density_splits',
+          'variables': {
+            'density_splits': 1,
+          },
+          'includes': [ 'android/finalize_splits_action.gypi']
+        },
+      ],
+    }],
+    ['is_test_apk == 1', {
+      'dependencies': [
+        '<(DEPTH)/build/android/pylib/device/commands/commands.gyp:chromium_commands',
+        '<(DEPTH)/tools/android/android_tools.gyp:android_tools',
+      ]
+    }],
+    ['run_findbugs == 1', {
+      'actions': [
+        {
+          'action_name': 'findbugs_<(_target_name)',
+          'message': 'Running findbugs on <(_target_name)',
+          'inputs': [
+            '<(DEPTH)/build/android/findbugs_diff.py',
+            '<(DEPTH)/build/android/findbugs_filter/findbugs_exclude.xml',
+            '<(DEPTH)/build/android/pylib/utils/findbugs.py',
+            '>@(input_jars_paths)',
+            '<(jar_path)',
+            '<(compile_stamp)',
+          ],
+          'outputs': [
+            '<(findbugs_stamp)',
+          ],
+          'action': [
+            'python', '<(DEPTH)/build/android/findbugs_diff.py',
+            '--auxclasspath-gyp', '>(input_jars_paths)',
+            '--stamp', '<(findbugs_stamp)',
+            '<(jar_path)',
+          ],
+        },
+      ],
+    },
+    ]
+  ],
+  'dependencies': [
+    '<(DEPTH)/tools/android/md5sum/md5sum.gyp:md5sum',
+  ],
+  'actions': [
+    {
+      'action_name': 'process_resources',
+      'message': 'processing resources for <(_target_name)',
+      'variables': {
+        # Write the inputs list to a file, so that its mtime is updated when
+        # the list of inputs changes.
+        'inputs_list_file': '>|(apk_codegen.<(_target_name).gypcmd >@(additional_input_paths) >@(resource_input_paths))',
+        'process_resources_options': [],
+        'conditions': [
+          ['is_test_apk == 1', {
+            'dependencies_res_zip_paths=': [],
+            'additional_res_packages=': [],
+          }],
+          ['res_v14_skip == 1', {
+            'process_resources_options+': ['--v14-skip']
+          }],
+          ['shared_resources == 1', {
+            'process_resources_options+': ['--shared-resources']
+          }],
+          ['R_package != ""', {
+            'process_resources_options+': ['--custom-package', '<(R_package)']
+          }],
+          ['include_all_resources == 1', {
+            'process_resources_options+': ['--include-all-resources']
+          }]
+        ],
+      },
+      'inputs': [
+        '<(DEPTH)/build/android/gyp/util/build_utils.py',
+        '<(DEPTH)/build/android/gyp/process_resources.py',
+        '<(android_manifest_path)',
+        '>@(additional_input_paths)',
+        '>@(resource_input_paths)',
+        '>@(dependencies_res_zip_paths)',
+        '>(inputs_list_file)',
+      ],
+      'outputs': [
+        '<(resource_zip_path)',
+        '<(generated_proguard_file)',
+        '<(codegen_stamp)',
+      ],
+      'action': [
+        'python', '<(DEPTH)/build/android/gyp/process_resources.py',
+        '--android-sdk', '<(android_sdk)',
+        '--aapt-path', '<(android_aapt_path)',
+
+        '--android-manifest', '<(android_manifest_path)',
+        '--dependencies-res-zips', '>(dependencies_res_zip_paths)',
+
+        '--extra-res-packages', '>(additional_res_packages)',
+        '--extra-r-text-files', '>(additional_R_text_files)',
+
+        '--proguard-file', '<(generated_proguard_file)',
+
+        '--resource-dirs', '<(resource_dir)',
+        '--resource-zip-out', '<(resource_zip_path)',
+
+        '--R-dir', '<(intermediate_dir)/gen',
+
+        '--stamp', '<(codegen_stamp)',
+
+        '<@(process_resources_options)',
+      ],
+    },
+    {
+      'action_name': 'javac_<(_target_name)',
+      'message': 'Compiling java for <(_target_name)',
+      'variables': {
+        'extra_args': [],
+        'extra_inputs': [],
+        'gen_src_dirs': [
+          '<(intermediate_dir)/gen',
+          '>@(generated_src_dirs)',
+        ],
+        # If there is a separate find for additional_src_dirs, it will find the
+        # wrong .java files when additional_src_dirs is empty.
+        # TODO(thakis): Gyp caches >! evaluation by command. Both java.gypi and
+        # java_apk.gypi evaluate the same command, and at the moment two targets
+        # set java_in_dir to "java". Add a dummy comment here to make sure
+        # that the two targets (one uses java.gypi, the other java_apk.gypi)
+        # get distinct source lists. Medium-term, make targets list all their
+        # Java files instead of using find. (As is, this will be broken if two
+        # targets use the same java_in_dir and both use java_apk.gypi or
+        # both use java.gypi.)
+        'java_sources': ['>!@(find >(java_in_dir)>(java_in_dir_suffix) >(additional_src_dirs) -name "*.java"  # apk)'],
+        'conditions': [
+          ['enable_errorprone == 1', {
+            'extra_inputs': [
+              '<(errorprone_exe_path)',
+            ],
+            'extra_args': [ '--use-errorprone-path=<(errorprone_exe_path)' ],
+          }],
+        ],
+      },
+      'inputs': [
+        '<(DEPTH)/build/android/gyp/util/build_utils.py',
+        '<(DEPTH)/build/android/gyp/javac.py',
+        '>@(java_sources)',
+        '>@(input_jars_paths)',
+        '<(codegen_stamp)',
+        '<@(extra_inputs)',
+      ],
+      'conditions': [
+        ['native_lib_target != ""', {
+          'inputs': [ '<(native_libraries_java_stamp)' ],
+        }],
+      ],
+      'outputs': [
+        '<(compile_stamp)',
+        '<(javac_jar_path)',
+      ],
+      'action': [
+        'python', '<(DEPTH)/build/android/gyp/javac.py',
+        '--bootclasspath=<(android_sdk_jar)',
+        '--classpath=>(input_jars_paths) <(android_sdk_jar)',
+        '--src-gendirs=>(gen_src_dirs)',
+        '--javac-includes=<(javac_includes)',
+        '--chromium-code=<(chromium_code)',
+        '--jar-path=<(javac_jar_path)',
+        '--jar-excluded-classes=<(jar_excluded_classes)',
+        '--stamp=<(compile_stamp)',
+        '<@(extra_args)',
+        '>@(java_sources)',
+      ],
+    },
+    {
+      'action_name': 'instr_jar_<(_target_name)',
+      'message': 'Instrumenting <(_target_name) jar',
+      'variables': {
+        'input_path': '<(javac_jar_path)',
+        'output_path': '<(jar_path)',
+        'stamp_path': '<(instr_stamp)',
+        'instr_type': 'jar',
+      },
+      'outputs': [
+        '<(instr_stamp)',
+        '<(jar_path)',
+      ],
+      'inputs': [
+        '<(javac_jar_path)',
+      ],
+      'includes': [ 'android/instr_action.gypi' ],
+    },
+    {
+      'variables': {
+        'src_dirs': [
+          '<(java_in_dir)<(java_in_dir_suffix)',
+          '>@(additional_src_dirs)',
+        ],
+        'lint_jar_path': '<(jar_path)',
+        'stamp_path': '<(lint_stamp)',
+        'result_path': '<(lint_result)',
+        'config_path': '<(lint_config)',
+      },
+      'outputs': [
+        '<(lint_stamp)',
+      ],
+      'includes': [ 'android/lint_action.gypi' ],
+    },
+    {
+      'action_name': 'obfuscate_<(_target_name)',
+      'message': 'Obfuscating <(_target_name)',
+      'variables': {
+        'additional_obfuscate_options': [],
+        'additional_obfuscate_input_paths': [],
+        'proguard_out_dir': '<(intermediate_dir)/proguard',
+        'proguard_input_jar_paths': [
+          '>@(input_jars_paths)',
+          '<(jar_path)',
+        ],
+        'target_conditions': [
+          ['is_test_apk == 1', {
+            'additional_obfuscate_options': [
+              '--testapp',
+            ],
+          }],
+          ['is_test_apk == 1 and tested_apk_obfuscated_jar_path != "/"', {
+            'additional_obfuscate_options': [
+              '--tested-apk-obfuscated-jar-path', '>(tested_apk_obfuscated_jar_path)',
+            ],
+            'additional_obfuscate_input_paths': [
+              '>(tested_apk_obfuscated_jar_path).info',
+            ],
+          }],
+          ['proguard_enabled == "true"', {
+            'additional_obfuscate_options': [
+              '--proguard-enabled',
+            ],
+          }],
+        ],
+        'obfuscate_input_jars_paths': [
+          '>@(input_jars_paths)',
+          '<(jar_path)',
+        ],
+      },
+      'conditions': [
+        ['is_test_apk == 1', {
+          'outputs': [
+            '<(test_jar_path)',
+          ],
+        }],
+      ],
+      'inputs': [
+        '<(DEPTH)/build/android/gyp/apk_obfuscate.py',
+        '<(DEPTH)/build/android/gyp/util/build_utils.py',
+        '>@(proguard_flags_paths)',
+        '>@(obfuscate_input_jars_paths)',
+        '>@(additional_obfuscate_input_paths)',
+        '<(instr_stamp)',
+      ],
+      'outputs': [
+        '<(obfuscate_stamp)',
+
+        # In non-Release builds, these paths will all be empty files.
+        '<(obfuscated_jar_path)',
+        '<(obfuscated_jar_path).info',
+        '<(obfuscated_jar_path).dump',
+        '<(obfuscated_jar_path).seeds',
+        '<(obfuscated_jar_path).mapping',
+        '<(obfuscated_jar_path).usage',
+      ],
+      'action': [
+        'python', '<(DEPTH)/build/android/gyp/apk_obfuscate.py',
+
+        '--configuration-name', '<(CONFIGURATION_NAME)',
+
+        '--android-sdk', '<(android_sdk)',
+        '--android-sdk-tools', '<(android_sdk_tools)',
+        '--android-sdk-jar', '<(android_sdk_jar)',
+
+        '--input-jars-paths=>(proguard_input_jar_paths)',
+        '--proguard-configs=>(proguard_flags_paths)',
+
+        '--test-jar-path', '<(test_jar_path)',
+        '--obfuscated-jar-path', '<(obfuscated_jar_path)',
+
+        '--proguard-jar-path', '<(android_sdk_root)/tools/proguard/lib/proguard.jar',
+
+        '--stamp', '<(obfuscate_stamp)',
+
+        '>@(additional_obfuscate_options)',
+      ],
+    },
+    {
+      'action_name': 'dex_<(_target_name)',
+      'variables': {
+        'dex_input_paths': [
+          '>@(library_dexed_jars_paths)',
+          '<(jar_path)',
+        ],
+        'output_path': '<(dex_path)',
+        'proguard_enabled_input_path': '<(obfuscated_jar_path)',
+      },
+      'target_conditions': [
+        ['emma_instrument != 0', {
+          'variables': {
+            'dex_no_locals': 1,
+            'dex_input_paths': [
+              '<(emma_device_jar)'
+            ],
+          },
+        }],
+        ['is_test_apk == 1 and tested_apk_dex_path != "/"', {
+          'variables': {
+            'dex_additional_options': [
+              '--excluded-paths', '@FileArg(>(tested_apk_dex_path).inputs)'
+            ],
+          },
+          'inputs': [
+            '>(tested_apk_dex_path).inputs',
+          ],
+        }],
+        ['proguard_enabled == "true"', {
+          'inputs': [ '<(obfuscate_stamp)' ]
+        }, {
+          'inputs': [ '<(instr_stamp)' ]
+        }],
+      ],
+      'includes': [ 'android/dex_action.gypi' ],
+    },
+    {
+      'variables': {
+        'extra_inputs': ['<(codegen_stamp)'],
+        'resource_zips': [
+          '<(resource_zip_path)',
+        ],
+        'conditions': [
+          ['is_test_apk == 0', {
+            'resource_zips': [
+              '>@(dependencies_res_zip_paths)',
+            ],
+          }],
+        ],
+      },
+      'includes': [ 'android/package_resources_action.gypi' ],
+    },
+    {
+      'variables': {
+        'apk_path': '<(unsigned_apk_path)',
+        'conditions': [
+          ['native_lib_target != ""', {
+            'extra_inputs': ['<(native_lib_placeholder_stamp)'],
+          }],
+          ['create_abi_split == 0', {
+            'native_libs_dir': '<(apk_package_native_libs_dir)',
+          }, {
+            'native_libs_dir': '<(DEPTH)/build/android/ant/empty/res',
+          }],
+        ],
+      },
+      'includes': ['android/apkbuilder_action.gypi'],
+    },
+  ],
+}
diff --git a/build/java_prebuilt.gypi b/build/java_prebuilt.gypi
new file mode 100644
index 0000000..8efc4ef
--- /dev/null
+++ b/build/java_prebuilt.gypi
@@ -0,0 +1,102 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into a target to provide a rule
+# to package prebuilt Java JARs in a consistent manner.
+#
+# To use this, create a gyp target with the following form:
+# {
+#   'target_name': 'my-package_java',
+#   'type': 'none',
+#   'variables': {
+#     'jar_path': 'path/to/your.jar',
+#   },
+#   'includes': ['path/to/this/gypi/file'],
+# }
+#
+# Required variables:
+#  jar_path - The path to the prebuilt Java JAR file.
+
+{
+  'dependencies': [
+    '<(DEPTH)/build/android/setup.gyp:build_output_dirs'
+  ],
+  'variables': {
+    'dex_path': '<(PRODUCT_DIR)/lib.java/<(_target_name).dex.jar',
+    'intermediate_dir': '<(SHARED_INTERMEDIATE_DIR)/<(_target_name)',
+    'android_jar': '<(android_sdk)/android.jar',
+    'input_jars_paths': [ '<(android_jar)' ],
+    'neverlink%': 0,
+    'proguard_config%': '',
+    'proguard_preprocess%': '0',
+    'variables': {
+      'variables': {
+        'proguard_preprocess%': 0,
+      },
+      'conditions': [
+        ['proguard_preprocess == 1', {
+          'dex_input_jar_path': '<(intermediate_dir)/<(_target_name).pre.jar'
+        }, {
+          'dex_input_jar_path': '<(jar_path)'
+        }],
+      ],
+    },
+    'dex_input_jar_path': '<(dex_input_jar_path)',
+  },
+  'all_dependent_settings': {
+    'variables': {
+      'input_jars_paths': ['<(dex_input_jar_path)'],
+      'conditions': [
+        ['neverlink == 1', {
+          'library_dexed_jars_paths': [],
+        }, {
+          'library_dexed_jars_paths': ['<(dex_path)'],
+        }],
+      ],
+    },
+  },
+  'conditions' : [
+    ['proguard_preprocess == 1', {
+      'actions': [
+        {
+          'action_name': 'proguard_<(_target_name)',
+          'message': 'Proguard preprocessing <(_target_name) jar',
+          'inputs': [
+            '<(android_sdk_root)/tools/proguard/lib/proguard.jar',
+            '<(DEPTH)/build/android/gyp/util/build_utils.py',
+            '<(DEPTH)/build/android/gyp/proguard.py',
+            '<(jar_path)',
+            '<(proguard_config)',
+          ],
+          'outputs': [
+            '<(dex_input_jar_path)',
+          ],
+          'action': [
+            'python', '<(DEPTH)/build/android/gyp/proguard.py',
+            '--proguard-path=<(android_sdk_root)/tools/proguard/lib/proguard.jar',
+            '--input-path=<(jar_path)',
+            '--output-path=<(dex_input_jar_path)',
+            '--proguard-config=<(proguard_config)',
+            '--classpath=>(input_jars_paths)',
+          ]
+        },
+      ],
+    }],
+    ['neverlink == 0', {
+      'actions': [
+        {
+          'action_name': 'dex_<(_target_name)',
+          'message': 'Dexing <(_target_name) jar',
+          'variables': {
+            'dex_input_paths': [
+              '<(dex_input_jar_path)',
+            ],
+            'output_path': '<(dex_path)',
+          },
+          'includes': [ 'android/dex_action.gypi' ],
+        },
+      ],
+    }],
+  ],
+}
diff --git a/build/java_strings_grd.gypi b/build/java_strings_grd.gypi
new file mode 100644
index 0000000..7534be5
--- /dev/null
+++ b/build/java_strings_grd.gypi
@@ -0,0 +1,62 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into a target to provide a rule
+# to generate localized strings.xml from a grd file.
+#
+# To use this, create a gyp target with the following form:
+# {
+#   'target_name': 'my-package_strings_grd',
+#   'type': 'none',
+#   'variables': {
+#     'grd_file': 'path/to/grd/file',
+#   },
+#   'includes': ['path/to/this/gypi/file'],
+# }
+#
+# Required variables:
+#  grd_file - The path to the grd file to use.
+{
+  'variables': {
+    'res_grit_dir': '<(INTERMEDIATE_DIR)/<(_target_name)/res_grit',
+    'grit_grd_file': '<(grd_file)',
+    'resource_zip_path': '<(PRODUCT_DIR)/res.java/<(_target_name).zip',
+    'grit_additional_defines': ['-E', 'ANDROID_JAVA_TAGGED_ONLY=false'],
+    'grit_out_dir': '<(res_grit_dir)',
+    # resource_ids is unneeded since we don't generate .h headers.
+    'grit_resource_ids': '',
+    'grit_outputs': [
+      '<!@pymod_do_main(grit_info <@(grit_defines) <@(grit_additional_defines) '
+          '--outputs \'<(grit_out_dir)\' '
+          '<(grit_grd_file) -f "<(grit_resource_ids)")',
+          ]
+  },
+  'all_dependent_settings': {
+    'variables': {
+      'additional_input_paths': ['<(resource_zip_path)'],
+      'dependencies_res_zip_paths': ['<(resource_zip_path)'],
+    },
+  },
+  'actions': [
+    {
+      'action_name': 'generate_localized_strings_xml',
+      'includes': ['../build/grit_action.gypi'],
+    },
+    {
+      'action_name': 'create_resources_zip',
+      'inputs': [
+          '<(DEPTH)/build/android/gyp/zip.py',
+          '<@(grit_outputs)',
+      ],
+      'outputs': [
+          '<(resource_zip_path)',
+      ],
+      'action': [
+          'python', '<(DEPTH)/build/android/gyp/zip.py',
+          '--input-dir', '<(res_grit_dir)',
+          '--output', '<(resource_zip_path)',
+      ],
+    }
+  ],
+}
diff --git a/build/jni_generator.gypi b/build/jni_generator.gypi
new file mode 100644
index 0000000..7a9e333
--- /dev/null
+++ b/build/jni_generator.gypi
@@ -0,0 +1,87 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into a target to provide a rule
+# to generate jni bindings for Java-files in a consistent manner.
+#
+# To use this, create a gyp target with the following form:
+#  {
+#    'target_name': 'base_jni_headers',
+#    'type': 'none',
+#    'sources': [
+#      'android/java/src/org/chromium/base/BuildInfo.java',
+#      ...
+#      ...
+#      'android/java/src/org/chromium/base/SystemMessageHandler.java',
+#    ],
+#    'variables': {
+#      'jni_gen_package': 'base',
+#    },
+#    'includes': [ '../build/jni_generator.gypi' ],
+#  },
+#
+# The generated file name pattern can be seen on the "outputs" section below.
+# (note that RULE_INPUT_ROOT is the basename for the java file).
+#
+# See base/android/jni_generator/jni_generator.py for more info about the
+# format of generating JNI bindings.
+
+{
+  'variables': {
+    'jni_generator': '<(DEPTH)/base/android/jni_generator/jni_generator.py',
+    'jni_generator_jarjar_file%': '',
+    'jni_generator_ptr_type%': 'long',
+    # A comma separated string of include files.
+    'jni_generator_includes%': (
+        'base/android/jni_generator/jni_generator_helper.h'
+    ),
+    'native_exports%': '--native_exports_optional',
+  },
+  'rules': [
+    {
+      'rule_name': 'generate_jni_headers',
+      'extension': 'java',
+      'inputs': [
+        '<(jni_generator)',
+      ],
+      'outputs': [
+        '<(SHARED_INTERMEDIATE_DIR)/<(jni_gen_package)/jni/<(RULE_INPUT_ROOT)_jni.h',
+      ],
+      'action': [
+        '<(jni_generator)',
+        '--input_file',
+        '<(RULE_INPUT_PATH)',
+        '--output_dir',
+        '<(SHARED_INTERMEDIATE_DIR)/<(jni_gen_package)/jni',
+        '--includes',
+        '<(jni_generator_includes)',
+        '--optimize_generation',
+        '<(optimize_jni_generation)',
+        '--jarjar',
+        '<(jni_generator_jarjar_file)',
+        '--ptr_type',
+        '<(jni_generator_ptr_type)',
+        '<(native_exports)',
+      ],
+      'message': 'Generating JNI bindings from <(RULE_INPUT_PATH)',
+      'process_outputs_as_sources': 1,
+      'conditions': [
+        ['jni_generator_jarjar_file != ""', {
+          'inputs': [
+            '<(jni_generator_jarjar_file)',
+          ],
+        }]
+      ],
+    },
+  ],
+  'direct_dependent_settings': {
+    'include_dirs': [
+      '<(SHARED_INTERMEDIATE_DIR)/<(jni_gen_package)',
+    ],
+  },
+  # This target exports a hard dependency because it generates header
+  # files.
+  'hard_dependency': 1,
+}
+
diff --git a/build/json_schema_api.gni b/build/json_schema_api.gni
new file mode 100644
index 0000000..e1c2d33
--- /dev/null
+++ b/build/json_schema_api.gni
@@ -0,0 +1,242 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Defines a static library corresponding to the output of schema compiler tools
+# over a set of extensions API schemas (IDL or JSON format.) The library target
+# has implicit hard dependencies on all schema files listed by the invoker and
+# is itself a hard dependency.
+#
+# Invocations of this template may use the following variables:
+#
+# sources [required] A list of schema files to be compiled.
+#
+# root_namespace [required]
+#     A Python string substituion pattern used to generate the C++
+#     namespace for each API. Use %(namespace)s to replace with the API
+#     namespace, like "toplevel::%(namespace)s_api".
+#
+# schema_include_rules [optional]
+#     A list of paths to include when searching for referenced objects,
+#     with the namespace separated by a :.
+#     Example:
+#       [ '/foo/bar:Foo::Bar::%(namespace)s' ]
+#
+# schemas [optional, default = false]
+#   Boolean indicating if the schema files should be generated.
+#
+# bundle [optional, default = false]
+#   Boolean indicating if the schema bundle files should be generated.
+#
+# bundle_registration [optional, default = false]
+#   Boolean indicating if the API registration bundle files should be generated.
+#
+# impl_dir [required if bundle_registration = true, otherwise unused]
+#   The path containing C++ implementations of API functions. This path is
+#   used as the root path when looking for {schema}/{schema}_api.h headers
+#   when generating API registration bundles. Such headers, if found, are
+#   automatically included by the generated code.
+#
+# uncompiled_sources [optional, only used when bundle = true or
+#     bundle_registration = true]
+#   A list of schema files which should not be compiled, but which should still
+#   be processed for API bundle generation.
+#
+# deps [optional]
+#   If any deps are specified they will be inherited by the static library
+#   target.
+#
+# generate_static_library [optional, defaults to false]
+#   Produces a static library instead of a source_set.
+#
+# The generated library target also inherits the visibility and output_name
+# of its invoker.
+
+template("json_schema_api") {
+  assert(defined(invoker.sources),
+         "\"sources\" must be defined for the $target_name template.")
+  assert(defined(invoker.root_namespace),
+         "\"root_namespace\" must be defined for the $target_name template.")
+
+  schemas = defined(invoker.schemas) && invoker.schemas
+  bundle = defined(invoker.bundle) && invoker.bundle
+  bundle_registration =
+      defined(invoker.bundle_registration) && invoker.bundle_registration
+
+  schema_include_rules = ""
+  if (defined(invoker.schema_include_rules)) {
+    schema_include_rules = invoker.schema_include_rules
+  }
+
+  # Keep a copy of the target_name here since it will be trampled
+  # in nested targets.
+  target_visibility = [ ":$target_name" ]
+
+  generated_config_name = target_name + "_generated_config"
+  config(generated_config_name) {
+    include_dirs = [ root_gen_dir ]
+    visibility = target_visibility
+  }
+
+  root_namespace = invoker.root_namespace
+
+  compiler_root = "//tools/json_schema_compiler"
+  compiler_script = "$compiler_root/compiler.py"
+  compiler_sources = [
+    "$compiler_root/cc_generator.py",
+    "$compiler_root/code.py",
+    "$compiler_root/compiler.py",
+    "$compiler_root/cpp_generator.py",
+    "$compiler_root/cpp_type_generator.py",
+    "$compiler_root/cpp_util.py",
+    "$compiler_root/h_generator.py",
+    "$compiler_root/idl_schema.py",
+    "$compiler_root/model.py",
+    "$compiler_root/util_cc_helper.py",
+  ]
+
+  if (schemas) {
+    schema_generator_name = target_name + "_schema_generator"
+    action_foreach(schema_generator_name) {
+      script = compiler_script
+      sources = invoker.sources
+      inputs = compiler_sources
+      outputs = [
+        "$target_gen_dir/{{source_name_part}}.cc",
+        "$target_gen_dir/{{source_name_part}}.h",
+      ]
+      args = [
+        "{{source}}",
+        "--root=" + rebase_path("//", root_build_dir),
+        "--destdir=" + rebase_path(root_gen_dir, root_build_dir),
+        "--namespace=$root_namespace",
+        "--generator=cpp",
+        "--include-rules=$schema_include_rules",
+      ]
+
+      if (defined(invoker.visibility)) {
+        # If visibility is restricted, add our own target to it.
+        visibility = invoker.visibility + target_visibility
+      }
+    }
+  }
+
+  if (bundle) {
+    uncompiled_sources = []
+    if (defined(invoker.uncompiled_sources)) {
+      uncompiled_sources = invoker.uncompiled_sources
+    }
+
+    bundle_generator_schema_name = target_name + "_bundle_generator_schema"
+    action(bundle_generator_schema_name) {
+      script = compiler_script
+      inputs = compiler_sources + invoker.sources + uncompiled_sources
+      outputs = [
+        "$target_gen_dir/generated_schemas.cc",
+        "$target_gen_dir/generated_schemas.h",
+      ]
+      args = [
+               "--root=" + rebase_path("//", root_build_dir),
+               "--destdir=" + rebase_path(root_gen_dir, root_build_dir),
+               "--namespace=$root_namespace",
+               "--generator=cpp-bundle-schema",
+               "--include-rules=$schema_include_rules",
+             ] + rebase_path(invoker.sources, root_build_dir) +
+             rebase_path(uncompiled_sources, root_build_dir)
+    }
+  }
+
+  if (bundle_registration) {
+    uncompiled_sources = []
+    if (defined(invoker.uncompiled_sources)) {
+      uncompiled_sources = invoker.uncompiled_sources
+    }
+
+    assert(defined(invoker.impl_dir),
+           "\"impl_dir\" must be defined for the $target_name template.")
+
+    # Child directory inside the generated file tree.
+    gen_child_dir = rebase_path(invoker.impl_dir, "//")
+
+    bundle_generator_registration_name =
+        target_name + "_bundle_generator_registration"
+    action(bundle_generator_registration_name) {
+      script = compiler_script
+      inputs = compiler_sources + invoker.sources + uncompiled_sources
+      outputs = [
+        "$root_gen_dir/$gen_child_dir/generated_api_registration.cc",
+        "$root_gen_dir/$gen_child_dir/generated_api_registration.h",
+      ]
+      args = [
+               "--root=" + rebase_path("//", root_build_dir),
+               "--destdir=" + rebase_path(root_gen_dir, root_build_dir),
+               "--namespace=$root_namespace",
+               "--generator=cpp-bundle-registration",
+               "--impl-dir=$gen_child_dir",
+               "--include-rules=$schema_include_rules",
+             ] + rebase_path(invoker.sources, root_build_dir) +
+             rebase_path(uncompiled_sources, root_build_dir)
+    }
+  }
+
+  # Compute the contents of the library/source set.
+  lib_sources = invoker.sources
+  lib_deps = []
+  lib_public_deps = []
+  lib_extra_configs = []
+
+  if (schemas) {
+    lib_sources += get_target_outputs(":$schema_generator_name")
+    lib_public_deps += [ ":$schema_generator_name" ]
+    lib_deps += [ "//tools/json_schema_compiler:generated_api_util" ]
+    lib_extra_configs += [ "//build/config/compiler:no_size_t_to_int_warning" ]
+  }
+
+  if (bundle) {
+    lib_sources += get_target_outputs(":$bundle_generator_schema_name")
+    lib_deps += [ ":$bundle_generator_schema_name" ]
+  }
+
+  if (bundle_registration) {
+    lib_sources += get_target_outputs(":$bundle_generator_registration_name")
+    lib_deps += [ ":$bundle_generator_registration_name" ]
+  }
+
+  if (defined(invoker.deps)) {
+    lib_deps += invoker.deps
+  }
+
+  # Generate either a static library or a source set.
+  if (defined(invoker.generate_static_library) &&
+      invoker.generate_static_library) {
+    static_library(target_name) {
+      sources = lib_sources
+      deps = lib_deps
+      public_deps = lib_public_deps
+      configs += lib_extra_configs
+      public_configs = [ ":$generated_config_name" ]
+
+      if (defined(invoker.visibility)) {
+        visibility = invoker.visibility
+      }
+      if (defined(invoker.output_name)) {
+        output_name = invoker.output_name
+      }
+    }
+  } else {
+    source_set(target_name) {
+      sources = lib_sources
+      deps = lib_deps
+      public_deps = lib_public_deps
+      configs += lib_extra_configs
+      public_configs = [ ":$generated_config_name" ]
+
+      if (defined(invoker.visibility)) {
+        visibility = invoker.visibility
+      }
+      if (defined(invoker.output_name)) {
+        output_name = invoker.output_name
+      }
+    }
+  }
+}
diff --git a/build/json_schema_bundle_compile.gypi b/build/json_schema_bundle_compile.gypi
new file mode 100644
index 0000000..a302013
--- /dev/null
+++ b/build/json_schema_bundle_compile.gypi
@@ -0,0 +1,83 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'variables': {
+    # When including this gypi, the following variables must be set:
+    #   schema_files:
+    #     An array of json or idl files that comprise the api model.
+    #   schema_include_rules (optional):
+    #     An array of paths to include when searching for referenced objects,
+    #     with the namespace separated by a :.
+    #     Example:
+    #       [ '/foo/bar:Foo::Bar::%(namespace)s' ]
+    #   cc_dir:
+    #     The directory to put the generated code in.
+    #   root_namespace:
+    #     A Python string substituion pattern used to generate the C++
+    #     namespace for each API. Use %(namespace)s to replace with the API
+    #     namespace, like "toplevel::%(namespace)s_api".
+    #
+    # Functions and namespaces can be excluded by setting "nocompile" to true.
+    # The default root path of API implementation sources is
+    # chrome/browser/extensions/api and can be overridden by setting "impl_dir".
+    'api_gen_dir': '<(DEPTH)/tools/json_schema_compiler',
+    'api_gen': '<(api_gen_dir)/compiler.py',
+    'generator_files': [
+      '<(api_gen_dir)/cc_generator.py',
+      '<(api_gen_dir)/code.py',
+      '<(api_gen_dir)/compiler.py',
+      '<(api_gen_dir)/cpp_bundle_generator.py',
+      '<(api_gen_dir)/cpp_type_generator.py',
+      '<(api_gen_dir)/cpp_util.py',
+      '<(api_gen_dir)/h_generator.py',
+      '<(api_gen_dir)/idl_schema.py',
+      '<(api_gen_dir)/json_schema.py',
+      '<(api_gen_dir)/model.py',
+      '<(api_gen_dir)/util_cc_helper.py',
+    ],
+    'schema_include_rules': [],
+  },
+  'actions': [
+    {
+      'action_name': 'genapi_bundle_schema',
+      'inputs': [
+        '<@(generator_files)',
+        '<@(schema_files)',
+        '<@(non_compiled_schema_files)',
+      ],
+      'outputs': [
+        '<(SHARED_INTERMEDIATE_DIR)/<(cc_dir)/generated_schemas.h',
+        '<(SHARED_INTERMEDIATE_DIR)/<(cc_dir)/generated_schemas.cc',
+      ],
+      'action': [
+        'python',
+        '<(api_gen)',
+        '--root=<(DEPTH)',
+        '--destdir=<(SHARED_INTERMEDIATE_DIR)',
+        '--namespace=<(root_namespace)',
+        '--generator=cpp-bundle-schema',
+        '--include-rules=<(schema_include_rules)',
+        '<@(schema_files)',
+        '<@(non_compiled_schema_files)',
+      ],
+      'message': 'Generating C++ API bundle code for schemas',
+      'process_outputs_as_sources': 1,
+      # Avoid running MIDL compiler on IDL input files.
+      'explicit_idl_action': 1,
+    },
+  ],
+  'include_dirs': [
+    '<(SHARED_INTERMEDIATE_DIR)',
+    '<(DEPTH)',
+  ],
+  'direct_dependent_settings': {
+    'include_dirs': [
+      '<(SHARED_INTERMEDIATE_DIR)',
+    ]
+  },
+  # This target exports a hard dependency because it generates header
+  # files.
+  'hard_dependency': 1,
+}
diff --git a/build/json_schema_bundle_registration_compile.gypi b/build/json_schema_bundle_registration_compile.gypi
new file mode 100644
index 0000000..8c5af4e
--- /dev/null
+++ b/build/json_schema_bundle_registration_compile.gypi
@@ -0,0 +1,78 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'variables': {
+    # When including this gypi, the following variables must be set:
+    #   schema_files:
+    #     An array of json or idl files that comprise the api model.
+    #   impl_dir_:
+    #     The root path of API implementations; also used for the
+    #     output location. (N.B. Named as such to prevent gyp from
+    #     expanding it as a relative path.)
+    #   root_namespace:
+    #     A Python string substituion pattern used to generate the C++
+    #     namespace for each API. Use %(namespace)s to replace with the API
+    #     namespace, like "toplevel::%(namespace)s_api".
+    #
+    # Functions and namespaces can be excluded by setting "nocompile" to true.
+    'api_gen_dir': '<(DEPTH)/tools/json_schema_compiler',
+    'api_gen': '<(api_gen_dir)/compiler.py',
+    'generator_files': [
+      '<(api_gen_dir)/cc_generator.py',
+      '<(api_gen_dir)/code.py',
+      '<(api_gen_dir)/compiler.py',
+      '<(api_gen_dir)/cpp_bundle_generator.py',
+      '<(api_gen_dir)/cpp_type_generator.py',
+      '<(api_gen_dir)/cpp_util.py',
+      '<(api_gen_dir)/h_generator.py',
+      '<(api_gen_dir)/idl_schema.py',
+      '<(api_gen_dir)/json_schema.py',
+      '<(api_gen_dir)/model.py',
+      '<(api_gen_dir)/util_cc_helper.py',
+    ],
+  },
+  'actions': [
+    {
+      # GN version: json_schema_api.gni
+      'action_name': 'genapi_bundle_registration',
+      'inputs': [
+        '<@(generator_files)',
+        '<@(schema_files)',
+        '<@(non_compiled_schema_files)',
+      ],
+      'outputs': [
+        '<(SHARED_INTERMEDIATE_DIR)/<(impl_dir_)/generated_api_registration.h',
+        '<(SHARED_INTERMEDIATE_DIR)/<(impl_dir_)/generated_api_registration.cc',
+      ],
+      'action': [
+        'python',
+        '<(api_gen)',
+        '--root=<(DEPTH)',
+        '--destdir=<(SHARED_INTERMEDIATE_DIR)',
+        '--namespace=<(root_namespace)',
+        '--generator=cpp-bundle-registration',
+        '--impl-dir=<(impl_dir_)',
+        '<@(schema_files)',
+        '<@(non_compiled_schema_files)',
+      ],
+      'message': 'Generating C++ API bundle code for function registration',
+      'process_outputs_as_sources': 1,
+      # Avoid running MIDL compiler on IDL input files.
+      'explicit_idl_action': 1,
+    },
+  ],
+  'include_dirs': [
+    '<(SHARED_INTERMEDIATE_DIR)',
+    '<(DEPTH)',
+  ],
+  'direct_dependent_settings': {
+    'include_dirs': [
+      '<(SHARED_INTERMEDIATE_DIR)',
+    ]
+  },
+  # This target exports a hard dependency because it generates header
+  # files.
+  'hard_dependency': 1,
+}
diff --git a/build/json_schema_compile.gypi b/build/json_schema_compile.gypi
new file mode 100644
index 0000000..6e5727a
--- /dev/null
+++ b/build/json_schema_compile.gypi
@@ -0,0 +1,123 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'variables': {
+    # When including this gypi, the following variables must be set:
+    #   schema_files:
+    #     An array of json or idl files that comprise the api model.
+    #   schema_include_rules (optional):
+    #     An array of paths to include when searching for referenced objects,
+    #     with the namespace separated by a :.
+    #     Example:
+    #       [ '/foo/bar:Foo::Bar::%(namespace)s' ]
+    #   cc_dir:
+    #     The directory to put the generated code in.
+    #   root_namespace:
+    #     A Python string substituion pattern used to generate the C++
+    #     namespace for each API. Use %(namespace)s to replace with the API
+    #     namespace, like "toplevel::%(namespace)s_api".
+    #
+    # Functions and namespaces can be excluded by setting "nocompile" to true.
+    'api_gen_dir': '<(DEPTH)/tools/json_schema_compiler',
+    'api_gen': '<(api_gen_dir)/compiler.py',
+    'schema_include_rules': [],
+  },
+  'rules': [
+    {
+      # GN version: json_schema_api.gni
+      'rule_name': 'genapi',
+      'msvs_external_rule': 1,
+      'extension': 'json',
+      'inputs': [
+        '<(api_gen_dir)/cc_generator.py',
+        '<(api_gen_dir)/code.py',
+        '<(api_gen_dir)/compiler.py',
+        '<(api_gen_dir)/cpp_generator.py',
+        '<(api_gen_dir)/cpp_type_generator.py',
+        '<(api_gen_dir)/cpp_util.py',
+        '<(api_gen_dir)/h_generator.py',
+        '<(api_gen_dir)/json_schema.py',
+        '<(api_gen_dir)/model.py',
+        '<(api_gen_dir)/util.cc',
+        '<(api_gen_dir)/util.h',
+        '<(api_gen_dir)/util_cc_helper.py',
+        # TODO(calamity): uncomment this when gyp on windows behaves like other
+        # platforms. List expansions of filepaths in inputs expand to different
+        # things.
+        # '<@(schema_files)',
+      ],
+      'outputs': [
+        '<(SHARED_INTERMEDIATE_DIR)/<(cc_dir)/<(RULE_INPUT_DIRNAME)/<(RULE_INPUT_ROOT).cc',
+        '<(SHARED_INTERMEDIATE_DIR)/<(cc_dir)/<(RULE_INPUT_DIRNAME)/<(RULE_INPUT_ROOT).h',
+      ],
+      'action': [
+        'python',
+        '<(api_gen)',
+        '<(RULE_INPUT_PATH)',
+        '--root=<(DEPTH)',
+        '--destdir=<(SHARED_INTERMEDIATE_DIR)',
+        '--namespace=<(root_namespace)',
+        '--generator=cpp',
+        '--include-rules=<(schema_include_rules)'
+      ],
+      'message': 'Generating C++ code from <(RULE_INPUT_PATH) json files',
+      'process_outputs_as_sources': 1,
+    },
+    {
+      'rule_name': 'genapi_idl',
+      'msvs_external_rule': 1,
+      'extension': 'idl',
+      'inputs': [
+        '<(api_gen_dir)/cc_generator.py',
+        '<(api_gen_dir)/code.py',
+        '<(api_gen_dir)/compiler.py',
+        '<(api_gen_dir)/cpp_generator.py',
+        '<(api_gen_dir)/cpp_type_generator.py',
+        '<(api_gen_dir)/cpp_util.py',
+        '<(api_gen_dir)/h_generator.py',
+        '<(api_gen_dir)/idl_schema.py',
+        '<(api_gen_dir)/model.py',
+        '<(api_gen_dir)/util.cc',
+        '<(api_gen_dir)/util.h',
+        '<(api_gen_dir)/util_cc_helper.py',
+        # TODO(calamity): uncomment this when gyp on windows behaves like other
+        # platforms. List expansions of filepaths in inputs expand to different
+        # things.
+        # '<@(schema_files)',
+      ],
+      'outputs': [
+        '<(SHARED_INTERMEDIATE_DIR)/<(cc_dir)/<(RULE_INPUT_DIRNAME)/<(RULE_INPUT_ROOT).cc',
+        '<(SHARED_INTERMEDIATE_DIR)/<(cc_dir)/<(RULE_INPUT_DIRNAME)/<(RULE_INPUT_ROOT).h',
+      ],
+      'action': [
+        'python',
+        '<(api_gen)',
+        '<(RULE_INPUT_PATH)',
+        '--root=<(DEPTH)',
+        '--destdir=<(SHARED_INTERMEDIATE_DIR)',
+        '--namespace=<(root_namespace)',
+        '--generator=cpp',
+        '--include-rules=<(schema_include_rules)'
+      ],
+      'message': 'Generating C++ code from <(RULE_INPUT_PATH) IDL files',
+      'process_outputs_as_sources': 1,
+    },
+  ],
+  'include_dirs': [
+    '<(SHARED_INTERMEDIATE_DIR)',
+    '<(DEPTH)',
+  ],
+  'dependencies':[
+    '<(DEPTH)/tools/json_schema_compiler/api_gen_util.gyp:api_gen_util',
+  ],
+  'direct_dependent_settings': {
+    'include_dirs': [
+      '<(SHARED_INTERMEDIATE_DIR)',
+    ]
+  },
+  # This target exports a hard dependency because it generates header
+  # files.
+  'hard_dependency': 1,
+}
diff --git a/build/json_to_struct.gypi b/build/json_to_struct.gypi
new file mode 100644
index 0000000..09c8e3e
--- /dev/null
+++ b/build/json_to_struct.gypi
@@ -0,0 +1,53 @@
+# Copyright 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'variables': {
+    # When including this gypi, the following variables must be set:
+    #   schema_file: a json file that comprise the structure model.
+    #   namespace: the C++ namespace that all generated files go under
+    #   cc_dir: path to generated files
+    # Functions and namespaces can be excluded by setting "nocompile" to true.
+    'struct_gen_dir': '<(DEPTH)/tools/json_to_struct',
+    'struct_gen%': '<(struct_gen_dir)/json_to_struct.py',
+    'output_filename%': '<(RULE_INPUT_ROOT)',
+  },
+  'rules': [
+    {
+      # GN version: //tools/json_to_struct/json_to_struct.gni
+      'rule_name': 'genstaticinit',
+      'extension': 'json',
+      'inputs': [
+        '<(struct_gen)',
+        '<(struct_gen_dir)/element_generator.py',
+        '<(struct_gen_dir)/json_to_struct.py',
+        '<(struct_gen_dir)/struct_generator.py',
+        '<(schema_file)',
+      ],
+      'outputs': [
+        '<(SHARED_INTERMEDIATE_DIR)/<(cc_dir)/<(output_filename).cc',
+        '<(SHARED_INTERMEDIATE_DIR)/<(cc_dir)/<(output_filename).h',
+      ],
+      'action': [
+        'python',
+        '<(struct_gen)',
+        '<(RULE_INPUT_PATH)',
+        '--destbase=<(SHARED_INTERMEDIATE_DIR)',
+        '--destdir=<(cc_dir)',
+        '--namespace=<(namespace)',
+        '--schema=<(schema_file)',
+        '--output=<(output_filename)',
+      ],
+      'message': 'Generating C++ static initializers from <(RULE_INPUT_PATH)',
+      'process_outputs_as_sources': 1,
+    },
+  ],
+  'include_dirs': [
+    '<(SHARED_INTERMEDIATE_DIR)',
+    '<(DEPTH)',
+  ],
+  # This target exports a hard dependency because it generates header
+  # files.
+  'hard_dependency': 1,
+}
diff --git a/build/landmine_utils.py b/build/landmine_utils.py
new file mode 100644
index 0000000..6d18b6d
--- /dev/null
+++ b/build/landmine_utils.py
@@ -0,0 +1,120 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+import functools
+import logging
+import os
+import shlex
+import sys
+
+
+def memoize(default=None):
+  """This decorator caches the return value of a parameterless pure function"""
+  def memoizer(func):
+    val = []
+    @functools.wraps(func)
+    def inner():
+      if not val:
+        ret = func()
+        val.append(ret if ret is not None else default)
+        if logging.getLogger().isEnabledFor(logging.INFO):
+          print '%s -> %r' % (func.__name__, val[0])
+      return val[0]
+    return inner
+  return memoizer
+
+
+@memoize()
+def IsWindows():
+  return sys.platform in ['win32', 'cygwin']
+
+
+@memoize()
+def IsLinux():
+  return sys.platform.startswith(('linux', 'freebsd', 'openbsd'))
+
+
+@memoize()
+def IsMac():
+  return sys.platform == 'darwin'
+
+
+@memoize()
+def gyp_defines():
+  """Parses and returns GYP_DEFINES env var as a dictionary."""
+  return dict(arg.split('=', 1)
+      for arg in shlex.split(os.environ.get('GYP_DEFINES', '')))
+
+@memoize()
+def gyp_generator_flags():
+  """Parses and returns GYP_GENERATOR_FLAGS env var as a dictionary."""
+  return dict(arg.split('=', 1)
+      for arg in shlex.split(os.environ.get('GYP_GENERATOR_FLAGS', '')))
+
+@memoize()
+def gyp_msvs_version():
+  return os.environ.get('GYP_MSVS_VERSION', '')
+
+@memoize()
+def distributor():
+  """
+  Returns a string which is the distributed build engine in use (if any).
+  Possible values: 'goma', 'ib', ''
+  """
+  if 'goma' in gyp_defines():
+    return 'goma'
+  elif IsWindows():
+    if 'CHROME_HEADLESS' in os.environ:
+      return 'ib' # use (win and !goma and headless) as approximation of ib
+
+
+@memoize()
+def platform():
+  """
+  Returns a string representing the platform this build is targetted for.
+  Possible values: 'win', 'mac', 'linux', 'ios', 'android'
+  """
+  if 'OS' in gyp_defines():
+    if 'android' in gyp_defines()['OS']:
+      return 'android'
+    else:
+      return gyp_defines()['OS']
+  elif IsWindows():
+    return 'win'
+  elif IsLinux():
+    return 'linux'
+  else:
+    return 'mac'
+
+
+@memoize()
+def builder():
+  """
+  Returns a string representing the build engine (not compiler) to use.
+  Possible values: 'make', 'ninja', 'xcode', 'msvs', 'scons'
+  """
+  if 'GYP_GENERATORS' in os.environ:
+    # for simplicity, only support the first explicit generator
+    generator = os.environ['GYP_GENERATORS'].split(',')[0]
+    if generator.endswith('-android'):
+      return generator.split('-')[0]
+    elif generator.endswith('-ninja'):
+      return 'ninja'
+    else:
+      return generator
+  else:
+    if platform() == 'android':
+      # Good enough for now? Do any android bots use make?
+      return 'ninja'
+    elif platform() == 'ios':
+      return 'xcode'
+    elif IsWindows():
+      return 'ninja'
+    elif IsLinux():
+      return 'ninja'
+    elif IsMac():
+      return 'ninja'
+    else:
+      assert False, 'Don\'t know what builder we\'re using!'
diff --git a/build/landmines.py b/build/landmines.py
new file mode 100755
index 0000000..0ea2b64
--- /dev/null
+++ b/build/landmines.py
@@ -0,0 +1,134 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+This script runs every build as the first hook (See DEPS). If it detects that
+the build should be clobbered, it will delete the contents of the build
+directory.
+
+A landmine is tripped when a builder checks out a different revision, and the
+diff between the new landmines and the old ones is non-null. At this point, the
+build is clobbered.
+"""
+
+import difflib
+import errno
+import logging
+import optparse
+import os
+import sys
+import subprocess
+import time
+
+import clobber
+import landmine_utils
+
+
+SRC_DIR = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
+
+
+def get_build_dir(build_tool, is_iphone=False):
+  """
+  Returns output directory absolute path dependent on build and targets.
+  Examples:
+    r'c:\b\build\slave\win\build\src\out'
+    '/mnt/data/b/build/slave/linux/build/src/out'
+    '/b/build/slave/ios_rel_device/build/src/xcodebuild'
+
+  Keep this function in sync with tools/build/scripts/slave/compile.py
+  """
+  ret = None
+  if build_tool == 'xcode':
+    ret = os.path.join(SRC_DIR, 'xcodebuild')
+  elif build_tool in ['make', 'ninja', 'ninja-ios']:  # TODO: Remove ninja-ios.
+    if 'CHROMIUM_OUT_DIR' in os.environ:
+      output_dir = os.environ.get('CHROMIUM_OUT_DIR').strip()
+      if not output_dir:
+        raise Error('CHROMIUM_OUT_DIR environment variable is set but blank!')
+    else:
+      output_dir = landmine_utils.gyp_generator_flags().get('output_dir', 'out')
+    ret = os.path.join(SRC_DIR, output_dir)
+  else:
+    raise NotImplementedError('Unexpected GYP_GENERATORS (%s)' % build_tool)
+  return os.path.abspath(ret)
+
+
+def clobber_if_necessary(new_landmines):
+  """Does the work of setting, planting, and triggering landmines."""
+  out_dir = get_build_dir(landmine_utils.builder())
+  landmines_path = os.path.normpath(os.path.join(out_dir, '..', '.landmines'))
+  try:
+    os.makedirs(out_dir)
+  except OSError as e:
+    if e.errno == errno.EEXIST:
+      pass
+
+  if os.path.exists(landmines_path):
+    with open(landmines_path, 'r') as f:
+      old_landmines = f.readlines()
+    if old_landmines != new_landmines:
+      old_date = time.ctime(os.stat(landmines_path).st_ctime)
+      diff = difflib.unified_diff(old_landmines, new_landmines,
+          fromfile='old_landmines', tofile='new_landmines',
+          fromfiledate=old_date, tofiledate=time.ctime(), n=0)
+      sys.stdout.write('Clobbering due to:\n')
+      sys.stdout.writelines(diff)
+
+      clobber.clobber(out_dir)
+
+  # Save current set of landmines for next time.
+  with open(landmines_path, 'w') as f:
+    f.writelines(new_landmines)
+
+
+def process_options():
+  """Returns a list of landmine emitting scripts."""
+  parser = optparse.OptionParser()
+  parser.add_option(
+      '-s', '--landmine-scripts', action='append',
+      default=[os.path.join(SRC_DIR, 'build', 'get_landmines.py')],
+      help='Path to the script which emits landmines to stdout. The target '
+           'is passed to this script via option -t. Note that an extra '
+           'script can be specified via an env var EXTRA_LANDMINES_SCRIPT.')
+  parser.add_option('-v', '--verbose', action='store_true',
+      default=('LANDMINES_VERBOSE' in os.environ),
+      help=('Emit some extra debugging information (default off). This option '
+          'is also enabled by the presence of a LANDMINES_VERBOSE environment '
+          'variable.'))
+
+  options, args = parser.parse_args()
+
+  if args:
+    parser.error('Unknown arguments %s' % args)
+
+  logging.basicConfig(
+      level=logging.DEBUG if options.verbose else logging.ERROR)
+
+  extra_script = os.environ.get('EXTRA_LANDMINES_SCRIPT')
+  if extra_script:
+    return options.landmine_scripts + [extra_script]
+  else:
+    return options.landmine_scripts
+
+
+def main():
+  landmine_scripts = process_options()
+
+  if landmine_utils.builder() in ('dump_dependency_json', 'eclipse'):
+    return 0
+
+
+  landmines = []
+  for s in landmine_scripts:
+    proc = subprocess.Popen([sys.executable, s], stdout=subprocess.PIPE)
+    output, _ = proc.communicate()
+    landmines.extend([('%s\n' % l.strip()) for l in output.splitlines()])
+  clobber_if_necessary(landmines)
+
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/linux/OWNERS b/build/linux/OWNERS
new file mode 100644
index 0000000..4a60b79
--- /dev/null
+++ b/build/linux/OWNERS
@@ -0,0 +1,3 @@
+mmoss@chromium.org
+phajdan.jr@chromium.org
+thestig@chromium.org
diff --git a/build/linux/bin/eu-strip b/build/linux/bin/eu-strip
new file mode 100755
index 0000000..7f93eec
--- /dev/null
+++ b/build/linux/bin/eu-strip
Binary files differ
diff --git a/build/linux/bin/eu-strip.sha1 b/build/linux/bin/eu-strip.sha1
new file mode 100644
index 0000000..43f290a7
--- /dev/null
+++ b/build/linux/bin/eu-strip.sha1
@@ -0,0 +1 @@
+0a9b8f68615ce388b65201e6d22da7a9cf2e729c
\ No newline at end of file
diff --git a/build/linux/chrome_linux.croc b/build/linux/chrome_linux.croc
new file mode 100644
index 0000000..f400306
--- /dev/null
+++ b/build/linux/chrome_linux.croc
@@ -0,0 +1,29 @@
+# -*- python -*-
+# Crocodile config file for Chromium linux
+
+# TODO(jhawkins): We'll need to add a chromeos.croc once we get a coverage bot
+# for that platform.
+
+{
+  # List of rules, applied in order
+  'rules' : [
+    # Specify inclusions before exclusions, since rules are in order.
+
+    # Don't include non-Linux platform dirs
+    {
+      'regexp' : '.*/(chromeos|views)/',
+      'include' : 0,
+    },
+    # Don't include chromeos, windows, or mac specific files
+    {
+      'regexp' : '.*(_|/)(chromeos|mac|win|views)(\\.|_)',
+      'include' : 0,
+    },
+
+    # Groups
+    {
+      'regexp' : '.*_test_linux\\.',
+      'group' : 'test',
+    },
+  ],
+}
diff --git a/build/linux/dump_app_syms.py b/build/linux/dump_app_syms.py
new file mode 100644
index 0000000..c18bff7
--- /dev/null
+++ b/build/linux/dump_app_syms.py
@@ -0,0 +1,29 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Helper script to run dump_syms on Chrome Linux executables and strip
+# them if needed.
+
+import os
+import subprocess
+import sys
+
+if len(sys.argv) != 5:
+  print "dump_app_syms.py <dump_syms_exe> <strip_binary>"
+  print "                 <binary_with_symbols> <symbols_output>"
+  sys.exit(1)
+
+dumpsyms = sys.argv[1]
+strip_binary = sys.argv[2]
+infile = sys.argv[3]
+outfile = sys.argv[4]
+
+# Dump only when the output file is out-of-date.
+if not os.path.isfile(outfile) or \
+   os.stat(outfile).st_mtime > os.stat(infile).st_mtime:
+  with open(outfile, 'w') as outfileobj:
+    subprocess.check_call([dumpsyms, '-r', infile], stdout=outfileobj)
+
+if strip_binary != '0':
+  subprocess.check_call(['strip', infile])
diff --git a/build/linux/install-chromeos-fonts.py b/build/linux/install-chromeos-fonts.py
new file mode 100755
index 0000000..a24adc9
--- /dev/null
+++ b/build/linux/install-chromeos-fonts.py
@@ -0,0 +1,73 @@
+#!/usr/bin/env python
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Script to install the Chrome OS fonts on Linux.
+# This script can be run manually (as root), but is also run as part
+# install-build-deps.sh.
+
+import os
+import shutil
+import subprocess
+import sys
+
+# Taken from the media-fonts/notofonts ebuild in chromiumos-overlay.
+VERSION = '20140815'
+URL = ('https://commondatastorage.googleapis.com/chromeos-localmirror/'
+       'distfiles/notofonts-%s.tar.bz2') % (VERSION)
+FONTS_DIR = '/usr/local/share/fonts'
+
+def main(args):
+  if not sys.platform.startswith('linux'):
+    print "Error: %s must be run on Linux." % __file__
+    return 1
+
+  if os.getuid() != 0:
+    print "Error: %s must be run as root." % __file__
+    return 1
+
+  if not os.path.isdir(FONTS_DIR):
+    print "Error: Destination directory does not exist: %s" % FONTS_DIR
+    return 1
+
+  dest_dir = os.path.join(FONTS_DIR, 'chromeos')
+
+  stamp = os.path.join(dest_dir, ".stamp02")
+  if os.path.exists(stamp):
+    with open(stamp) as s:
+      if s.read() == URL:
+        print "Chrome OS fonts already up-to-date in %s." % dest_dir
+        return 0
+
+  if os.path.isdir(dest_dir):
+    shutil.rmtree(dest_dir)
+  os.mkdir(dest_dir)
+  os.chmod(dest_dir, 0755)
+
+  print "Installing Chrome OS fonts to %s." % dest_dir
+  tarball = os.path.join(dest_dir, os.path.basename(URL))
+  subprocess.check_call(['curl', '-L', URL, '-o', tarball])
+  subprocess.check_call(['tar', '--no-same-owner', '--no-same-permissions',
+                         '-xf', tarball, '-C', dest_dir])
+  os.remove(tarball)
+
+  readme = os.path.join(dest_dir, "README")
+  with open(readme, 'w') as s:
+    s.write("This directory and its contents are auto-generated.\n")
+    s.write("It may be deleted and recreated. Do not modify.\n")
+    s.write("Script: %s\n" % __file__)
+
+  with open(stamp, 'w') as s:
+    s.write(URL)
+
+  for base, dirs, files in os.walk(dest_dir):
+    for dir in dirs:
+      os.chmod(os.path.join(base, dir), 0755)
+    for file in files:
+      os.chmod(os.path.join(base, file), 0644)
+
+  return 0
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/build/linux/pkg-config-wrapper b/build/linux/pkg-config-wrapper
new file mode 100755
index 0000000..b759564
--- /dev/null
+++ b/build/linux/pkg-config-wrapper
@@ -0,0 +1,59 @@
+#!/bin/bash
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This program wraps around pkg-config to generate the correct include and
+# library paths when cross-compiling using a sysroot.
+# The assumption is that the sysroot contains the .pc files in usr/lib/pkgconfig
+# and usr/share/pkgconfig (relative to the sysroot) and that they output paths
+# relative to some parent path of the sysroot.
+# This assumption is valid for a range of sysroots, in particular: a
+# LSB-compliant root filesystem mounted at the sysroot, and a board build
+# directory of a Chromium OS chroot.
+# Additional directories containing .pc files may be specified by setting
+# the PKG_CONFIG_PATH environment variable- these will be prepended to the
+# generated paths.
+
+root="$1"
+shift
+target_arch="$1"
+shift
+libpath="$1"
+shift
+
+if [ -z "$root" -o -z "$target_arch" ]
+then
+  echo "usage: $0 /path/to/sysroot target_arch libdir [pkg-config-arguments] package" >&2
+  exit 1
+fi
+
+if [ "$target_arch" = "x64" ]
+then
+  : ${libpath:="lib64"}
+else
+  : ${libpath:="lib"}
+fi
+
+rewrite=`dirname $0`/rewrite_dirs.py
+package=${!#}
+
+config_path=$root/usr/$libpath/pkgconfig:$root/usr/share/pkgconfig
+
+# prepend any paths specified by the environment
+if [ -n "$PKG_CONFIG_PATH" ]
+then
+  config_path="$PKG_CONFIG_PATH:$config_path"
+fi
+
+set -e
+# Some sysroots, like the Chromium OS ones, may generate paths that are not
+# relative to the sysroot. For example,
+# /path/to/chroot/build/x86-generic/usr/lib/pkgconfig/pkg.pc may have all paths
+# relative to /path/to/chroot (i.e. prefix=/build/x86-generic/usr) instead of
+# relative to /path/to/chroot/build/x86-generic (i.e prefix=/usr).
+# To support this correctly, it's necessary to extract the prefix to strip from
+# pkg-config's |prefix| variable.
+prefix=`PKG_CONFIG_PATH=$config_path pkg-config --variable=prefix "$package" | sed -e 's|/usr$||'`
+result=`PKG_CONFIG_PATH=$config_path pkg-config "$@"`
+echo "$result"| $rewrite --sysroot "$root" --strip-prefix "$prefix"
diff --git a/build/linux/rewrite_dirs.py b/build/linux/rewrite_dirs.py
new file mode 100755
index 0000000..30f22f0
--- /dev/null
+++ b/build/linux/rewrite_dirs.py
@@ -0,0 +1,71 @@
+#!/usr/bin/env python
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Rewrites paths in -I, -L and other option to be relative to a sysroot."""
+
+import sys
+import os
+import optparse
+
+REWRITE_PREFIX = ['-I',
+                  '-idirafter',
+                  '-imacros',
+                  '-imultilib',
+                  '-include',
+                  '-iprefix',
+                  '-iquote',
+                  '-isystem',
+                  '-L']
+
+def RewritePath(path, opts):
+  """Rewrites a path by stripping the prefix and prepending the sysroot."""
+  sysroot = opts.sysroot
+  prefix = opts.strip_prefix
+  if os.path.isabs(path) and not path.startswith(sysroot):
+    if path.startswith(prefix):
+      path = path[len(prefix):]
+    path = path.lstrip('/')
+    return os.path.join(sysroot, path)
+  else:
+    return path
+
+
+def RewriteLine(line, opts):
+  """Rewrites all the paths in recognized options."""
+  args = line.split()
+  count = len(args)
+  i = 0
+  while i < count:
+    for prefix in REWRITE_PREFIX:
+      # The option can be either in the form "-I /path/to/dir" or
+      # "-I/path/to/dir" so handle both.
+      if args[i] == prefix:
+        i += 1
+        try:
+          args[i] = RewritePath(args[i], opts)
+        except IndexError:
+          sys.stderr.write('Missing argument following %s\n' % prefix)
+          break
+      elif args[i].startswith(prefix):
+        args[i] = prefix + RewritePath(args[i][len(prefix):], opts)
+    i += 1
+
+  return ' '.join(args)
+
+
+def main(argv):
+  parser = optparse.OptionParser()
+  parser.add_option('-s', '--sysroot', default='/', help='sysroot to prepend')
+  parser.add_option('-p', '--strip-prefix', default='', help='prefix to strip')
+  opts, args = parser.parse_args(argv[1:])
+
+  for line in sys.stdin.readlines():
+    line = RewriteLine(line.strip(), opts)
+    print line
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
diff --git a/build/linux/sysroot_ld_path.sh b/build/linux/sysroot_ld_path.sh
new file mode 100755
index 0000000..4b8bf73
--- /dev/null
+++ b/build/linux/sysroot_ld_path.sh
@@ -0,0 +1,100 @@
+#!/bin/sh
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Reads etc/ld.so.conf and/or etc/ld.so.conf.d/*.conf and returns the
+# appropriate linker flags.
+#
+#  sysroot_ld_path.sh /abspath/to/sysroot
+#
+
+log_error_and_exit() {
+  echo $0: $@
+  exit 1
+}
+
+process_entry() {
+  if [ -z "$1" ] || [ -z "$2" ]; then
+    log_error_and_exit "bad arguments to process_entry()"
+  fi
+  local root="$1"
+  local localpath="$2"
+
+  echo $localpath | grep -qs '^/'
+  if [ $? -ne 0 ]; then
+    log_error_and_exit $localpath does not start with /
+  fi
+  local entry="$root$localpath"
+  echo -L$entry
+  echo -Wl,-rpath-link=$entry
+}
+
+process_ld_so_conf() {
+  if [ -z "$1" ] || [ -z "$2" ]; then
+    log_error_and_exit "bad arguments to process_ld_so_conf()"
+  fi
+  local root="$1"
+  local ld_so_conf="$2"
+
+  # ld.so.conf may include relative include paths. pushd is a bashism.
+  local saved_pwd=$(pwd)
+  cd $(dirname "$ld_so_conf")
+
+  cat "$ld_so_conf" | \
+    while read ENTRY; do
+      echo "$ENTRY" | grep -qs ^include
+      if [ $? -eq 0 ]; then
+        local included_files=$(echo "$ENTRY" | sed 's/^include //')
+        echo "$included_files" | grep -qs ^/
+        if [ $? -eq 0 ]; then
+          if ls $root$included_files >/dev/null 2>&1 ; then
+            for inc_file in $root$included_files; do
+              process_ld_so_conf "$root" "$inc_file"
+            done
+          fi
+        else
+          if ls $(pwd)/$included_files >/dev/null 2>&1 ; then
+            for inc_file in $(pwd)/$included_files; do
+              process_ld_so_conf "$root" "$inc_file"
+            done
+          fi
+        fi
+        continue
+      fi
+
+      echo "$ENTRY" | grep -qs ^/
+      if [ $? -eq 0 ]; then
+        process_entry "$root" "$ENTRY"
+      fi
+    done
+
+  # popd is a bashism
+  cd "$saved_pwd"
+}
+
+# Main
+
+if [ $# -ne 1 ]; then
+  echo Usage $0 /abspath/to/sysroot
+  exit 1
+fi
+
+echo $1 | grep -qs ' '
+if [ $? -eq 0 ]; then
+  log_error_and_exit $1 contains whitespace.
+fi
+
+LD_SO_CONF="$1/etc/ld.so.conf"
+LD_SO_CONF_D="$1/etc/ld.so.conf.d"
+
+if [ -e "$LD_SO_CONF" ]; then
+  process_ld_so_conf "$1" "$LD_SO_CONF" | xargs echo
+elif [ -e "$LD_SO_CONF_D" ]; then
+  find "$LD_SO_CONF_D" -maxdepth 1 -name '*.conf' -print -quit > /dev/null
+  if [ $? -eq 0 ]; then
+    for entry in $LD_SO_CONF_D/*.conf; do
+      process_ld_so_conf "$1" "$entry"
+    done | xargs echo
+  fi
+fi
diff --git a/build/linux/sysroot_scripts/install-sysroot.py b/build/linux/sysroot_scripts/install-sysroot.py
new file mode 100755
index 0000000..99fc2d6
--- /dev/null
+++ b/build/linux/sysroot_scripts/install-sysroot.py
@@ -0,0 +1,193 @@
+#!/usr/bin/env python
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Script to install a Debian Wheezy sysroot for making official Google Chrome
+# Linux builds.
+# The sysroot is needed to make Chrome work for Debian Wheezy.
+# This script can be run manually but is more often run as part of gclient
+# hooks. When run from hooks this script should be a no-op on non-linux
+# platforms.
+
+# The sysroot image could be constructed from scratch based on the current
+# state or Debian Wheezy but for consistency we currently use a pre-built root
+# image. The image will normally need to be rebuilt every time chrome's build
+# dependancies are changed.
+
+import hashlib
+import platform
+import optparse
+import os
+import re
+import shutil
+import subprocess
+import sys
+
+
+SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
+URL_PREFIX = 'http://storage.googleapis.com'
+URL_PATH = 'chrome-linux-sysroot/toolchain'
+REVISION_AMD64 = 'a2d45701cb21244b9514e420950ba6ba687fb655'
+REVISION_ARM = 'a2d45701cb21244b9514e420950ba6ba687fb655'
+REVISION_I386 = 'a2d45701cb21244b9514e420950ba6ba687fb655'
+REVISION_MIPS = '7749d2957387abf225b6d45154c3ddad142148dc'
+TARBALL_AMD64 = 'debian_wheezy_amd64_sysroot.tgz'
+TARBALL_ARM = 'debian_wheezy_arm_sysroot.tgz'
+TARBALL_I386 = 'debian_wheezy_i386_sysroot.tgz'
+TARBALL_MIPS = 'debian_wheezy_mips_sysroot.tgz'
+TARBALL_AMD64_SHA1SUM = '601216c0f980e798e7131635f3dd8171b3dcbcde'
+TARBALL_ARM_SHA1SUM = '6289593b36616526562a4d85ae9c92b694b8ce7e'
+TARBALL_I386_SHA1SUM = '0090e5a4b56ab9ffb5d557da6a520195ab59b446'
+TARBALL_MIPS_SHA1SUM = '3b4d782a237db4aac185a638572a7747c1a21825'
+SYSROOT_DIR_AMD64 = 'debian_wheezy_amd64-sysroot'
+SYSROOT_DIR_ARM = 'debian_wheezy_arm-sysroot'
+SYSROOT_DIR_I386 = 'debian_wheezy_i386-sysroot'
+SYSROOT_DIR_MIPS = 'debian_wheezy_mips-sysroot'
+
+valid_archs = ('arm', 'i386', 'amd64', 'mips')
+
+
+def GetSha1(filename):
+  sha1 = hashlib.sha1()
+  with open(filename, 'rb') as f:
+    while True:
+      # Read in 1mb chunks, so it doesn't all have to be loaded into memory.
+      chunk = f.read(1024*1024)
+      if not chunk:
+        break
+      sha1.update(chunk)
+  return sha1.hexdigest()
+
+
+def DetectArch(gyp_defines):
+  # Check for optional target_arch and only install for that architecture.
+  # If target_arch is not specified, then only install for the host
+  # architecture.
+  if 'target_arch=x64' in gyp_defines:
+    return 'amd64'
+  elif 'target_arch=ia32' in gyp_defines:
+    return 'i386'
+  elif 'target_arch=arm' in gyp_defines:
+    return 'arm'
+  elif 'target_arch=mipsel' in gyp_defines:
+    return 'mips'
+
+  # Figure out host arch using build/detect_host_arch.py and
+  # set target_arch to host arch
+  build_dir = os.path.dirname(os.path.dirname(os.path.join(SCRIPT_DIR)))
+  sys.path.append(build_dir)
+  import detect_host_arch
+
+  detected_host_arch = detect_host_arch.HostArch()
+  if detected_host_arch == 'x64':
+    return 'amd64'
+  elif detected_host_arch == 'ia32':
+    return 'i386'
+  elif detected_host_arch == 'arm':
+    return 'arm'
+  elif detected_host_arch == 'mips':
+    return 'mips'
+  else:
+    print "Unknown host arch: %s" % detected_host_arch
+
+  return None
+
+
+def main():
+  if options.running_as_hook and not sys.platform.startswith('linux'):
+    return 0
+
+  gyp_defines = os.environ.get('GYP_DEFINES', '')
+
+  if options.arch:
+    target_arch = options.arch
+  else:
+    target_arch = DetectArch(gyp_defines)
+    if not target_arch:
+      print 'Unable to detect host architecture'
+      return 1
+
+  if options.running_as_hook and target_arch != 'arm' and target_arch != 'mips':
+    # When run from runhooks, only install the sysroot for an Official Chrome
+    # Linux build, except on ARM where we always use a sysroot.
+    skip_if_defined = ['branding=Chrome', 'buildtype=Official']
+    skip_if_undefined = ['chromeos=1']
+    for option in skip_if_defined:
+      if option not in gyp_defines:
+        return 0
+    for option in skip_if_undefined:
+      if option in gyp_defines:
+        return 0
+
+  # The sysroot directory should match the one specified in build/common.gypi.
+  # TODO(thestig) Consider putting this else where to avoid having to recreate
+  # it on every build.
+  linux_dir = os.path.dirname(SCRIPT_DIR)
+  if target_arch == 'amd64':
+    sysroot = os.path.join(linux_dir, SYSROOT_DIR_AMD64)
+    tarball_filename = TARBALL_AMD64
+    tarball_sha1sum = TARBALL_AMD64_SHA1SUM
+    revision = REVISION_AMD64
+  elif target_arch == 'arm':
+    sysroot = os.path.join(linux_dir, SYSROOT_DIR_ARM)
+    tarball_filename = TARBALL_ARM
+    tarball_sha1sum = TARBALL_ARM_SHA1SUM
+    revision = REVISION_ARM
+  elif target_arch == 'i386':
+    sysroot = os.path.join(linux_dir, SYSROOT_DIR_I386)
+    tarball_filename = TARBALL_I386
+    tarball_sha1sum = TARBALL_I386_SHA1SUM
+    revision = REVISION_I386
+  elif target_arch == 'mips':
+    sysroot = os.path.join(linux_dir, SYSROOT_DIR_MIPS)
+    tarball_filename = TARBALL_MIPS
+    tarball_sha1sum = TARBALL_MIPS_SHA1SUM
+    revision = REVISION_MIPS
+  else:
+    print 'Unknown architecture: %s' % target_arch
+    assert(False)
+
+  url = '%s/%s/%s/%s' % (URL_PREFIX, URL_PATH, revision, tarball_filename)
+
+  stamp = os.path.join(sysroot, '.stamp')
+  if os.path.exists(stamp):
+    with open(stamp) as s:
+      if s.read() == url:
+        print 'Debian Wheezy %s root image already up-to-date: %s' % \
+            (target_arch, sysroot)
+        return 0
+
+  print 'Installing Debian Wheezy %s root image: %s' % (target_arch, sysroot)
+  if os.path.isdir(sysroot):
+    shutil.rmtree(sysroot)
+  os.mkdir(sysroot)
+  tarball = os.path.join(sysroot, tarball_filename)
+  print 'Downloading %s' % url
+  sys.stdout.flush()
+  sys.stderr.flush()
+  subprocess.check_call(['curl', '--fail', '-L', url, '-o', tarball])
+  sha1sum = GetSha1(tarball)
+  if sha1sum != tarball_sha1sum:
+    print 'Tarball sha1sum is wrong.'
+    print 'Expected %s, actual: %s' % (tarball_sha1sum, sha1sum)
+    return 1
+  subprocess.check_call(['tar', 'xf', tarball, '-C', sysroot])
+  os.remove(tarball)
+
+  with open(stamp, 'w') as s:
+    s.write(url)
+  return 0
+
+
+if __name__ == '__main__':
+  parser = optparse.OptionParser('usage: %prog [OPTIONS]')
+  parser.add_option('--running-as-hook', action='store_true',
+                    default=False, help='Used when running from gclient hooks.'
+                                        ' In this mode the sysroot will only '
+                                        'be installed for official Linux '
+                                        'builds or ARM Linux builds')
+  parser.add_option('--arch', type='choice', choices=valid_archs,
+                    help='Sysroot architecture: %s' % ', '.join(valid_archs))
+  options, _ = parser.parse_args()
+  sys.exit(main())
diff --git a/build/linux/sysroot_scripts/packagelist.trusty.arm b/build/linux/sysroot_scripts/packagelist.trusty.arm
new file mode 100644
index 0000000..cd4b671
--- /dev/null
+++ b/build/linux/sysroot_scripts/packagelist.trusty.arm
@@ -0,0 +1,158 @@
+main/a/alsa-lib/libasound2_1.0.27.2-3ubuntu7_armhf.deb
+main/a/alsa-lib/libasound2-dev_1.0.27.2-3ubuntu7_armhf.deb
+main/a/atk1.0/libatk1.0-0_2.10.0-2ubuntu2_armhf.deb
+main/a/atk1.0/libatk1.0-dev_2.10.0-2ubuntu2_armhf.deb
+main/a/avahi/libavahi-client3_0.6.31-4ubuntu1_armhf.deb
+main/a/avahi/libavahi-common3_0.6.31-4ubuntu1_armhf.deb
+main/c/cairo/libcairo2_1.13.0~20140204-0ubuntu1_armhf.deb
+main/c/cairo/libcairo2-dev_1.13.0~20140204-0ubuntu1_armhf.deb
+main/c/cairo/libcairo-gobject2_1.13.0~20140204-0ubuntu1_armhf.deb
+main/c/cairo/libcairo-script-interpreter2_1.13.0~20140204-0ubuntu1_armhf.deb
+main/c/cups/libcups2_1.7.2-0ubuntu1_armhf.deb
+main/c/cups/libcups2-dev_1.7.2-0ubuntu1_armhf.deb
+main/d/dbus-glib/libdbus-glib-1-2_0.100.2-1_armhf.deb
+main/d/dbus/libdbus-1-3_1.6.18-0ubuntu4_armhf.deb
+main/d/dbus/libdbus-1-dev_1.6.18-0ubuntu4_armhf.deb
+main/e/e2fsprogs/comerr-dev_2.1-1.42.9-3ubuntu1_armhf.deb
+main/e/e2fsprogs/libcomerr2_1.42.9-3ubuntu1_armhf.deb
+main/e/eglibc/libc6_2.19-0ubuntu6_armhf.deb
+main/e/eglibc/libc6-dev_2.19-0ubuntu6_armhf.deb
+main/e/elfutils/libelf1_0.158-0ubuntu5_armhf.deb
+main/e/elfutils/libelf-dev_0.158-0ubuntu5_armhf.deb
+main/e/expat/libexpat1_2.1.0-4ubuntu1_armhf.deb
+main/e/expat/libexpat1-dev_2.1.0-4ubuntu1_armhf.deb
+main/f/fontconfig/libfontconfig1_2.11.0-0ubuntu4_armhf.deb
+main/f/fontconfig/libfontconfig1-dev_2.11.0-0ubuntu4_armhf.deb
+main/f/freetype/libfreetype6_2.5.2-1ubuntu2_armhf.deb
+main/f/freetype/libfreetype6-dev_2.5.2-1ubuntu2_armhf.deb
+main/g/gcc-4.8/gcc-4.8_4.8.2-19ubuntu1_armhf.deb
+main/g/gcc-4.8/libgomp1_4.8.2-19ubuntu1_armhf.deb
+main/g/gcc-4.8/libstdc++-4.8-dev_4.8.2-19ubuntu1_armhf.deb
+main/g/gcc-4.8/libstdc++6_4.8.2-19ubuntu1_armhf.deb
+main/g/gccgo-4.9/libgcc1_4.9-20140406-0ubuntu1_armhf.deb
+main/g/gconf/libgconf2-4_3.2.6-0ubuntu2_armhf.deb
+main/g/gconf/libgconf-2-4_3.2.6-0ubuntu2_armhf.deb
+main/g/gconf/libgconf2-dev_3.2.6-0ubuntu2_armhf.deb
+main/g/gdk-pixbuf/libgdk-pixbuf2.0-0_2.30.7-0ubuntu1_armhf.deb
+main/g/gdk-pixbuf/libgdk-pixbuf2.0-dev_2.30.7-0ubuntu1_armhf.deb
+main/g/glib2.0/libglib2.0-0_2.40.0-2_armhf.deb
+main/g/glib2.0/libglib2.0-dev_2.40.0-2_armhf.deb
+main/g/gnutls26/libgnutls26_2.12.23-12ubuntu2_armhf.deb
+main/g/gnutls26/libgnutls-dev_2.12.23-12ubuntu2_armhf.deb
+main/g/gnutls26/libgnutls-openssl27_2.12.23-12ubuntu2_armhf.deb
+main/g/gnutls26/libgnutlsxx27_2.12.23-12ubuntu2_armhf.deb
+main/g/gtk+2.0/libgtk2.0-0_2.24.23-0ubuntu1_armhf.deb
+main/g/gtk+2.0/libgtk2.0-dev_2.24.23-0ubuntu1_armhf.deb
+main/k/keyutils/libkeyutils1_1.5.6-1_armhf.deb
+main/k/krb5/krb5-multidev_1.12+dfsg-2ubuntu4_armhf.deb
+main/k/krb5/libgssapi-krb5-2_1.12+dfsg-2ubuntu4_armhf.deb
+main/k/krb5/libgssrpc4_1.12+dfsg-2ubuntu4_armhf.deb
+main/k/krb5/libk5crypto3_1.12+dfsg-2ubuntu4_armhf.deb
+main/k/krb5/libkadm5clnt-mit9_1.12+dfsg-2ubuntu4_armhf.deb
+main/k/krb5/libkadm5srv-mit9_1.12+dfsg-2ubuntu4_armhf.deb
+main/k/krb5/libkdb5-7_1.12+dfsg-2ubuntu4_armhf.deb
+main/k/krb5/libkrb5-3_1.12+dfsg-2ubuntu4_armhf.deb
+main/k/krb5/libkrb5-dev_1.12+dfsg-2ubuntu4_armhf.deb
+main/k/krb5/libkrb5support0_1.12+dfsg-2ubuntu4_armhf.deb
+main/libc/libcap2/libcap2_2.24-0ubuntu2_armhf.deb
+main/libc/libcap2/libcap-dev_2.24-0ubuntu2_armhf.deb
+main/libd/libdrm/libdrm2_2.4.52-1_armhf.deb
+main/libe/libexif/libexif12_0.6.21-1ubuntu1_armhf.deb
+main/libe/libexif/libexif-dev_0.6.21-1ubuntu1_armhf.deb
+main/libf/libffi/libffi6_3.1~rc1+r3.0.13-12_armhf.deb
+main/libg/libgcrypt11/libgcrypt11_1.5.3-2ubuntu4_armhf.deb
+main/libg/libgcrypt11/libgcrypt11-dev_1.5.3-2ubuntu4_armhf.deb
+main/libg/libgnome-keyring/libgnome-keyring0_3.8.0-2_armhf.deb
+main/libg/libgnome-keyring/libgnome-keyring-dev_3.8.0-2_armhf.deb
+main/libg/libgpg-error/libgpg-error0_1.12-0.2ubuntu1_armhf.deb
+main/libg/libgpg-error/libgpg-error-dev_1.12-0.2ubuntu1_armhf.deb
+main/libn/libnss-db/libnss-db_2.2.3pre1-5build3_armhf.deb
+main/libp/libp11/libp11-2_0.2.8-3ubuntu1_armhf.deb
+main/libp/libpng/libpng12-0_1.2.50-1ubuntu2_armhf.deb
+main/libp/libpng/libpng12-dev_1.2.50-1ubuntu2_armhf.deb
+main/libs/libselinux/libselinux1_2.2.2-1_armhf.deb
+main/libt/libtasn1-6/libtasn1-6_3.4-3_armhf.deb
+main/libx/libx11/libx11-6_1.6.2-1ubuntu2_armhf.deb
+main/libx/libx11/libx11-dev_1.6.2-1ubuntu2_armhf.deb
+main/libx/libx11/libx11-xcb1_1.6.2-1ubuntu2_armhf.deb
+main/libx/libxau/libxau6_1.0.8-1_armhf.deb
+main/libx/libxau/libxau-dev_1.0.8-1_armhf.deb
+main/libx/libxcb/libxcb1_1.10-2ubuntu1_armhf.deb
+main/libx/libxcb/libxcb1-dev_1.10-2ubuntu1_armhf.deb
+main/libx/libxcb/libxcb-glx0_1.10-2ubuntu1_armhf.deb
+main/libx/libxcb/libxcb-render0_1.10-2ubuntu1_armhf.deb
+main/libx/libxcb/libxcb-render0-dev_1.10-2ubuntu1_armhf.deb
+main/libx/libxcb/libxcb-shm0_1.10-2ubuntu1_armhf.deb
+main/libx/libxcb/libxcb-shm0-dev_1.10-2ubuntu1_armhf.deb
+main/libx/libxcomposite/libxcomposite1_0.4.4-1_armhf.deb
+main/libx/libxcomposite/libxcomposite-dev_0.4.4-1_armhf.deb
+main/libx/libxcursor/libxcursor1_1.1.14-1_armhf.deb
+main/libx/libxcursor/libxcursor-dev_1.1.14-1_armhf.deb
+main/libx/libxdamage/libxdamage1_1.1.4-1ubuntu1_armhf.deb
+main/libx/libxdamage/libxdamage-dev_1.1.4-1ubuntu1_armhf.deb
+main/libx/libxdmcp/libxdmcp6_1.1.1-1_armhf.deb
+main/libx/libxext/libxext6_1.3.2-1_armhf.deb
+main/libx/libxext/libxext-dev_1.3.2-1_armhf.deb
+main/libx/libxfixes/libxfixes3_5.0.1-1ubuntu1_armhf.deb
+main/libx/libxfixes/libxfixes-dev_5.0.1-1ubuntu1_armhf.deb
+main/libx/libxi/libxi6_1.7.1.901-1ubuntu1_armhf.deb
+main/libx/libxi/libxi-dev_1.7.1.901-1ubuntu1_armhf.deb
+main/libx/libxinerama/libxinerama1_1.1.3-1_armhf.deb
+main/libx/libxinerama/libxinerama-dev_1.1.3-1_armhf.deb
+main/libx/libxrandr/libxrandr2_1.4.2-1_armhf.deb
+main/libx/libxrandr/libxrandr-dev_1.4.2-1_armhf.deb
+main/libx/libxrender/libxrender1_0.9.8-1_armhf.deb
+main/libx/libxrender/libxrender-dev_0.9.8-1_armhf.deb
+main/libx/libxss/libxss1_1.2.2-1_armhf.deb
+main/libx/libxss/libxss-dev_1.2.2-1_armhf.deb
+main/libx/libxt/libxt6_1.1.4-1_armhf.deb
+main/libx/libxt/libxt-dev_1.1.4-1_armhf.deb
+main/libx/libxtst/libxtst6_1.2.2-1_armhf.deb
+main/libx/libxtst/libxtst-dev_1.2.2-1_armhf.deb
+main/libx/libxxf86vm/libxxf86vm1_1.1.3-1_armhf.deb
+main/l/linux/linux-libc-dev_3.13.0-24.46_armhf.deb
+main/m/mesa/libgl1-mesa-dev_10.1.0-4ubuntu5_armhf.deb
+main/m/mesa/libgl1-mesa-glx_10.1.0-4ubuntu5_armhf.deb
+main/m/mesa/libglapi-mesa_10.1.0-4ubuntu5_armhf.deb
+main/m/mesa/mesa-common-dev_10.1.0-4ubuntu5_armhf.deb
+main/n/nspr/libnspr4_4.10.2-1ubuntu1_armhf.deb
+main/n/nspr/libnspr4-dev_4.10.2-1ubuntu1_armhf.deb
+main/n/nss/libnss3_3.15.4-1ubuntu7_armhf.deb
+main/n/nss/libnss3-dev_3.15.4-1ubuntu7_armhf.deb
+main/o/openssl/libssl1.0.0_1.0.1f-1ubuntu2_armhf.deb
+main/o/openssl/libssl-dev_1.0.1f-1ubuntu2_armhf.deb
+main/o/orbit2/liborbit2_2.14.19-0.3_armhf.deb
+main/p/p11-kit/libp11-kit0_0.20.2-2ubuntu2_armhf.deb
+main/p/pam/libpam0g_1.1.8-1ubuntu2_armhf.deb
+main/p/pam/libpam0g-dev_1.1.8-1ubuntu2_armhf.deb
+main/p/pango1.0/libpango-1.0-0_1.36.3-1ubuntu1_armhf.deb
+main/p/pango1.0/libpango1.0-dev_1.36.3-1ubuntu1_armhf.deb
+main/p/pango1.0/libpangocairo-1.0-0_1.36.3-1ubuntu1_armhf.deb
+main/p/pango1.0/libpangoft2-1.0-0_1.36.3-1ubuntu1_armhf.deb
+main/p/pango1.0/libpangoxft-1.0-0_1.36.3-1ubuntu1_armhf.deb
+main/p/pciutils/libpci3_3.2.1-1ubuntu5_armhf.deb
+main/p/pciutils/libpci-dev_3.2.1-1ubuntu5_armhf.deb
+main/p/pcre3/libpcre3_8.31-2ubuntu2_armhf.deb
+main/p/pcre3/libpcre3-dev_8.31-2ubuntu2_armhf.deb
+main/p/pcre3/libpcrecpp0_8.31-2ubuntu2_armhf.deb
+main/p/pixman/libpixman-1-0_0.30.2-2ubuntu1_armhf.deb
+main/p/pixman/libpixman-1-dev_0.30.2-2ubuntu1_armhf.deb
+main/p/pulseaudio/libpulse0_4.0-0ubuntu11_armhf.deb
+main/p/pulseaudio/libpulse-dev_4.0-0ubuntu11_armhf.deb
+main/p/pulseaudio/libpulse-mainloop-glib0_4.0-0ubuntu11_armhf.deb
+main/s/speech-dispatcher/libspeechd2_0.8-5ubuntu1_armhf.deb
+main/s/speech-dispatcher/libspeechd-dev_0.8-5ubuntu1_armhf.deb
+main/s/speech-dispatcher/speech-dispatcher_0.8-5ubuntu1_armhf.deb
+main/x/x11proto-composite/x11proto-composite-dev_0.4.2-2_all.deb
+main/x/x11proto-core/x11proto-core-dev_7.0.24-1_all.deb
+main/x/x11proto-damage/x11proto-damage-dev_1.2.1-2_all.deb
+main/x/x11proto-fixes/x11proto-fixes-dev_5.0-2ubuntu2_all.deb
+main/x/x11proto-input/x11proto-input-dev_2.3-1_all.deb
+main/x/x11proto-kb/x11proto-kb-dev_1.0.6-2_all.deb
+main/x/x11proto-randr/x11proto-randr-dev_1.4.0+git20120101.is.really.1.4.0-0ubuntu1_all.deb
+main/x/x11proto-record/x11proto-record-dev_1.14.2-1_all.deb
+main/x/x11proto-render/x11proto-render-dev_0.11.1-2_all.deb
+main/x/x11proto-scrnsaver/x11proto-scrnsaver-dev_1.2.2-1_all.deb
+main/x/x11proto-xext/x11proto-xext-dev_7.3.0-1_all.deb
+main/z/zlib/zlib1g_1.2.8.dfsg-1ubuntu1_armhf.deb
+main/z/zlib/zlib1g-dev_1.2.8.dfsg-1ubuntu1_armhf.deb
diff --git a/build/linux/sysroot_scripts/packagelist.wheezy.amd64 b/build/linux/sysroot_scripts/packagelist.wheezy.amd64
new file mode 100644
index 0000000..ced7dfd
--- /dev/null
+++ b/build/linux/sysroot_scripts/packagelist.wheezy.amd64
@@ -0,0 +1,157 @@
+main/a/alsa-lib/libasound2_1.0.25-4_amd64.deb
+main/a/alsa-lib/libasound2-dev_1.0.25-4_amd64.deb
+main/a/atk1.0/libatk1.0-0_2.4.0-2_amd64.deb
+main/a/atk1.0/libatk1.0-dev_2.4.0-2_amd64.deb
+main/a/attr/libattr1_2.4.46-8_amd64.deb
+main/a/avahi/libavahi-client3_0.6.31-2_amd64.deb
+main/a/avahi/libavahi-common3_0.6.31-2_amd64.deb
+main/c/cairo/libcairo2_1.12.2-3_amd64.deb
+main/c/cairo/libcairo2-dev_1.12.2-3_amd64.deb
+main/c/cairo/libcairo-gobject2_1.12.2-3_amd64.deb
+main/c/cairo/libcairo-script-interpreter2_1.12.2-3_amd64.deb
+main/c/cups/libcups2_1.5.3-5+deb7u4_amd64.deb
+main/c/cups/libcups2-dev_1.5.3-5+deb7u4_amd64.deb
+main/d/dbus-glib/libdbus-glib-1-2_0.100.2-1_amd64.deb
+main/d/dbus/libdbus-1-3_1.6.8-1+deb7u5_amd64.deb
+main/d/dbus/libdbus-1-dev_1.6.8-1+deb7u5_amd64.deb
+main/e/e2fsprogs/comerr-dev_2.1-1.42.5-1.1_amd64.deb
+main/e/e2fsprogs/libcomerr2_1.42.5-1.1_amd64.deb
+main/e/eglibc/libc6_2.13-38+deb7u6_amd64.deb
+main/e/eglibc/libc6-dev_2.13-38+deb7u6_amd64.deb
+main/e/elfutils/libelf1_0.152-1+wheezy1_amd64.deb
+main/e/elfutils/libelf-dev_0.152-1+wheezy1_amd64.deb
+main/e/expat/libexpat1_2.1.0-1+deb7u1_amd64.deb
+main/e/expat/libexpat1-dev_2.1.0-1+deb7u1_amd64.deb
+main/f/fontconfig/libfontconfig1_2.9.0-7.1_amd64.deb
+main/f/fontconfig/libfontconfig1-dev_2.9.0-7.1_amd64.deb
+main/f/freetype/libfreetype6_2.4.9-1.1_amd64.deb
+main/f/freetype/libfreetype6-dev_2.4.9-1.1_amd64.deb
+main/g/gcc-4.6/gcc-4.6_4.6.3-14_amd64.deb
+main/g/gcc-4.6/libstdc++6-4.6-dev_4.6.3-14_amd64.deb
+main/g/gcc-4.7/libgcc1_4.7.2-5_amd64.deb
+main/g/gcc-4.7/libgomp1_4.7.2-5_amd64.deb
+main/g/gcc-4.7/libquadmath0_4.7.2-5_amd64.deb
+main/g/gcc-4.7/libstdc++6_4.7.2-5_amd64.deb
+main/g/gconf/libgconf-2-4_3.2.5-1+build1_amd64.deb
+main/g/gconf/libgconf2-4_3.2.5-1+build1_amd64.deb
+main/g/gconf/libgconf2-dev_3.2.5-1+build1_amd64.deb
+main/g/gdk-pixbuf/libgdk-pixbuf2.0-0_2.26.1-1_amd64.deb
+main/g/gdk-pixbuf/libgdk-pixbuf2.0-dev_2.26.1-1_amd64.deb
+main/g/glib2.0/libglib2.0-0_2.33.12+really2.32.4-5_amd64.deb
+main/g/glib2.0/libglib2.0-dev_2.33.12+really2.32.4-5_amd64.deb
+main/g/gnutls26/libgnutls26_2.12.20-8+deb7u2_amd64.deb
+main/g/gnutls26/libgnutls-dev_2.12.20-8+deb7u2_amd64.deb
+main/g/gnutls26/libgnutls-openssl27_2.12.20-8+deb7u2_amd64.deb
+main/g/gnutls26/libgnutlsxx27_2.12.20-8+deb7u2_amd64.deb
+main/g/gtk+2.0/libgtk2.0-0_2.24.10-2_amd64.deb
+main/g/gtk+2.0/libgtk2.0-dev_2.24.10-2_amd64.deb
+main/k/keyutils/libkeyutils1_1.5.5-3+deb7u1_amd64.deb
+main/k/krb5/krb5-multidev_1.10.1+dfsg-5+deb7u2_amd64.deb
+main/k/krb5/libgssapi-krb5-2_1.10.1+dfsg-5+deb7u2_amd64.deb
+main/k/krb5/libgssrpc4_1.10.1+dfsg-5+deb7u2_amd64.deb
+main/k/krb5/libk5crypto3_1.10.1+dfsg-5+deb7u2_amd64.deb
+main/k/krb5/libkadm5clnt-mit8_1.10.1+dfsg-5+deb7u2_amd64.deb
+main/k/krb5/libkadm5srv-mit8_1.10.1+dfsg-5+deb7u2_amd64.deb
+main/k/krb5/libkdb5-6_1.10.1+dfsg-5+deb7u2_amd64.deb
+main/k/krb5/libkrb5-3_1.10.1+dfsg-5+deb7u2_amd64.deb
+main/k/krb5/libkrb5-dev_1.10.1+dfsg-5+deb7u2_amd64.deb
+main/k/krb5/libkrb5support0_1.10.1+dfsg-5+deb7u2_amd64.deb
+main/libc/libcap2/libcap2_2.22-1.2_amd64.deb
+main/libc/libcap2/libcap-dev_2.22-1.2_amd64.deb
+main/libd/libdrm/libdrm2_2.4.40-1~deb7u2_amd64.deb
+main/libe/libexif/libexif12_0.6.20-3_amd64.deb
+main/libe/libexif/libexif-dev_0.6.20-3_amd64.deb
+main/libf/libffi/libffi5_3.0.10-3_amd64.deb
+main/libg/libgcrypt11/libgcrypt11_1.5.0-5+deb7u2_amd64.deb
+main/libg/libgcrypt11/libgcrypt11-dev_1.5.0-5+deb7u2_amd64.deb
+main/libg/libgnome-keyring/libgnome-keyring0_3.4.1-1_amd64.deb
+main/libg/libgnome-keyring/libgnome-keyring-dev_3.4.1-1_amd64.deb
+main/libg/libgpg-error/libgpg-error0_1.10-3.1_amd64.deb
+main/libg/libgpg-error/libgpg-error-dev_1.10-3.1_amd64.deb
+main/libn/libnss-db/libnss-db_2.2.3pre1-4_amd64.deb
+main/libp/libp11/libp11-2_0.2.8-2_amd64.deb
+main/libp/libpng/libpng12-0_1.2.49-1_amd64.deb
+main/libp/libpng/libpng12-dev_1.2.49-1_amd64.deb
+main/libs/libselinux/libselinux1_2.1.9-5_amd64.deb
+main/libt/libtasn1-3/libtasn1-3_2.13-2+deb7u1_amd64.deb
+main/libx/libx11/libx11-6_1.5.0-1+deb7u1_amd64.deb
+main/libx/libx11/libx11-dev_1.5.0-1+deb7u1_amd64.deb
+main/libx/libx11/libx11-xcb1_1.5.0-1+deb7u1_amd64.deb
+main/libx/libxau/libxau6_1.0.7-1_amd64.deb
+main/libx/libxau/libxau-dev_1.0.7-1_amd64.deb
+main/libx/libxcb/libxcb1_1.8.1-2+deb7u1_amd64.deb
+main/libx/libxcb/libxcb1-dev_1.8.1-2+deb7u1_amd64.deb
+main/libx/libxcb/libxcb-glx0_1.8.1-2+deb7u1_amd64.deb
+main/libx/libxcb/libxcb-render0_1.8.1-2+deb7u1_amd64.deb
+main/libx/libxcb/libxcb-render0-dev_1.8.1-2+deb7u1_amd64.deb
+main/libx/libxcb/libxcb-shm0_1.8.1-2+deb7u1_amd64.deb
+main/libx/libxcb/libxcb-shm0-dev_1.8.1-2+deb7u1_amd64.deb
+main/libx/libxcomposite/libxcomposite1_0.4.3-2_amd64.deb
+main/libx/libxcomposite/libxcomposite-dev_0.4.3-2_amd64.deb
+main/libx/libxcursor/libxcursor1_1.1.13-1+deb7u1_amd64.deb
+main/libx/libxcursor/libxcursor-dev_1.1.13-1+deb7u1_amd64.deb
+main/libx/libxdamage/libxdamage1_1.1.3-2_amd64.deb
+main/libx/libxdamage/libxdamage-dev_1.1.3-2_amd64.deb
+main/libx/libxdmcp/libxdmcp6_1.1.1-1_amd64.deb
+main/libx/libxext/libxext6_1.3.1-2+deb7u1_amd64.deb
+main/libx/libxext/libxext-dev_1.3.1-2+deb7u1_amd64.deb
+main/libx/libxfixes/libxfixes3_5.0-4+deb7u1_amd64.deb
+main/libx/libxfixes/libxfixes-dev_5.0-4+deb7u1_amd64.deb
+main/libx/libxi/libxi6_1.6.1-1+deb7u1_amd64.deb
+main/libx/libxi/libxi-dev_1.6.1-1+deb7u1_amd64.deb
+main/libx/libxinerama/libxinerama1_1.1.2-1+deb7u1_amd64.deb
+main/libx/libxinerama/libxinerama-dev_1.1.2-1+deb7u1_amd64.deb
+main/libx/libxrandr/libxrandr2_1.3.2-2+deb7u1_amd64.deb
+main/libx/libxrandr/libxrandr-dev_1.3.2-2+deb7u1_amd64.deb
+main/libx/libxrender/libxrender1_0.9.7-1+deb7u1_amd64.deb
+main/libx/libxrender/libxrender-dev_0.9.7-1+deb7u1_amd64.deb
+main/libx/libxss/libxss1_1.2.2-1_amd64.deb
+main/libx/libxss/libxss-dev_1.2.2-1_amd64.deb
+main/libx/libxt/libxt6_1.1.3-1+deb7u1_amd64.deb
+main/libx/libxt/libxt-dev_1.1.3-1+deb7u1_amd64.deb
+main/libx/libxtst/libxtst6_1.2.1-1+deb7u1_amd64.deb
+main/libx/libxtst/libxtst-dev_1.2.1-1+deb7u1_amd64.deb
+main/libx/libxxf86vm/libxxf86vm1_1.1.2-1+deb7u1_amd64.deb
+main/l/linux/linux-libc-dev_3.2.65-1_amd64.deb
+main/m/mesa/libgl1-mesa-dev_8.0.5-4+deb7u2_amd64.deb
+main/m/mesa/libgl1-mesa-glx_8.0.5-4+deb7u2_amd64.deb
+main/m/mesa/libglapi-mesa_8.0.5-4+deb7u2_amd64.deb
+main/m/mesa/mesa-common-dev_8.0.5-4+deb7u2_amd64.deb
+main/n/nspr/libnspr4_4.9.2-1+deb7u2_amd64.deb
+main/n/nspr/libnspr4-dev_4.9.2-1+deb7u2_amd64.deb
+main/n/nss/libnss3_3.14.5-1+deb7u3_amd64.deb
+main/n/nss/libnss3-dev_3.14.5-1+deb7u3_amd64.deb
+main/o/openssl/libssl1.0.0_1.0.1e-2+deb7u13_amd64.deb
+main/o/openssl/libssl-dev_1.0.1e-2+deb7u13_amd64.deb
+main/o/orbit2/liborbit2_2.14.19-0.1_amd64.deb
+main/p/p11-kit/libp11-kit0_0.12-3_amd64.deb
+main/p/pam/libpam0g_1.1.3-7.1_amd64.deb
+main/p/pam/libpam0g-dev_1.1.3-7.1_amd64.deb
+main/p/pango1.0/libpango1.0-0_1.30.0-1_amd64.deb
+main/p/pango1.0/libpango1.0-dev_1.30.0-1_amd64.deb
+main/p/pciutils/libpci3_3.1.9-6_amd64.deb
+main/p/pciutils/libpci-dev_3.1.9-6_amd64.deb
+main/p/pcre3/libpcre3_8.30-5_amd64.deb
+main/p/pcre3/libpcre3-dev_8.30-5_amd64.deb
+main/p/pcre3/libpcrecpp0_8.30-5_amd64.deb
+main/p/pixman/libpixman-1-0_0.26.0-4+deb7u1_amd64.deb
+main/p/pixman/libpixman-1-dev_0.26.0-4+deb7u1_amd64.deb
+main/p/pulseaudio/libpulse0_2.0-6.1_amd64.deb
+main/p/pulseaudio/libpulse-dev_2.0-6.1_amd64.deb
+main/p/pulseaudio/libpulse-mainloop-glib0_2.0-6.1_amd64.deb
+main/s/speech-dispatcher/libspeechd2_0.7.1-6.2_amd64.deb
+main/s/speech-dispatcher/libspeechd-dev_0.7.1-6.2_amd64.deb
+main/s/speech-dispatcher/speech-dispatcher_0.7.1-6.2_amd64.deb
+main/x/x11proto-composite/x11proto-composite-dev_0.4.2-2_all.deb
+main/x/x11proto-core/x11proto-core-dev_7.0.23-1_all.deb
+main/x/x11proto-damage/x11proto-damage-dev_1.2.1-2_all.deb
+main/x/x11proto-fixes/x11proto-fixes-dev_5.0-2_all.deb
+main/x/x11proto-input/x11proto-input-dev_2.2-1_all.deb
+main/x/x11proto-kb/x11proto-kb-dev_1.0.6-2_all.deb
+main/x/x11proto-randr/x11proto-randr-dev_1.3.2-2_all.deb
+main/x/x11proto-record/x11proto-record-dev_1.14.2-1_all.deb
+main/x/x11proto-render/x11proto-render-dev_0.11.1-2_all.deb
+main/x/x11proto-scrnsaver/x11proto-scrnsaver-dev_1.2.2-1_all.deb
+main/x/x11proto-xext/x11proto-xext-dev_7.2.1-1_all.deb
+main/z/zlib/zlib1g_1.2.7.dfsg-13_amd64.deb
+main/z/zlib/zlib1g-dev_1.2.7.dfsg-13_amd64.deb
diff --git a/build/linux/sysroot_scripts/packagelist.wheezy.arm b/build/linux/sysroot_scripts/packagelist.wheezy.arm
new file mode 100644
index 0000000..3d79cb3
--- /dev/null
+++ b/build/linux/sysroot_scripts/packagelist.wheezy.arm
@@ -0,0 +1,156 @@
+main/a/alsa-lib/libasound2_1.0.25-4_armhf.deb
+main/a/alsa-lib/libasound2-dev_1.0.25-4_armhf.deb
+main/a/atk1.0/libatk1.0-0_2.4.0-2_armhf.deb
+main/a/atk1.0/libatk1.0-dev_2.4.0-2_armhf.deb
+main/a/attr/libattr1_2.4.46-8_armhf.deb
+main/a/avahi/libavahi-client3_0.6.31-2_armhf.deb
+main/a/avahi/libavahi-common3_0.6.31-2_armhf.deb
+main/c/cairo/libcairo2_1.12.2-3_armhf.deb
+main/c/cairo/libcairo2-dev_1.12.2-3_armhf.deb
+main/c/cairo/libcairo-gobject2_1.12.2-3_armhf.deb
+main/c/cairo/libcairo-script-interpreter2_1.12.2-3_armhf.deb
+main/c/cups/libcups2_1.5.3-5+deb7u4_armhf.deb
+main/c/cups/libcups2-dev_1.5.3-5+deb7u4_armhf.deb
+main/d/dbus-glib/libdbus-glib-1-2_0.100.2-1_armhf.deb
+main/d/dbus/libdbus-1-3_1.6.8-1+deb7u5_armhf.deb
+main/d/dbus/libdbus-1-dev_1.6.8-1+deb7u5_armhf.deb
+main/e/e2fsprogs/comerr-dev_2.1-1.42.5-1.1_armhf.deb
+main/e/e2fsprogs/libcomerr2_1.42.5-1.1_armhf.deb
+main/e/eglibc/libc6_2.13-38+deb7u6_armhf.deb
+main/e/eglibc/libc6-dev_2.13-38+deb7u6_armhf.deb
+main/e/elfutils/libelf1_0.152-1+wheezy1_armhf.deb
+main/e/elfutils/libelf-dev_0.152-1+wheezy1_armhf.deb
+main/e/expat/libexpat1_2.1.0-1+deb7u1_armhf.deb
+main/e/expat/libexpat1-dev_2.1.0-1+deb7u1_armhf.deb
+main/f/fontconfig/libfontconfig1_2.9.0-7.1_armhf.deb
+main/f/fontconfig/libfontconfig1-dev_2.9.0-7.1_armhf.deb
+main/f/freetype/libfreetype6_2.4.9-1.1_armhf.deb
+main/f/freetype/libfreetype6-dev_2.4.9-1.1_armhf.deb
+main/g/gcc-4.6/gcc-4.6_4.6.3-14_armhf.deb
+main/g/gcc-4.6/libstdc++6-4.6-dev_4.6.3-14_armhf.deb
+main/g/gcc-4.7/libgcc1_4.7.2-5_armhf.deb
+main/g/gcc-4.7/libgomp1_4.7.2-5_armhf.deb
+main/g/gcc-4.7/libstdc++6_4.7.2-5_armhf.deb
+main/g/gconf/libgconf2-4_3.2.5-1+build1_armhf.deb
+main/g/gconf/libgconf-2-4_3.2.5-1+build1_armhf.deb
+main/g/gconf/libgconf2-dev_3.2.5-1+build1_armhf.deb
+main/g/gdk-pixbuf/libgdk-pixbuf2.0-0_2.26.1-1_armhf.deb
+main/g/gdk-pixbuf/libgdk-pixbuf2.0-dev_2.26.1-1_armhf.deb
+main/g/glib2.0/libglib2.0-0_2.33.12+really2.32.4-5_armhf.deb
+main/g/glib2.0/libglib2.0-dev_2.33.12+really2.32.4-5_armhf.deb
+main/g/gnutls26/libgnutls26_2.12.20-8+deb7u2_armhf.deb
+main/g/gnutls26/libgnutls-dev_2.12.20-8+deb7u2_armhf.deb
+main/g/gnutls26/libgnutls-openssl27_2.12.20-8+deb7u2_armhf.deb
+main/g/gnutls26/libgnutlsxx27_2.12.20-8+deb7u2_armhf.deb
+main/g/gtk+2.0/libgtk2.0-0_2.24.10-2_armhf.deb
+main/g/gtk+2.0/libgtk2.0-dev_2.24.10-2_armhf.deb
+main/k/keyutils/libkeyutils1_1.5.5-3+deb7u1_armhf.deb
+main/k/krb5/krb5-multidev_1.10.1+dfsg-5+deb7u2_armhf.deb
+main/k/krb5/libgssapi-krb5-2_1.10.1+dfsg-5+deb7u2_armhf.deb
+main/k/krb5/libgssrpc4_1.10.1+dfsg-5+deb7u2_armhf.deb
+main/k/krb5/libk5crypto3_1.10.1+dfsg-5+deb7u2_armhf.deb
+main/k/krb5/libkadm5clnt-mit8_1.10.1+dfsg-5+deb7u2_armhf.deb
+main/k/krb5/libkadm5srv-mit8_1.10.1+dfsg-5+deb7u2_armhf.deb
+main/k/krb5/libkdb5-6_1.10.1+dfsg-5+deb7u2_armhf.deb
+main/k/krb5/libkrb5-3_1.10.1+dfsg-5+deb7u2_armhf.deb
+main/k/krb5/libkrb5-dev_1.10.1+dfsg-5+deb7u2_armhf.deb
+main/k/krb5/libkrb5support0_1.10.1+dfsg-5+deb7u2_armhf.deb
+main/libc/libcap2/libcap2_2.22-1.2_armhf.deb
+main/libc/libcap2/libcap-dev_2.22-1.2_armhf.deb
+main/libd/libdrm/libdrm2_2.4.40-1~deb7u2_armhf.deb
+main/libe/libexif/libexif12_0.6.20-3_armhf.deb
+main/libe/libexif/libexif-dev_0.6.20-3_armhf.deb
+main/libf/libffi/libffi5_3.0.10-3+b1_armhf.deb
+main/libg/libgcrypt11/libgcrypt11_1.5.0-5+deb7u2_armhf.deb
+main/libg/libgcrypt11/libgcrypt11-dev_1.5.0-5+deb7u2_armhf.deb
+main/libg/libgnome-keyring/libgnome-keyring0_3.4.1-1_armhf.deb
+main/libg/libgnome-keyring/libgnome-keyring-dev_3.4.1-1_armhf.deb
+main/libg/libgpg-error/libgpg-error0_1.10-3.1_armhf.deb
+main/libg/libgpg-error/libgpg-error-dev_1.10-3.1_armhf.deb
+main/libn/libnss-db/libnss-db_2.2.3pre1-4_armhf.deb
+main/libp/libp11/libp11-2_0.2.8-2_armhf.deb
+main/libp/libpng/libpng12-0_1.2.49-1_armhf.deb
+main/libp/libpng/libpng12-dev_1.2.49-1_armhf.deb
+main/libs/libselinux/libselinux1_2.1.9-5_armhf.deb
+main/libt/libtasn1-3/libtasn1-3_2.13-2+deb7u1_armhf.deb
+main/libx/libx11/libx11-6_1.5.0-1+deb7u1_armhf.deb
+main/libx/libx11/libx11-dev_1.5.0-1+deb7u1_armhf.deb
+main/libx/libx11/libx11-xcb1_1.5.0-1+deb7u1_armhf.deb
+main/libx/libxau/libxau6_1.0.7-1_armhf.deb
+main/libx/libxau/libxau-dev_1.0.7-1_armhf.deb
+main/libx/libxcb/libxcb1_1.8.1-2+deb7u1_armhf.deb
+main/libx/libxcb/libxcb1-dev_1.8.1-2+deb7u1_armhf.deb
+main/libx/libxcb/libxcb-glx0_1.8.1-2+deb7u1_armhf.deb
+main/libx/libxcb/libxcb-render0_1.8.1-2+deb7u1_armhf.deb
+main/libx/libxcb/libxcb-render0-dev_1.8.1-2+deb7u1_armhf.deb
+main/libx/libxcb/libxcb-shm0_1.8.1-2+deb7u1_armhf.deb
+main/libx/libxcb/libxcb-shm0-dev_1.8.1-2+deb7u1_armhf.deb
+main/libx/libxcomposite/libxcomposite1_0.4.3-2+b1_armhf.deb
+main/libx/libxcomposite/libxcomposite-dev_0.4.3-2+b1_armhf.deb
+main/libx/libxcursor/libxcursor1_1.1.13-1+deb7u1_armhf.deb
+main/libx/libxcursor/libxcursor-dev_1.1.13-1+deb7u1_armhf.deb
+main/libx/libxdamage/libxdamage1_1.1.3-2+b1_armhf.deb
+main/libx/libxdamage/libxdamage-dev_1.1.3-2+b1_armhf.deb
+main/libx/libxdmcp/libxdmcp6_1.1.1-1_armhf.deb
+main/libx/libxext/libxext6_1.3.1-2+deb7u1_armhf.deb
+main/libx/libxext/libxext-dev_1.3.1-2+deb7u1_armhf.deb
+main/libx/libxfixes/libxfixes3_5.0-4+deb7u1_armhf.deb
+main/libx/libxfixes/libxfixes-dev_5.0-4+deb7u1_armhf.deb
+main/libx/libxi/libxi6_1.6.1-1+deb7u1_armhf.deb
+main/libx/libxi/libxi-dev_1.6.1-1+deb7u1_armhf.deb
+main/libx/libxinerama/libxinerama1_1.1.2-1+deb7u1_armhf.deb
+main/libx/libxinerama/libxinerama-dev_1.1.2-1+deb7u1_armhf.deb
+main/libx/libxrandr/libxrandr2_1.3.2-2+deb7u1_armhf.deb
+main/libx/libxrandr/libxrandr-dev_1.3.2-2+deb7u1_armhf.deb
+main/libx/libxrender/libxrender1_0.9.7-1+deb7u1_armhf.deb
+main/libx/libxrender/libxrender-dev_0.9.7-1+deb7u1_armhf.deb
+main/libx/libxss/libxss1_1.2.2-1_armhf.deb
+main/libx/libxss/libxss-dev_1.2.2-1_armhf.deb
+main/libx/libxt/libxt6_1.1.3-1+deb7u1_armhf.deb
+main/libx/libxt/libxt-dev_1.1.3-1+deb7u1_armhf.deb
+main/libx/libxtst/libxtst6_1.2.1-1+deb7u1_armhf.deb
+main/libx/libxtst/libxtst-dev_1.2.1-1+deb7u1_armhf.deb
+main/libx/libxxf86vm/libxxf86vm1_1.1.2-1+deb7u1_armhf.deb
+main/l/linux/linux-libc-dev_3.2.65-1_armhf.deb
+main/m/mesa/libgl1-mesa-dev_8.0.5-4+deb7u2_armhf.deb
+main/m/mesa/libgl1-mesa-glx_8.0.5-4+deb7u2_armhf.deb
+main/m/mesa/libglapi-mesa_8.0.5-4+deb7u2_armhf.deb
+main/m/mesa/mesa-common-dev_8.0.5-4+deb7u2_armhf.deb
+main/n/nspr/libnspr4_4.9.2-1+deb7u2_armhf.deb
+main/n/nspr/libnspr4-dev_4.9.2-1+deb7u2_armhf.deb
+main/n/nss/libnss3_3.14.5-1+deb7u3_armhf.deb
+main/n/nss/libnss3-dev_3.14.5-1+deb7u3_armhf.deb
+main/o/openssl/libssl1.0.0_1.0.1e-2+deb7u13_armhf.deb
+main/o/openssl/libssl-dev_1.0.1e-2+deb7u13_armhf.deb
+main/o/orbit2/liborbit2_2.14.19-0.1_armhf.deb
+main/p/p11-kit/libp11-kit0_0.12-3_armhf.deb
+main/p/pam/libpam0g_1.1.3-7.1_armhf.deb
+main/p/pam/libpam0g-dev_1.1.3-7.1_armhf.deb
+main/p/pango1.0/libpango1.0-0_1.30.0-1_armhf.deb
+main/p/pango1.0/libpango1.0-dev_1.30.0-1_armhf.deb
+main/p/pciutils/libpci3_3.1.9-6_armhf.deb
+main/p/pciutils/libpci-dev_3.1.9-6_armhf.deb
+main/p/pcre3/libpcre3_8.30-5_armhf.deb
+main/p/pcre3/libpcre3-dev_8.30-5_armhf.deb
+main/p/pcre3/libpcrecpp0_8.30-5_armhf.deb
+main/p/pixman/libpixman-1-0_0.26.0-4+deb7u1_armhf.deb
+main/p/pixman/libpixman-1-dev_0.26.0-4+deb7u1_armhf.deb
+main/p/pulseaudio/libpulse0_2.0-6.1_armhf.deb
+main/p/pulseaudio/libpulse-dev_2.0-6.1_armhf.deb
+main/p/pulseaudio/libpulse-mainloop-glib0_2.0-6.1_armhf.deb
+main/s/speech-dispatcher/libspeechd2_0.7.1-6.2_armhf.deb
+main/s/speech-dispatcher/libspeechd-dev_0.7.1-6.2_armhf.deb
+main/s/speech-dispatcher/speech-dispatcher_0.7.1-6.2_armhf.deb
+main/x/x11proto-composite/x11proto-composite-dev_0.4.2-2_all.deb
+main/x/x11proto-core/x11proto-core-dev_7.0.23-1_all.deb
+main/x/x11proto-damage/x11proto-damage-dev_1.2.1-2_all.deb
+main/x/x11proto-fixes/x11proto-fixes-dev_5.0-2_all.deb
+main/x/x11proto-input/x11proto-input-dev_2.2-1_all.deb
+main/x/x11proto-kb/x11proto-kb-dev_1.0.6-2_all.deb
+main/x/x11proto-randr/x11proto-randr-dev_1.3.2-2_all.deb
+main/x/x11proto-record/x11proto-record-dev_1.14.2-1_all.deb
+main/x/x11proto-render/x11proto-render-dev_0.11.1-2_all.deb
+main/x/x11proto-scrnsaver/x11proto-scrnsaver-dev_1.2.2-1_all.deb
+main/x/x11proto-xext/x11proto-xext-dev_7.2.1-1_all.deb
+main/z/zlib/zlib1g_1.2.7.dfsg-13_armhf.deb
+main/z/zlib/zlib1g-dev_1.2.7.dfsg-13_armhf.deb
diff --git a/build/linux/sysroot_scripts/packagelist.wheezy.i386 b/build/linux/sysroot_scripts/packagelist.wheezy.i386
new file mode 100644
index 0000000..1379fee
--- /dev/null
+++ b/build/linux/sysroot_scripts/packagelist.wheezy.i386
@@ -0,0 +1,157 @@
+main/a/alsa-lib/libasound2_1.0.25-4_i386.deb
+main/a/alsa-lib/libasound2-dev_1.0.25-4_i386.deb
+main/a/atk1.0/libatk1.0-0_2.4.0-2_i386.deb
+main/a/atk1.0/libatk1.0-dev_2.4.0-2_i386.deb
+main/a/attr/libattr1_2.4.46-8_i386.deb
+main/a/avahi/libavahi-client3_0.6.31-2_i386.deb
+main/a/avahi/libavahi-common3_0.6.31-2_i386.deb
+main/c/cairo/libcairo2_1.12.2-3_i386.deb
+main/c/cairo/libcairo2-dev_1.12.2-3_i386.deb
+main/c/cairo/libcairo-gobject2_1.12.2-3_i386.deb
+main/c/cairo/libcairo-script-interpreter2_1.12.2-3_i386.deb
+main/c/cups/libcups2_1.5.3-5+deb7u4_i386.deb
+main/c/cups/libcups2-dev_1.5.3-5+deb7u4_i386.deb
+main/d/dbus-glib/libdbus-glib-1-2_0.100.2-1_i386.deb
+main/d/dbus/libdbus-1-3_1.6.8-1+deb7u5_i386.deb
+main/d/dbus/libdbus-1-dev_1.6.8-1+deb7u5_i386.deb
+main/e/e2fsprogs/comerr-dev_2.1-1.42.5-1.1_i386.deb
+main/e/e2fsprogs/libcomerr2_1.42.5-1.1_i386.deb
+main/e/eglibc/libc6_2.13-38+deb7u6_i386.deb
+main/e/eglibc/libc6-dev_2.13-38+deb7u6_i386.deb
+main/e/elfutils/libelf1_0.152-1+wheezy1_i386.deb
+main/e/elfutils/libelf-dev_0.152-1+wheezy1_i386.deb
+main/e/expat/libexpat1_2.1.0-1+deb7u1_i386.deb
+main/e/expat/libexpat1-dev_2.1.0-1+deb7u1_i386.deb
+main/f/fontconfig/libfontconfig1_2.9.0-7.1_i386.deb
+main/f/fontconfig/libfontconfig1-dev_2.9.0-7.1_i386.deb
+main/f/freetype/libfreetype6_2.4.9-1.1_i386.deb
+main/f/freetype/libfreetype6-dev_2.4.9-1.1_i386.deb
+main/g/gcc-4.6/gcc-4.6_4.6.3-14_i386.deb
+main/g/gcc-4.6/libstdc++6-4.6-dev_4.6.3-14_i386.deb
+main/g/gcc-4.7/libgcc1_4.7.2-5_i386.deb
+main/g/gcc-4.7/libgomp1_4.7.2-5_i386.deb
+main/g/gcc-4.7/libquadmath0_4.7.2-5_i386.deb
+main/g/gcc-4.7/libstdc++6_4.7.2-5_i386.deb
+main/g/gconf/libgconf-2-4_3.2.5-1+build1_i386.deb
+main/g/gconf/libgconf2-4_3.2.5-1+build1_i386.deb
+main/g/gconf/libgconf2-dev_3.2.5-1+build1_i386.deb
+main/g/gdk-pixbuf/libgdk-pixbuf2.0-0_2.26.1-1_i386.deb
+main/g/gdk-pixbuf/libgdk-pixbuf2.0-dev_2.26.1-1_i386.deb
+main/g/glib2.0/libglib2.0-0_2.33.12+really2.32.4-5_i386.deb
+main/g/glib2.0/libglib2.0-dev_2.33.12+really2.32.4-5_i386.deb
+main/g/gnutls26/libgnutls26_2.12.20-8+deb7u2_i386.deb
+main/g/gnutls26/libgnutls-dev_2.12.20-8+deb7u2_i386.deb
+main/g/gnutls26/libgnutls-openssl27_2.12.20-8+deb7u2_i386.deb
+main/g/gnutls26/libgnutlsxx27_2.12.20-8+deb7u2_i386.deb
+main/g/gtk+2.0/libgtk2.0-0_2.24.10-2_i386.deb
+main/g/gtk+2.0/libgtk2.0-dev_2.24.10-2_i386.deb
+main/k/keyutils/libkeyutils1_1.5.5-3+deb7u1_i386.deb
+main/k/krb5/krb5-multidev_1.10.1+dfsg-5+deb7u2_i386.deb
+main/k/krb5/libgssapi-krb5-2_1.10.1+dfsg-5+deb7u2_i386.deb
+main/k/krb5/libgssrpc4_1.10.1+dfsg-5+deb7u2_i386.deb
+main/k/krb5/libk5crypto3_1.10.1+dfsg-5+deb7u2_i386.deb
+main/k/krb5/libkadm5clnt-mit8_1.10.1+dfsg-5+deb7u2_i386.deb
+main/k/krb5/libkadm5srv-mit8_1.10.1+dfsg-5+deb7u2_i386.deb
+main/k/krb5/libkdb5-6_1.10.1+dfsg-5+deb7u2_i386.deb
+main/k/krb5/libkrb5-3_1.10.1+dfsg-5+deb7u2_i386.deb
+main/k/krb5/libkrb5-dev_1.10.1+dfsg-5+deb7u2_i386.deb
+main/k/krb5/libkrb5support0_1.10.1+dfsg-5+deb7u2_i386.deb
+main/libc/libcap2/libcap2_2.22-1.2_i386.deb
+main/libc/libcap2/libcap-dev_2.22-1.2_i386.deb
+main/libd/libdrm/libdrm2_2.4.40-1~deb7u2_i386.deb
+main/libe/libexif/libexif12_0.6.20-3_i386.deb
+main/libe/libexif/libexif-dev_0.6.20-3_i386.deb
+main/libf/libffi/libffi5_3.0.10-3_i386.deb
+main/libg/libgcrypt11/libgcrypt11_1.5.0-5+deb7u2_i386.deb
+main/libg/libgcrypt11/libgcrypt11-dev_1.5.0-5+deb7u2_i386.deb
+main/libg/libgnome-keyring/libgnome-keyring0_3.4.1-1_i386.deb
+main/libg/libgnome-keyring/libgnome-keyring-dev_3.4.1-1_i386.deb
+main/libg/libgpg-error/libgpg-error0_1.10-3.1_i386.deb
+main/libg/libgpg-error/libgpg-error-dev_1.10-3.1_i386.deb
+main/libn/libnss-db/libnss-db_2.2.3pre1-4_i386.deb
+main/libp/libp11/libp11-2_0.2.8-2_i386.deb
+main/libp/libpng/libpng12-0_1.2.49-1_i386.deb
+main/libp/libpng/libpng12-dev_1.2.49-1_i386.deb
+main/libs/libselinux/libselinux1_2.1.9-5_i386.deb
+main/libt/libtasn1-3/libtasn1-3_2.13-2+deb7u1_i386.deb
+main/libx/libx11/libx11-6_1.5.0-1+deb7u1_i386.deb
+main/libx/libx11/libx11-dev_1.5.0-1+deb7u1_i386.deb
+main/libx/libx11/libx11-xcb1_1.5.0-1+deb7u1_i386.deb
+main/libx/libxau/libxau6_1.0.7-1_i386.deb
+main/libx/libxau/libxau-dev_1.0.7-1_i386.deb
+main/libx/libxcb/libxcb1_1.8.1-2+deb7u1_i386.deb
+main/libx/libxcb/libxcb1-dev_1.8.1-2+deb7u1_i386.deb
+main/libx/libxcb/libxcb-glx0_1.8.1-2+deb7u1_i386.deb
+main/libx/libxcb/libxcb-render0_1.8.1-2+deb7u1_i386.deb
+main/libx/libxcb/libxcb-render0-dev_1.8.1-2+deb7u1_i386.deb
+main/libx/libxcb/libxcb-shm0_1.8.1-2+deb7u1_i386.deb
+main/libx/libxcb/libxcb-shm0-dev_1.8.1-2+deb7u1_i386.deb
+main/libx/libxcomposite/libxcomposite1_0.4.3-2_i386.deb
+main/libx/libxcomposite/libxcomposite-dev_0.4.3-2_i386.deb
+main/libx/libxcursor/libxcursor1_1.1.13-1+deb7u1_i386.deb
+main/libx/libxcursor/libxcursor-dev_1.1.13-1+deb7u1_i386.deb
+main/libx/libxdamage/libxdamage1_1.1.3-2_i386.deb
+main/libx/libxdamage/libxdamage-dev_1.1.3-2_i386.deb
+main/libx/libxdmcp/libxdmcp6_1.1.1-1_i386.deb
+main/libx/libxext/libxext6_1.3.1-2+deb7u1_i386.deb
+main/libx/libxext/libxext-dev_1.3.1-2+deb7u1_i386.deb
+main/libx/libxfixes/libxfixes3_5.0-4+deb7u1_i386.deb
+main/libx/libxfixes/libxfixes-dev_5.0-4+deb7u1_i386.deb
+main/libx/libxi/libxi6_1.6.1-1+deb7u1_i386.deb
+main/libx/libxi/libxi-dev_1.6.1-1+deb7u1_i386.deb
+main/libx/libxinerama/libxinerama1_1.1.2-1+deb7u1_i386.deb
+main/libx/libxinerama/libxinerama-dev_1.1.2-1+deb7u1_i386.deb
+main/libx/libxrandr/libxrandr2_1.3.2-2+deb7u1_i386.deb
+main/libx/libxrandr/libxrandr-dev_1.3.2-2+deb7u1_i386.deb
+main/libx/libxrender/libxrender1_0.9.7-1+deb7u1_i386.deb
+main/libx/libxrender/libxrender-dev_0.9.7-1+deb7u1_i386.deb
+main/libx/libxss/libxss1_1.2.2-1_i386.deb
+main/libx/libxss/libxss-dev_1.2.2-1_i386.deb
+main/libx/libxt/libxt6_1.1.3-1+deb7u1_i386.deb
+main/libx/libxt/libxt-dev_1.1.3-1+deb7u1_i386.deb
+main/libx/libxtst/libxtst6_1.2.1-1+deb7u1_i386.deb
+main/libx/libxtst/libxtst-dev_1.2.1-1+deb7u1_i386.deb
+main/libx/libxxf86vm/libxxf86vm1_1.1.2-1+deb7u1_i386.deb
+main/l/linux/linux-libc-dev_3.2.65-1_i386.deb
+main/m/mesa/libgl1-mesa-dev_8.0.5-4+deb7u2_i386.deb
+main/m/mesa/libgl1-mesa-glx_8.0.5-4+deb7u2_i386.deb
+main/m/mesa/libglapi-mesa_8.0.5-4+deb7u2_i386.deb
+main/m/mesa/mesa-common-dev_8.0.5-4+deb7u2_i386.deb
+main/n/nspr/libnspr4_4.9.2-1+deb7u2_i386.deb
+main/n/nspr/libnspr4-dev_4.9.2-1+deb7u2_i386.deb
+main/n/nss/libnss3_3.14.5-1+deb7u3_i386.deb
+main/n/nss/libnss3-dev_3.14.5-1+deb7u3_i386.deb
+main/o/openssl/libssl1.0.0_1.0.1e-2+deb7u13_i386.deb
+main/o/openssl/libssl-dev_1.0.1e-2+deb7u13_i386.deb
+main/o/orbit2/liborbit2_2.14.19-0.1_i386.deb
+main/p/p11-kit/libp11-kit0_0.12-3_i386.deb
+main/p/pam/libpam0g_1.1.3-7.1_i386.deb
+main/p/pam/libpam0g-dev_1.1.3-7.1_i386.deb
+main/p/pango1.0/libpango1.0-0_1.30.0-1_i386.deb
+main/p/pango1.0/libpango1.0-dev_1.30.0-1_i386.deb
+main/p/pciutils/libpci3_3.1.9-6_i386.deb
+main/p/pciutils/libpci-dev_3.1.9-6_i386.deb
+main/p/pcre3/libpcre3_8.30-5_i386.deb
+main/p/pcre3/libpcre3-dev_8.30-5_i386.deb
+main/p/pcre3/libpcrecpp0_8.30-5_i386.deb
+main/p/pixman/libpixman-1-0_0.26.0-4+deb7u1_i386.deb
+main/p/pixman/libpixman-1-dev_0.26.0-4+deb7u1_i386.deb
+main/p/pulseaudio/libpulse0_2.0-6.1_i386.deb
+main/p/pulseaudio/libpulse-dev_2.0-6.1_i386.deb
+main/p/pulseaudio/libpulse-mainloop-glib0_2.0-6.1_i386.deb
+main/s/speech-dispatcher/libspeechd2_0.7.1-6.2_i386.deb
+main/s/speech-dispatcher/libspeechd-dev_0.7.1-6.2_i386.deb
+main/s/speech-dispatcher/speech-dispatcher_0.7.1-6.2_i386.deb
+main/x/x11proto-composite/x11proto-composite-dev_0.4.2-2_all.deb
+main/x/x11proto-core/x11proto-core-dev_7.0.23-1_all.deb
+main/x/x11proto-damage/x11proto-damage-dev_1.2.1-2_all.deb
+main/x/x11proto-fixes/x11proto-fixes-dev_5.0-2_all.deb
+main/x/x11proto-input/x11proto-input-dev_2.2-1_all.deb
+main/x/x11proto-kb/x11proto-kb-dev_1.0.6-2_all.deb
+main/x/x11proto-randr/x11proto-randr-dev_1.3.2-2_all.deb
+main/x/x11proto-record/x11proto-record-dev_1.14.2-1_all.deb
+main/x/x11proto-render/x11proto-render-dev_0.11.1-2_all.deb
+main/x/x11proto-scrnsaver/x11proto-scrnsaver-dev_1.2.2-1_all.deb
+main/x/x11proto-xext/x11proto-xext-dev_7.2.1-1_all.deb
+main/z/zlib/zlib1g_1.2.7.dfsg-13_i386.deb
+main/z/zlib/zlib1g-dev_1.2.7.dfsg-13_i386.deb
diff --git a/build/linux/sysroot_scripts/sysroot-creator-test.sh b/build/linux/sysroot_scripts/sysroot-creator-test.sh
new file mode 100755
index 0000000..b346bb7
--- /dev/null
+++ b/build/linux/sysroot_scripts/sysroot-creator-test.sh
@@ -0,0 +1,23 @@
+#!/bin/sh
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Rudimentry test suite for sysroot-creator.
+
+SCRIPT_DIR=$(dirname $0)
+
+set -o errexit
+
+TestUpdateAllLists() {
+  echo "[ RUN      ] TestUpdateAllLists"
+  "$SCRIPT_DIR/sysroot-creator-trusty.sh" UpdatePackageListsAmd64
+  "$SCRIPT_DIR/sysroot-creator-trusty.sh" UpdatePackageListsI386
+  "$SCRIPT_DIR/sysroot-creator-trusty.sh" UpdatePackageListsARM
+  "$SCRIPT_DIR/sysroot-creator-wheezy.sh" UpdatePackageListsAmd64
+  "$SCRIPT_DIR/sysroot-creator-wheezy.sh" UpdatePackageListsI386
+  "$SCRIPT_DIR/sysroot-creator-wheezy.sh" UpdatePackageListsARM
+  echo "[      OK  ]"
+}
+
+TestUpdateAllLists
diff --git a/build/linux/sysroot_scripts/sysroot-creator-trusty.sh b/build/linux/sysroot_scripts/sysroot-creator-trusty.sh
new file mode 100755
index 0000000..c0d82ec
--- /dev/null
+++ b/build/linux/sysroot_scripts/sysroot-creator-trusty.sh
@@ -0,0 +1,182 @@
+#!/bin/sh
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+SCRIPT_DIR=$(dirname $0)
+
+DISTRO=ubuntu
+DIST=trusty
+
+# This is where we get all the debian packages from.
+APT_REPO=http://archive.ubuntu.com/ubuntu
+APT_REPO_ARM=http://ports.ubuntu.com
+REPO_BASEDIR="${APT_REPO}/dists/${DIST}"
+KEYRING_FILE=/usr/share/keyrings/ubuntu-archive-keyring.gpg
+
+# Sysroot packages: these are the packages needed to build chrome.
+# NOTE: When DEBIAN_PACKAGES is modified, the packagelist files must be updated
+# by running this script in GeneratePackageList mode.
+DEBIAN_PACKAGES="\
+  comerr-dev \
+  gcc-4.8 \
+  krb5-multidev \
+  libasound2 \
+  libasound2-dev \
+  libatk1.0-0 \
+  libatk1.0-dev \
+  libavahi-client3 \
+  libavahi-common3 \
+  libc6 \
+  libc6-dev \
+  libcairo2 \
+  libcairo2-dev \
+  libcairo-gobject2 \
+  libcairo-script-interpreter2 \
+  libcap-dev \
+  libcap2 \
+  libcomerr2 \
+  libcups2 \
+  libcups2-dev \
+  libdbus-1-3 \
+  libdbus-1-dev \
+  libdbus-glib-1-2 \
+  libdrm2 \
+  libelf1 \
+  libelf-dev \
+  libexif12 \
+  libexif-dev \
+  libexpat1 \
+  libexpat1-dev \
+  libffi6 \
+  libfontconfig1 \
+  libfontconfig1-dev \
+  libfreetype6 \
+  libfreetype6-dev \
+  libgcc1 \
+  libgconf-2-4 \
+  libgconf2-4 \
+  libgconf2-dev \
+  libgcrypt11 \
+  libgcrypt11-dev \
+  libgdk-pixbuf2.0-0 \
+  libgdk-pixbuf2.0-dev \
+  libgl1-mesa-dev \
+  libgl1-mesa-glx \
+  libglapi-mesa \
+  libglib2.0-0 \
+  libglib2.0-dev \
+  libgnome-keyring0 \
+  libgnome-keyring-dev \
+  libgnutls26 \
+  libgnutls-dev \
+  libgnutls-openssl27 \
+  libgnutlsxx27 \
+  libgomp1 \
+  libgpg-error0 \
+  libgpg-error-dev \
+  libgssapi-krb5-2 \
+  libgssrpc4 \
+  libgtk2.0-0 \
+  libgtk2.0-dev \
+  libk5crypto3 \
+  libkadm5clnt-mit9 \
+  libkadm5srv-mit9 \
+  libkdb5-7 \
+  libkeyutils1 \
+  libkrb5-3 \
+  libkrb5-dev \
+  libkrb5support0 \
+  libnspr4 \
+  libnspr4-dev \
+  libnss3 \
+  libnss3-dev \
+  libnss-db \
+  liborbit2 \
+  libp11-2 \
+  libp11-kit0 \
+  libpam0g \
+  libpam0g-dev \
+  libpango-1.0-0 \
+  libpango1.0-dev \
+  libpangocairo-1.0-0 \
+  libpangoft2-1.0-0 \
+  libpangoxft-1.0-0 \
+  libpci3 \
+  libpci-dev \
+  libpcre3 \
+  libpcre3-dev \
+  libpcrecpp0 \
+  libpixman-1-0 \
+  libpixman-1-dev \
+  libpng12-0 \
+  libpng12-dev \
+  libpulse0 \
+  libpulse-dev \
+  libpulse-mainloop-glib0 \
+  libselinux1 \
+  libspeechd2 \
+  libspeechd-dev \
+  libssl1.0.0 \
+  libssl-dev \
+  libstdc++6 \
+  libstdc++-4.8-dev \
+  libtasn1-6 \
+  libx11-6 \
+  libx11-dev \
+  libx11-xcb1 \
+  libxau6 \
+  libxau-dev \
+  libxcb1 \
+  libxcb1-dev \
+  libxcb-glx0 \
+  libxcb-render0 \
+  libxcb-render0-dev \
+  libxcb-shm0 \
+  libxcb-shm0-dev \
+  libxcomposite1 \
+  libxcomposite-dev \
+  libxcursor1 \
+  libxcursor-dev \
+  libxdamage1 \
+  libxdamage-dev \
+  libxdmcp6 \
+  libxext6 \
+  libxext-dev \
+  libxfixes3 \
+  libxfixes-dev \
+  libxi6 \
+  libxi-dev \
+  libxinerama1 \
+  libxinerama-dev \
+  libxrandr2 \
+  libxrandr-dev \
+  libxrender1 \
+  libxrender-dev \
+  libxss1 \
+  libxss-dev \
+  libxt6 \
+  libxt-dev \
+  libxtst6 \
+  libxtst-dev \
+  libxxf86vm1 \
+  linux-libc-dev \
+  mesa-common-dev \
+  speech-dispatcher \
+  x11proto-composite-dev \
+  x11proto-core-dev \
+  x11proto-damage-dev \
+  x11proto-fixes-dev \
+  x11proto-input-dev \
+  x11proto-kb-dev \
+  x11proto-randr-dev \
+  x11proto-record-dev \
+  x11proto-render-dev \
+  x11proto-scrnsaver-dev \
+  x11proto-xext-dev \
+  zlib1g \
+  zlib1g-dev"
+
+DEBIAN_PACKAGES_X86="libquadmath0"
+
+. ${SCRIPT_DIR}/sysroot-creator.sh
diff --git a/build/linux/sysroot_scripts/sysroot-creator-wheezy.sh b/build/linux/sysroot_scripts/sysroot-creator-wheezy.sh
new file mode 100755
index 0000000..9a4d1bf
--- /dev/null
+++ b/build/linux/sysroot_scripts/sysroot-creator-wheezy.sh
@@ -0,0 +1,177 @@
+#!/bin/sh
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+SCRIPT_DIR=$(dirname $0)
+
+DISTRO=debian
+DIST=wheezy
+APT_REPO=http://http.us.debian.org/debian
+REPO_BASEDIR="${APT_REPO}/dists/${DIST}"
+KEYRING_FILE=/usr/share/keyrings/debian-archive-keyring.gpg
+
+# Sysroot packages: these are the packages needed to build chrome.
+# NOTE: When DEBIAN_PACKAGES is modified, the packagelist files must be updated
+# by running this script in GeneratePackageList mode.
+DEBIAN_PACKAGES="\
+  comerr-dev \
+  gcc-4.6 \
+  krb5-multidev \
+  libasound2 \
+  libasound2-dev \
+  libatk1.0-0 \
+  libatk1.0-dev \
+  libattr1 \
+  libavahi-client3 \
+  libavahi-common3 \
+  libc6 \
+  libc6-dev \
+  libcairo2 \
+  libcairo2-dev \
+  libcairo-gobject2 \
+  libcairo-script-interpreter2 \
+  libcap-dev \
+  libcap2 \
+  libcomerr2 \
+  libcups2 \
+  libcups2-dev \
+  libdbus-1-3 \
+  libdbus-1-dev \
+  libdbus-glib-1-2 \
+  libdrm2 \
+  libelf1 \
+  libelf-dev \
+  libexif12 \
+  libexif-dev \
+  libexpat1 \
+  libexpat1-dev \
+  libffi5 \
+  libfontconfig1 \
+  libfontconfig1-dev \
+  libfreetype6 \
+  libfreetype6-dev \
+  libgcc1 \
+  libgconf-2-4 \
+  libgconf2-4 \
+  libgconf2-dev \
+  libgcrypt11 \
+  libgcrypt11-dev \
+  libgdk-pixbuf2.0-0 \
+  libgdk-pixbuf2.0-dev \
+  libgl1-mesa-dev \
+  libgl1-mesa-glx \
+  libglapi-mesa \
+  libglib2.0-0 \
+  libglib2.0-dev \
+  libgnome-keyring0 \
+  libgnome-keyring-dev \
+  libgnutls26 \
+  libgnutls-dev \
+  libgnutls-openssl27 \
+  libgnutlsxx27 \
+  libgomp1 \
+  libgpg-error0 \
+  libgpg-error-dev \
+  libgssapi-krb5-2 \
+  libgssrpc4 \
+  libgtk2.0-0 \
+  libgtk2.0-dev \
+  libk5crypto3 \
+  libkadm5clnt-mit8 \
+  libkadm5srv-mit8 \
+  libkdb5-6 \
+  libkeyutils1 \
+  libkrb5-3 \
+  libkrb5-dev \
+  libkrb5support0 \
+  libnspr4 \
+  libnspr4-dev \
+  libnss3 \
+  libnss3-dev \
+  libnss-db \
+  liborbit2 \
+  libp11-2 \
+  libp11-kit0 \
+  libpam0g \
+  libpam0g-dev \
+  libpango1.0-0 \
+  libpango1.0-dev \
+  libpci3 \
+  libpci-dev \
+  libpcre3 \
+  libpcre3-dev \
+  libpcrecpp0 \
+  libpixman-1-0 \
+  libpixman-1-dev \
+  libpng12-0 \
+  libpng12-dev \
+  libpulse0 \
+  libpulse-dev \
+  libpulse-mainloop-glib0 \
+  libselinux1 \
+  libspeechd2 \
+  libspeechd-dev \
+  libssl1.0.0 \
+  libssl-dev \
+  libstdc++6 \
+  libstdc++6-4.6-dev \
+  libtasn1-3 \
+  libx11-6 \
+  libx11-dev \
+  libx11-xcb1 \
+  libxau6 \
+  libxau-dev \
+  libxcb1 \
+  libxcb1-dev \
+  libxcb-glx0 \
+  libxcb-render0 \
+  libxcb-render0-dev \
+  libxcb-shm0 \
+  libxcb-shm0-dev \
+  libxcomposite1 \
+  libxcomposite-dev \
+  libxcursor1 \
+  libxcursor-dev \
+  libxdamage1 \
+  libxdamage-dev \
+  libxdmcp6 \
+  libxext6 \
+  libxext-dev \
+  libxfixes3 \
+  libxfixes-dev \
+  libxi6 \
+  libxi-dev \
+  libxinerama1 \
+  libxinerama-dev \
+  libxrandr2 \
+  libxrandr-dev \
+  libxrender1 \
+  libxrender-dev \
+  libxss1 \
+  libxss-dev \
+  libxt6 \
+  libxt-dev \
+  libxtst6 \
+  libxtst-dev \
+  libxxf86vm1 \
+  linux-libc-dev \
+  mesa-common-dev \
+  speech-dispatcher \
+  x11proto-composite-dev \
+  x11proto-core-dev \
+  x11proto-damage-dev \
+  x11proto-fixes-dev \
+  x11proto-input-dev \
+  x11proto-kb-dev \
+  x11proto-randr-dev \
+  x11proto-record-dev \
+  x11proto-render-dev \
+  x11proto-scrnsaver-dev \
+  x11proto-xext-dev \
+  zlib1g \
+  zlib1g-dev"
+
+DEBIAN_PACKAGES_X86="libquadmath0"
+
+. ${SCRIPT_DIR}/sysroot-creator.sh
diff --git a/build/linux/sysroot_scripts/sysroot-creator.sh b/build/linux/sysroot_scripts/sysroot-creator.sh
new file mode 100644
index 0000000..822a5e8
--- /dev/null
+++ b/build/linux/sysroot_scripts/sysroot-creator.sh
@@ -0,0 +1,700 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# This script should not be run directly but sourced by the other
+# scripts (e.g. sysroot-creator-trusty.sh).  Its up to the parent scripts
+# to define certain environment variables: e.g.
+#  DISTRO=ubuntu
+#  DIST=trusty
+#  APT_REPO=http://archive.ubuntu.com/ubuntu
+#  KEYRING_FILE=/usr/share/keyrings/ubuntu-archive-keyring.gpg
+#  DEBIAN_PACKAGES="gcc libz libssl"
+
+#@ This script builds a Debian sysroot images for building Google Chrome.
+#@
+#@  Generally this script is invoked as:
+#@  sysroot-creator-<flavour>.sh <mode> <args>*
+#@  Available modes are shown below.
+#@
+#@ List of modes:
+
+######################################################################
+# Config
+######################################################################
+
+set -o nounset
+set -o errexit
+
+SCRIPT_DIR=$(cd $(dirname $0) && pwd)
+
+if [ -z "${DIST:-}" ]; then
+  echo "error: DIST not defined"
+  exit 1
+fi
+
+if [ -z "${APT_REPO:-}" ]; then
+  echo "error: APT_REPO not defined"
+  exit 1
+fi
+
+if [ -z "${KEYRING_FILE:-}" ]; then
+  echo "error: KEYRING_FILE not defined"
+  exit 1
+fi
+
+if [ -z "${DEBIAN_PACKAGES:-}" ]; then
+  echo "error: DEBIAN_PACKAGES not defined"
+  exit 1
+fi
+
+readonly REPO_BASEDIR="${APT_REPO}/dists/${DIST}"
+
+readonly REQUIRED_TOOLS="wget"
+
+######################################################################
+# Package Config
+######################################################################
+
+readonly RELEASE_FILE="Release"
+readonly RELEASE_FILE_GPG="Release.gpg"
+readonly RELEASE_LIST="${REPO_BASEDIR}/${RELEASE_FILE}"
+readonly RELEASE_LIST_GPG="${REPO_BASEDIR}/${RELEASE_FILE_GPG}"
+readonly PACKAGE_FILE_AMD64="main/binary-amd64/Packages.bz2"
+readonly PACKAGE_FILE_I386="main/binary-i386/Packages.bz2"
+readonly PACKAGE_FILE_ARM="main/binary-armhf/Packages.bz2"
+readonly PACKAGE_FILE_MIPS="main/binary-mipsel/Packages.bz2"
+readonly PACKAGE_LIST_AMD64="${REPO_BASEDIR}/${PACKAGE_FILE_AMD64}"
+readonly PACKAGE_LIST_I386="${REPO_BASEDIR}/${PACKAGE_FILE_I386}"
+readonly PACKAGE_LIST_ARM="${REPO_BASEDIR}/${PACKAGE_FILE_ARM}"
+readonly PACKAGE_LIST_MIPS="${REPO_BASEDIR}/${PACKAGE_FILE_MIPS}"
+
+readonly DEBIAN_DEP_LIST_AMD64="packagelist.${DIST}.amd64"
+readonly DEBIAN_DEP_LIST_I386="packagelist.${DIST}.i386"
+readonly DEBIAN_DEP_LIST_ARM="packagelist.${DIST}.arm"
+readonly DEBIAN_DEP_LIST_MIPS="packagelist.${DIST}.mipsel"
+
+######################################################################
+# Helper
+######################################################################
+
+Banner() {
+  echo "######################################################################"
+  echo $*
+  echo "######################################################################"
+}
+
+
+SubBanner() {
+  echo "----------------------------------------------------------------------"
+  echo $*
+  echo "----------------------------------------------------------------------"
+}
+
+
+Usage() {
+  egrep "^#@" "${BASH_SOURCE[0]}" | cut --bytes=3-
+}
+
+
+DownloadOrCopy() {
+  if [ -f "$2" ] ; then
+    echo "$2 already in place"
+    return
+  fi
+
+  HTTP=0
+  echo "$1" | grep -qs ^http:// && HTTP=1
+  if [ "$HTTP" = "1" ]; then
+    SubBanner "downloading from $1 -> $2"
+    wget "$1" -O "${2}.partial"
+    mv "${2}.partial" $2
+  else
+    SubBanner "copying from $1"
+    cp "$1" "$2"
+  fi
+}
+
+
+SetEnvironmentVariables() {
+  ARCH=""
+  echo $1 | grep -qs Amd64$ && ARCH=AMD64
+  if [ -z "$ARCH" ]; then
+    echo $1 | grep -qs I386$ && ARCH=I386
+  fi
+  if [ -z "$ARCH" ]; then
+    echo $1 | grep -qs Mips$ && ARCH=MIPS
+  fi
+  if [ -z "$ARCH" ]; then
+    echo $1 | grep -qs ARM$ && ARCH=ARM
+  fi
+  if [ -z "${ARCH}" ]; then
+    echo "ERROR: Unable to determine architecture based on: $1"
+    exit 1
+  fi
+  ARCH_LOWER=$(echo $ARCH | tr '[:upper:]' '[:lower:]')
+}
+
+
+# some sanity checks to make sure this script is run from the right place
+# with the right tools
+SanityCheck() {
+  Banner "Sanity Checks"
+
+  local chrome_dir=$(cd "${SCRIPT_DIR}/../../../.." && pwd)
+  BUILD_DIR="${chrome_dir}/out/sysroot-build/${DIST}"
+  mkdir -p ${BUILD_DIR}
+  echo "Using build directory: ${BUILD_DIR}"
+
+  for tool in ${REQUIRED_TOOLS} ; do
+    if ! which ${tool} > /dev/null ; then
+      echo "Required binary $tool not found."
+      echo "Exiting."
+      exit 1
+    fi
+  done
+
+  # This is where the staging sysroot is.
+  INSTALL_ROOT="${BUILD_DIR}/${DIST}_${ARCH_LOWER}_staging"
+  TARBALL="${BUILD_DIR}/${DISTRO}_${DIST}_${ARCH_LOWER}_sysroot.tgz"
+
+  if ! mkdir -p "${INSTALL_ROOT}" ; then
+    echo "ERROR: ${INSTALL_ROOT} can't be created."
+    exit 1
+  fi
+}
+
+
+ChangeDirectory() {
+  # Change directory to where this script is.
+  cd ${SCRIPT_DIR}
+}
+
+
+ClearInstallDir() {
+  Banner "Clearing dirs in ${INSTALL_ROOT}"
+  rm -rf ${INSTALL_ROOT}/*
+}
+
+
+CreateTarBall() {
+  Banner "Creating tarball ${TARBALL}"
+  tar zcf ${TARBALL} -C ${INSTALL_ROOT} .
+}
+
+ExtractPackageBz2() {
+  bzcat "$1" | egrep '^(Package:|Filename:|SHA256:) ' > "$2"
+}
+
+GeneratePackageListAmd64() {
+  local output_file="$1"
+  local package_list="${BUILD_DIR}/Packages.${DIST}_amd64.bz2"
+  local tmp_package_list="${BUILD_DIR}/Packages.${DIST}_amd64"
+  DownloadOrCopy "${PACKAGE_LIST_AMD64}" "${package_list}"
+  VerifyPackageListing "${PACKAGE_FILE_AMD64}" "${package_list}"
+  ExtractPackageBz2 "$package_list" "$tmp_package_list"
+  GeneratePackageList "$tmp_package_list" "$output_file" "${DEBIAN_PACKAGES}
+    ${DEBIAN_PACKAGES_X86}"
+}
+
+GeneratePackageListI386() {
+  local output_file="$1"
+  local package_list="${BUILD_DIR}/Packages.${DIST}_i386.bz2"
+  local tmp_package_list="${BUILD_DIR}/Packages.${DIST}_amd64"
+  DownloadOrCopy "${PACKAGE_LIST_I386}" "${package_list}"
+  VerifyPackageListing "${PACKAGE_FILE_I386}" "${package_list}"
+  ExtractPackageBz2 "$package_list" "$tmp_package_list"
+  GeneratePackageList "$tmp_package_list" "$output_file" "${DEBIAN_PACKAGES}
+    ${DEBIAN_PACKAGES_X86}"
+}
+
+GeneratePackageListARM() {
+  local output_file="$1"
+  local package_list="${BUILD_DIR}/Packages.${DIST}_arm.bz2"
+  local tmp_package_list="${BUILD_DIR}/Packages.${DIST}_arm"
+  DownloadOrCopy "${PACKAGE_LIST_ARM}" "${package_list}"
+  VerifyPackageListing "${PACKAGE_FILE_ARM}" "${package_list}"
+  ExtractPackageBz2 "$package_list" "$tmp_package_list"
+  GeneratePackageList "$tmp_package_list" "$output_file" "${DEBIAN_PACKAGES}"
+}
+
+GeneratePackageListMips() {
+  local output_file="$1"
+  local package_list="${BUILD_DIR}/Packages.${DIST}_mips.bz2"
+  local tmp_package_list="${BUILD_DIR}/Packages.${DIST}_mips"
+  DownloadOrCopy "${PACKAGE_LIST_MIPS}" "${package_list}"
+  VerifyPackageListing "${PACKAGE_FILE_MIPS}" "${package_list}"
+  ExtractPackageBz2 "$package_list" "$tmp_package_list"
+  GeneratePackageList "$tmp_package_list" "$output_file" "${DEBIAN_PACKAGES}"
+}
+
+StripChecksumsFromPackageList() {
+  local package_file="$1"
+  sed -i 's/ [a-f0-9]\{64\}$//' "$package_file"
+}
+
+VerifyPackageFilesMatch() {
+  local downloaded_package_file="$1"
+  local stored_package_file="$2"
+  diff -u "$downloaded_package_file" "$stored_package_file"
+  if [ "$?" -ne "0" ]; then
+    echo "ERROR: downloaded package files does not match $2."
+    echo "You may need to run UpdatePackageLists."
+    exit 1
+  fi
+}
+
+######################################################################
+#
+######################################################################
+
+HacksAndPatchesAmd64() {
+  Banner "Misc Hacks & Patches"
+  # these are linker scripts with absolute pathnames in them
+  # which we rewrite here
+  lscripts="${INSTALL_ROOT}/usr/lib/x86_64-linux-gnu/libpthread.so \
+            ${INSTALL_ROOT}/usr/lib/x86_64-linux-gnu/libc.so"
+
+  # Rewrite linker scripts
+  sed -i -e 's|/usr/lib/x86_64-linux-gnu/||g'  ${lscripts}
+  sed -i -e 's|/lib/x86_64-linux-gnu/||g' ${lscripts}
+
+  # This is for chrome's ./build/linux/pkg-config-wrapper
+  # which overwrites PKG_CONFIG_PATH internally
+  SubBanner "Package Configs Symlink"
+  mkdir -p ${INSTALL_ROOT}/usr/share
+  ln -s ../lib/x86_64-linux-gnu/pkgconfig ${INSTALL_ROOT}/usr/share/pkgconfig
+
+  SubBanner "Adding an additional ld.conf include"
+  LD_SO_HACK_CONF="${INSTALL_ROOT}/etc/ld.so.conf.d/zz_hack.conf"
+  echo /usr/lib/gcc/x86_64-linux-gnu/4.6 > "$LD_SO_HACK_CONF"
+  echo /usr/lib >> "$LD_SO_HACK_CONF"
+}
+
+
+HacksAndPatchesI386() {
+  Banner "Misc Hacks & Patches"
+  # these are linker scripts with absolute pathnames in them
+  # which we rewrite here
+  lscripts="${INSTALL_ROOT}/usr/lib/i386-linux-gnu/libpthread.so \
+            ${INSTALL_ROOT}/usr/lib/i386-linux-gnu/libc.so"
+
+  # Rewrite linker scripts
+  sed -i -e 's|/usr/lib/i386-linux-gnu/||g'  ${lscripts}
+  sed -i -e 's|/lib/i386-linux-gnu/||g' ${lscripts}
+
+  # This is for chrome's ./build/linux/pkg-config-wrapper
+  # which overwrites PKG_CONFIG_PATH internally
+  SubBanner "Package Configs Symlink"
+  mkdir -p ${INSTALL_ROOT}/usr/share
+  ln -s ../lib/i386-linux-gnu/pkgconfig ${INSTALL_ROOT}/usr/share/pkgconfig
+
+  SubBanner "Adding an additional ld.conf include"
+  LD_SO_HACK_CONF="${INSTALL_ROOT}/etc/ld.so.conf.d/zz_hack.conf"
+  echo /usr/lib/gcc/i486-linux-gnu/4.6 > "$LD_SO_HACK_CONF"
+  echo /usr/lib >> "$LD_SO_HACK_CONF"
+}
+
+
+HacksAndPatchesARM() {
+  Banner "Misc Hacks & Patches"
+  # these are linker scripts with absolute pathnames in them
+  # which we rewrite here
+  lscripts="${INSTALL_ROOT}/usr/lib/arm-linux-gnueabihf/libpthread.so \
+            ${INSTALL_ROOT}/usr/lib/arm-linux-gnueabihf/libc.so"
+
+  # Rewrite linker scripts
+  sed -i -e 's|/usr/lib/arm-linux-gnueabihf/||g' ${lscripts}
+  sed -i -e 's|/lib/arm-linux-gnueabihf/||g' ${lscripts}
+
+  # This is for chrome's ./build/linux/pkg-config-wrapper
+  # which overwrites PKG_CONFIG_PATH internally
+  SubBanner "Package Configs Symlink"
+  mkdir -p ${INSTALL_ROOT}/usr/share
+  ln -s ../lib/arm-linux-gnueabihf/pkgconfig ${INSTALL_ROOT}/usr/share/pkgconfig
+}
+
+
+HacksAndPatchesMips() {
+  Banner "Misc Hacks & Patches"
+  # these are linker scripts with absolute pathnames in them
+  # which we rewrite here
+  lscripts="${INSTALL_ROOT}/usr/lib/mipsel-linux-gnu/libpthread.so \
+            ${INSTALL_ROOT}/usr/lib/mipsel-linux-gnu/libc.so"
+
+  # Rewrite linker scripts
+  sed -i -e 's|/usr/lib/mipsel-linux-gnu/||g' ${lscripts}
+  sed -i -e 's|/lib/mipsel-linux-gnu/||g' ${lscripts}
+
+  # This is for chrome's ./build/linux/pkg-config-wrapper
+  # which overwrites PKG_CONFIG_PATH internally
+  SubBanner "Package Configs Symlink"
+  mkdir -p ${INSTALL_ROOT}/usr/share
+  ln -s ../lib/mipsel-linux-gnu/pkgconfig ${INSTALL_ROOT}/usr/share/pkgconfig
+}
+
+
+InstallIntoSysroot() {
+  Banner "Install Libs And Headers Into Jail"
+
+  mkdir -p ${BUILD_DIR}/debian-packages
+  mkdir -p ${INSTALL_ROOT}
+  while (( "$#" )); do
+    local file="$1"
+    local package="${BUILD_DIR}/debian-packages/${file##*/}"
+    shift
+    local sha256sum="$1"
+    shift
+    if [ "${#sha256sum}" -ne "64" ]; then
+      echo "Bad sha256sum from package list"
+      exit 1
+    fi
+
+    Banner "Installing ${file}"
+    DownloadOrCopy ${APT_REPO}/pool/${file} ${package}
+    if [ ! -s "${package}" ] ; then
+      echo
+      echo "ERROR: bad package ${package}"
+      exit 1
+    fi
+    echo "${sha256sum}  ${package}" | sha256sum --quiet -c
+
+    SubBanner "Extracting to ${INSTALL_ROOT}"
+    dpkg --fsys-tarfile ${package}\
+      | tar -xf - --exclude=./usr/share -C ${INSTALL_ROOT}
+  done
+}
+
+
+CleanupJailSymlinks() {
+  Banner "Jail symlink cleanup"
+
+  SAVEDPWD=$(pwd)
+  cd ${INSTALL_ROOT}
+  local libdirs="lib usr/lib"
+  if [ "${ARCH}" != "MIPS" ]; then
+    libdirs+=" lib64"
+  fi
+  find $libdirs -type l -printf '%p %l\n' | while read link target; do
+    # skip links with non-absolute paths
+    echo "${target}" | grep -qs ^/ || continue
+    echo "${link}: ${target}"
+    case "${link}" in
+      usr/lib/gcc/x86_64-linux-gnu/4.*/* | usr/lib/gcc/i486-linux-gnu/4.*/* | \
+      usr/lib/gcc/arm-linux-gnueabihf/4.*/* | \
+      usr/lib/gcc/mipsel-linux-gnu/4.*/*)
+        # Relativize the symlink.
+        ln -snfv "../../../../..${target}" "${link}"
+        ;;
+      usr/lib/x86_64-linux-gnu/* | usr/lib/i386-linux-gnu/* | \
+      usr/lib/arm-linux-gnueabihf/* | usr/lib/mipsel-linux-gnu/* )
+        # Relativize the symlink.
+        ln -snfv "../../..${target}" "${link}"
+        ;;
+      usr/lib/*)
+        # Relativize the symlink.
+        ln -snfv "../..${target}" "${link}"
+        ;;
+      lib64/* | lib/*)
+        # Relativize the symlink.
+        ln -snfv "..${target}" "${link}"
+        ;;
+    esac
+  done
+
+  find $libdirs -type l -printf '%p %l\n' | while read link target; do
+    # Make sure we catch new bad links.
+    if [ ! -r "${link}" ]; then
+      echo "ERROR: FOUND BAD LINK ${link}"
+      ls -l ${link}
+      exit 1
+    fi
+  done
+  cd "$SAVEDPWD"
+}
+
+#@
+#@ BuildSysrootAmd64
+#@
+#@    Build everything and package it
+BuildSysrootAmd64() {
+  ClearInstallDir
+  local package_file="$BUILD_DIR/package_with_sha256sum_amd64"
+  GeneratePackageListAmd64 "$package_file"
+  local files_and_sha256sums="$(cat ${package_file})"
+  StripChecksumsFromPackageList "$package_file"
+  VerifyPackageFilesMatch "$package_file" "$DEBIAN_DEP_LIST_AMD64"
+  InstallIntoSysroot ${files_and_sha256sums}
+  CleanupJailSymlinks
+  HacksAndPatchesAmd64
+  CreateTarBall
+}
+
+#@
+#@ BuildSysrootI386
+#@
+#@    Build everything and package it
+BuildSysrootI386() {
+  ClearInstallDir
+  local package_file="$BUILD_DIR/package_with_sha256sum_i386"
+  GeneratePackageListI386 "$package_file"
+  local files_and_sha256sums="$(cat ${package_file})"
+  StripChecksumsFromPackageList "$package_file"
+  VerifyPackageFilesMatch "$package_file" "$DEBIAN_DEP_LIST_I386"
+  InstallIntoSysroot ${files_and_sha256sums}
+  CleanupJailSymlinks
+  HacksAndPatchesI386
+  CreateTarBall
+}
+
+#@
+#@ BuildSysrootARM
+#@
+#@    Build everything and package it
+BuildSysrootARM() {
+  ClearInstallDir
+  local package_file="$BUILD_DIR/package_with_sha256sum_arm"
+  GeneratePackageListARM "$package_file"
+  local files_and_sha256sums="$(cat ${package_file})"
+  StripChecksumsFromPackageList "$package_file"
+  VerifyPackageFilesMatch "$package_file" "$DEBIAN_DEP_LIST_ARM"
+  APT_REPO=${APR_REPO_ARM:=$APT_REPO}
+  InstallIntoSysroot ${files_and_sha256sums}
+  CleanupJailSymlinks
+  HacksAndPatchesARM
+  CreateTarBall
+}
+
+#@
+#@ BuildSysrootMips
+#@
+#@    Build everything and package it
+BuildSysrootMips() {
+  ClearInstallDir
+  local package_file="$BUILD_DIR/package_with_sha256sum_arm"
+  GeneratePackageListMips "$package_file"
+  local files_and_sha256sums="$(cat ${package_file})"
+  StripChecksumsFromPackageList "$package_file"
+  VerifyPackageFilesMatch "$package_file" "$DEBIAN_DEP_LIST_MIPS"
+  APT_REPO=${APR_REPO_MIPS:=$APT_REPO}
+  InstallIntoSysroot ${files_and_sha256sums}
+  CleanupJailSymlinks
+  HacksAndPatchesMips
+  CreateTarBall
+}
+
+#@
+#@ BuildSysrootAll
+#@
+#@    Build sysroot images for all architectures
+BuildSysrootAll() {
+  RunCommand BuildSysrootAmd64
+  RunCommand BuildSysrootI386
+  RunCommand BuildSysrootARM
+  RunCommand BuildSysrootMips
+}
+
+UploadSysroot() {
+  local rev=$1
+  if [ -z "${rev}" ]; then
+    echo "Please specify a revision to upload at."
+    exit 1
+  fi
+  set -x
+  gsutil cp -a public-read "${TARBALL}" \
+      "gs://chrome-linux-sysroot/toolchain/$rev/"
+  set +x
+}
+
+#@
+#@ UploadSysrootAmd64 <revision>
+#@
+UploadSysrootAmd64() {
+  UploadSysroot "$@"
+}
+
+#@
+#@ UploadSysrootI386 <revision>
+#@
+UploadSysrootI386() {
+  UploadSysroot "$@"
+}
+
+#@
+#@ UploadSysrootARM <revision>
+#@
+UploadSysrootARM() {
+  UploadSysroot "$@"
+}
+
+#@
+#@ UploadSysrootMips <revision>
+#@
+UploadSysrootMips() {
+  UploadSysroot "$@"
+}
+
+#@
+#@ UploadSysrootAll <revision>
+#@
+#@    Upload sysroot image for all architectures
+UploadSysrootAll() {
+  RunCommand UploadSysrootAmd64 "$@"
+  RunCommand UploadSysrootI386 "$@"
+  RunCommand UploadSysrootARM "$@"
+  RunCommand UploadSysrootMips "$@"
+}
+
+#
+# CheckForDebianGPGKeyring
+#
+#     Make sure the Debian GPG keys exist. Otherwise print a helpful message.
+#
+CheckForDebianGPGKeyring() {
+  if [ ! -e "$KEYRING_FILE" ]; then
+    echo "Debian GPG keys missing. Install the debian-archive-keyring package."
+    exit 1
+  fi
+}
+
+#
+# VerifyPackageListing
+#
+#     Verifies the downloaded Packages.bz2 file has the right checksums.
+#
+VerifyPackageListing() {
+  local file_path=$1
+  local output_file=$2
+  local release_file="${BUILD_DIR}/${RELEASE_FILE}"
+  local release_file_gpg="${BUILD_DIR}/${RELEASE_FILE_GPG}"
+  local tmp_keyring_file="${BUILD_DIR}/keyring.gpg"
+
+  CheckForDebianGPGKeyring
+
+  DownloadOrCopy ${RELEASE_LIST} ${release_file}
+  DownloadOrCopy ${RELEASE_LIST_GPG} ${release_file_gpg}
+  echo "Verifying: ${release_file} with ${release_file_gpg}"
+  cp "${KEYRING_FILE}" "${tmp_keyring_file}"
+  gpg --primary-keyring "${tmp_keyring_file}" --recv-keys 2B90D010
+  gpgv --keyring "${tmp_keyring_file}" "${release_file_gpg}" "${release_file}"
+
+  echo "Verifying: ${output_file}"
+  local checksums=$(grep ${file_path} ${release_file} | cut -d " " -f 2)
+  local sha256sum=$(echo ${checksums} | cut -d " " -f 3)
+
+  if [ "${#sha256sum}" -ne "64" ]; then
+    echo "Bad sha256sum from ${RELEASE_LIST}"
+    exit 1
+  fi
+
+  echo "${sha256sum}  ${output_file}" | sha256sum --quiet -c
+}
+
+#
+# GeneratePackageList
+#
+#     Looks up package names in ${BUILD_DIR}/Packages and write list of URLs
+#     to output file.
+#
+GeneratePackageList() {
+  local input_file="$1"
+  local output_file="$2"
+  echo "Updating: ${output_file} from ${input_file}"
+  /bin/rm -f "${output_file}"
+  shift
+  shift
+  for pkg in $@ ; do
+    local pkg_full=$(grep -A 1 " ${pkg}\$" "$input_file" | \
+      egrep -o "pool/.*")
+    if [ -z "${pkg_full}" ]; then
+        echo "ERROR: missing package: $pkg"
+        exit 1
+    fi
+    local pkg_nopool=$(echo "$pkg_full" | sed "s/^pool\///")
+    local sha256sum=$(grep -A 4 " ${pkg}\$" "$input_file" | \
+      grep ^SHA256: | sed 's/^SHA256: //')
+    if [ "${#sha256sum}" -ne "64" ]; then
+      echo "Bad sha256sum from Packages"
+      exit 1
+    fi
+    echo $pkg_nopool $sha256sum >> "$output_file"
+  done
+  # sort -o does an in-place sort of this file
+  sort "$output_file" -o "$output_file"
+}
+
+#@
+#@ UpdatePackageListsAmd64
+#@
+#@     Regenerate the package lists such that they contain an up-to-date
+#@     list of URLs within the Debian archive. (For amd64)
+UpdatePackageListsAmd64() {
+  GeneratePackageListAmd64 "$DEBIAN_DEP_LIST_AMD64"
+  StripChecksumsFromPackageList "$DEBIAN_DEP_LIST_AMD64"
+}
+
+#@
+#@ UpdatePackageListsI386
+#@
+#@     Regenerate the package lists such that they contain an up-to-date
+#@     list of URLs within the Debian archive. (For i386)
+UpdatePackageListsI386() {
+  GeneratePackageListI386 "$DEBIAN_DEP_LIST_I386"
+  StripChecksumsFromPackageList "$DEBIAN_DEP_LIST_I386"
+}
+
+#@
+#@ UpdatePackageListsARM
+#@
+#@     Regenerate the package lists such that they contain an up-to-date
+#@     list of URLs within the Debian archive. (For arm)
+UpdatePackageListsARM() {
+  GeneratePackageListARM "$DEBIAN_DEP_LIST_ARM"
+  StripChecksumsFromPackageList "$DEBIAN_DEP_LIST_ARM"
+}
+
+#@
+#@ UpdatePackageListsMips
+#@
+#@     Regenerate the package lists such that they contain an up-to-date
+#@     list of URLs within the Debian archive. (For arm)
+UpdatePackageListsMips() {
+  GeneratePackageListMips "$DEBIAN_DEP_LIST_MIPS"
+  StripChecksumsFromPackageList "$DEBIAN_DEP_LIST_MIPS"
+}
+
+#@
+#@ UpdatePackageListsAll
+#@
+#@    Regenerate the package lists for all architectures.
+UpdatePackageListsAll() {
+  RunCommand UpdatePackageListsAmd64
+  RunCommand UpdatePackageListsI386
+  RunCommand UpdatePackageListsARM
+  RunCommand UpdatePackageListsMips
+}
+
+RunCommand() {
+  SetEnvironmentVariables "$1"
+  SanityCheck
+  "$@"
+}
+
+if [ $# -eq 0 ] ; then
+  echo "ERROR: you must specify a mode on the commandline"
+  echo
+  Usage
+  exit 1
+elif [ "$(type -t $1)" != "function" ]; then
+  echo "ERROR: unknown function '$1'." >&2
+  echo "For help, try:"
+  echo "    $0 help"
+  exit 1
+else
+  ChangeDirectory
+  if echo $1 | grep -qs "All$"; then
+    "$@"
+  else
+    RunCommand "$@"
+  fi
+fi
diff --git a/build/linux/system.gyp b/build/linux/system.gyp
new file mode 100644
index 0000000..ab20951
--- /dev/null
+++ b/build/linux/system.gyp
@@ -0,0 +1,1222 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'variables': {
+    # If any of the linux_link_FOO below are set to 1, then the corresponding
+    # target will be linked against the FOO library (either dynamically or
+    # statically, depending on the pkg-config files), as opposed to loading the
+    # FOO library dynamically with dlopen.
+    'linux_link_libgps%': 0,
+    'linux_link_libpci%': 0,
+    'linux_link_libspeechd%': 0,
+    'linux_link_libbrlapi%': 0,
+
+    # Used below for the various libraries. In this scope for sharing with GN.
+    'libbrlapi_functions': [
+      'brlapi_getHandleSize',
+      'brlapi_error_location',
+      'brlapi_strerror',
+      'brlapi__acceptKeys',
+      'brlapi__openConnection',
+      'brlapi__closeConnection',
+      'brlapi__getDisplaySize',
+      'brlapi__enterTtyModeWithPath',
+      'brlapi__leaveTtyMode',
+      'brlapi__writeDots',
+      'brlapi__readKey',
+    ],
+    'libgio_functions': [
+      'g_settings_new',
+      'g_settings_get_child',
+      'g_settings_get_string',
+      'g_settings_get_boolean',
+      'g_settings_get_int',
+      'g_settings_get_strv',
+      'g_settings_list_schemas',
+    ],
+    'libpci_functions': [
+      'pci_alloc',
+      'pci_init',
+      'pci_cleanup',
+      'pci_scan_bus',
+      'pci_fill_info',
+      'pci_lookup_name',
+    ],
+    'libudev_functions': [
+      'udev_device_get_action',
+      'udev_device_get_devnode',
+      'udev_device_get_parent',
+      'udev_device_get_parent_with_subsystem_devtype',
+      'udev_device_get_property_value',
+      'udev_device_get_subsystem',
+      'udev_device_get_sysattr_value',
+      'udev_device_get_sysname',
+      'udev_device_get_syspath',
+      'udev_device_new_from_devnum',
+      'udev_device_new_from_subsystem_sysname',
+      'udev_device_new_from_syspath',
+      'udev_device_unref',
+      'udev_enumerate_add_match_subsystem',
+      'udev_enumerate_get_list_entry',
+      'udev_enumerate_new',
+      'udev_enumerate_scan_devices',
+      'udev_enumerate_unref',
+      'udev_list_entry_get_next',
+      'udev_list_entry_get_name',
+      'udev_monitor_enable_receiving',
+      'udev_monitor_filter_add_match_subsystem_devtype',
+      'udev_monitor_get_fd',
+      'udev_monitor_new_from_netlink',
+      'udev_monitor_receive_device',
+      'udev_monitor_unref',
+      'udev_new',
+      'udev_set_log_fn',
+      'udev_set_log_priority',
+      'udev_unref',
+    ],
+  },
+  'conditions': [
+    [ 'chromeos==0 and use_ozone==0', {
+      # Hide GTK and related dependencies for Chrome OS and Ozone, so they won't get
+      # added back to Chrome OS and Ozone. Don't try to use GTK on Chrome OS and Ozone.
+      'targets': [
+        {
+          'target_name': 'atk',
+          'type': 'none',
+          'conditions': [
+            ['_toolset=="target"', {
+              'direct_dependent_settings': {
+                'cflags': [
+                  '<!@(<(pkg-config) --cflags atk)',
+                ],
+                'defines': [
+                  'ATK_LIB_DIR="<!@(<(pkg-config) --variable=libdir atk)"',
+                ],
+              },
+              'link_settings': {
+                'ldflags': [
+                  '<!@(<(pkg-config) --libs-only-L --libs-only-other atk)',
+                ],
+                'libraries': [
+                  '<!@(<(pkg-config) --libs-only-l atk)',
+                ],
+              },
+            }],
+          ],
+        },
+        {
+          'target_name': 'gdk',
+          'type': 'none',
+          'conditions': [
+            ['_toolset=="target"', {
+              'direct_dependent_settings': {
+                'cflags': [
+                  '<!@(<(pkg-config) --cflags gdk-2.0)',
+                ],
+              },
+              'link_settings': {
+                'ldflags': [
+                  '<!@(<(pkg-config) --libs-only-L --libs-only-other gdk-2.0)',
+                ],
+                'libraries': [
+                  '<!@(<(pkg-config) --libs-only-l gdk-2.0)',
+                ],
+              },
+            }],
+          ],
+        },
+        {
+          'target_name': 'gtk',
+          'type': 'none',
+          'toolsets': ['host', 'target'],
+          'variables': {
+            # gtk requires gmodule, but it does not list it as a dependency
+            # in some misconfigured systems.
+            'gtk_packages': 'gmodule-2.0 gtk+-2.0 gthread-2.0',
+          },
+          'conditions': [
+            ['_toolset=="target"', {
+              'all_dependent_settings': {
+                'cflags': [
+                  '<!@(<(pkg-config) --cflags <(gtk_packages))',
+                ],
+              },
+              'link_settings': {
+                'ldflags': [
+                  '<!@(<(pkg-config) --libs-only-L --libs-only-other <(gtk_packages))',
+                ],
+                'libraries': [
+                  '<!@(<(pkg-config) --libs-only-l <(gtk_packages))',
+                ],
+              },
+            }, {
+              'all_dependent_settings': {
+                'cflags': [
+                  '<!@(pkg-config --cflags <(gtk_packages))',
+                ],
+              },
+              'link_settings': {
+                'ldflags': [
+                  '<!@(pkg-config --libs-only-L --libs-only-other <(gtk_packages))',
+                ],
+                'libraries': [
+                  '<!@(pkg-config --libs-only-l <(gtk_packages))',
+                ],
+              },
+            }],
+          ],
+        },
+        {
+          'target_name': 'gtkprint',
+          'type': 'none',
+          'conditions': [
+            ['_toolset=="target"', {
+              'direct_dependent_settings': {
+                'cflags': [
+                  '<!@(<(pkg-config) --cflags gtk+-unix-print-2.0)',
+                ],
+              },
+              'link_settings': {
+                'ldflags': [
+                  '<!@(<(pkg-config) --libs-only-L --libs-only-other gtk+-unix-print-2.0)',
+                ],
+                'libraries': [
+                  '<!@(<(pkg-config) --libs-only-l gtk+-unix-print-2.0)',
+                ],
+              },
+            }],
+          ],
+        },
+      ],  # targets
+    }],
+    [ 'use_x11==1 or ozone_platform_ozonex==1', {
+      # Hide X11 and related dependencies when use_x11=0
+      'targets': [
+        {
+          'target_name': 'x11',
+          'type': 'none',
+          'toolsets': ['host', 'target'],
+          'conditions': [
+            ['_toolset=="target"', {
+              'direct_dependent_settings': {
+                'cflags': [
+                  '<!@(<(pkg-config) --cflags x11)',
+                ],
+              },
+              'link_settings': {
+                'ldflags': [
+                  '<!@(<(pkg-config) --libs-only-L --libs-only-other x11 xi)',
+                ],
+                'libraries': [
+                  '<!@(<(pkg-config) --libs-only-l x11 xi)',
+                ],
+              },
+            }, {
+              'direct_dependent_settings': {
+                'cflags': [
+                  '<!@(pkg-config --cflags x11)',
+                ],
+              },
+              'link_settings': {
+                'ldflags': [
+                  '<!@(pkg-config --libs-only-L --libs-only-other x11 xi)',
+                ],
+                'libraries': [
+                  '<!@(pkg-config --libs-only-l x11 xi)',
+                ],
+              },
+            }],
+          ],
+        },
+        {
+          'target_name': 'xcursor',
+          'type': 'none',
+          'direct_dependent_settings': {
+            'cflags': [
+              '<!@(<(pkg-config) --cflags xcursor)',
+            ],
+          },
+          'link_settings': {
+            'ldflags': [
+              '<!@(<(pkg-config) --libs-only-L --libs-only-other xcursor)',
+            ],
+            'libraries': [
+              '<!@(<(pkg-config) --libs-only-l xcursor)',
+            ],
+          },
+        },
+        {
+          'target_name': 'xcomposite',
+          'type': 'none',
+          'direct_dependent_settings': {
+            'cflags': [
+              '<!@(<(pkg-config) --cflags xcomposite)',
+            ],
+          },
+          'link_settings': {
+            'ldflags': [
+              '<!@(<(pkg-config) --libs-only-L --libs-only-other xcomposite)',
+            ],
+            'libraries': [
+              '<!@(<(pkg-config) --libs-only-l xcomposite)',
+            ],
+          },
+        },
+        {
+          'target_name': 'xdamage',
+          'type': 'none',
+          'direct_dependent_settings': {
+            'cflags': [
+              '<!@(<(pkg-config) --cflags xdamage)',
+            ],
+          },
+          'link_settings': {
+            'ldflags': [
+              '<!@(<(pkg-config) --libs-only-L --libs-only-other xdamage)',
+            ],
+            'libraries': [
+              '<!@(<(pkg-config) --libs-only-l xdamage)',
+            ],
+          },
+        },
+        {
+          'target_name': 'xext',
+          'type': 'none',
+          'direct_dependent_settings': {
+            'cflags': [
+              '<!@(<(pkg-config) --cflags xext)',
+            ],
+          },
+          'link_settings': {
+            'ldflags': [
+              '<!@(<(pkg-config) --libs-only-L --libs-only-other xext)',
+            ],
+            'libraries': [
+              '<!@(<(pkg-config) --libs-only-l xext)',
+            ],
+          },
+        },
+        {
+          'target_name': 'xfixes',
+          'type': 'none',
+          'direct_dependent_settings': {
+            'cflags': [
+              '<!@(<(pkg-config) --cflags xfixes)',
+            ],
+          },
+          'link_settings': {
+            'ldflags': [
+              '<!@(<(pkg-config) --libs-only-L --libs-only-other xfixes)',
+            ],
+            'libraries': [
+              '<!@(<(pkg-config) --libs-only-l xfixes)',
+            ],
+          },
+        },
+        {
+          'target_name': 'xi',
+          'type': 'none',
+          'direct_dependent_settings': {
+            'cflags': [
+              '<!@(<(pkg-config) --cflags xi)',
+            ],
+          },
+          'link_settings': {
+            'ldflags': [
+              '<!@(<(pkg-config) --libs-only-L --libs-only-other xi)',
+            ],
+            'libraries': [
+              '<!@(<(pkg-config) --libs-only-l xi)',
+            ],
+          },
+        },
+        {
+          'target_name': 'xrandr',
+          'type': 'none',
+          'toolsets': ['host', 'target'],
+          'conditions': [
+            ['_toolset=="target"', {
+              'direct_dependent_settings': {
+                'cflags': [
+                  '<!@(<(pkg-config) --cflags xrandr)',
+                ],
+              },
+              'link_settings': {
+                'ldflags': [
+                  '<!@(<(pkg-config) --libs-only-L --libs-only-other xrandr)',
+                ],
+                'libraries': [
+                  '<!@(<(pkg-config) --libs-only-l xrandr)',
+                ],
+              },
+            }, {
+              'direct_dependent_settings': {
+                'cflags': [
+                  '<!@(pkg-config --cflags xrandr)',
+                ],
+              },
+              'link_settings': {
+                'ldflags': [
+                  '<!@(pkg-config --libs-only-L --libs-only-other xrandr)',
+                ],
+                'libraries': [
+                  '<!@(pkg-config --libs-only-l xrandr)',
+                ],
+              },
+            }],
+          ],
+        },
+        {
+          'target_name': 'xrender',
+          'type': 'none',
+          'direct_dependent_settings': {
+            'cflags': [
+              '<!@(<(pkg-config) --cflags xrender)',
+            ],
+          },
+          'link_settings': {
+            'ldflags': [
+              '<!@(<(pkg-config) --libs-only-L --libs-only-other xrender)',
+            ],
+            'libraries': [
+              '<!@(<(pkg-config) --libs-only-l xrender)',
+            ],
+          },
+        },
+        {
+          'target_name': 'xtst',
+          'type': 'none',
+          'toolsets': ['host', 'target'],
+          'conditions': [
+            ['_toolset=="target"', {
+              'direct_dependent_settings': {
+                'cflags': [
+                  '<!@(<(pkg-config) --cflags xtst)',
+                ],
+              },
+              'link_settings': {
+                'ldflags': [
+                  '<!@(<(pkg-config) --libs-only-L --libs-only-other xtst)',
+                ],
+                'libraries': [
+                  '<!@(<(pkg-config) --libs-only-l xtst)',
+                ],
+              },
+            }, {
+              'direct_dependent_settings': {
+                'cflags': [
+                  '<!@(pkg-config --cflags xtst)',
+                ],
+              },
+              'link_settings': {
+                'ldflags': [
+                  '<!@(pkg-config --libs-only-L --libs-only-other xtst)',
+                ],
+                'libraries': [
+                  '<!@(pkg-config --libs-only-l xtst)',
+                ],
+              },
+            }]
+          ]
+        }
+      ],  # targets
+    }],
+    ['use_x11==1 and chromeos==0', {
+      'targets': [
+        {
+          'target_name': 'xscrnsaver',
+          'type': 'none',
+          'direct_dependent_settings': {
+            'cflags': [
+              '<!@(<(pkg-config) --cflags xscrnsaver)',
+            ],
+          },
+          'link_settings': {
+            'ldflags': [
+              '<!@(<(pkg-config) --libs-only-L --libs-only-other xscrnsaver)',
+            ],
+            'libraries': [
+              '<!@(<(pkg-config) --libs-only-l xscrnsaver)',
+            ],
+          },
+        },
+      ],  # targets
+    }],
+    ['use_evdev_gestures==1', {
+      'targets': [
+        {
+          'target_name': 'libevdev-cros',
+          'type': 'none',
+          'direct_dependent_settings': {
+            'cflags': [
+              '<!@(<(pkg-config) --cflags libevdev-cros)'
+            ],
+          },
+          'link_settings': {
+            'ldflags': [
+              '<!@(<(pkg-config) --libs-only-L --libs-only-other libevdev-cros)',
+            ],
+            'libraries': [
+              '<!@(<(pkg-config) --libs-only-l libevdev-cros)',
+            ],
+          },
+        },
+        {
+          'target_name': 'libgestures',
+          'type': 'none',
+          'direct_dependent_settings': {
+            'cflags': [
+              '<!@(<(pkg-config) --cflags libgestures)'
+            ],
+          },
+          'link_settings': {
+            'ldflags': [
+              '<!@(<(pkg-config) --libs-only-L --libs-only-other libgestures)',
+            ],
+            'libraries': [
+              '<!@(<(pkg-config) --libs-only-l libgestures)',
+            ],
+          },
+        },
+      ],
+    }],
+    ['use_xkbcommon==1', {
+      'targets': [
+        {
+          'target_name': 'xkbcommon',
+          'type': 'none',
+          'direct_dependent_settings': {
+            'cflags': [
+              '<!@(<(pkg-config) --cflags xkbcommon)'
+            ],
+          },
+          'link_settings': {
+            'ldflags': [
+              '<!@(<(pkg-config) --libs-only-L --libs-only-other xkbcommon)',
+            ],
+            'libraries': [
+              '<!@(<(pkg-config) --libs-only-l xkbcommon)',
+            ],
+          },
+        },
+      ],
+    }],
+    ['ozone_platform_gbm==1', {
+      'targets': [
+        {
+          'target_name': 'gbm',
+          'type': 'none',
+          'direct_dependent_settings': {
+            'cflags': [
+              '<!@(<(pkg-config) --cflags gbm)',
+            ],
+          },
+          'link_settings': {
+            'ldflags': [
+              '<!@(<(pkg-config) --libs-only-L --libs-only-other gbm)',
+            ],
+            'libraries': [
+              '<!@(<(pkg-config) --libs-only-l gbm)',
+            ],
+          },
+        },
+      ],
+    }],
+    ['ozone_platform_drm==1 or ozone_platform_gbm==1', {
+      'targets': [
+        {
+          'target_name': 'libdrm',
+          'type': 'none',
+          'direct_dependent_settings': {
+            'cflags': [
+              '<!@(<(pkg-config) --cflags libdrm)',
+            ],
+          },
+          'link_settings': {
+            'libraries': [
+              '<!@(<(pkg-config) --libs-only-l libdrm)',
+            ],
+          },
+        },
+      ],
+    }],
+    ['use_udev==1', {
+      'targets': [
+        {
+          'target_name': 'udev',
+          'type': 'static_library',
+          'conditions': [
+            ['_toolset=="target"', {
+              'include_dirs': [
+                '../..',
+              ],
+              'hard_dependency': 1,
+              'actions': [
+                {
+                  'variables': {
+                    'output_h': '<(SHARED_INTERMEDIATE_DIR)/library_loaders/libudev0.h',
+                    'output_cc': '<(INTERMEDIATE_DIR)/libudev0_loader.cc',
+                    'generator': '../../tools/generate_library_loader/generate_library_loader.py',
+                  },
+                  'action_name': 'generate_libudev0_loader',
+                  'inputs': [
+                    '<(generator)',
+                  ],
+                  'outputs': [
+                    '<(output_h)',
+                    '<(output_cc)',
+                  ],
+                  'action': ['python',
+                             '<(generator)',
+                             '--name', 'LibUdev0Loader',
+                             '--output-h', '<(output_h)',
+                             '--output-cc', '<(output_cc)',
+                             '--header', '"third_party/libudev/libudev0.h"',
+                             '--link-directly=0',
+                             '<@(libudev_functions)',
+                  ],
+                  'message': 'Generating libudev0 library loader',
+                  'process_outputs_as_sources': 1,
+                },
+                {
+                  'variables': {
+                    'output_h': '<(SHARED_INTERMEDIATE_DIR)/library_loaders/libudev1.h',
+                    'output_cc': '<(INTERMEDIATE_DIR)/libudev1_loader.cc',
+                    'generator': '../../tools/generate_library_loader/generate_library_loader.py',
+                  },
+                  'action_name': 'generate_libudev1_loader',
+                  'inputs': [
+                    '<(generator)',
+                  ],
+                  'outputs': [
+                    '<(output_h)',
+                    '<(output_cc)',
+                  ],
+                  'action': ['python',
+                             '<(generator)',
+                             '--name', 'LibUdev1Loader',
+                             '--output-h', '<(output_h)',
+                             '--output-cc', '<(output_cc)',
+                             '--header', '"third_party/libudev/libudev1.h"',
+                             '--link-directly=0',
+                             '<@(libudev_functions)',
+                  ],
+                  'message': 'Generating libudev1 library loader',
+                  'process_outputs_as_sources': 1,
+                },
+              ],
+            }],
+          ],
+        },
+      ],
+    }],
+    ['use_libpci==1', {
+      'targets': [
+        {
+          'target_name': 'libpci',
+          'type': 'static_library',
+          'cflags': [
+            '<!@(<(pkg-config) --cflags libpci)',
+          ],
+          'direct_dependent_settings': {
+            'include_dirs': [
+              '<(SHARED_INTERMEDIATE_DIR)',
+            ],
+            'conditions': [
+              ['linux_link_libpci==1', {
+                'link_settings': {
+                  'ldflags': [
+                    '<!@(<(pkg-config) --libs-only-L --libs-only-other libpci)',
+                  ],
+                  'libraries': [
+                    '<!@(<(pkg-config) --libs-only-l libpci)',
+                  ],
+                }
+              }],
+            ],
+          },
+          'include_dirs': [
+            '../..',
+          ],
+          'hard_dependency': 1,
+          'actions': [
+            {
+              'variables': {
+                'output_h': '<(SHARED_INTERMEDIATE_DIR)/library_loaders/libpci.h',
+                'output_cc': '<(INTERMEDIATE_DIR)/libpci_loader.cc',
+                'generator': '../../tools/generate_library_loader/generate_library_loader.py',
+              },
+              'action_name': 'generate_libpci_loader',
+              'inputs': [
+                '<(generator)',
+              ],
+              'outputs': [
+                '<(output_h)',
+                '<(output_cc)',
+              ],
+              'action': ['python',
+                         '<(generator)',
+                         '--name', 'LibPciLoader',
+                         '--output-h', '<(output_h)',
+                         '--output-cc', '<(output_cc)',
+                         '--header', '<pci/pci.h>',
+                         # TODO(phajdan.jr): Report problem to pciutils project
+                         # and get it fixed so that we don't need --use-extern-c.
+                         '--use-extern-c',
+                         '--link-directly=<(linux_link_libpci)',
+                         '<@(libpci_functions)',
+              ],
+              'message': 'Generating libpci library loader',
+              'process_outputs_as_sources': 1,
+            },
+          ],
+        },
+      ],
+    }],
+  ],  # conditions
+  'targets': [
+    {
+      'target_name': 'dbus',
+      'type': 'none',
+      'direct_dependent_settings': {
+        'cflags': [
+          '<!@(<(pkg-config) --cflags dbus-1)',
+        ],
+      },
+      'link_settings': {
+        'ldflags': [
+          '<!@(<(pkg-config) --libs-only-L --libs-only-other dbus-1)',
+        ],
+        'libraries': [
+          '<!@(<(pkg-config) --libs-only-l dbus-1)',
+        ],
+      },
+    },
+    {
+      'target_name': 'fontconfig',
+      'type': 'none',
+      'conditions': [
+        ['_toolset=="target"', {
+          'conditions': [
+            ['use_system_fontconfig==1', {
+              'direct_dependent_settings': {
+                'cflags': [
+                  '<!@(<(pkg-config) --cflags fontconfig)',
+                ],
+              },
+              'link_settings': {
+                'ldflags': [
+                  '<!@(<(pkg-config) --libs-only-L --libs-only-other fontconfig)',
+                ],
+                'libraries': [
+                  '<!@(<(pkg-config) --libs-only-l fontconfig)',
+                ],
+              },
+            }, {  # use_system_fontconfig==0
+              'dependencies': [
+                '../../third_party/fontconfig/fontconfig.gyp:fontconfig',
+              ],
+              'export_dependent_settings' : [
+                '../../third_party/fontconfig/fontconfig.gyp:fontconfig',
+              ],
+            }],
+          ],
+        }],
+      ],
+    },
+    {
+      'target_name': 'freetype2',
+      'type': 'none',
+      'conditions': [
+        ['_toolset=="target"', {
+          'direct_dependent_settings': {
+            'cflags': [
+              '<!@(<(pkg-config) --cflags freetype2)',
+            ],
+          },
+          'link_settings': {
+            'ldflags': [
+              '<!@(<(pkg-config) --libs-only-L --libs-only-other freetype2)',
+            ],
+            'libraries': [
+              '<!@(<(pkg-config) --libs-only-l freetype2)',
+            ],
+          },
+        }],
+      ],
+    },
+    {
+      'target_name': 'gconf',
+      'type': 'none',
+      'conditions': [
+        ['use_gconf==1 and _toolset=="target"', {
+          'direct_dependent_settings': {
+            'cflags': [
+              '<!@(<(pkg-config) --cflags gconf-2.0)',
+            ],
+            'defines': [
+              'USE_GCONF',
+            ],
+          },
+          'link_settings': {
+            'ldflags': [
+              '<!@(<(pkg-config) --libs-only-L --libs-only-other gconf-2.0)',
+            ],
+            'libraries': [
+              '<!@(<(pkg-config) --libs-only-l gconf-2.0)',
+            ],
+          },
+        }],
+      ],
+    },
+    {
+      'target_name': 'gio',
+      'type': 'static_library',
+      'conditions': [
+        ['use_gio==1 and _toolset=="target"', {
+          'cflags': [
+            '<!@(<(pkg-config) --cflags gio-2.0)',
+          ],
+          'variables': {
+            'gio_warning_define': [
+              # glib >=2.40 deprecate g_settings_list_schemas in favor of
+              # g_settings_schema_source_list_schemas. This function is not
+              # available on earlier versions that we still need to support
+              # (specifically, 2.32), so disable the warning.
+              # TODO(mgiuca): Remove this suppression (and variable) when we
+              # drop support for Ubuntu 13.10 (saucy) and earlier. Update the
+              # code to use g_settings_schema_source_list_schemas instead.
+              'GLIB_DISABLE_DEPRECATION_WARNINGS',
+            ],
+          },
+          'defines': [
+            '<(gio_warning_define)',
+          ],
+          'direct_dependent_settings': {
+            'cflags': [
+              '<!@(<(pkg-config) --cflags gio-2.0)',
+            ],
+            'defines': [
+              'USE_GIO',
+              '<(gio_warning_define)',
+            ],
+            'include_dirs': [
+              '<(SHARED_INTERMEDIATE_DIR)',
+            ],
+          },
+          'include_dirs': [
+            '../..',
+          ],
+          'link_settings': {
+            'ldflags': [
+              '<!@(<(pkg-config) --libs-only-L --libs-only-other gio-2.0)',
+            ],
+            'libraries': [
+              '<!@(<(pkg-config) --libs-only-l gio-2.0)',
+            ],
+            'conditions': [
+              ['linux_link_gsettings==0 and OS=="linux"', {
+                'libraries': [
+                  '-ldl',
+                ],
+              }],
+            ],
+          },
+          'hard_dependency': 1,
+          'actions': [
+            {
+              'variables': {
+                'output_h': '<(SHARED_INTERMEDIATE_DIR)/library_loaders/libgio.h',
+                'output_cc': '<(INTERMEDIATE_DIR)/libgio_loader.cc',
+                'generator': '../../tools/generate_library_loader/generate_library_loader.py',
+              },
+              'action_name': 'generate_libgio_loader',
+              'inputs': [
+                '<(generator)',
+              ],
+              'outputs': [
+                '<(output_h)',
+                '<(output_cc)',
+              ],
+              'action': ['python',
+                         '<(generator)',
+                         '--name', 'LibGioLoader',
+                         '--output-h', '<(output_h)',
+                         '--output-cc', '<(output_cc)',
+                         '--header', '<gio/gio.h>',
+                         '--link-directly=<(linux_link_gsettings)',
+                         '<@(libgio_functions)',
+              ],
+              'message': 'Generating libgio library loader',
+              'process_outputs_as_sources': 1,
+            },
+          ],
+        }],
+      ],
+    },
+    {
+      'target_name': 'glib',
+      'type': 'none',
+      'toolsets': ['host', 'target'],
+      'variables': {
+        'glib_packages': 'glib-2.0 gmodule-2.0 gobject-2.0 gthread-2.0',
+      },
+      'conditions': [
+        ['_toolset=="target"', {
+          'direct_dependent_settings': {
+            'cflags': [
+              '<!@(<(pkg-config) --cflags <(glib_packages))',
+            ],
+          },
+          'link_settings': {
+            'ldflags': [
+              '<!@(<(pkg-config) --libs-only-L --libs-only-other <(glib_packages))',
+            ],
+            'libraries': [
+              '<!@(<(pkg-config) --libs-only-l <(glib_packages))',
+            ],
+          },
+        }, {
+          'direct_dependent_settings': {
+            'cflags': [
+              '<!@(pkg-config --cflags <(glib_packages))',
+            ],
+          },
+          'link_settings': {
+            'ldflags': [
+              '<!@(pkg-config --libs-only-L --libs-only-other <(glib_packages))',
+            ],
+            'libraries': [
+              '<!@(pkg-config --libs-only-l <(glib_packages))',
+            ],
+          },
+        }],
+      ],
+    },
+    {
+      'target_name': 'gnome_keyring',
+      'type': 'none',
+      'conditions': [
+        ['use_gnome_keyring==1', {
+          'direct_dependent_settings': {
+            'cflags': [
+              '<!@(<(pkg-config) --cflags gnome-keyring-1)',
+            ],
+            'defines': [
+              'USE_GNOME_KEYRING',
+            ],
+            'conditions': [
+              ['linux_link_gnome_keyring==0', {
+                'defines': ['DLOPEN_GNOME_KEYRING'],
+              }],
+            ],
+          },
+          'conditions': [
+            ['linux_link_gnome_keyring!=0', {
+              'link_settings': {
+                'ldflags': [
+                  '<!@(<(pkg-config) --libs-only-L --libs-only-other gnome-keyring-1)',
+                ],
+                'libraries': [
+                  '<!@(<(pkg-config) --libs-only-l gnome-keyring-1)',
+                ],
+              },
+            }, {
+              'conditions': [
+                ['OS=="linux"', {
+                 'link_settings': {
+                   'libraries': [
+                     '-ldl',
+                   ],
+                 },
+                }],
+              ],
+            }],
+          ],
+        }],
+      ],
+    },
+    {
+      # The unit tests use a few convenience functions from the GNOME
+      # Keyring library directly. We ignore linux_link_gnome_keyring and
+      # link directly in this version of the target to allow this.
+      # *** Do not use this target in the main binary! ***
+      'target_name': 'gnome_keyring_direct',
+      'type': 'none',
+      'conditions': [
+        ['use_gnome_keyring==1', {
+          'direct_dependent_settings': {
+            'cflags': [
+              '<!@(<(pkg-config) --cflags gnome-keyring-1)',
+            ],
+            'defines': [
+              'USE_GNOME_KEYRING',
+            ],
+            'conditions': [
+              ['linux_link_gnome_keyring==0', {
+                'defines': ['DLOPEN_GNOME_KEYRING'],
+              }],
+            ],
+          },
+          'link_settings': {
+            'ldflags': [
+              '<!@(<(pkg-config) --libs-only-L --libs-only-other gnome-keyring-1)',
+            ],
+            'libraries': [
+              '<!@(<(pkg-config) --libs-only-l gnome-keyring-1)',
+            ],
+          },
+        }],
+      ],
+    },
+    {
+      'target_name': 'libbrlapi',
+      'type': 'static_library',
+      'all_dependent_settings': {
+        'include_dirs': [
+          '<(SHARED_INTERMEDIATE_DIR)',
+        ],
+        'defines': [
+          'USE_BRLAPI',
+        ],
+        'conditions': [
+          ['linux_link_libbrlapi==1', {
+            'link_settings': {
+              'libraries': [
+                '-lbrlapi',
+              ],
+            }
+          }],
+        ],
+      },
+      'include_dirs': [
+        '../..',
+      ],
+      'hard_dependency': 1,
+      'actions': [
+        {
+          'variables': {
+            'output_h': '<(SHARED_INTERMEDIATE_DIR)/library_loaders/libbrlapi.h',
+            'output_cc': '<(INTERMEDIATE_DIR)/libbrlapi_loader.cc',
+            'generator': '../../tools/generate_library_loader/generate_library_loader.py',
+          },
+          'action_name': 'generate_brlapi_loader',
+          'inputs': [
+            '<(generator)',
+          ],
+          'outputs': [
+            '<(output_h)',
+            '<(output_cc)',
+          ],
+          'action': ['python',
+                     '<(generator)',
+                     '--name', 'LibBrlapiLoader',
+                     '--output-h', '<(output_h)',
+                     '--output-cc', '<(output_cc)',
+                     '--header', '<brlapi.h>',
+                     '--link-directly=<(linux_link_libbrlapi)',
+                     '<@(libbrlapi_functions)',
+          ],
+          'message': 'Generating libbrlapi library loader',
+          'process_outputs_as_sources': 1,
+        },
+      ],
+    },
+    {
+      'target_name': 'libcap',
+      'type': 'none',
+      'link_settings': {
+        'libraries': [
+          '-lcap',
+        ],
+      },
+    },
+    {
+      'target_name': 'libresolv',
+      'type': 'none',
+      'link_settings': {
+        'libraries': [
+          '-lresolv',
+        ],
+      },
+    },
+    {
+      # GN version: //third_party/speech-dispatcher
+      'target_name': 'libspeechd',
+      'type': 'static_library',
+      'direct_dependent_settings': {
+        'include_dirs': [
+          '<(SHARED_INTERMEDIATE_DIR)',
+        ],
+        'conditions': [
+          ['linux_link_libspeechd==1', {
+            'link_settings': {
+              'libraries': [
+                '-lspeechd',
+              ],
+            }
+          }],
+        ],
+      },
+      'include_dirs': [
+        '../..',
+      ],
+      'hard_dependency': 1,
+      'actions': [
+        {
+          'variables': {
+            'output_h': '<(SHARED_INTERMEDIATE_DIR)/library_loaders/libspeechd.h',
+            'output_cc': '<(INTERMEDIATE_DIR)/libspeechd_loader.cc',
+            'generator': '../../tools/generate_library_loader/generate_library_loader.py',
+
+            # speech-dispatcher >= 0.8 installs libspeechd.h into
+            # speech-dispatcher/libspeechd.h, whereas speech-dispatcher < 0.8
+            # puts libspeechd.h in the top-level include directory.
+            # Since we need to support both cases for now, we ship a copy of
+            # libspeechd.h in third_party/speech-dispatcher. If the user
+            # prefers to link against the speech-dispatcher directly, the
+            # `libspeechd_h_prefix' variable can be passed to gyp with a value
+            # such as "speech-dispatcher/" that will be prepended to
+            # "libspeechd.h" in the #include directive.
+            # TODO(phaldan.jr): Once we do not need to support
+            # speech-dispatcher < 0.8 we can get rid of all this (including
+            # third_party/speech-dispatcher) and just include
+            # speech-dispatcher/libspeechd.h unconditionally.
+            'libspeechd_h_prefix%': '',
+          },
+          'action_name': 'generate_libspeechd_loader',
+          'inputs': [
+            '<(generator)',
+          ],
+          'outputs': [
+            '<(output_h)',
+            '<(output_cc)',
+          ],
+          'action': ['python',
+                     '<(generator)',
+                     '--name', 'LibSpeechdLoader',
+                     '--output-h', '<(output_h)',
+                     '--output-cc', '<(output_cc)',
+                     '--header', '<<(libspeechd_h_prefix)libspeechd.h>',
+                     '--bundled-header',
+                     '"third_party/speech-dispatcher/libspeechd.h"',
+                     '--link-directly=<(linux_link_libspeechd)',
+                     'spd_open',
+                     'spd_say',
+                     'spd_stop',
+                     'spd_close',
+                     'spd_pause',
+                     'spd_resume',
+                     'spd_set_notification_on',
+                     'spd_set_voice_rate',
+                     'spd_set_voice_pitch',
+                     'spd_list_synthesis_voices',
+                     'spd_set_synthesis_voice',
+                     'spd_list_modules',
+                     'spd_set_output_module',
+                     'spd_set_language',
+          ],
+          'message': 'Generating libspeechd library loader',
+          'process_outputs_as_sources': 1,
+        },
+      ],
+    },
+    {
+      'target_name': 'pangocairo',
+      'type': 'none',
+      'toolsets': ['host', 'target'],
+      'conditions': [
+        ['use_pango==1 and use_cairo==1', {
+          'conditions': [
+            ['_toolset=="target"', {
+              'direct_dependent_settings': {
+                'cflags': [
+                  '<!@(<(pkg-config) --cflags pangocairo pangoft2)',
+                ],
+              },
+              'link_settings': {
+                'ldflags': [
+                  '<!@(<(pkg-config) --libs-only-L --libs-only-other pangocairo pangoft2)',
+                ],
+                'libraries': [
+                  '<!@(<(pkg-config) --libs-only-l pangocairo pangoft2)',
+                ],
+              },
+            }, {
+              'direct_dependent_settings': {
+                'cflags': [
+                  '<!@(pkg-config --cflags pangocairo pangoft2)',
+                ],
+              },
+              'link_settings': {
+                'ldflags': [
+                  '<!@(pkg-config --libs-only-L --libs-only-other pangocairo pangoft2)',
+                ],
+                'libraries': [
+                  '<!@(pkg-config --libs-only-l pangocairo pangoft2)',
+                ],
+              },
+            }],
+          ],
+        }],
+      ],
+    },
+    {
+      'target_name': 'ssl',
+      'type': 'none',
+      'conditions': [
+        ['_toolset=="target"', {
+          'conditions': [
+            ['use_openssl==1', {
+              'dependencies': [
+                '../../third_party/boringssl/boringssl.gyp:boringssl',
+              ],
+            }, {
+              'dependencies': [
+                '../../net/third_party/nss/ssl.gyp:libssl',
+              ],
+              'direct_dependent_settings': {
+                'include_dirs+': [
+                  # We need for our local copies of the libssl3 headers to come
+                  # before other includes, as we are shadowing system headers.
+                  '<(DEPTH)/net/third_party/nss/ssl',
+                ],
+              },
+            }],
+            # Link in the system NSS if it is used for either the internal
+            # crypto library (use_openssl==0) or platform certificate
+            # library (use_nss_certs==1).
+            ['use_openssl==0 or use_nss_certs==1', {
+              'direct_dependent_settings': {
+                'cflags': [
+                  '<!@(<(pkg-config) --cflags nss)',
+                ],
+              },
+              'link_settings': {
+                'ldflags': [
+                  '<!@(<(pkg-config) --libs-only-L --libs-only-other nss)',
+                ],
+                'libraries': [
+                  '<!@(<(pkg-config) --libs-only-l nss | sed -e "s/-lssl3//")',
+                ],
+              },
+              'conditions': [
+                ['clang==1', {
+                  'direct_dependent_settings': {
+                    'cflags': [
+                      # There is a broken header guard in /usr/include/nss/secmod.h:
+                      # https://bugzilla.mozilla.org/show_bug.cgi?id=884072
+                      '-Wno-header-guard',
+                    ],
+                  },
+                }],
+              ],
+            }],
+          ]
+        }],
+      ],
+    },
+  ],
+}
diff --git a/build/linux/unbundle/README b/build/linux/unbundle/README
new file mode 100644
index 0000000..d1b2a96
--- /dev/null
+++ b/build/linux/unbundle/README
@@ -0,0 +1,44 @@
+This directory contains files that make it possible to use system libraries.
+
+For more info please read the following:
+
+ - https://fedoraproject.org/wiki/Packaging:No_Bundled_Libraries
+ - https://wiki.gentoo.org/wiki/Why_not_bundle_dependencies
+ - http://www.debian.org/doc/debian-policy/ch-source.html#s-embeddedfiles
+
+For more Chromium-specific context please read
+http://spot.livejournal.com/312320.html .
+
+This directory is provided in the source tree to follow above guidelines.
+It is a compromise solution which takes into account Chromium developers
+who want to avoid the perceived burden of more conditional code in gyp,
+and expectations of Open Source community, where using system-provided
+libraries is the norm.
+
+Usage:
+
+1. remove_bundled_libraries.py <preserved-directories>
+
+   For example: remove_bundled_libraries.py third_party/mesa
+
+   The script scans sources looking for third_party directories.
+   Everything that is not explicitly preserved is removed (except for
+   gyp files), and the script fails if any directory passed on command
+   line does not exist (to ensure list is kept up to date).
+
+   This is intended to be used on sources extracted from a tarball,
+   not a repository.
+
+   NOTE: by default this will not remove anything (for safety). Pass
+   --do-remove flag to actually remove files.
+
+2. replace_gyp_files.py <gyp-flags>
+
+   For example: replace_gyp_files.py -Duse_system_harfbuzz=1
+
+   The script ignores flags other than -D for convenience. This makes it
+   possible to have a variable e.g. ${myconf} with all the options, and
+   execute:
+
+   build/linux/unbundle/replace_gyp_files.py ${myconf}
+   build/gyp_chromium ${myconf}
diff --git a/build/linux/unbundle/expat.gyp b/build/linux/unbundle/expat.gyp
new file mode 100644
index 0000000..030fb85
--- /dev/null
+++ b/build/linux/unbundle/expat.gyp
@@ -0,0 +1,17 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'targets': [
+    {
+      'target_name': 'expat',
+      'type': 'none',
+      'link_settings': {
+        'libraries': [
+          '-lexpat',
+        ],
+      },
+    },
+  ],
+}
diff --git a/build/linux/unbundle/ffmpeg.gyp b/build/linux/unbundle/ffmpeg.gyp
new file mode 100644
index 0000000..e3c3723
--- /dev/null
+++ b/build/linux/unbundle/ffmpeg.gyp
@@ -0,0 +1,54 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'targets': [
+    {
+      'target_name': 'ffmpeg',
+      'type': 'none',
+      'direct_dependent_settings': {
+        'cflags': [
+          '<!@(pkg-config --cflags libavcodec libavformat libavutil)',
+
+          '<!(python <(DEPTH)/tools/compile_test/compile_test.py '
+              '--code "#define __STDC_CONSTANT_MACROS\n'
+              '#include <libavcodec/avcodec.h>\n'
+              'int test() { return AV_CODEC_ID_OPUS; }" '
+              '--on-failure -DCHROMIUM_OMIT_AV_CODEC_ID_OPUS=1)',
+
+          '<!(python <(DEPTH)/tools/compile_test/compile_test.py '
+              '--code "#define __STDC_CONSTANT_MACROS\n'
+              '#include <libavcodec/avcodec.h>\n'
+              'int test() { return AV_CODEC_ID_VP9; }" '
+              '--on-failure -DCHROMIUM_OMIT_AV_CODEC_ID_VP9=1)',
+
+          '<!(python <(DEPTH)/tools/compile_test/compile_test.py '
+              '--code "#define __STDC_CONSTANT_MACROS\n'
+              '#include <libavcodec/avcodec.h>\n'
+              'int test() { return AV_PKT_DATA_MATROSKA_BLOCKADDITIONAL; }" '
+              '--on-failure -DCHROMIUM_OMIT_AV_PKT_DATA_MATROSKA_BLOCKADDITIONAL=1)',
+
+          '<!(python <(DEPTH)/tools/compile_test/compile_test.py '
+              '--code "#define __STDC_CONSTANT_MACROS\n'
+              '#include <libavcodec/avcodec.h>\n'
+              'int test() { struct AVFrame frame;\n'
+              'return av_frame_get_channels(&frame); }" '
+              '--on-failure -DCHROMIUM_NO_AVFRAME_CHANNELS=1)',
+        ],
+        'defines': [
+          '__STDC_CONSTANT_MACROS',
+          'USE_SYSTEM_FFMPEG',
+        ],
+      },
+      'link_settings': {
+        'ldflags': [
+          '<!@(pkg-config --libs-only-L --libs-only-other libavcodec libavformat libavutil)',
+        ],
+        'libraries': [
+          '<!@(pkg-config --libs-only-l libavcodec libavformat libavutil)',
+        ],
+      },
+    },
+  ],
+}
diff --git a/build/linux/unbundle/flac.gyp b/build/linux/unbundle/flac.gyp
new file mode 100644
index 0000000..9e4a664
--- /dev/null
+++ b/build/linux/unbundle/flac.gyp
@@ -0,0 +1,37 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'targets': [
+    {
+      'target_name': 'libflac',
+      'type': 'none',
+      'variables': {
+        'headers_root_path': 'include',
+        'header_filenames': [
+          'FLAC/callback.h',
+          'FLAC/metadata.h',
+          'FLAC/assert.h',
+          'FLAC/export.h',
+          'FLAC/format.h',
+          'FLAC/stream_decoder.h',
+          'FLAC/stream_encoder.h',
+          'FLAC/ordinals.h',
+          'FLAC/all.h',
+        ],
+      },
+      'includes': [
+        '../../build/shim_headers.gypi',
+      ],
+      'link_settings': {
+        'ldflags': [
+          '<!@(pkg-config --libs-only-L --libs-only-other flac)',
+        ],
+        'libraries': [
+          '<!@(pkg-config --libs-only-l flac)',
+        ],
+      },
+    },
+  ],
+}
diff --git a/build/linux/unbundle/harfbuzz.gyp b/build/linux/unbundle/harfbuzz.gyp
new file mode 100644
index 0000000..3bc1744
--- /dev/null
+++ b/build/linux/unbundle/harfbuzz.gyp
@@ -0,0 +1,47 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'variables': {
+    # Check for presence of harfbuzz-icu library, use it if present.
+    'harfbuzz_libraries':
+        '<!(python <(DEPTH)/tools/compile_test/compile_test.py '
+        '--code "int main() { return 0; }" '
+        '--run-linker '
+        '--on-success "harfbuzz harfbuzz-icu" '
+        '--on-failure "harfbuzz" '
+        '-- -lharfbuzz-icu)',
+  },
+  'targets': [
+    {
+      'target_name': 'harfbuzz-ng',
+      'type': 'none',
+      'cflags': [
+        '<!@(pkg-config --cflags <(harfbuzz_libraries))',
+      ],
+      'direct_dependent_settings': {
+        'cflags': [
+          '<!@(pkg-config --cflags <(harfbuzz_libraries))',
+        ],
+      },
+      'link_settings': {
+        'ldflags': [
+          '<!@(pkg-config --libs-only-L --libs-only-other <(harfbuzz_libraries))',
+        ],
+        'libraries': [
+          '<!@(pkg-config --libs-only-l <(harfbuzz_libraries))',
+        ],
+      },
+      'variables': {
+        'headers_root_path': 'src',
+        'header_filenames': [
+          'hb.h',
+        ],
+      },
+      'includes': [
+        '../../build/shim_headers.gypi',
+      ],
+    },
+  ],
+}
diff --git a/build/linux/unbundle/icu.gyp b/build/linux/unbundle/icu.gyp
new file mode 100644
index 0000000..16c36df
--- /dev/null
+++ b/build/linux/unbundle/icu.gyp
@@ -0,0 +1,248 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'targets': [
+    {
+      'target_name': 'icudata',
+      'type': 'none',
+      'direct_dependent_settings': {
+        'cflags': [
+          '<!@(pkg-config --cflags icu-uc)',
+        ],
+        'defines': [
+          'U_USING_ICU_NAMESPACE=0',
+        ],
+      },
+      'link_settings': {
+        'ldflags': [
+          '<!@(pkg-config --libs-only-L --libs-only-other icu-uc)',
+        ],
+        'libraries': [
+          '<!@(pkg-config --libs-only-l icu-uc)',
+        ],
+      },
+    },
+    {
+      'target_name': 'icui18n',
+      'type': 'none',
+      'toolsets': ['host', 'target'],
+      'direct_dependent_settings': {
+        'cflags': [
+          '<!@(pkg-config --cflags icu-i18n)',
+        ],
+        'defines': [
+          'U_USING_ICU_NAMESPACE=0',
+        ],
+      },
+      'link_settings': {
+        'ldflags': [
+          '<!@(pkg-config --libs-only-L --libs-only-other icu-i18n)',
+        ],
+        'libraries': [
+          '<!@(pkg-config --libs-only-l icu-i18n)',
+        ],
+      },
+      'variables': {
+        'headers_root_path': 'source/i18n',
+        'header_filenames': [
+          # This list can easily be updated using the command below:
+          # find third_party/icu/source/i18n/unicode -iname '*.h' \
+          # -printf "'%p',\n" | \
+          # sed -e 's|third_party/icu/source/i18n/||' | sort -u
+          'unicode/basictz.h',
+          'unicode/bmsearch.h',
+          'unicode/bms.h',
+          'unicode/calendar.h',
+          'unicode/choicfmt.h',
+          'unicode/coleitr.h',
+          'unicode/colldata.h',
+          'unicode/coll.h',
+          'unicode/curramt.h',
+          'unicode/currpinf.h',
+          'unicode/currunit.h',
+          'unicode/datefmt.h',
+          'unicode/dcfmtsym.h',
+          'unicode/decimfmt.h',
+          'unicode/dtfmtsym.h',
+          'unicode/dtitvfmt.h',
+          'unicode/dtitvinf.h',
+          'unicode/dtptngen.h',
+          'unicode/dtrule.h',
+          'unicode/fieldpos.h',
+          'unicode/fmtable.h',
+          'unicode/format.h',
+          'unicode/fpositer.h',
+          'unicode/gregocal.h',
+          'unicode/locdspnm.h',
+          'unicode/measfmt.h',
+          'unicode/measunit.h',
+          'unicode/measure.h',
+          'unicode/msgfmt.h',
+          'unicode/numfmt.h',
+          'unicode/numsys.h',
+          'unicode/plurfmt.h',
+          'unicode/plurrule.h',
+          'unicode/rbnf.h',
+          'unicode/rbtz.h',
+          'unicode/regex.h',
+          'unicode/search.h',
+          'unicode/selfmt.h',
+          'unicode/simpletz.h',
+          'unicode/smpdtfmt.h',
+          'unicode/sortkey.h',
+          'unicode/stsearch.h',
+          'unicode/tblcoll.h',
+          'unicode/timezone.h',
+          'unicode/tmunit.h',
+          'unicode/tmutamt.h',
+          'unicode/tmutfmt.h',
+          'unicode/translit.h',
+          'unicode/tzrule.h',
+          'unicode/tztrans.h',
+          'unicode/ucal.h',
+          'unicode/ucoleitr.h',
+          'unicode/ucol.h',
+          'unicode/ucsdet.h',
+          'unicode/ucurr.h',
+          'unicode/udat.h',
+          'unicode/udatpg.h',
+          'unicode/uldnames.h',
+          'unicode/ulocdata.h',
+          'unicode/umsg.h',
+          'unicode/unirepl.h',
+          'unicode/unum.h',
+          'unicode/uregex.h',
+          'unicode/usearch.h',
+          'unicode/uspoof.h',
+          'unicode/utmscale.h',
+          'unicode/utrans.h',
+          'unicode/vtzone.h',
+        ],
+      },
+      'includes': [
+        '../../build/shim_headers.gypi',
+      ],
+    },
+    {
+      'target_name': 'icuuc',
+      'type': 'none',
+      'toolsets': ['host', 'target'],
+      'direct_dependent_settings': {
+        'cflags': [
+          '<!@(pkg-config --cflags icu-uc)',
+        ],
+        'defines': [
+          'U_USING_ICU_NAMESPACE=0',
+        ],
+      },
+      'link_settings': {
+        'ldflags': [
+          '<!@(pkg-config --libs-only-L --libs-only-other icu-uc)',
+        ],
+        'libraries': [
+          '<!@(pkg-config --libs-only-l icu-uc)',
+        ],
+      },
+      'variables': {
+        'headers_root_path': 'source/common',
+        'header_filenames': [
+          # This list can easily be updated using the command below:
+          # find third_party/icu/source/common/unicode -iname '*.h' \
+          # -printf "'%p',\n" | \
+          # sed -e 's|third_party/icu/source/common/||' | sort -u
+          'unicode/brkiter.h',
+          'unicode/bytestream.h',
+          'unicode/caniter.h',
+          'unicode/chariter.h',
+          'unicode/dbbi.h',
+          'unicode/docmain.h',
+          'unicode/dtintrv.h',
+          'unicode/errorcode.h',
+          'unicode/icudataver.h',
+          'unicode/icuplug.h',
+          'unicode/idna.h',
+          'unicode/localpointer.h',
+          'unicode/locid.h',
+          'unicode/normalizer2.h',
+          'unicode/normlzr.h',
+          'unicode/pandroid.h',
+          'unicode/parseerr.h',
+          'unicode/parsepos.h',
+          'unicode/pfreebsd.h',
+          'unicode/plinux.h',
+          'unicode/pmac.h',
+          'unicode/popenbsd.h',
+          'unicode/ppalmos.h',
+          'unicode/ptypes.h',
+          'unicode/putil.h',
+          'unicode/pwin32.h',
+          'unicode/rbbi.h',
+          'unicode/rep.h',
+          'unicode/resbund.h',
+          'unicode/schriter.h',
+          'unicode/std_string.h',
+          'unicode/strenum.h',
+          'unicode/stringpiece.h',
+          'unicode/symtable.h',
+          'unicode/ubidi.h',
+          'unicode/ubrk.h',
+          'unicode/ucasemap.h',
+          'unicode/ucat.h',
+          'unicode/uchar.h',
+          'unicode/uchriter.h',
+          'unicode/uclean.h',
+          'unicode/ucnv_cb.h',
+          'unicode/ucnv_err.h',
+          'unicode/ucnv.h',
+          'unicode/ucnvsel.h',
+          'unicode/uconfig.h',
+          'unicode/udata.h',
+          'unicode/udeprctd.h',
+          'unicode/udraft.h',
+          'unicode/uenum.h',
+          'unicode/uidna.h',
+          'unicode/uintrnal.h',
+          'unicode/uiter.h',
+          'unicode/uloc.h',
+          'unicode/umachine.h',
+          'unicode/umisc.h',
+          'unicode/unifilt.h',
+          'unicode/unifunct.h',
+          'unicode/unimatch.h',
+          'unicode/uniset.h',
+          'unicode/unistr.h',
+          'unicode/unorm2.h',
+          'unicode/unorm.h',
+          'unicode/uobject.h',
+          'unicode/uobslete.h',
+          'unicode/urename.h',
+          'unicode/urep.h',
+          'unicode/ures.h',
+          'unicode/uscript.h',
+          'unicode/uset.h',
+          'unicode/usetiter.h',
+          'unicode/ushape.h',
+          'unicode/usprep.h',
+          'unicode/ustring.h',
+          'unicode/usystem.h',
+          'unicode/utext.h',
+          'unicode/utf16.h',
+          'unicode/utf32.h',
+          'unicode/utf8.h',
+          'unicode/utf.h',
+          'unicode/utf_old.h',
+          'unicode/utrace.h',
+          'unicode/utypeinfo.h',
+          'unicode/utypes.h',
+          'unicode/uvernum.h',
+          'unicode/uversion.h',
+        ],
+      },
+      'includes': [
+        '../../build/shim_headers.gypi',
+      ],
+    },
+  ],
+}
diff --git a/build/linux/unbundle/jsoncpp.gyp b/build/linux/unbundle/jsoncpp.gyp
new file mode 100644
index 0000000..c397f64
--- /dev/null
+++ b/build/linux/unbundle/jsoncpp.gyp
@@ -0,0 +1,39 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'targets': [
+    {
+      'target_name': 'jsoncpp',
+      'type': 'none',
+      'variables': {
+        'headers_root_path': 'source/include',
+        'header_filenames': [
+          'json/assertions.h',
+          'json/autolink.h',
+          'json/config.h',
+          'json/features.h',
+          'json/forwards.h',
+          'json/json.h',
+          'json/reader.h',
+          'json/value.h',
+          'json/writer.h',
+        ],
+      },
+      'includes': [
+        '../../build/shim_headers.gypi',
+      ],
+      'direct_dependent_settings': {
+        'include_dirs': [
+          '/usr/include/jsoncpp',
+        ],
+      },
+      'link_settings': {
+        'libraries': [
+          '-ljsoncpp',
+        ],
+      },
+    }
+  ],
+}
diff --git a/build/linux/unbundle/libXNVCtrl.gyp b/build/linux/unbundle/libXNVCtrl.gyp
new file mode 100644
index 0000000..f076bdb
--- /dev/null
+++ b/build/linux/unbundle/libXNVCtrl.gyp
@@ -0,0 +1,35 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'targets': [
+    {
+      'target_name': 'libXNVCtrl',
+      'type': 'none',
+      'variables': {
+        'headers_root_path': '.',
+        'header_filenames': [
+          'NVCtrlLib.h',
+          'NVCtrl.h',
+        ],
+      },
+      'includes': [
+        '../../build/shim_headers.gypi',
+      ],
+      'direct_dependent_settings': {
+        'cflags': [
+            '<!@(pkg-config --cflags libXNVCtrl)',
+        ],
+      },
+      'link_settings': {
+        'ldflags': [
+          '<!@(pkg-config --libs-only-L --libs-only-other libXNVCtrl)',
+        ],
+        'libraries': [
+          '<!@(pkg-config --libs-only-l libXNVCtrl)',
+        ],
+      },
+    }
+  ],
+}
diff --git a/build/linux/unbundle/libevent.gyp b/build/linux/unbundle/libevent.gyp
new file mode 100644
index 0000000..99d7435
--- /dev/null
+++ b/build/linux/unbundle/libevent.gyp
@@ -0,0 +1,27 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'targets': [
+    {
+      'target_name': 'libevent',
+      'type': 'none',
+      'toolsets': ['host', 'target'],
+      'variables': {
+        'headers_root_path': '.',
+        'header_filenames': [
+          'event.h',
+        ],
+      },
+      'includes': [
+        '../../build/shim_headers.gypi',
+      ],
+      'link_settings': {
+        'libraries': [
+          '-levent',
+        ],
+      },
+    }
+  ],
+}
diff --git a/build/linux/unbundle/libjpeg.gyp b/build/linux/unbundle/libjpeg.gyp
new file mode 100644
index 0000000..f56e7aa
--- /dev/null
+++ b/build/linux/unbundle/libjpeg.gyp
@@ -0,0 +1,29 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'targets': [
+    {
+      'target_name': 'libjpeg',
+      'type': 'none',
+      'direct_dependent_settings': {
+        'defines': [
+          'USE_SYSTEM_LIBJPEG',
+        ],
+        'conditions': [
+          ['os_bsd==1', {
+            'include_dirs': [
+              '/usr/local/include',
+            ],
+          }],
+        ],
+      },
+      'link_settings': {
+        'libraries': [
+          '-ljpeg',
+        ],
+      },
+    }
+  ],
+}
diff --git a/build/linux/unbundle/libpng.gyp b/build/linux/unbundle/libpng.gyp
new file mode 100644
index 0000000..d6933fc
--- /dev/null
+++ b/build/linux/unbundle/libpng.gyp
@@ -0,0 +1,38 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'targets': [
+    {
+      'target_name': 'libpng',
+      'type': 'none',
+      'dependencies': [
+        '../zlib/zlib.gyp:zlib',
+      ],
+      'direct_dependent_settings': {
+        'cflags': [
+          '<!@(pkg-config --cflags libpng)',
+        ],
+      },
+      'link_settings': {
+        'ldflags': [
+          '<!@(pkg-config --libs-only-L --libs-only-other libpng)',
+        ],
+        'libraries': [
+          '<!@(pkg-config --libs-only-l libpng)',
+        ],
+      },
+      'variables': {
+        'headers_root_path': '.',
+        'header_filenames': [
+          'png.h',
+          'pngconf.h',
+        ],
+      },
+      'includes': [
+        '../../build/shim_headers.gypi',
+      ],
+    },
+  ],
+}
diff --git a/build/linux/unbundle/libusb.gyp b/build/linux/unbundle/libusb.gyp
new file mode 100644
index 0000000..1c18033
--- /dev/null
+++ b/build/linux/unbundle/libusb.gyp
@@ -0,0 +1,34 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'targets': [
+    {
+      'target_name': 'libusb',
+      'type': 'none',
+      'variables': {
+        'headers_root_path': 'src/libusb',
+        'header_filenames': [
+          'libusb.h',
+        ],
+      },
+      'includes': [
+        '../../build/shim_headers.gypi',
+      ],
+      'direct_dependent_settings': {
+        'cflags': [
+          '<!@(pkg-config --cflags libusb-1.0)',
+        ],
+        'link_settings': {
+          'ldflags': [
+            '<!@(pkg-config --libs-only-L --libs-only-other libusb-1.0)',
+          ],
+          'libraries': [
+            '<!@(pkg-config --libs-only-l libusb-1.0)',
+          ],
+        },
+      },
+    },
+  ],
+}
diff --git a/build/linux/unbundle/libvpx.gyp b/build/linux/unbundle/libvpx.gyp
new file mode 100644
index 0000000..75671c5
--- /dev/null
+++ b/build/linux/unbundle/libvpx.gyp
@@ -0,0 +1,43 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+  'targets': [
+    {
+      'target_name': 'libvpx',
+      'type': 'none',
+      'direct_dependent_settings': {
+        'cflags': [
+          '<!@(pkg-config --cflags vpx)',
+        ],
+      },
+      'variables': {
+        'headers_root_path': 'source/libvpx',
+        'header_filenames': [
+          'vpx/vp8.h',
+          'vpx/vp8cx.h',
+          'vpx/vp8dx.h',
+          'vpx/vpx_codec.h',
+          'vpx/vpx_codec_impl_bottom.h',
+          'vpx/vpx_codec_impl_top.h',
+          'vpx/vpx_decoder.h',
+          'vpx/vpx_encoder.h',
+          'vpx/vpx_frame_buffer.h',
+          'vpx/vpx_image.h',
+          'vpx/vpx_integer.h',
+        ],
+      },
+      'includes': [
+        '../../build/shim_headers.gypi',
+      ],
+      'link_settings': {
+        'ldflags': [
+          '<!@(pkg-config --libs-only-L --libs-only-other vpx)',
+        ],
+        'libraries': [
+          '<!@(pkg-config --libs-only-l vpx)',
+        ],
+      },
+    },
+  ],
+}
diff --git a/build/linux/unbundle/libwebp.gyp b/build/linux/unbundle/libwebp.gyp
new file mode 100644
index 0000000..6dbce2e
--- /dev/null
+++ b/build/linux/unbundle/libwebp.gyp
@@ -0,0 +1,28 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'targets': [
+    {
+      'target_name': 'libwebp',
+      'type': 'none',
+      'direct_dependent_settings': {
+        'defines': [
+          'ENABLE_WEBP',
+        ],
+      },
+      'link_settings': {
+        'libraries': [
+          # Check for presence of webpdemux library, use it if present.
+          '<!(python <(DEPTH)/tools/compile_test/compile_test.py '
+          '--code "int main() { return 0; }" '
+          '--run-linker '
+          '--on-success "-lwebp -lwebpdemux" '
+          '--on-failure "-lwebp" '
+          '-- -lwebpdemux)',
+        ],
+      },
+    }
+  ],
+}
diff --git a/build/linux/unbundle/libxml.gyp b/build/linux/unbundle/libxml.gyp
new file mode 100644
index 0000000..bc4f9fc
--- /dev/null
+++ b/build/linux/unbundle/libxml.gyp
@@ -0,0 +1,38 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'targets': [
+    {
+      'target_name': 'libxml',
+      'type': 'static_library',
+      'sources': [
+        'chromium/libxml_utils.h',
+        'chromium/libxml_utils.cc',
+      ],
+      'cflags': [
+        '<!@(pkg-config --cflags libxml-2.0)',
+      ],
+      'defines': [
+        'USE_SYSTEM_LIBXML',
+      ],
+      'direct_dependent_settings': {
+        'cflags': [
+          '<!@(pkg-config --cflags libxml-2.0)',
+        ],
+        'defines': [
+          'USE_SYSTEM_LIBXML',
+        ],
+      },
+      'link_settings': {
+        'ldflags': [
+          '<!@(pkg-config --libs-only-L --libs-only-other libxml-2.0)',
+        ],
+        'libraries': [
+          '<!@(pkg-config --libs-only-l libxml-2.0)',
+        ],
+      },
+    },
+  ],
+}
diff --git a/build/linux/unbundle/libxslt.gyp b/build/linux/unbundle/libxslt.gyp
new file mode 100644
index 0000000..f7f6bb9
--- /dev/null
+++ b/build/linux/unbundle/libxslt.gyp
@@ -0,0 +1,25 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'targets': [
+    {
+      'target_name': 'libxslt',
+      'type': 'none',
+      'direct_dependent_settings': {
+        'cflags': [
+          '<!@(pkg-config --cflags libxslt)',
+        ],
+      },
+      'link_settings': {
+        'ldflags': [
+          '<!@(pkg-config --libs-only-L --libs-only-other libxslt)',
+        ],
+        'libraries': [
+          '<!@(pkg-config --libs-only-l libxslt)',
+        ],
+      },
+    },
+  ],
+}
diff --git a/build/linux/unbundle/opus.gyp b/build/linux/unbundle/opus.gyp
new file mode 100644
index 0000000..e8c30ba
--- /dev/null
+++ b/build/linux/unbundle/opus.gyp
@@ -0,0 +1,38 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'targets': [
+    {
+      'target_name': 'opus',
+      'type': 'none',
+      'direct_dependent_settings': {
+        'cflags': [
+          '<!@(pkg-config --cflags opus)',
+        ],
+      },
+      'variables': {
+        'headers_root_path': 'src/include',
+        'header_filenames': [
+          'opus_custom.h',
+          'opus_defines.h',
+          'opus_multistream.h',
+          'opus_types.h',
+          'opus.h',
+        ],
+      },
+      'includes': [
+        '../../build/shim_headers.gypi',
+      ],
+      'link_settings': {
+        'ldflags': [
+          '<!@(pkg-config --libs-only-L --libs-only-other opus)',
+        ],
+        'libraries': [
+          '<!@(pkg-config --libs-only-l opus)',
+        ],
+      },
+    },
+  ],
+}
diff --git a/build/linux/unbundle/protobuf.gyp b/build/linux/unbundle/protobuf.gyp
new file mode 100644
index 0000000..7bcd992
--- /dev/null
+++ b/build/linux/unbundle/protobuf.gyp
@@ -0,0 +1,149 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'targets': [
+    {
+      'target_name': 'protobuf_lite',
+      'type': 'none',
+      'direct_dependent_settings': {
+        'cflags': [
+          # Use full protobuf, because vanilla protobuf doesn't have
+          # our custom patch to retain unknown fields in lite mode.
+          '<!@(pkg-config --cflags protobuf)',
+        ],
+        'defines': [
+          'USE_SYSTEM_PROTOBUF',
+
+          # This macro must be defined to suppress the use
+          # of dynamic_cast<>, which requires RTTI.
+          'GOOGLE_PROTOBUF_NO_RTTI',
+          'GOOGLE_PROTOBUF_NO_STATIC_INITIALIZER',
+        ],
+      },
+      'link_settings': {
+        # Use full protobuf, because vanilla protobuf doesn't have
+        # our custom patch to retain unknown fields in lite mode.
+        'ldflags': [
+          '<!@(pkg-config --libs-only-L --libs-only-other protobuf)',
+        ],
+        'libraries': [
+          '<!@(pkg-config --libs-only-l protobuf)',
+        ],
+      },
+      'variables': {
+        'headers_root_path': 'src',
+        'header_filenames': [
+          # This list can easily be updated using the command below:
+          # find third_party/protobuf/src -iname '*.h' -printf "'%p',\n" | \
+          # sed -e 's|third_party/protobuf/src/||' | sort -u
+          'google/protobuf/compiler/code_generator.h',
+          'google/protobuf/compiler/command_line_interface.h',
+          'google/protobuf/compiler/cpp/cpp_enum_field.h',
+          'google/protobuf/compiler/cpp/cpp_enum.h',
+          'google/protobuf/compiler/cpp/cpp_extension.h',
+          'google/protobuf/compiler/cpp/cpp_field.h',
+          'google/protobuf/compiler/cpp/cpp_file.h',
+          'google/protobuf/compiler/cpp/cpp_generator.h',
+          'google/protobuf/compiler/cpp/cpp_helpers.h',
+          'google/protobuf/compiler/cpp/cpp_message_field.h',
+          'google/protobuf/compiler/cpp/cpp_message.h',
+          'google/protobuf/compiler/cpp/cpp_options.h',
+          'google/protobuf/compiler/cpp/cpp_primitive_field.h',
+          'google/protobuf/compiler/cpp/cpp_service.h',
+          'google/protobuf/compiler/cpp/cpp_string_field.h',
+          'google/protobuf/compiler/cpp/cpp_unittest.h',
+          'google/protobuf/compiler/importer.h',
+          'google/protobuf/compiler/java/java_doc_comment.h',
+          'google/protobuf/compiler/java/java_enum_field.h',
+          'google/protobuf/compiler/java/java_enum.h',
+          'google/protobuf/compiler/java/java_extension.h',
+          'google/protobuf/compiler/java/java_field.h',
+          'google/protobuf/compiler/java/java_file.h',
+          'google/protobuf/compiler/java/java_generator.h',
+          'google/protobuf/compiler/java/java_helpers.h',
+          'google/protobuf/compiler/java/java_message_field.h',
+          'google/protobuf/compiler/java/java_message.h',
+          'google/protobuf/compiler/java/java_primitive_field.h',
+          'google/protobuf/compiler/java/java_service.h',
+          'google/protobuf/compiler/java/java_string_field.h',
+          'google/protobuf/compiler/mock_code_generator.h',
+          'google/protobuf/compiler/package_info.h',
+          'google/protobuf/compiler/parser.h',
+          'google/protobuf/compiler/plugin.h',
+          'google/protobuf/compiler/plugin.pb.h',
+          'google/protobuf/compiler/python/python_generator.h',
+          'google/protobuf/compiler/subprocess.h',
+          'google/protobuf/compiler/zip_writer.h',
+          'google/protobuf/descriptor_database.h',
+          'google/protobuf/descriptor.h',
+          'google/protobuf/descriptor.pb.h',
+          'google/protobuf/dynamic_message.h',
+          'google/protobuf/extension_set.h',
+          'google/protobuf/generated_enum_reflection.h',
+          'google/protobuf/generated_message_reflection.h',
+          'google/protobuf/generated_message_util.h',
+          'google/protobuf/io/coded_stream.h',
+          'google/protobuf/io/coded_stream_inl.h',
+          'google/protobuf/io/gzip_stream.h',
+          'google/protobuf/io/package_info.h',
+          'google/protobuf/io/printer.h',
+          'google/protobuf/io/tokenizer.h',
+          'google/protobuf/io/zero_copy_stream.h',
+          'google/protobuf/io/zero_copy_stream_impl.h',
+          'google/protobuf/io/zero_copy_stream_impl_lite.h',
+          'google/protobuf/message.h',
+          'google/protobuf/message_lite.h',
+          'google/protobuf/package_info.h',
+          'google/protobuf/reflection_ops.h',
+          'google/protobuf/repeated_field.h',
+          'google/protobuf/service.h',
+          'google/protobuf/stubs/atomicops.h',
+          'google/protobuf/stubs/atomicops_internals_arm64_gcc.h',
+          'google/protobuf/stubs/atomicops_internals_arm_gcc.h',
+          'google/protobuf/stubs/atomicops_internals_arm_qnx.h',
+          'google/protobuf/stubs/atomicops_internals_atomicword_compat.h',
+          'google/protobuf/stubs/atomicops_internals_macosx.h',
+          'google/protobuf/stubs/atomicops_internals_mips_gcc.h',
+          'google/protobuf/stubs/atomicops_internals_pnacl.h',
+          'google/protobuf/stubs/atomicops_internals_tsan.h',
+          'google/protobuf/stubs/atomicops_internals_x86_gcc.h',
+          'google/protobuf/stubs/atomicops_internals_x86_msvc.h',
+          'google/protobuf/stubs/common.h',
+          'google/protobuf/stubs/hash.h',
+          'google/protobuf/stubs/map-util.h',
+          'google/protobuf/stubs/once.h',
+          'google/protobuf/stubs/platform_macros.h',
+          'google/protobuf/stubs/stl_util.h',
+          'google/protobuf/stubs/stringprintf.h',
+          'google/protobuf/stubs/strutil.h',
+          'google/protobuf/stubs/substitute.h',
+          'google/protobuf/stubs/template_util.h',
+          'google/protobuf/stubs/type_traits.h',
+          'google/protobuf/testing/file.h',
+          'google/protobuf/testing/googletest.h',
+          'google/protobuf/test_util.h',
+          'google/protobuf/test_util_lite.h',
+          'google/protobuf/text_format.h',
+          'google/protobuf/unknown_field_set.h',
+          'google/protobuf/wire_format.h',
+          'google/protobuf/wire_format_lite.h',
+          'google/protobuf/wire_format_lite_inl.h',
+        ],
+      },
+      'includes': [
+        '../../build/shim_headers.gypi',
+      ],
+    },
+    {
+      'target_name': 'protoc',
+      'type': 'none',
+      'toolsets': ['host', 'target'],
+    },
+    {
+      'target_name': 'py_proto',
+      'type': 'none',
+    },
+  ],
+}
diff --git a/build/linux/unbundle/re2.gyp b/build/linux/unbundle/re2.gyp
new file mode 100644
index 0000000..e2e567a
--- /dev/null
+++ b/build/linux/unbundle/re2.gyp
@@ -0,0 +1,37 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'targets': [
+    {
+      'target_name': 're2',
+      'type': 'none',
+      'variables': {
+        'headers_root_path': '.',
+        'header_filenames': [
+          're2/filtered_re2.h',
+          're2/re2.h',
+          're2/set.h',
+          're2/stringpiece.h',
+          're2/variadic_function.h',
+        ],
+        'shim_generator_additional_args': [
+          # Chromium copy of re2 is patched to rename POSIX to POSIX_SYNTAX
+          # because of collision issues that break the build.
+          # Upstream refuses to make changes:
+          # http://code.google.com/p/re2/issues/detail?id=73 .
+          '--define', 'POSIX=POSIX_SYNTAX',
+        ],
+      },
+      'includes': [
+        '../../build/shim_headers.gypi',
+      ],
+      'link_settings': {
+        'libraries': [
+          '-lre2',
+        ],
+      },
+    }
+  ],
+}
diff --git a/build/linux/unbundle/remove_bundled_libraries.py b/build/linux/unbundle/remove_bundled_libraries.py
new file mode 100755
index 0000000..69e76f5
--- /dev/null
+++ b/build/linux/unbundle/remove_bundled_libraries.py
@@ -0,0 +1,102 @@
+#!/usr/bin/env python
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Removes bundled libraries to make sure they are not used.
+
+See README for more details.
+"""
+
+
+import optparse
+import os.path
+import sys
+
+
+def DoMain(argv):
+  my_dirname = os.path.abspath(os.path.dirname(__file__))
+  source_tree_root = os.path.abspath(
+    os.path.join(my_dirname, '..', '..', '..'))
+
+  if os.path.join(source_tree_root, 'build', 'linux', 'unbundle') != my_dirname:
+    print ('Sanity check failed: please run this script from ' +
+           'build/linux/unbundle directory.')
+    return 1
+
+  parser = optparse.OptionParser()
+  parser.add_option('--do-remove', action='store_true')
+
+  options, args = parser.parse_args(argv)
+
+  exclusion_used = {}
+  for exclusion in args:
+    exclusion_used[exclusion] = False
+
+  for root, dirs, files in os.walk(source_tree_root, topdown=False):
+    # Only look at paths which contain a "third_party" component
+    # (note that e.g. third_party.png doesn't count).
+    root_relpath = os.path.relpath(root, source_tree_root)
+    if 'third_party' not in root_relpath.split(os.sep):
+      continue
+
+    for f in files:
+      path = os.path.join(root, f)
+      relpath = os.path.relpath(path, source_tree_root)
+
+      excluded = False
+      for exclusion in args:
+        # Require precise exclusions. Find the right-most third_party
+        # in the relative path, and if there is more than one ignore
+        # the exclusion if it's completely contained within the part
+        # before right-most third_party path component.
+        split = relpath.rsplit(os.sep + 'third_party' + os.sep, 1)
+        if len(split) > 1 and split[0].startswith(exclusion):
+          continue
+
+        if relpath.startswith(exclusion):
+          # Multiple exclusions can match the same path. Go through all of them
+          # and mark each one as used.
+          exclusion_used[exclusion] = True
+          excluded = True
+      if excluded:
+        continue
+
+      # Deleting gyp files almost always leads to gyp failures.
+      # These files come from Chromium project, and can be replaced if needed.
+      if f.endswith('.gyp') or f.endswith('.gypi'):
+        continue
+
+      # Deleting .isolate files leads to gyp failures. They are usually
+      # not used by a distro build anyway.
+      # See http://www.chromium.org/developers/testing/isolated-testing
+      # for more info.
+      if f.endswith('.isolate'):
+        continue
+
+      if options.do_remove:
+        # Delete the file - best way to ensure it's not used during build.
+        os.remove(path)
+      else:
+        # By default just print paths that would be removed.
+        print path
+
+  exit_code = 0
+
+  # Fail if exclusion list contains stale entries - this helps keep it
+  # up to date.
+  for exclusion, used in exclusion_used.iteritems():
+    if not used:
+      print '%s does not exist' % exclusion
+      exit_code = 1
+
+  if not options.do_remove:
+    print ('To actually remove files printed above, please pass ' +
+           '--do-remove flag.')
+
+  return exit_code
+
+
+if __name__ == '__main__':
+  sys.exit(DoMain(sys.argv[1:]))
diff --git a/build/linux/unbundle/replace_gyp_files.py b/build/linux/unbundle/replace_gyp_files.py
new file mode 100755
index 0000000..d06ae41
--- /dev/null
+++ b/build/linux/unbundle/replace_gyp_files.py
@@ -0,0 +1,82 @@
+#!/usr/bin/env python
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+Replaces gyp files in tree with files from here that
+make the build use system libraries.
+"""
+
+
+import optparse
+import os.path
+import shutil
+import sys
+
+
+REPLACEMENTS = {
+  'use_system_expat': 'third_party/expat/expat.gyp',
+  'use_system_ffmpeg': 'third_party/ffmpeg/ffmpeg.gyp',
+  'use_system_flac': 'third_party/flac/flac.gyp',
+  'use_system_harfbuzz': 'third_party/harfbuzz-ng/harfbuzz.gyp',
+  'use_system_icu': 'third_party/icu/icu.gyp',
+  'use_system_jsoncpp': 'third_party/jsoncpp/jsoncpp.gyp',
+  'use_system_libevent': 'third_party/libevent/libevent.gyp',
+  'use_system_libjpeg': 'third_party/libjpeg/libjpeg.gyp',
+  'use_system_libpng': 'third_party/libpng/libpng.gyp',
+  'use_system_libusb': 'third_party/libusb/libusb.gyp',
+  'use_system_libvpx': 'third_party/libvpx/libvpx.gyp',
+  'use_system_libwebp': 'third_party/libwebp/libwebp.gyp',
+  'use_system_libxml': 'third_party/libxml/libxml.gyp',
+  'use_system_libxnvctrl' : 'third_party/libXNVCtrl/libXNVCtrl.gyp',
+  'use_system_libxslt': 'third_party/libxslt/libxslt.gyp',
+  'use_system_opus': 'third_party/opus/opus.gyp',
+  'use_system_protobuf': 'third_party/protobuf/protobuf.gyp',
+  'use_system_re2': 'third_party/re2/re2.gyp',
+  'use_system_snappy': 'third_party/snappy/snappy.gyp',
+  'use_system_speex': 'third_party/speex/speex.gyp',
+  'use_system_sqlite': 'third_party/sqlite/sqlite.gyp',
+  'use_system_v8': 'v8/tools/gyp/v8.gyp',
+  'use_system_zlib': 'third_party/zlib/zlib.gyp',
+}
+
+
+def DoMain(argv):
+  my_dirname = os.path.dirname(__file__)
+  source_tree_root = os.path.abspath(
+    os.path.join(my_dirname, '..', '..', '..'))
+
+  parser = optparse.OptionParser()
+
+  # Accept arguments in gyp command-line syntax, so that the caller can re-use
+  # command-line for this script and gyp.
+  parser.add_option('-D', dest='defines', action='append')
+
+  parser.add_option('--undo', action='store_true')
+
+  options, args = parser.parse_args(argv)
+
+  for flag, path in REPLACEMENTS.items():
+    if '%s=1' % flag not in options.defines:
+      continue
+
+    if options.undo:
+      # Restore original file, and also remove the backup.
+      # This is meant to restore the source tree to its original state.
+      os.rename(os.path.join(source_tree_root, path + '.orig'),
+                os.path.join(source_tree_root, path))
+    else:
+      # Create a backup copy for --undo.
+      shutil.copyfile(os.path.join(source_tree_root, path),
+                      os.path.join(source_tree_root, path + '.orig'))
+
+      # Copy the gyp file from directory of this script to target path.
+      shutil.copyfile(os.path.join(my_dirname, os.path.basename(path)),
+                      os.path.join(source_tree_root, path))
+
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(DoMain(sys.argv))
diff --git a/build/linux/unbundle/snappy.gyp b/build/linux/unbundle/snappy.gyp
new file mode 100644
index 0000000..ab856ed
--- /dev/null
+++ b/build/linux/unbundle/snappy.gyp
@@ -0,0 +1,29 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'targets': [
+    {
+      'target_name': 'snappy',
+      'type': 'none',
+      'variables': {
+        'headers_root_path': 'src',
+        'header_filenames': [
+          'snappy-c.h',
+          'snappy-sinksource.h',
+          'snappy-stubs-public.h',
+          'snappy.h',
+        ],
+      },
+      'includes': [
+        '../../build/shim_headers.gypi',
+      ],
+      'link_settings': {
+        'libraries': [
+          '-lsnappy',
+        ],
+      },
+    },
+  ],
+}
diff --git a/build/linux/unbundle/speex.gyp b/build/linux/unbundle/speex.gyp
new file mode 100644
index 0000000..75376c8
--- /dev/null
+++ b/build/linux/unbundle/speex.gyp
@@ -0,0 +1,45 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'targets': [
+    {
+      'target_name': 'libspeex',
+      'type': 'none',
+      'variables': {
+        'headers_root_path': 'include',
+        'header_filenames': [
+          'speex/speex_types.h',
+          'speex/speex_callbacks.h',
+          'speex/speex_config_types.h',
+          'speex/speex_stereo.h',
+          'speex/speex_echo.h',
+          'speex/speex_preprocess.h',
+          'speex/speex_jitter.h',
+          'speex/speex.h',
+          'speex/speex_resampler.h',
+          'speex/speex_buffer.h',
+          'speex/speex_header.h',
+          'speex/speex_bits.h',
+        ],
+      },
+      'includes': [
+        '../../build/shim_headers.gypi',
+      ],
+      'direct_dependent_settings': {
+        'cflags': [
+          '<!@(pkg-config --cflags speex)',
+        ],
+      },
+      'link_settings': {
+        'ldflags': [
+          '<!@(pkg-config --libs-only-L --libs-only-other speex)',
+        ],
+        'libraries': [
+          '<!@(pkg-config --libs-only-l speex)',
+        ],
+      },
+    },
+  ],
+}
diff --git a/build/linux/unbundle/sqlite.gyp b/build/linux/unbundle/sqlite.gyp
new file mode 100644
index 0000000..918da928
--- /dev/null
+++ b/build/linux/unbundle/sqlite.gyp
@@ -0,0 +1,28 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'targets': [
+    {
+      'target_name': 'sqlite',
+      'type': 'none',
+      'direct_dependent_settings': {
+        'cflags': [
+          '<!@(pkg-config --cflags sqlite3)',
+        ],
+        'defines': [
+          'USE_SYSTEM_SQLITE',
+        ],
+      },
+      'link_settings': {
+        'ldflags': [
+          '<!@(pkg-config --libs-only-L --libs-only-other sqlite3)',
+        ],
+        'libraries': [
+          '<!@(pkg-config --libs-only-l sqlite3)',
+        ],
+      },
+    },
+  ],
+}
diff --git a/build/linux/unbundle/v8.gyp b/build/linux/unbundle/v8.gyp
new file mode 100644
index 0000000..9b06347
--- /dev/null
+++ b/build/linux/unbundle/v8.gyp
@@ -0,0 +1,64 @@
+# Copyright 2013 the V8 project authors. All rights reserved.
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+#     * Redistributions of source code must retain the above copyright
+#       notice, this list of conditions and the following disclaimer.
+#     * Redistributions in binary form must reproduce the above
+#       copyright notice, this list of conditions and the following
+#       disclaimer in the documentation and/or other materials provided
+#       with the distribution.
+#     * Neither the name of Google Inc. nor the names of its
+#       contributors may be used to endorse or promote products derived
+#       from this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+{
+  'includes': ['../../build/toolchain.gypi', '../../build/features.gypi'],
+  'targets': [
+    {
+      'target_name': 'v8',
+      'type': 'none',
+      'toolsets': ['host', 'target'],
+      'variables': {
+        'headers_root_path': '../../include',
+        'header_filenames': [
+          'v8-debug.h',
+          'v8-preparser.h',
+          'v8-profiler.h',
+          'v8-testing.h',
+          'v8.h',
+          'v8stdint.h',
+        ],
+      },
+      'includes': [
+        '../../../build/shim_headers.gypi',
+      ],
+      'link_settings': {
+        'libraries': [
+          '-lv8',
+        ],
+      },
+    },
+    {
+      'target_name': 'v8_shell',
+      'type': 'none',
+      'toolsets': ['host', 'target'],
+      'dependencies': [
+        'v8'
+      ],
+    },
+  ],
+}
diff --git a/build/linux/unbundle/zlib.gyp b/build/linux/unbundle/zlib.gyp
new file mode 100644
index 0000000..0a85ff0
--- /dev/null
+++ b/build/linux/unbundle/zlib.gyp
@@ -0,0 +1,67 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'targets': [
+    {
+      'target_name': 'zlib',
+      'type': 'none',
+      'variables': {
+        'headers_root_path': '.',
+        'header_filenames': [
+          'zlib.h',
+        ],
+      },
+      'includes': [
+        '../../build/shim_headers.gypi',
+      ],
+      'direct_dependent_settings': {
+        'defines': [
+          'USE_SYSTEM_ZLIB',
+        ],
+      },
+      'link_settings': {
+        'libraries': [
+          '-lz',
+        ],
+      },
+    },
+    {
+      'target_name': 'minizip',
+      'type': 'static_library',
+      'all_dependent_settings': {
+        'defines': [
+          'USE_SYSTEM_MINIZIP',
+        ],
+      },
+      'defines': [
+        'USE_SYSTEM_MINIZIP',
+      ],
+      'link_settings': {
+        'libraries': [
+          '-lminizip',
+        ],
+      },
+    },
+    {
+      'target_name': 'zip',
+      'type': 'static_library',
+      'dependencies': [
+        'minizip',
+        '../../base/base.gyp:base',
+      ],
+      'include_dirs': [
+        '../..',
+      ],
+      'sources': [
+        'google/zip.cc',
+        'google/zip.h',
+        'google/zip_internal.cc',
+        'google/zip_internal.h',
+        'google/zip_reader.cc',
+        'google/zip_reader.h',
+      ],
+    },
+  ],
+}
diff --git a/build/ls.py b/build/ls.py
new file mode 100755
index 0000000..638c3bd
--- /dev/null
+++ b/build/ls.py
@@ -0,0 +1,31 @@
+#!/usr/bin/python
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Recursively list files of the target directory. Ignores dot files."""
+
+import argparse
+import os
+import sys
+
+def main(target_directory):
+  for root, dirs, files in os.walk(target_directory):
+    files = [f for f in files if not f[0] == '.']
+    dirs[:] = [d for d in dirs if not d[0] == '.']
+    for f in files:
+      path = os.path.join(root, f)
+      print path
+
+if __name__ == '__main__':
+  parser = argparse.ArgumentParser(
+      description="Recursively list files of the target directory")
+  parser.add_argument("--target-directory",
+                      dest="target_directory",
+                      metavar="<target-directory>",
+                      type=str,
+                      required=True,
+                      help="The target directory")
+
+  args = parser.parse_args()
+  sys.exit(main(args.target_directory))
diff --git a/build/mac/OWNERS b/build/mac/OWNERS
new file mode 100644
index 0000000..c56e89d
--- /dev/null
+++ b/build/mac/OWNERS
@@ -0,0 +1,2 @@
+mark@chromium.org
+thomasvl@chromium.org
diff --git a/build/mac/asan.gyp b/build/mac/asan.gyp
new file mode 100644
index 0000000..5231681
--- /dev/null
+++ b/build/mac/asan.gyp
@@ -0,0 +1,53 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+   'targets': [
+     {
+       'target_name': 'asan_dynamic_runtime',
+       'type': 'none',
+       'variables': {
+         # Every target is going to depend on asan_dynamic_runtime, so allow
+         # this one to depend on itself.
+         'prune_self_dependency': 1,
+         # Path is relative to this GYP file.
+         'asan_rtl_mask_path':
+             '../../third_party/llvm-build/Release+Asserts/lib/clang/*/lib/darwin',
+         'asan_osx_dynamic':
+             '<(asan_rtl_mask_path)/libclang_rt.asan_osx_dynamic.dylib',
+         'asan_iossim_dynamic':
+             '<(asan_rtl_mask_path)/libclang_rt.asan_iossim_dynamic.dylib',
+       },
+       'conditions': [
+         ['OS=="mac"', {
+           'copies': [
+             {
+               'destination': '<(PRODUCT_DIR)',
+               'files': [
+                 '<!(/bin/ls <(asan_osx_dynamic))',
+               ],
+             },
+           ],
+         }],
+         # ASan works with iOS simulator only, not bare-metal iOS.
+         ['OS=="ios" and target_arch=="ia32"', {
+           'toolsets': ['host', 'target'],
+           'copies': [
+             {
+               'destination': '<(PRODUCT_DIR)',
+               'target_conditions': [
+                 ['_toolset=="host"', {
+                   'files': [ '<!(/bin/ls <(asan_osx_dynamic))'],
+                 }],
+                 ['_toolset=="target"', {
+                   'files': [ '<!(/bin/ls <(asan_iossim_dynamic))'],
+                 }],
+               ],
+             },
+           ],
+         }],
+       ],
+     },
+   ],
+}
diff --git a/build/mac/change_mach_o_flags.py b/build/mac/change_mach_o_flags.py
new file mode 100755
index 0000000..c2aeaec
--- /dev/null
+++ b/build/mac/change_mach_o_flags.py
@@ -0,0 +1,273 @@
+#!/usr/bin/env python
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Usage: change_mach_o_flags.py [--executable-heap] [--no-pie] <executablepath>
+
+Arranges for the executable at |executable_path| to have its data (heap)
+pages protected to prevent execution on Mac OS X 10.7 ("Lion"), and to have
+the PIE (position independent executable) bit set to enable ASLR (address
+space layout randomization). With --executable-heap or --no-pie, the
+respective bits are cleared instead of set, making the heap executable or
+disabling PIE/ASLR.
+
+This script is able to operate on thin (single-architecture) Mach-O files
+and fat (universal, multi-architecture) files. When operating on fat files,
+it will set or clear the bits for each architecture contained therein.
+
+NON-EXECUTABLE HEAP
+
+Traditionally in Mac OS X, 32-bit processes did not have data pages set to
+prohibit execution. Although user programs could call mprotect and
+mach_vm_protect to deny execution of code in data pages, the kernel would
+silently ignore such requests without updating the page tables, and the
+hardware would happily execute code on such pages. 64-bit processes were
+always given proper hardware protection of data pages. This behavior was
+controllable on a system-wide level via the vm.allow_data_exec sysctl, which
+is set by default to 1. The bit with value 1 (set by default) allows code
+execution on data pages for 32-bit processes, and the bit with value 2
+(clear by default) does the same for 64-bit processes.
+
+In Mac OS X 10.7, executables can "opt in" to having hardware protection
+against code execution on data pages applied. This is done by setting a new
+bit in the |flags| field of an executable's |mach_header|. When
+MH_NO_HEAP_EXECUTION is set, proper protections will be applied, regardless
+of the setting of vm.allow_data_exec. See xnu-1699.22.73/osfmk/vm/vm_map.c
+override_nx and xnu-1699.22.73/bsd/kern/mach_loader.c load_machfile.
+
+The Apple toolchain has been revised to set the MH_NO_HEAP_EXECUTION when
+producing executables, provided that -allow_heap_execute is not specified
+at link time. Only linkers shipping with Xcode 4.0 and later (ld64-123.2 and
+later) have this ability. See ld64-123.2.1/src/ld/Options.cpp
+Options::reconfigureDefaults() and
+ld64-123.2.1/src/ld/HeaderAndLoadCommands.hpp
+HeaderAndLoadCommandsAtom<A>::flags().
+
+This script sets the MH_NO_HEAP_EXECUTION bit on Mach-O executables. It is
+intended for use with executables produced by a linker that predates Apple's
+modifications to set this bit itself. It is also useful for setting this bit
+for non-i386 executables, including x86_64 executables. Apple's linker only
+sets it for 32-bit i386 executables, presumably under the assumption that
+the value of vm.allow_data_exec is set in stone. However, if someone were to
+change vm.allow_data_exec to 2 or 3, 64-bit x86_64 executables would run
+without hardware protection against code execution on data pages. This
+script can set the bit for x86_64 executables, guaranteeing that they run
+with appropriate protection even when vm.allow_data_exec has been tampered
+with.
+
+POSITION-INDEPENDENT EXECUTABLES/ADDRESS SPACE LAYOUT RANDOMIZATION
+
+This script sets or clears the MH_PIE bit in an executable's Mach-O header,
+enabling or disabling position independence on Mac OS X 10.5 and later.
+Processes running position-independent executables have varying levels of
+ASLR protection depending on the OS release. The main executable's load
+address, shared library load addresess, and the heap and stack base
+addresses may be randomized. Position-independent executables are produced
+by supplying the -pie flag to the linker (or defeated by supplying -no_pie).
+Executables linked with a deployment target of 10.7 or higher have PIE on
+by default.
+
+This script is never strictly needed during the build to enable PIE, as all
+linkers used are recent enough to support -pie. However, it's used to
+disable the PIE bit as needed on already-linked executables.
+"""
+
+import optparse
+import os
+import struct
+import sys
+
+
+# <mach-o/fat.h>
+FAT_MAGIC = 0xcafebabe
+FAT_CIGAM = 0xbebafeca
+
+# <mach-o/loader.h>
+MH_MAGIC = 0xfeedface
+MH_CIGAM = 0xcefaedfe
+MH_MAGIC_64 = 0xfeedfacf
+MH_CIGAM_64 = 0xcffaedfe
+MH_EXECUTE = 0x2
+MH_PIE = 0x00200000
+MH_NO_HEAP_EXECUTION = 0x01000000
+
+
+class MachOError(Exception):
+  """A class for exceptions thrown by this module."""
+
+  pass
+
+
+def CheckedSeek(file, offset):
+  """Seeks the file-like object at |file| to offset |offset| and raises a
+  MachOError if anything funny happens."""
+
+  file.seek(offset, os.SEEK_SET)
+  new_offset = file.tell()
+  if new_offset != offset:
+    raise MachOError, \
+          'seek: expected offset %d, observed %d' % (offset, new_offset)
+
+
+def CheckedRead(file, count):
+  """Reads |count| bytes from the file-like |file| object, raising a
+  MachOError if any other number of bytes is read."""
+
+  bytes = file.read(count)
+  if len(bytes) != count:
+    raise MachOError, \
+          'read: expected length %d, observed %d' % (count, len(bytes))
+
+  return bytes
+
+
+def ReadUInt32(file, endian):
+  """Reads an unsinged 32-bit integer from the file-like |file| object,
+  treating it as having endianness specified by |endian| (per the |struct|
+  module), and returns it as a number. Raises a MachOError if the proper
+  length of data can't be read from |file|."""
+
+  bytes = CheckedRead(file, 4)
+
+  (uint32,) = struct.unpack(endian + 'I', bytes)
+  return uint32
+
+
+def ReadMachHeader(file, endian):
+  """Reads an entire |mach_header| structure (<mach-o/loader.h>) from the
+  file-like |file| object, treating it as having endianness specified by
+  |endian| (per the |struct| module), and returns a 7-tuple of its members
+  as numbers. Raises a MachOError if the proper length of data can't be read
+  from |file|."""
+
+  bytes = CheckedRead(file, 28)
+
+  magic, cputype, cpusubtype, filetype, ncmds, sizeofcmds, flags = \
+      struct.unpack(endian + '7I', bytes)
+  return magic, cputype, cpusubtype, filetype, ncmds, sizeofcmds, flags
+
+
+def ReadFatArch(file):
+  """Reads an entire |fat_arch| structure (<mach-o/fat.h>) from the file-like
+  |file| object, treating it as having endianness specified by |endian|
+  (per the |struct| module), and returns a 5-tuple of its members as numbers.
+  Raises a MachOError if the proper length of data can't be read from
+  |file|."""
+
+  bytes = CheckedRead(file, 20)
+
+  cputype, cpusubtype, offset, size, align = struct.unpack('>5I', bytes)
+  return cputype, cpusubtype, offset, size, align
+
+
+def WriteUInt32(file, uint32, endian):
+  """Writes |uint32| as an unsinged 32-bit integer to the file-like |file|
+  object, treating it as having endianness specified by |endian| (per the
+  |struct| module)."""
+
+  bytes = struct.pack(endian + 'I', uint32)
+  assert len(bytes) == 4
+
+  file.write(bytes)
+
+
+def HandleMachOFile(file, options, offset=0):
+  """Seeks the file-like |file| object to |offset|, reads its |mach_header|,
+  and rewrites the header's |flags| field if appropriate. The header's
+  endianness is detected. Both 32-bit and 64-bit Mach-O headers are supported
+  (mach_header and mach_header_64). Raises MachOError if used on a header that
+  does not have a known magic number or is not of type MH_EXECUTE. The
+  MH_PIE and MH_NO_HEAP_EXECUTION bits are set or cleared in the |flags| field
+  according to |options| and written to |file| if any changes need to be made.
+  If already set or clear as specified by |options|, nothing is written."""
+
+  CheckedSeek(file, offset)
+  magic = ReadUInt32(file, '<')
+  if magic == MH_MAGIC or magic == MH_MAGIC_64:
+    endian = '<'
+  elif magic == MH_CIGAM or magic == MH_CIGAM_64:
+    endian = '>'
+  else:
+    raise MachOError, \
+          'Mach-O file at offset %d has illusion of magic' % offset
+
+  CheckedSeek(file, offset)
+  magic, cputype, cpusubtype, filetype, ncmds, sizeofcmds, flags = \
+      ReadMachHeader(file, endian)
+  assert magic == MH_MAGIC or magic == MH_MAGIC_64
+  if filetype != MH_EXECUTE:
+    raise MachOError, \
+          'Mach-O file at offset %d is type 0x%x, expected MH_EXECUTE' % \
+              (offset, filetype)
+
+  original_flags = flags
+
+  if options.no_heap_execution:
+    flags |= MH_NO_HEAP_EXECUTION
+  else:
+    flags &= ~MH_NO_HEAP_EXECUTION
+
+  if options.pie:
+    flags |= MH_PIE
+  else:
+    flags &= ~MH_PIE
+
+  if flags != original_flags:
+    CheckedSeek(file, offset + 24)
+    WriteUInt32(file, flags, endian)
+
+
+def HandleFatFile(file, options, fat_offset=0):
+  """Seeks the file-like |file| object to |offset| and loops over its
+  |fat_header| entries, calling HandleMachOFile for each."""
+
+  CheckedSeek(file, fat_offset)
+  magic = ReadUInt32(file, '>')
+  assert magic == FAT_MAGIC
+
+  nfat_arch = ReadUInt32(file, '>')
+
+  for index in xrange(0, nfat_arch):
+    cputype, cpusubtype, offset, size, align = ReadFatArch(file)
+    assert size >= 28
+
+    # HandleMachOFile will seek around. Come back here after calling it, in
+    # case it sought.
+    fat_arch_offset = file.tell()
+    HandleMachOFile(file, options, offset)
+    CheckedSeek(file, fat_arch_offset)
+
+
+def main(me, args):
+  parser = optparse.OptionParser('%prog [options] <executable_path>')
+  parser.add_option('--executable-heap', action='store_false',
+                    dest='no_heap_execution', default=True,
+                    help='Clear the MH_NO_HEAP_EXECUTION bit')
+  parser.add_option('--no-pie', action='store_false',
+                    dest='pie', default=True,
+                    help='Clear the MH_PIE bit')
+  (options, loose_args) = parser.parse_args(args)
+  if len(loose_args) != 1:
+    parser.print_usage()
+    return 1
+
+  executable_path = loose_args[0]
+  executable_file = open(executable_path, 'rb+')
+
+  magic = ReadUInt32(executable_file, '<')
+  if magic == FAT_CIGAM:
+    # Check FAT_CIGAM and not FAT_MAGIC because the read was little-endian.
+    HandleFatFile(executable_file, options)
+  elif magic == MH_MAGIC or magic == MH_CIGAM or \
+      magic == MH_MAGIC_64 or magic == MH_CIGAM_64:
+    HandleMachOFile(executable_file, options)
+  else:
+    raise MachOError, '%s is not a Mach-O or fat file' % executable_file
+
+  executable_file.close()
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[0], sys.argv[1:]))
diff --git a/build/mac/change_mach_o_flags_from_xcode.sh b/build/mac/change_mach_o_flags_from_xcode.sh
new file mode 100755
index 0000000..1824f8d
--- /dev/null
+++ b/build/mac/change_mach_o_flags_from_xcode.sh
@@ -0,0 +1,15 @@
+#!/bin/sh
+
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This is a small wrapper script around change_mach_o_flags.py allowing it to
+# be invoked easily from Xcode. change_mach_o_flags.py expects its arguments
+# on the command line, but Xcode puts its parameters in the environment.
+
+set -e
+
+exec "$(dirname "${0}")/change_mach_o_flags.py" \
+     "${@}" \
+     "${BUILT_PRODUCTS_DIR}/${EXECUTABLE_PATH}"
diff --git a/build/mac/chrome_mac.croc b/build/mac/chrome_mac.croc
new file mode 100644
index 0000000..8cde00c
--- /dev/null
+++ b/build/mac/chrome_mac.croc
@@ -0,0 +1,36 @@
+# -*- python -*-
+# Crocodile config file for Chromium mac
+
+{
+  # List of rules, applied in order
+  'rules' : [
+    # Specify inclusions before exclusions, since rules are in order.
+
+    # Don't include chromeos, linux, or windows specific files
+    {
+      'regexp' : '.*(_|/)(chromeos|linux|win|views)(\\.|_)',
+      'include' : 0,
+    },
+    # Don't include ChromeOS dirs
+    {
+      'regexp' : '.*/chromeos/',
+      'include' : 0,
+    },
+
+    # Groups
+    {
+      'regexp' : '.*_test_mac\\.',
+      'group' : 'test',
+    },
+
+    # Languages
+    {
+      'regexp' : '.*\\.m$',
+      'language' : 'ObjC',
+    },
+    {
+      'regexp' : '.*\\.mm$',
+      'language' : 'ObjC++',
+    },
+  ],
+}
diff --git a/build/mac/copy_asan_runtime_dylib.sh b/build/mac/copy_asan_runtime_dylib.sh
new file mode 100755
index 0000000..f221c4a
--- /dev/null
+++ b/build/mac/copy_asan_runtime_dylib.sh
@@ -0,0 +1,76 @@
+#!/bin/bash
+
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# For app bundles built with ASan, copies the runtime lib
+# (libclang_rt.asan_osx_dynamic.dylib), on which their executables depend, from
+# the compiler installation path into the bundle and fixes the dylib's install
+# name in the binary to be relative to @executable_path.
+
+set -e
+
+BINARY="${BUILT_PRODUCTS_DIR}/${EXECUTABLE_PATH}"
+
+if [[ ! -f "$BINARY" ]]; then
+  # This is neither an .app bundle nor a standalone executable.
+  # Most certainly the script has been called for a data bundle.
+  exit 0
+fi
+
+BINARY_DIR="$(dirname "${BINARY}")"
+
+# Find the link to the ASan runtime encoded in the binary.
+BUILTIN_DYLIB_PATH=$(otool -L "${BINARY}" | \
+    sed -Ene 's/^[[:blank:]]+(.*libclang_rt\.asan_.*_dynamic\.dylib).*$/\1/p')
+
+if [[ "${BUILTIN_DYLIB_PATH}" == *asan_iossim_dynamic* ]]; then
+  ASAN_DYLIB_NAME=libclang_rt.asan_iossim_dynamic.dylib
+elif [[ "${BUILTIN_DYLIB_PATH}" == *asan_osx_dynamic* ]]; then
+  ASAN_DYLIB_NAME=libclang_rt.asan_osx_dynamic.dylib
+fi
+
+if [[ -z "${BUILTIN_DYLIB_PATH}" ]]; then
+  echo "${BINARY} does not depend on the ASan runtime library!" >&2
+  exit 1
+fi
+
+# TODO(glider): this doesn't work if we set CC and CXX to override the default
+# Clang.
+ASAN_DYLIB=$(find \
+    "${BUILT_PRODUCTS_DIR}/../../third_party/llvm-build/Release+Asserts/lib/clang/" \
+    -type f -path "*${ASAN_DYLIB_NAME}")
+
+DYLIB_BASENAME=$(basename "${ASAN_DYLIB}")
+if [[ "${DYLIB_BASENAME}" != "${ASAN_DYLIB_NAME}" ]]; then
+  echo "basename(${ASAN_DYLIB}) != ${ASAN_DYLIB_NAME}" >&2
+  exit 1
+fi
+
+# Check whether the directory containing the executable binary is named
+# "MacOS". In this case we're building a full-fledged OSX app and will put
+# the runtime into appname.app/Contents/Libraries/. Otherwise this is probably
+# an iOS gtest app, and the ASan runtime is put next to the executable.
+UPPER_DIR=$(dirname "${BINARY_DIR}")
+if [ "${UPPER_DIR}" == "MacOS" ]; then
+  LIBRARIES_DIR="${UPPER_DIR}/Libraries"
+  mkdir -p "${LIBRARIES_DIR}"
+  NEW_LC_ID_DYLIB="@executable_path/../Libraries/${ASAN_DYLIB_NAME}"
+else
+  LIBRARIES_DIR="${BINARY_DIR}"
+  NEW_LC_ID_DYLIB="@executable_path/${ASAN_DYLIB_NAME}"
+fi
+
+cp "${ASAN_DYLIB}" "${LIBRARIES_DIR}"
+
+# Make LC_ID_DYLIB of the runtime copy point to its location.
+install_name_tool \
+    -id "${NEW_LC_ID_DYLIB}" \
+    "${LIBRARIES_DIR}/${ASAN_DYLIB_NAME}"
+
+# Fix the rpath to the runtime library recorded in the binary.
+install_name_tool \
+    -change "${BUILTIN_DYLIB_PATH}" \
+    "${NEW_LC_ID_DYLIB}" \
+    "${BINARY}"
diff --git a/build/mac/copy_framework_unversioned.sh b/build/mac/copy_framework_unversioned.sh
new file mode 100755
index 0000000..380cc90
--- /dev/null
+++ b/build/mac/copy_framework_unversioned.sh
@@ -0,0 +1,118 @@
+#!/bin/bash
+
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Copies a framework to its new home, "unversioning" it.
+#
+# Normally, frameworks are versioned bundles.  The contents of a framework are
+# stored in a versioned directory within the bundle, and symbolic links
+# provide access to the actual code and resources.  See
+# http://developer.apple.com/mac/library/documentation/MacOSX/Conceptual/BPFrameworks/Concepts/FrameworkAnatomy.html
+#
+# The symbolic links usually found in frameworks create problems.  Symbolic
+# links are excluded from code signatures.  That means that it's possible to
+# remove or retarget a symbolic link within a framework without affecting the
+# seal.  In Chrome's case, the outer .app bundle contains a framework where
+# all application code and resources live.  In order for the signature on the
+# .app to be meaningful, it encompasses the framework.  Because framework
+# resources are accessed through the framework's symbolic links, this
+# arrangement results in a case where the resources can be altered without
+# affecting the .app signature's validity.
+#
+# Indirection through symbolic links also carries a runtime performance
+# penalty on open() operations, although open() typically completes so quickly
+# that this is not considered a major performance problem.
+#
+# To resolve these problems, the frameworks that ship within Chrome's .app
+# bundle are unversioned.  Unversioning is simple: instead of using the
+# original outer .framework directory as the framework that ships within the
+# .app, the inner versioned directory is used.  Instead of accessing bundled
+# resources through symbolic links, they are accessed directly.  In normal
+# situations, the only hard-coded use of the versioned directory is by dyld,
+# when loading the framework's code, but this is handled through a normal
+# Mach-O load command, and it is easy to adjust the load command to point to
+# the unversioned framework code rather than the versioned counterpart.
+#
+# The resulting framework bundles aren't strictly conforming, but they work
+# as well as normal versioned framework bundles.
+#
+# An option to skip running install_name_tool is available. By passing -I as
+# the first argument to this script, install_name_tool will be skipped. This
+# is only suitable for copied frameworks that will not be linked against, or
+# when install_name_tool will be run on any linker output when something is
+# linked against the copied framework. This option exists to allow signed
+# frameworks to pass through without subjecting them to any modifications that
+# would break their signatures.
+
+set -e
+
+RUN_INSTALL_NAME_TOOL=1
+if [ $# -eq 3 ] && [ "${1}" = "-I" ] ; then
+  shift
+  RUN_INSTALL_NAME_TOOL=
+fi
+
+if [ $# -ne 2 ] ; then
+  echo "usage: ${0} [-I] FRAMEWORK DESTINATION_DIR" >& 2
+  exit 1
+fi
+
+# FRAMEWORK should be a path to a versioned framework bundle, ending in
+# .framework.  DESTINATION_DIR is the directory that the unversioned framework
+# bundle will be copied to.
+
+FRAMEWORK="${1}"
+DESTINATION_DIR="${2}"
+
+FRAMEWORK_NAME="$(basename "${FRAMEWORK}")"
+if [ "${FRAMEWORK_NAME: -10}" != ".framework" ] ; then
+  echo "${0}: ${FRAMEWORK_NAME} does not end in .framework" >& 2
+  exit 1
+fi
+FRAMEWORK_NAME_NOEXT="${FRAMEWORK_NAME:0:$((${#FRAMEWORK_NAME} - 10))}"
+
+# Find the current version.
+VERSIONS="${FRAMEWORK}/Versions"
+CURRENT_VERSION_LINK="${VERSIONS}/Current"
+CURRENT_VERSION_ID="$(readlink "${VERSIONS}/Current")"
+CURRENT_VERSION="${VERSIONS}/${CURRENT_VERSION_ID}"
+
+# Make sure that the framework's structure makes sense as a versioned bundle.
+if [ ! -e "${CURRENT_VERSION}/${FRAMEWORK_NAME_NOEXT}" ] ; then
+  echo "${0}: ${FRAMEWORK_NAME} does not contain a dylib" >& 2
+  exit 1
+fi
+
+DESTINATION="${DESTINATION_DIR}/${FRAMEWORK_NAME}"
+
+# Copy the versioned directory within the versioned framework to its
+# destination location.
+mkdir -p "${DESTINATION_DIR}"
+rsync -acC --delete --exclude Headers --exclude PrivateHeaders \
+    --include '*.so' "${CURRENT_VERSION}/" "${DESTINATION}"
+
+if [[ -n "${RUN_INSTALL_NAME_TOOL}" ]]; then
+  # Adjust the Mach-O LC_ID_DYLIB load command in the framework.  This does not
+  # change the LC_LOAD_DYLIB load commands in anything that may have already
+  # linked against the framework.  Not all frameworks will actually need this
+  # to be changed.  Some frameworks may already be built with the proper
+  # LC_ID_DYLIB for use as an unversioned framework.  Xcode users can do this
+  # by setting LD_DYLIB_INSTALL_NAME to
+  # $(DYLIB_INSTALL_NAME_BASE:standardizepath)/$(WRAPPER_NAME)/$(PRODUCT_NAME)
+  # If invoking ld via gcc or g++, pass the desired path to -Wl,-install_name
+  # at link time.
+  FRAMEWORK_DYLIB="${DESTINATION}/${FRAMEWORK_NAME_NOEXT}"
+  LC_ID_DYLIB_OLD="$(otool -l "${FRAMEWORK_DYLIB}" |
+                         grep -A10 "^ *cmd LC_ID_DYLIB$" |
+                         grep -m1 "^ *name" |
+                         sed -Ee 's/^ *name (.*) \(offset [0-9]+\)$/\1/')"
+  VERSION_PATH="/Versions/${CURRENT_VERSION_ID}/${FRAMEWORK_NAME_NOEXT}"
+  LC_ID_DYLIB_NEW="$(echo "${LC_ID_DYLIB_OLD}" |
+                     sed -Ee "s%${VERSION_PATH}$%/${FRAMEWORK_NAME_NOEXT}%")"
+
+  if [ "${LC_ID_DYLIB_NEW}" != "${LC_ID_DYLIB_OLD}" ] ; then
+    install_name_tool -id "${LC_ID_DYLIB_NEW}" "${FRAMEWORK_DYLIB}"
+  fi
+fi
diff --git a/build/mac/edit_xibs.sh b/build/mac/edit_xibs.sh
new file mode 100755
index 0000000..b7b749e
--- /dev/null
+++ b/build/mac/edit_xibs.sh
@@ -0,0 +1,19 @@
+#!/bin/sh
+
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This script is a convenience to run GYP for /src/chrome/chrome_nibs.gyp
+# with the Xcode generator (as you likely use ninja). Documentation:
+#   http://dev.chromium.org/developers/design-documents/mac-xib-files
+
+set -e
+
+RELSRC=$(dirname "$0")/../..
+SRC=$(cd "$RELSRC" && pwd)
+export PYTHONPATH="$PYTHONPATH:$SRC/build"
+export GYP_GENERATORS=xcode
+"$SRC/tools/gyp/gyp" -I"$SRC/build/common.gypi" "$SRC/chrome/chrome_nibs.gyp"
+echo "You can now edit XIB files in Xcode using:"
+echo "  $SRC/chrome/chrome_nibs.xcodeproj"
diff --git a/build/mac/find_sdk.py b/build/mac/find_sdk.py
new file mode 100755
index 0000000..0534766
--- /dev/null
+++ b/build/mac/find_sdk.py
@@ -0,0 +1,90 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Prints the lowest locally available SDK version greater than or equal to a
+given minimum sdk version to standard output.
+
+Usage:
+  python find_sdk.py 10.6  # Ignores SDKs < 10.6
+"""
+
+import os
+import re
+import subprocess
+import sys
+
+
+from optparse import OptionParser
+
+
+def parse_version(version_str):
+  """'10.6' => [10, 6]"""
+  return map(int, re.findall(r'(\d+)', version_str))
+
+
+def main():
+  parser = OptionParser()
+  parser.add_option("--verify",
+                    action="store_true", dest="verify", default=False,
+                    help="return the sdk argument and warn if it doesn't exist")
+  parser.add_option("--sdk_path",
+                    action="store", type="string", dest="sdk_path", default="",
+                    help="user-specified SDK path; bypasses verification")
+  parser.add_option("--print_sdk_path",
+                    action="store_true", dest="print_sdk_path", default=False,
+                    help="Additionaly print the path the SDK (appears first).")
+  (options, args) = parser.parse_args()
+  min_sdk_version = args[0]
+
+  job = subprocess.Popen(['xcode-select', '-print-path'],
+                         stdout=subprocess.PIPE,
+                         stderr=subprocess.STDOUT)
+  out, err = job.communicate()
+  if job.returncode != 0:
+    print >> sys.stderr, out
+    print >> sys.stderr, err
+    raise Exception(('Error %d running xcode-select, you might have to run '
+      '|sudo xcode-select --switch /Applications/Xcode.app/Contents/Developer| '
+      'if you are using Xcode 4.') % job.returncode)
+  # The Developer folder moved in Xcode 4.3.
+  xcode43_sdk_path = os.path.join(
+      out.rstrip(), 'Platforms/MacOSX.platform/Developer/SDKs')
+  if os.path.isdir(xcode43_sdk_path):
+    sdk_dir = xcode43_sdk_path
+  else:
+    sdk_dir = os.path.join(out.rstrip(), 'SDKs')
+  sdks = [re.findall('^MacOSX(10\.\d+)\.sdk$', s) for s in os.listdir(sdk_dir)]
+  sdks = [s[0] for s in sdks if s]  # [['10.5'], ['10.6']] => ['10.5', '10.6']
+  sdks = [s for s in sdks  # ['10.5', '10.6'] => ['10.6']
+          if parse_version(s) >= parse_version(min_sdk_version)]
+  if not sdks:
+    raise Exception('No %s+ SDK found' % min_sdk_version)
+  best_sdk = sorted(sdks, key=parse_version)[0]
+
+  if options.verify and best_sdk != min_sdk_version and not options.sdk_path:
+    print >> sys.stderr, ''
+    print >> sys.stderr, '                                           vvvvvvv'
+    print >> sys.stderr, ''
+    print >> sys.stderr, \
+        'This build requires the %s SDK, but it was not found on your system.' \
+        % min_sdk_version
+    print >> sys.stderr, \
+        'Either install it, or explicitly set mac_sdk in your GYP_DEFINES.'
+    print >> sys.stderr, ''
+    print >> sys.stderr, '                                           ^^^^^^^'
+    print >> sys.stderr, ''
+    return min_sdk_version
+
+  if options.print_sdk_path:
+    print subprocess.check_output(['xcodebuild', '-version', '-sdk',
+                                   'macosx' + best_sdk, 'Path']).strip()
+
+  return best_sdk
+
+
+if __name__ == '__main__':
+  if sys.platform != 'darwin':
+    raise Exception("This script only runs on Mac")
+  print main()
diff --git a/build/mac/make_more_helpers.sh b/build/mac/make_more_helpers.sh
new file mode 100755
index 0000000..6f5c474
--- /dev/null
+++ b/build/mac/make_more_helpers.sh
@@ -0,0 +1,91 @@
+#!/bin/bash
+
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Usage: make_more_helpers.sh <directory_within_contents> <app_name>
+#
+# This script creates additional helper .app bundles for Chromium, based on
+# the existing helper .app bundle, changing their Mach-O header's flags to
+# enable and disable various features. Based on Chromium Helper.app, it will
+# create Chromium Helper EH.app, which has the MH_NO_HEAP_EXECUTION bit
+# cleared to support Chromium child processes that require an executable heap,
+# and Chromium Helper NP.app, which has the MH_PIE bit cleared to support
+# Chromium child processes that cannot tolerate ASLR.
+#
+# This script expects to be called from the chrome_exe target as a postbuild,
+# and operates directly within the built-up browser app's versioned directory.
+#
+# Each helper is adjusted by giving it the proper bundle name, renaming the
+# executable, adjusting several Info.plist keys, and changing the executable's
+# Mach-O flags.
+
+set -eu
+
+make_helper() {
+  local containing_dir="${1}"
+  local app_name="${2}"
+  local feature="${3}"
+  local flags="${4}"
+
+  local helper_name="${app_name} Helper"
+  local helper_stem="${containing_dir}/${helper_name}"
+  local original_helper="${helper_stem}.app"
+  if [[ ! -d "${original_helper}" ]]; then
+    echo "${0}: error: ${original_helper} is a required directory" >& 2
+    exit 1
+  fi
+  local original_helper_exe="${original_helper}/Contents/MacOS/${helper_name}"
+  if [[ ! -f "${original_helper_exe}" ]]; then
+    echo "${0}: error: ${original_helper_exe} is a required file" >& 2
+    exit 1
+  fi
+
+  local feature_helper="${helper_stem} ${feature}.app"
+
+  rsync -acC --delete --include '*.so' "${original_helper}/" "${feature_helper}"
+
+  local helper_feature="${helper_name} ${feature}"
+  local helper_feature_exe="${feature_helper}/Contents/MacOS/${helper_feature}"
+  mv "${feature_helper}/Contents/MacOS/${helper_name}" "${helper_feature_exe}"
+
+  local change_flags="$(dirname "${0}")/change_mach_o_flags.py"
+  "${change_flags}" ${flags} "${helper_feature_exe}"
+
+  local feature_info="${feature_helper}/Contents/Info"
+  local feature_info_plist="${feature_info}.plist"
+
+  defaults write "${feature_info}" "CFBundleDisplayName" "${helper_feature}"
+  defaults write "${feature_info}" "CFBundleExecutable" "${helper_feature}"
+
+  cfbundleid="$(defaults read "${feature_info}" "CFBundleIdentifier")"
+  feature_cfbundleid="${cfbundleid}.${feature}"
+  defaults write "${feature_info}" "CFBundleIdentifier" "${feature_cfbundleid}"
+
+  cfbundlename="$(defaults read "${feature_info}" "CFBundleName")"
+  feature_cfbundlename="${cfbundlename} ${feature}"
+  defaults write "${feature_info}" "CFBundleName" "${feature_cfbundlename}"
+
+  # As usual, defaults might have put the plist into whatever format excites
+  # it, but Info.plists get converted back to the expected XML format.
+  plutil -convert xml1 "${feature_info_plist}"
+
+  # `defaults` also changes the file permissions, so make the file
+  # world-readable again.
+  chmod a+r "${feature_info_plist}"
+}
+
+if [[ ${#} -ne 2 ]]; then
+  echo "usage: ${0} <directory_within_contents> <app_name>" >& 2
+  exit 1
+fi
+
+DIRECTORY_WITHIN_CONTENTS="${1}"
+APP_NAME="${2}"
+
+CONTENTS_DIR="${BUILT_PRODUCTS_DIR}/${CONTENTS_FOLDER_PATH}"
+CONTAINING_DIR="${CONTENTS_DIR}/${DIRECTORY_WITHIN_CONTENTS}"
+
+make_helper "${CONTAINING_DIR}" "${APP_NAME}" "EH" "--executable-heap"
+make_helper "${CONTAINING_DIR}" "${APP_NAME}" "NP" "--no-pie"
diff --git a/build/mac/strip_from_xcode b/build/mac/strip_from_xcode
new file mode 100755
index 0000000..c26b9fb
--- /dev/null
+++ b/build/mac/strip_from_xcode
@@ -0,0 +1,62 @@
+#!/bin/bash
+
+# Copyright (c) 2008 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This is a handy wrapper script that figures out how to call the strip
+# utility (strip_save_dsym in this case), if it even needs to be called at all,
+# and then does it.  This script should be called by a post-link phase in
+# targets that might generate Mach-O executables, dynamic libraries, or
+# loadable bundles.
+#
+# An example "Strip If Needed" build phase placed after "Link Binary With
+# Libraries" would do:
+# exec "${XCODEPROJ_DEPTH}/build/mac/strip_from_xcode"
+
+if [ "${CONFIGURATION}" != "Release" ] ; then
+  # Only strip in release mode.
+  exit 0
+fi
+
+declare -a FLAGS
+
+# MACH_O_TYPE is not set for a command-line tool, so check PRODUCT_TYPE too.
+# Weird.
+if [ "${MACH_O_TYPE}" = "mh_execute" ] || \
+   [ "${PRODUCT_TYPE}" = "com.apple.product-type.tool" ] ; then
+  # Strip everything (no special flags).  No-op.
+  true
+elif [ "${MACH_O_TYPE}" = "mh_dylib" ] || \
+     [ "${MACH_O_TYPE}" = "mh_bundle" ]; then
+  # Strip debugging symbols and local symbols
+  FLAGS[${#FLAGS[@]}]=-S
+  FLAGS[${#FLAGS[@]}]=-x
+elif [ "${MACH_O_TYPE}" = "staticlib" ] ; then
+  # Don't strip static libraries.
+  exit 0
+else
+  # Warn, but don't treat this as an error.
+  echo $0: warning: unrecognized MACH_O_TYPE ${MACH_O_TYPE}
+  exit 0
+fi
+
+if [ -n "${STRIPFLAGS}" ] ; then
+  # Pick up the standard STRIPFLAGS Xcode setting, used for "Additional Strip
+  # Flags".
+  for stripflag in "${STRIPFLAGS}" ; do
+    FLAGS[${#FLAGS[@]}]="${stripflag}"
+  done
+fi
+
+if [ -n "${CHROMIUM_STRIP_SAVE_FILE}" ] ; then
+  # An Xcode project can communicate a file listing symbols to saved in this
+  # environment variable by setting it as a build setting.  This isn't a
+  # standard Xcode setting.  It's used in preference to STRIPFLAGS to
+  # eliminate quoting ambiguity concerns.
+  FLAGS[${#FLAGS[@]}]=-s
+  FLAGS[${#FLAGS[@]}]="${CHROMIUM_STRIP_SAVE_FILE}"
+fi
+
+exec "$(dirname ${0})/strip_save_dsym" "${FLAGS[@]}" \
+     "${BUILT_PRODUCTS_DIR}/${EXECUTABLE_PATH}"
diff --git a/build/mac/strip_save_dsym b/build/mac/strip_save_dsym
new file mode 100755
index 0000000..c9cf226
--- /dev/null
+++ b/build/mac/strip_save_dsym
@@ -0,0 +1,335 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Usage: strip_save_dsym <whatever-arguments-you-would-pass-to-strip>
+#
+# strip_save_dsym is a wrapper around the standard strip utility.  Given an
+# input Mach-O file, strip_save_dsym will save a copy of the file in a "fake"
+# .dSYM bundle for debugging, and then call strip to strip the Mach-O file.
+# Note that the .dSYM file is a "fake" in that it's not a self-contained
+# .dSYM bundle, it just contains a copy of the original (unstripped) Mach-O
+# file, and therefore contains references to object files on the filesystem.
+# The generated .dSYM bundle is therefore unsuitable for debugging in the
+# absence of these .o files.
+#
+# If a .dSYM already exists and has a newer timestamp than the Mach-O file,
+# this utility does nothing.  That allows strip_save_dsym to be run on a file
+# that has already been stripped without trashing the .dSYM.
+#
+# Rationale: the "right" way to generate dSYM bundles, dsymutil, is incredibly
+# slow.  On the other hand, doing a file copy (which is really all that
+# dsymutil does) is comparatively fast.  Since we usually just want to strip
+# a release-mode executable but still be able to debug it, and we don't care
+# so much about generating a hermetic dSYM bundle, we'll prefer the file copy.
+# If a real dSYM is ever needed, it's still possible to create one by running
+# dsymutil and pointing it at the original Mach-O file inside the "fake"
+# bundle, provided that the object files are available.
+
+import errno
+import os
+import re
+import shutil
+import subprocess
+import sys
+import time
+
+# Returns a list of architectures contained in a Mach-O file.  The file can be
+# a universal (fat) file, in which case there will be one list element for
+# each contained architecture, or it can be a thin single-architecture Mach-O
+# file, in which case the list will contain a single element identifying the
+# architecture.  On error, returns an empty list.  Determines the architecture
+# list by calling file.
+def macho_archs(macho):
+  macho_types = ["executable",
+                 "dynamically linked shared library",
+                 "bundle"]
+  macho_types_re = "Mach-O (?:64-bit )?(?:" + "|".join(macho_types) + ")"
+
+  file_cmd = subprocess.Popen(["/usr/bin/file", "-b", "--", macho],
+                              stdout=subprocess.PIPE)
+
+  archs = []
+
+  type_line = file_cmd.stdout.readline()
+  type_match = re.match("^%s (.*)$" % macho_types_re, type_line)
+  if type_match:
+    archs.append(type_match.group(1))
+    return [type_match.group(1)]
+  else:
+    type_match = re.match("^Mach-O universal binary with (.*) architectures$",
+                          type_line)
+    if type_match:
+      for i in range(0, int(type_match.group(1))):
+        arch_line = file_cmd.stdout.readline()
+        arch_match = re.match(
+                     "^.* \(for architecture (.*)\):\t%s .*$" % macho_types_re,
+                     arch_line)
+        if arch_match:
+          archs.append(arch_match.group(1))
+
+  if file_cmd.wait() != 0:
+    archs = []
+
+  if len(archs) == 0:
+    print >> sys.stderr, "No architectures in %s" % macho
+
+  return archs
+
+# Returns a dictionary mapping architectures contained in the file as returned
+# by macho_archs to the LC_UUID load command for that architecture.
+# Architectures with no LC_UUID load command are omitted from the dictionary.
+# Determines the UUID value by calling otool.
+def macho_uuids(macho):
+  uuids = {}
+
+  archs = macho_archs(macho)
+  if len(archs) == 0:
+    return uuids
+
+  for arch in archs:
+    if arch == "":
+      continue
+
+    otool_cmd = subprocess.Popen(["/usr/bin/otool", "-arch", arch, "-l", "-",
+                                  macho],
+                                 stdout=subprocess.PIPE)
+    # state 0 is when nothing UUID-related has been seen yet.  State 1 is
+    # entered after a load command begins, but it may not be an LC_UUID load
+    # command.  States 2, 3, and 4 are intermediate states while reading an
+    # LC_UUID command.  State 5 is the terminal state for a successful LC_UUID
+    # read.  State 6 is the error state.
+    state = 0
+    uuid = ""
+    for otool_line in otool_cmd.stdout:
+      if state == 0:
+        if re.match("^Load command .*$", otool_line):
+          state = 1
+      elif state == 1:
+        if re.match("^     cmd LC_UUID$", otool_line):
+          state = 2
+        else:
+          state = 0
+      elif state == 2:
+        if re.match("^ cmdsize 24$", otool_line):
+          state = 3
+        else:
+          state = 6
+      elif state == 3:
+        # The UUID display format changed in the version of otool shipping
+        # with the Xcode 3.2.2 prerelease.  The new format is traditional:
+        #    uuid 4D7135B2-9C56-C5F5-5F49-A994258E0955
+        # and with Xcode 3.2.6, then line is indented one more space:
+        #     uuid 4D7135B2-9C56-C5F5-5F49-A994258E0955
+        # The old format, from cctools-750 and older's otool, breaks the UUID
+        # up into a sequence of bytes:
+        #    uuid 0x4d 0x71 0x35 0xb2 0x9c 0x56 0xc5 0xf5
+        #         0x5f 0x49 0xa9 0x94 0x25 0x8e 0x09 0x55
+        new_uuid_match = re.match("^ {3,4}uuid (.{8}-.{4}-.{4}-.{4}-.{12})$",
+                                  otool_line)
+        if new_uuid_match:
+          uuid = new_uuid_match.group(1)
+
+          # Skip state 4, there is no second line to read.
+          state = 5
+        else:
+          old_uuid_match = re.match("^   uuid 0x(..) 0x(..) 0x(..) 0x(..) "
+                                    "0x(..) 0x(..) 0x(..) 0x(..)$",
+                                    otool_line)
+          if old_uuid_match:
+            state = 4
+            uuid = old_uuid_match.group(1) + old_uuid_match.group(2) + \
+                   old_uuid_match.group(3) + old_uuid_match.group(4) + "-" + \
+                   old_uuid_match.group(5) + old_uuid_match.group(6) + "-" + \
+                   old_uuid_match.group(7) + old_uuid_match.group(8) + "-"
+          else:
+            state = 6
+      elif state == 4:
+        old_uuid_match = re.match("^        0x(..) 0x(..) 0x(..) 0x(..) "
+                                  "0x(..) 0x(..) 0x(..) 0x(..)$",
+                                  otool_line)
+        if old_uuid_match:
+          state = 5
+          uuid += old_uuid_match.group(1) + old_uuid_match.group(2) + "-" + \
+                  old_uuid_match.group(3) + old_uuid_match.group(4) + \
+                  old_uuid_match.group(5) + old_uuid_match.group(6) + \
+                  old_uuid_match.group(7) + old_uuid_match.group(8)
+        else:
+          state = 6
+
+    if otool_cmd.wait() != 0:
+      state = 6
+
+    if state == 5:
+      uuids[arch] = uuid.upper()
+
+  if len(uuids) == 0:
+    print >> sys.stderr, "No UUIDs in %s" % macho
+
+  return uuids
+
+# Given a path to a Mach-O file and possible information from the environment,
+# determines the desired path to the .dSYM.
+def dsym_path(macho):
+  # If building a bundle, the .dSYM should be placed next to the bundle.  Use
+  # WRAPPER_NAME to make this determination.  If called from xcodebuild,
+  # WRAPPER_NAME will be set to the name of the bundle.
+  dsym = ""
+  if "WRAPPER_NAME" in os.environ:
+    if "BUILT_PRODUCTS_DIR" in os.environ:
+      dsym = os.path.join(os.environ["BUILT_PRODUCTS_DIR"],
+                          os.environ["WRAPPER_NAME"])
+    else:
+      dsym = os.environ["WRAPPER_NAME"]
+  else:
+    dsym = macho
+
+  dsym += ".dSYM"
+
+  return dsym
+
+# Creates a fake .dSYM bundle at dsym for macho, a Mach-O image with the
+# architectures and UUIDs specified by the uuids map.
+def make_fake_dsym(macho, dsym):
+  uuids = macho_uuids(macho)
+  if len(uuids) == 0:
+    return False
+
+  dwarf_dir = os.path.join(dsym, "Contents", "Resources", "DWARF")
+  dwarf_file = os.path.join(dwarf_dir, os.path.basename(macho))
+  try:
+    os.makedirs(dwarf_dir)
+  except OSError, (err, error_string):
+    if err != errno.EEXIST:
+      raise
+  shutil.copyfile(macho, dwarf_file)
+
+  # info_template is the same as what dsymutil would have written, with the
+  # addition of the fake_dsym key.
+  info_template = \
+'''<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE plist PUBLIC "-//Apple Computer//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
+<plist version="1.0">
+	<dict>
+		<key>CFBundleDevelopmentRegion</key>
+		<string>English</string>
+		<key>CFBundleIdentifier</key>
+		<string>com.apple.xcode.dsym.%(root_name)s</string>
+		<key>CFBundleInfoDictionaryVersion</key>
+		<string>6.0</string>
+		<key>CFBundlePackageType</key>
+		<string>dSYM</string>
+		<key>CFBundleSignature</key>
+		<string>????</string>
+		<key>CFBundleShortVersionString</key>
+		<string>1.0</string>
+		<key>CFBundleVersion</key>
+		<string>1</string>
+		<key>dSYM_UUID</key>
+		<dict>
+%(uuid_dict)s		</dict>
+		<key>fake_dsym</key>
+		<true/>
+	</dict>
+</plist>
+'''
+
+  root_name = os.path.basename(dsym)[:-5]  # whatever.dSYM without .dSYM
+  uuid_dict = ""
+  for arch in sorted(uuids):
+    uuid_dict += "\t\t\t<key>" + arch + "</key>\n"\
+                 "\t\t\t<string>" + uuids[arch] + "</string>\n"
+  info_dict = {
+    "root_name": root_name,
+    "uuid_dict": uuid_dict,
+  }
+  info_contents = info_template % info_dict
+  info_file = os.path.join(dsym, "Contents", "Info.plist")
+  info_fd = open(info_file, "w")
+  info_fd.write(info_contents)
+  info_fd.close()
+
+  return True
+
+# For a Mach-O file, determines where the .dSYM bundle should be located.  If
+# the bundle does not exist or has a modification time older than the Mach-O
+# file, calls make_fake_dsym to create a fake .dSYM bundle there, then strips
+# the Mach-O file and sets the modification time on the .dSYM bundle and Mach-O
+# file to be identical.
+def strip_and_make_fake_dsym(macho):
+  dsym = dsym_path(macho)
+  macho_stat = os.stat(macho)
+  dsym_stat = None
+  try:
+    dsym_stat = os.stat(dsym)
+  except OSError, (err, error_string):
+    if err != errno.ENOENT:
+      raise
+
+  if dsym_stat is None or dsym_stat.st_mtime < macho_stat.st_mtime:
+    # Make a .dSYM bundle
+    if not make_fake_dsym(macho, dsym):
+      return False
+
+    # Strip the Mach-O file
+    remove_dsym = True
+    try:
+      strip_cmdline = ['xcrun', 'strip'] + sys.argv[1:]
+      strip_cmd = subprocess.Popen(strip_cmdline)
+      if strip_cmd.wait() == 0:
+        remove_dsym = False
+    finally:
+      if remove_dsym:
+        shutil.rmtree(dsym)
+
+    # Update modification time on the Mach-O file and .dSYM bundle
+    now = time.time()
+    os.utime(macho, (now, now))
+    os.utime(dsym, (now, now))
+
+  return True
+
+def main(argv=None):
+  if argv is None:
+    argv = sys.argv
+
+  # This only supports operating on one file at a time.  Look at the arguments
+  # to strip to figure out what the source to be stripped is.  Arguments are
+  # processed in the same way that strip does, although to reduce complexity,
+  # this doesn't do all of the same checking as strip.  For example, strip
+  # has no -Z switch and would treat -Z on the command line as an error.  For
+  # the purposes this is needed for, that's fine.
+  macho = None
+  process_switches = True
+  ignore_argument = False
+  for arg in argv[1:]:
+    if ignore_argument:
+      ignore_argument = False
+      continue
+    if process_switches:
+      if arg == "-":
+        process_switches = False
+      # strip has these switches accept an argument:
+      if arg in ["-s", "-R", "-d", "-o", "-arch"]:
+        ignore_argument = True
+      if arg[0] == "-":
+        continue
+    if macho is None:
+      macho = arg
+    else:
+      print >> sys.stderr, "Too many things to strip"
+      return 1
+
+  if macho is None:
+    print >> sys.stderr, "Nothing to strip"
+    return 1
+
+  if not strip_and_make_fake_dsym(macho):
+    return 1
+
+  return 0
+
+if __name__ == "__main__":
+  sys.exit(main(sys.argv))
diff --git a/build/mac/tweak_info_plist.py b/build/mac/tweak_info_plist.py
new file mode 100755
index 0000000..2057bac
--- /dev/null
+++ b/build/mac/tweak_info_plist.py
@@ -0,0 +1,280 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+#
+# Xcode supports build variable substitutions and CPP; sadly, that doesn't work
+# because:
+#
+# 1. Xcode wants to do the Info.plist work before it runs any build phases,
+#    this means if we were to generate a .h file for INFOPLIST_PREFIX_HEADER
+#    we'd have to put it in another target so it runs in time.
+# 2. Xcode also doesn't check to see if the header being used as a prefix for
+#    the Info.plist has changed.  So even if we updated it, it's only looking
+#    at the modtime of the info.plist to see if that's changed.
+#
+# So, we work around all of this by making a script build phase that will run
+# during the app build, and simply update the info.plist in place.  This way
+# by the time the app target is done, the info.plist is correct.
+#
+
+import optparse
+import os
+from os import environ as env
+import plistlib
+import re
+import subprocess
+import sys
+import tempfile
+
+TOP = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))
+
+
+def _GetOutput(args):
+  """Runs a subprocess and waits for termination. Returns (stdout, returncode)
+  of the process. stderr is attached to the parent."""
+  proc = subprocess.Popen(args, stdout=subprocess.PIPE)
+  (stdout, stderr) = proc.communicate()
+  return (stdout, proc.returncode)
+
+
+def _GetOutputNoError(args):
+  """Similar to _GetOutput() but ignores stderr. If there's an error launching
+  the child (like file not found), the exception will be caught and (None, 1)
+  will be returned to mimic quiet failure."""
+  try:
+    proc = subprocess.Popen(args, stdout=subprocess.PIPE,
+                            stderr=subprocess.PIPE)
+  except OSError:
+    return (None, 1)
+  (stdout, stderr) = proc.communicate()
+  return (stdout, proc.returncode)
+
+
+def _RemoveKeys(plist, *keys):
+  """Removes a varargs of keys from the plist."""
+  for key in keys:
+    try:
+      del plist[key]
+    except KeyError:
+      pass
+
+
+def _AddVersionKeys(plist, version=None):
+  """Adds the product version number into the plist. Returns True on success and
+  False on error. The error will be printed to stderr."""
+  if version:
+    match = re.match('\d+\.\d+\.(\d+\.\d+)$', version)
+    if not match:
+      print >>sys.stderr, 'Invalid version string specified: "%s"' % version
+      return False
+
+    full_version = match.group(0)
+    bundle_version = match.group(1)
+
+  else:
+    # Pull in the Chrome version number.
+    VERSION_TOOL = os.path.join(TOP, 'build/util/version.py')
+    VERSION_FILE = os.path.join(TOP, 'chrome/VERSION')
+
+    (stdout, retval1) = _GetOutput([VERSION_TOOL, '-f', VERSION_FILE, '-t',
+                                    '@MAJOR@.@MINOR@.@BUILD@.@PATCH@'])
+    full_version = stdout.rstrip()
+
+    (stdout, retval2) = _GetOutput([VERSION_TOOL, '-f', VERSION_FILE, '-t',
+                                    '@BUILD@.@PATCH@'])
+    bundle_version = stdout.rstrip()
+
+    # If either of the two version commands finished with non-zero returncode,
+    # report the error up.
+    if retval1 or retval2:
+      return False
+
+  # Add public version info so "Get Info" works.
+  plist['CFBundleShortVersionString'] = full_version
+
+  # Honor the 429496.72.95 limit.  The maximum comes from splitting 2^32 - 1
+  # into  6, 2, 2 digits.  The limitation was present in Tiger, but it could
+  # have been fixed in later OS release, but hasn't been tested (it's easy
+  # enough to find out with "lsregister -dump).
+  # http://lists.apple.com/archives/carbon-dev/2006/Jun/msg00139.html
+  # BUILD will always be an increasing value, so BUILD_PATH gives us something
+  # unique that meetings what LS wants.
+  plist['CFBundleVersion'] = bundle_version
+
+  # Return with no error.
+  return True
+
+
+def _DoSCMKeys(plist, add_keys):
+  """Adds the SCM information, visible in about:version, to property list. If
+  |add_keys| is True, it will insert the keys, otherwise it will remove them."""
+  scm_revision = None
+  if add_keys:
+    # Pull in the Chrome revision number.
+    VERSION_TOOL = os.path.join(TOP, 'build/util/version.py')
+    LASTCHANGE_FILE = os.path.join(TOP, 'build/util/LASTCHANGE')
+    (stdout, retval) = _GetOutput([VERSION_TOOL, '-f', LASTCHANGE_FILE, '-t',
+                                  '@LASTCHANGE@'])
+    if retval:
+      return False
+    scm_revision = stdout.rstrip()
+
+  # See if the operation failed.
+  _RemoveKeys(plist, 'SCMRevision')
+  if scm_revision != None:
+    plist['SCMRevision'] = scm_revision
+  elif add_keys:
+    print >>sys.stderr, 'Could not determine SCM revision.  This may be OK.'
+
+  return True
+
+
+def _AddBreakpadKeys(plist, branding):
+  """Adds the Breakpad keys. This must be called AFTER _AddVersionKeys() and
+  also requires the |branding| argument."""
+  plist['BreakpadReportInterval'] = '3600'  # Deliberately a string.
+  plist['BreakpadProduct'] = '%s_Mac' % branding
+  plist['BreakpadProductDisplay'] = branding
+  plist['BreakpadVersion'] = plist['CFBundleShortVersionString']
+  # These are both deliberately strings and not boolean.
+  plist['BreakpadSendAndExit'] = 'YES'
+  plist['BreakpadSkipConfirm'] = 'YES'
+
+
+def _RemoveBreakpadKeys(plist):
+  """Removes any set Breakpad keys."""
+  _RemoveKeys(plist,
+      'BreakpadURL',
+      'BreakpadReportInterval',
+      'BreakpadProduct',
+      'BreakpadProductDisplay',
+      'BreakpadVersion',
+      'BreakpadSendAndExit',
+      'BreakpadSkipConfirm')
+
+
+def _TagSuffixes():
+  # Keep this list sorted in the order that tag suffix components are to
+  # appear in a tag value. That is to say, it should be sorted per ASCII.
+  components = ('32bit', 'full')
+  assert tuple(sorted(components)) == components
+
+  components_len = len(components)
+  combinations = 1 << components_len
+  tag_suffixes = []
+  for combination in xrange(0, combinations):
+    tag_suffix = ''
+    for component_index in xrange(0, components_len):
+      if combination & (1 << component_index):
+        tag_suffix += '-' + components[component_index]
+    tag_suffixes.append(tag_suffix)
+  return tag_suffixes
+
+
+def _AddKeystoneKeys(plist, bundle_identifier):
+  """Adds the Keystone keys. This must be called AFTER _AddVersionKeys() and
+  also requires the |bundle_identifier| argument (com.example.product)."""
+  plist['KSVersion'] = plist['CFBundleShortVersionString']
+  plist['KSProductID'] = bundle_identifier
+  plist['KSUpdateURL'] = 'https://tools.google.com/service/update2'
+
+  _RemoveKeys(plist, 'KSChannelID')
+  for tag_suffix in _TagSuffixes():
+    if tag_suffix:
+      plist['KSChannelID' + tag_suffix] = tag_suffix
+
+
+def _RemoveKeystoneKeys(plist):
+  """Removes any set Keystone keys."""
+  _RemoveKeys(plist,
+      'KSVersion',
+      'KSProductID',
+      'KSUpdateURL')
+
+  tag_keys = []
+  for tag_suffix in _TagSuffixes():
+    tag_keys.append('KSChannelID' + tag_suffix)
+  _RemoveKeys(plist, *tag_keys)
+
+
+def Main(argv):
+  parser = optparse.OptionParser('%prog [options]')
+  parser.add_option('--breakpad', dest='use_breakpad', action='store',
+      type='int', default=False, help='Enable Breakpad [1 or 0]')
+  parser.add_option('--breakpad_uploads', dest='breakpad_uploads',
+      action='store', type='int', default=False,
+      help='Enable Breakpad\'s uploading of crash dumps [1 or 0]')
+  parser.add_option('--keystone', dest='use_keystone', action='store',
+      type='int', default=False, help='Enable Keystone [1 or 0]')
+  parser.add_option('--scm', dest='add_scm_info', action='store', type='int',
+      default=True, help='Add SCM metadata [1 or 0]')
+  parser.add_option('--branding', dest='branding', action='store',
+      type='string', default=None, help='The branding of the binary')
+  parser.add_option('--bundle_id', dest='bundle_identifier',
+      action='store', type='string', default=None,
+      help='The bundle id of the binary')
+  parser.add_option('--version', dest='version', action='store', type='string',
+      default=None, help='The version string [major.minor.build.patch]')
+  (options, args) = parser.parse_args(argv)
+
+  if len(args) > 0:
+    print >>sys.stderr, parser.get_usage()
+    return 1
+
+  # Read the plist into its parsed format.
+  DEST_INFO_PLIST = os.path.join(env['TARGET_BUILD_DIR'], env['INFOPLIST_PATH'])
+  plist = plistlib.readPlist(DEST_INFO_PLIST)
+
+  # Insert the product version.
+  if not _AddVersionKeys(plist, version=options.version):
+    return 2
+
+  # Add Breakpad if configured to do so.
+  if options.use_breakpad:
+    if options.branding is None:
+      print >>sys.stderr, 'Use of Breakpad requires branding.'
+      return 1
+    _AddBreakpadKeys(plist, options.branding)
+    if options.breakpad_uploads:
+      plist['BreakpadURL'] = 'https://clients2.google.com/cr/report'
+    else:
+      # This allows crash dumping to a file without uploading the
+      # dump, for testing purposes.  Breakpad does not recognise
+      # "none" as a special value, but this does stop crash dump
+      # uploading from happening.  We need to specify something
+      # because if "BreakpadURL" is not present, Breakpad will not
+      # register its crash handler and no crash dumping will occur.
+      plist['BreakpadURL'] = 'none'
+  else:
+    _RemoveBreakpadKeys(plist)
+
+  # Only add Keystone in Release builds.
+  if options.use_keystone and env['CONFIGURATION'] == 'Release':
+    if options.bundle_identifier is None:
+      print >>sys.stderr, 'Use of Keystone requires the bundle id.'
+      return 1
+    _AddKeystoneKeys(plist, options.bundle_identifier)
+  else:
+    _RemoveKeystoneKeys(plist)
+
+  # Adds or removes any SCM keys.
+  if not _DoSCMKeys(plist, options.add_scm_info):
+    return 3
+
+  # Now that all keys have been mutated, rewrite the file.
+  temp_info_plist = tempfile.NamedTemporaryFile()
+  plistlib.writePlist(plist, temp_info_plist.name)
+
+  # Info.plist will work perfectly well in any plist format, but traditionally
+  # applications use xml1 for this, so convert it to ensure that it's valid.
+  proc = subprocess.Popen(['plutil', '-convert', 'xml1', '-o', DEST_INFO_PLIST,
+                           temp_info_plist.name])
+  proc.wait()
+  return proc.returncode
+
+
+if __name__ == '__main__':
+  sys.exit(Main(sys.argv[1:]))
diff --git a/build/mac/verify_no_objc.sh b/build/mac/verify_no_objc.sh
new file mode 100755
index 0000000..e18a5ea
--- /dev/null
+++ b/build/mac/verify_no_objc.sh
@@ -0,0 +1,42 @@
+#!/bin/bash
+
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This script makes sure that no __OBJC,__image_info section appears in the
+# executable file built by the Xcode target that runs the script. If such a
+# section appears, the script prints an error message and exits nonzero.
+#
+# Why is this important?
+#
+# On 10.5, there's a bug in CFBundlePreflightExecutable that causes it to
+# crash when operating in an executable that has not loaded at its default
+# address (that is, when it's a position-independent executable with the
+# MH_PIE bit set in its mach_header) and the executable has an
+# __OBJC,__image_info section. See http://crbug.com/88697.
+#
+# Chrome's main executables don't use any Objective-C at all, and don't need
+# to carry this section around. Not linking them as Objective-C when they
+# don't need it anyway saves about 4kB in the linked executable, although most
+# of that 4kB is just filled with zeroes.
+#
+# This script makes sure that nobody goofs and accidentally introduces these
+# sections into the main executables.
+
+set -eu
+
+executable="${BUILT_PRODUCTS_DIR}/${EXECUTABLE_PATH}"
+
+if xcrun otool -arch i386 -o "${executable}" | grep -q '^Contents.*section$'; \
+then
+  echo "${0}: ${executable} has an __OBJC,__image_info section" 2>&1
+  exit 1
+fi
+
+if [[ ${PIPESTATUS[0]} -ne 0 ]]; then
+  echo "${0}: otool failed" 2>&1
+  exit 1
+fi
+
+exit 0
diff --git a/build/module_args/dart.gni b/build/module_args/dart.gni
new file mode 100644
index 0000000..ee6b038
--- /dev/null
+++ b/build/module_args/dart.gni
@@ -0,0 +1,6 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This variable should point to the Dart SDK.
+dart_sdk_root = "//third_party/dart-sdk/dart-sdk"
diff --git a/build/module_args/mojo.gni b/build/module_args/mojo.gni
new file mode 100644
index 0000000..fee9114
--- /dev/null
+++ b/build/module_args/mojo.gni
@@ -0,0 +1,16 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This variable should point to the parent directory of the Mojo SDK.
+mojo_sdk_root = "//"
+
+# To build the Mojo shell from source, set this variable to true. To use the
+# prebuilt shell, omit this variable or set it to false. Note that the prebuilt
+# shell will be used only on platforms for which it is published (currently
+# Linux and Android).
+mojo_build_mojo_shell_from_source = true
+
+# To build the network service from source, set this variable to true. To use
+# the prebuilt network service, omit this variable or set it to false.
+mojo_build_network_service_from_source = true
diff --git a/build/module_args/nacl.gni b/build/module_args/nacl.gni
new file mode 100644
index 0000000..61e0768
--- /dev/null
+++ b/build/module_args/nacl.gni
@@ -0,0 +1,6 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Override nacl's build directory.
+nacl_shared_build_dir = "//build"
diff --git a/build/module_args/v8.gni b/build/module_args/v8.gni
new file mode 100644
index 0000000..8b5204c
--- /dev/null
+++ b/build/module_args/v8.gni
@@ -0,0 +1,13 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+if (is_android) {
+  import("//build/config/android/config.gni")
+}
+
+# TODO(sky): nuke this. Temporary while sorting out http://crbug.com/465456.
+enable_correct_v8_arch = false
+
+v8_use_external_startup_data = !(is_chromeos || is_win)
+v8_extra_library_files = []
diff --git a/build/nocompile.gypi b/build/nocompile.gypi
new file mode 100644
index 0000000..8c0f288
--- /dev/null
+++ b/build/nocompile.gypi
@@ -0,0 +1,96 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into an target to create a unittest that
+# invokes a set of no-compile tests.  A no-compile test is a test that asserts
+# a particular construct will not compile.
+#
+# Also see:
+#   http://dev.chromium.org/developers/testing/no-compile-tests
+#
+# To use this, create a gyp target with the following form:
+# {
+#   'target_name': 'my_module_nc_unittests',
+#   'type': 'executable',
+#   'sources': [
+#     'nc_testset_1.nc',
+#     'nc_testset_2.nc',
+#   ],
+#   'includes': ['path/to/this/gypi/file'],
+# }
+#
+# The .nc files are C++ files that contain code we wish to assert will not
+# compile.  Each individual test case in the file should be put in its own
+# #ifdef section.  The expected output should be appended with a C++-style
+# comment that has a python list of regular expressions.  This will likely
+# be greater than 80-characters. Giving a solid expected output test is
+# important so that random compile failures do not cause the test to pass.
+#
+# Example .nc file:
+#
+#   #if defined(TEST_NEEDS_SEMICOLON)  // [r"expected ',' or ';' at end of input"]
+#
+#   int a = 1
+#
+#   #elif defined(TEST_NEEDS_CAST)  // [r"invalid conversion from 'void*' to 'char*'"]
+#
+#   void* a = NULL;
+#   char* b = a;
+#
+#   #endif
+#
+# If we needed disable TEST_NEEDS_SEMICOLON, then change the define to:
+#
+#   DISABLE_TEST_NEEDS_SEMICOLON
+#   TEST_NEEDS_CAST
+#
+# The lines above are parsed by a regexp so avoid getting creative with the
+# formatting or ifdef logic; it will likely just not work.
+#
+# Implementation notes:
+# The .nc files are actually processed by a python script which executes the
+# compiler and generates a .cc file that is empty on success, or will have a
+# series of #error lines on failure, and a set of trivially passing gunit
+# TEST() functions on success. This allows us to fail at the compile step when
+# something goes wrong, and know during the unittest run that the test was at
+# least processed when things go right.
+
+{
+  # TODO(awong): Disabled until http://crbug.com/105388 is resolved.
+  'sources/': [['exclude', '\\.nc$']],
+  'conditions': [
+    [ 'OS!="win" and clang==1', {
+      'rules': [
+        {
+          'variables': {
+            'nocompile_driver': '<(DEPTH)/tools/nocompile_driver.py',
+            'nc_result_path': ('<(INTERMEDIATE_DIR)/<(module_dir)/'
+                               '<(RULE_INPUT_ROOT)_nc.cc'),
+           },
+          'rule_name': 'run_nocompile',
+          'extension': 'nc',
+          'inputs': [
+            '<(nocompile_driver)',
+          ],
+          'outputs': [
+            '<(nc_result_path)'
+          ],
+          'action': [
+            'python',
+            '<(nocompile_driver)',
+            '4', # number of compilers to invoke in parallel.
+            '<(RULE_INPUT_PATH)',
+            '-Wall -Werror -Wfatal-errors -I<(DEPTH)',
+            '<(nc_result_path)',
+            ],
+          'message': 'Generating no compile results for <(RULE_INPUT_PATH)',
+          'process_outputs_as_sources': 1,
+        },
+      ],
+    }, {
+      'sources/': [['exclude', '\\.nc$']]
+    }],  # 'OS!="win" and clang=="1"'
+  ],
+}
+
diff --git a/build/output_dll_copy.rules b/build/output_dll_copy.rules
new file mode 100644
index 0000000..c6e9051
--- /dev/null
+++ b/build/output_dll_copy.rules
@@ -0,0 +1,17 @@
+<?xml version="1.0" encoding="utf-8"?>
+<VisualStudioToolFile
+	Name="Output DLL copy"
+	Version="8.00"
+	>
+	<Rules>
+		<CustomBuildRule
+			Name="Output DLL copy"
+			CommandLine="xcopy /R /C /Y $(InputPath) $(OutDir)"
+			Outputs="$(OutDir)\$(InputFileName)"
+			FileExtensions="*.dll"
+			>
+			<Properties>
+			</Properties>
+		</CustomBuildRule>
+	</Rules>
+</VisualStudioToolFile>
diff --git a/build/precompile.cc b/build/precompile.cc
new file mode 100644
index 0000000..db1ef6d
--- /dev/null
+++ b/build/precompile.cc
@@ -0,0 +1,7 @@
+// Copyright (c) 2011 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// Precompiled header generator for Windows builds. No include is needed
+// in this file as the PCH include is forced via the "Forced Include File"
+// flag in the projects generated by GYP.
diff --git a/build/precompile.h b/build/precompile.h
new file mode 100644
index 0000000..32c2f11
--- /dev/null
+++ b/build/precompile.h
@@ -0,0 +1,109 @@
+// Copyright (c) 2012 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// Precompiled header for Chromium project on Windows, not used by
+// other build configurations. Using precompiled headers speeds the
+// build up significantly, around 1/4th on VS 2010 on an HP Z600 with 12
+// GB of memory.
+//
+// Numeric comments beside includes are the number of times they were
+// included under src/chrome/browser on 2011/8/20, which was used as a
+// baseline for deciding what to include in the PCH. Includes without
+// a numeric comment are generally included at least 5 times. It may
+// be possible to tweak the speed of the build by commenting out or
+// removing some of the less frequently used headers.
+
+#if defined(BUILD_PRECOMPILE_H_)
+#error You shouldn't include the precompiled header file more than once.
+#endif
+
+#define BUILD_PRECOMPILE_H_
+
+#define _USE_MATH_DEFINES
+
+// The Windows header needs to come before almost all the other
+// Windows-specific headers.
+#include <Windows.h>
+#include <dwmapi.h>
+#include <shellapi.h>
+#include <wtypes.h>  // 2
+
+// Defines in atlbase.h cause conflicts; if we could figure out how
+// this family of headers can be included in the PCH, it might speed
+// up the build as several of them are used frequently.
+/*
+#include <atlbase.h>
+#include <atlapp.h>
+#include <atlcom.h>
+#include <atlcrack.h>  // 2
+#include <atlctrls.h>  // 2
+#include <atlmisc.h>  // 2
+#include <atlsafe.h>  // 1
+#include <atltheme.h>  // 1
+#include <atlwin.h>  // 2
+*/
+
+// Objbase.h and other files that rely on it bring in [ #define
+// interface struct ] which can cause problems in a multi-platform
+// build like Chrome's. #undef-ing it does not work as there are
+// currently 118 targets that break if we do this, so leaving out of
+// the precompiled header for now.
+//#include <commctrl.h>  // 2
+//#include <commdlg.h>  // 3
+//#include <cryptuiapi.h>  // 2
+//#include <Objbase.h>  // 2
+//#include <objidl.h>  // 1
+//#include <ole2.h>  // 1
+//#include <oleacc.h>  // 2
+//#include <oleauto.h>  // 1
+//#include <oleidl.h>  // 1
+//#include <propkey.h>  // 2
+//#include <propvarutil.h>  // 2
+//#include <pstore.h>  // 2
+//#include <shlguid.h>  // 1
+//#include <shlwapi.h>  // 1
+//#include <shobjidl.h>  // 4
+//#include <urlhist.h>  // 2
+
+// Caused other conflicts in addition to the 'interface' issue above.
+// #include <shlobj.h>
+
+#include <errno.h>
+#include <fcntl.h>
+#include <limits.h>  // 4
+#include <math.h>
+#include <memory.h>  // 1
+#include <signal.h>
+#include <stdarg.h>  // 1
+#include <stddef.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+#include <time.h>  // 4
+
+#include <algorithm>
+#include <bitset>  // 3
+#include <cmath>
+#include <cstddef>
+#include <cstdio>  // 3
+#include <cstdlib>  // 2
+#include <cstring>
+#include <deque>
+#include <fstream>  // 3
+#include <functional>
+#include <iomanip>  // 2
+#include <iosfwd>  // 2
+#include <iterator>
+#include <limits>
+#include <list>
+#include <map>
+#include <numeric>  // 2
+#include <ostream>
+#include <queue>
+#include <set>
+#include <sstream>
+#include <stack>
+#include <string>
+#include <utility>
+#include <vector>
diff --git a/build/protoc.gypi b/build/protoc.gypi
new file mode 100644
index 0000000..fafdf9d
--- /dev/null
+++ b/build/protoc.gypi
@@ -0,0 +1,123 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into a target to provide a rule
+# to invoke protoc in a consistent manner. For Java-targets, see
+# protoc_java.gypi.
+#
+# To use this, create a gyp target with the following form:
+# {
+#   'target_name': 'my_proto_lib',
+#   'type': 'static_library',
+#   'sources': [
+#     'foo.proto',
+#     'bar.proto',
+#   ],
+#   'variables': {
+#     # Optional, see below: 'proto_in_dir': '.'
+#     'proto_out_dir': 'dir/for/my_proto_lib'
+#   },
+#   'includes': ['path/to/this/gypi/file'],
+# }
+# If necessary, you may add normal .cc files to the sources list or other gyp
+# dependencies.  The proto headers are guaranteed to be generated before any
+# source files, even within this target, are compiled.
+#
+# The 'proto_in_dir' variable must be the relative path to the
+# directory containing the .proto files.  If left out, it defaults to '.'.
+#
+# The 'proto_out_dir' variable specifies the path suffix that output
+# files are generated under.  Targets that gyp-depend on my_proto_lib
+# will be able to include the resulting proto headers with an include
+# like:
+#   #include "dir/for/my_proto_lib/foo.pb.h"
+#
+# If you need to add an EXPORT macro to a protobuf's c++ header, set the
+# 'cc_generator_options' variable with the value: 'dllexport_decl=FOO_EXPORT:'
+# e.g. 'dllexport_decl=BASE_EXPORT:'
+#
+# It is likely you also need to #include a file for the above EXPORT macro to
+# work. You can do so with the 'cc_include' variable.
+# e.g. 'base/base_export.h'
+#
+# Implementation notes:
+# A proto_out_dir of foo/bar produces
+#   <(SHARED_INTERMEDIATE_DIR)/protoc_out/foo/bar/{file1,file2}.pb.{cc,h}
+#   <(SHARED_INTERMEDIATE_DIR)/pyproto/foo/bar/{file1,file2}_pb2.py
+
+{
+  'variables': {
+    'protoc_wrapper': '<(DEPTH)/tools/protoc_wrapper/protoc_wrapper.py',
+    'cc_dir': '<(SHARED_INTERMEDIATE_DIR)/protoc_out/<(proto_out_dir)',
+    'py_dir': '<(PRODUCT_DIR)/pyproto/<(proto_out_dir)',
+    'cc_generator_options%': '',
+    'cc_include%': '',
+    'proto_in_dir%': '.',
+    'conditions': [
+      ['use_system_protobuf==0', {
+        'protoc': '<(PRODUCT_DIR)/<(EXECUTABLE_PREFIX)protoc<(EXECUTABLE_SUFFIX)',
+      }, { # use_system_protobuf==1
+        'protoc': '<!(which protoc)',
+      }],
+    ],
+  },
+  'rules': [
+    {
+      'rule_name': 'genproto',
+      'extension': 'proto',
+      'inputs': [
+        '<(protoc_wrapper)',
+        '<(protoc)',
+      ],
+      'outputs': [
+        '<(py_dir)/<(RULE_INPUT_ROOT)_pb2.py',
+        '<(cc_dir)/<(RULE_INPUT_ROOT).pb.cc',
+        '<(cc_dir)/<(RULE_INPUT_ROOT).pb.h',
+      ],
+      'action': [
+        'python',
+        '<(protoc_wrapper)',
+        '--include',
+        '<(cc_include)',
+        '--protobuf',
+        '<(cc_dir)/<(RULE_INPUT_ROOT).pb.h',
+        # Using the --arg val form (instead of --arg=val) allows gyp's msvs rule
+        # generation to correct 'val' which is a path.
+        '--proto-in-dir','<(proto_in_dir)',
+        # Naively you'd use <(RULE_INPUT_PATH) here, but protoc requires
+        # --proto_path is a strict prefix of the path given as an argument.
+        '--proto-in-file','<(RULE_INPUT_ROOT)<(RULE_INPUT_EXT)',
+        '--use-system-protobuf=<(use_system_protobuf)',
+        '--',
+        '<(protoc)',
+        '--cpp_out', '<(cc_generator_options)<(cc_dir)',
+        '--python_out', '<(py_dir)',
+      ],
+      'message': 'Generating C++ and Python code from <(RULE_INPUT_PATH)',
+      'process_outputs_as_sources': 1,
+    },
+  ],
+  'dependencies': [
+    '<(DEPTH)/third_party/protobuf/protobuf.gyp:protoc#host',
+    '<(DEPTH)/third_party/protobuf/protobuf.gyp:protobuf_lite',
+  ],
+  'include_dirs': [
+    '<(SHARED_INTERMEDIATE_DIR)/protoc_out',
+    '<(DEPTH)',
+  ],
+  'direct_dependent_settings': {
+    'include_dirs': [
+      '<(SHARED_INTERMEDIATE_DIR)/protoc_out',
+      '<(DEPTH)',
+    ]
+  },
+  'export_dependent_settings': [
+    # The generated headers reference headers within protobuf_lite,
+    # so dependencies must be able to find those headers too.
+    '<(DEPTH)/third_party/protobuf/protobuf.gyp:protobuf_lite',
+  ],
+  # This target exports a hard dependency because it generates header
+  # files.
+  'hard_dependency': 1,
+}
diff --git a/build/protoc_java.gypi b/build/protoc_java.gypi
new file mode 100644
index 0000000..6fd80d85
--- /dev/null
+++ b/build/protoc_java.gypi
@@ -0,0 +1,83 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into a target to provide a rule
+# to invoke protoc in a consistent manner. This is only to be included
+# for Java targets. When including this file, a .jar-file will be generated.
+# For other targets, see protoc.gypi.
+#
+# To use this, create a gyp target with the following form:
+# {
+#   'target_name': 'my_proto_lib',
+#   'sources': [
+#     'foo.proto',
+#     'bar.proto',
+#   ],
+#   'variables': {
+#     'proto_in_dir': '.'
+#   },
+#   'includes': ['path/to/this/gypi/file'],
+# }
+#
+# The 'proto_in_dir' variable must be the relative path to the
+# directory containing the .proto files.  If left out, it defaults to '.'.
+#
+# The 'output_java_files' variable specifies a list of output files that will
+# be generated. It is based on the package and java_outer_classname fields in
+# the proto. All the values must be prefixed with >(java_out_dir), since that
+# is the root directory of all the output.
+#
+# Implementation notes:
+# A target_name of foo and proto-specified 'package' java.package.path produces:
+#   <(PRODUCT_DIR)/java_proto/foo/{java/package/path/}{Foo,Bar}.java
+# where Foo and Bar are taken from 'java_outer_classname' of the protos.
+#
+# How the .jar-file is created is different than how protoc is used for other
+# targets, and as such, this lives in its own file.
+
+{
+  'variables': {
+    'protoc': '<(PRODUCT_DIR)/<(EXECUTABLE_PREFIX)android_protoc<(EXECUTABLE_SUFFIX)',
+    'java_out_dir': '<(PRODUCT_DIR)/java_proto/<(_target_name)/src',
+    'proto_in_dir%': '.',
+    'stamp_file': '<(java_out_dir).stamp',
+    'script': '<(DEPTH)/build/protoc_java.py',
+
+    # The rest of the variables here are for the java.gypi include.
+    'java_in_dir': '<(DEPTH)/build/android/empty',
+    'generated_src_dirs': ['<(java_out_dir)'],
+    # Adding the |stamp_file| to |additional_input_paths| makes the actions in
+    # the include of java.gypi depend on the genproto_java action.
+    'additional_input_paths': ['<(stamp_file)'],
+    'run_findbugs': 0,
+  },
+  'actions': [
+    {
+      'action_name': 'genproto_java',
+      'inputs': [
+        '<(script)',
+        '<(protoc)',
+        '<@(_sources)',
+      ],
+      # We do not know the names of the generated files, so we use a stamp.
+      'outputs': [
+        '<(stamp_file)',
+      ],
+      'action': [
+        '<(script)',
+        '--protoc=<(protoc)',
+        '--proto-path=<(proto_in_dir)',
+        '--java-out-dir=<(java_out_dir)',
+        '--stamp=<(stamp_file)',
+        '<@(_sources)',
+      ],
+      'message': 'Generating Java code from protobuf files in <(proto_in_dir)',
+    },
+  ],
+  'dependencies': [
+    '<(DEPTH)/third_party/android_protobuf/android_protobuf.gyp:android_protoc#host',
+    '<(DEPTH)/third_party/android_protobuf/android_protobuf.gyp:protobuf_nano_javalib',
+  ],
+  'includes': [ 'java.gypi' ],
+}
diff --git a/build/protoc_java.py b/build/protoc_java.py
new file mode 100755
index 0000000..470667c
--- /dev/null
+++ b/build/protoc_java.py
@@ -0,0 +1,68 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Generate java source files from protobuf files.
+
+This is a helper file for the genproto_java action in protoc_java.gypi.
+
+It performs the following steps:
+1. Deletes all old sources (ensures deleted classes are not part of new jars).
+2. Creates source directory.
+3. Generates Java files using protoc (output into either --java-out-dir or
+   --srcjar).
+4. Creates a new stamp file.
+"""
+
+import os
+import optparse
+import shutil
+import subprocess
+import sys
+
+sys.path.append(os.path.join(os.path.dirname(__file__), "android", "gyp"))
+from util import build_utils
+
+def main(argv):
+  parser = optparse.OptionParser()
+  build_utils.AddDepfileOption(parser)
+  parser.add_option("--protoc", help="Path to protoc binary.")
+  parser.add_option("--proto-path", help="Path to proto directory.")
+  parser.add_option("--java-out-dir",
+      help="Path to output directory for java files.")
+  parser.add_option("--srcjar", help="Path to output srcjar.")
+  parser.add_option("--stamp", help="File to touch on success.")
+  options, args = parser.parse_args(argv)
+
+  build_utils.CheckOptions(options, parser, ['protoc', 'proto_path'])
+  if not options.java_out_dir and not options.srcjar:
+    print 'One of --java-out-dir or --srcjar must be specified.'
+    return 1
+
+  with build_utils.TempDir() as temp_dir:
+    # Specify arguments to the generator.
+    generator_args = ['optional_field_style=reftypes',
+                      'store_unknown_fields=true']
+    out_arg = '--javanano_out=' + ','.join(generator_args) + ':' + temp_dir
+    # Generate Java files using protoc.
+    build_utils.CheckOutput(
+        [options.protoc, '--proto_path', options.proto_path, out_arg]
+        + args)
+
+    if options.java_out_dir:
+      build_utils.DeleteDirectory(options.java_out_dir)
+      shutil.copytree(temp_dir, options.java_out_dir)
+    else:
+      build_utils.ZipDir(options.srcjar, temp_dir)
+
+  if options.depfile:
+    build_utils.WriteDepfile(
+        options.depfile,
+        args + [options.protoc] + build_utils.GetPythonDependencies())
+
+  if options.stamp:
+    build_utils.Touch(options.stamp)
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv[1:]))
diff --git a/build/release.gypi b/build/release.gypi
new file mode 100644
index 0000000..9b8b11d
--- /dev/null
+++ b/build/release.gypi
@@ -0,0 +1,29 @@
+{
+  'conditions': [
+    # Handle build types.
+    ['buildtype=="Dev"', {
+      'includes': ['internal/release_impl.gypi'],
+    }],
+    ['buildtype=="Dev" and incremental_chrome_dll==1', {
+      'msvs_settings': {
+        'VCLinkerTool': {
+          # Enable incremental linking and disable conflicting link options:
+          # http://msdn.microsoft.com/en-us/library/4khtbfyf.aspx
+          'LinkIncremental': '2',
+          'OptimizeReferences': '1',
+          'EnableCOMDATFolding': '1',
+          'Profile': 'false',
+        },
+      },
+    }],
+    ['buildtype=="Official"', {
+      'includes': ['internal/release_impl_official.gypi'],
+    }],
+    # TODO(bradnelson): may also need:
+    #     checksenabled
+    #     coverage
+    #     dom_stats
+    #     pgo_instrument
+    #     pgo_optimize
+  ],
+}
diff --git a/build/repack_action.gypi b/build/repack_action.gypi
new file mode 100644
index 0000000..04b982a
--- /dev/null
+++ b/build/repack_action.gypi
@@ -0,0 +1,31 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into an action to invoke grit repack in a
+# consistent manner. To use this the following variables need to be
+# defined:
+#   pak_inputs: list: paths of pak files that need to be combined.
+#   pak_output: string: the output pak file path.
+
+{
+  # GYP version: //tools/grit/repack.gni
+  'variables': {
+    'repack_path': '<(DEPTH)/tools/grit/grit/format/repack.py',
+    'repack_options%': [],
+  },
+  'inputs': [
+    '<(repack_path)',
+    '<@(pak_inputs)',
+  ],
+  'outputs': [
+    '<(pak_output)'
+  ],
+  'action': [
+    'python',
+    '<(repack_path)',
+    '<@(repack_options)',
+    '<(pak_output)',
+    '<@(pak_inputs)',
+  ],
+}
diff --git a/build/rmdir_and_stamp.py b/build/rmdir_and_stamp.py
new file mode 100755
index 0000000..6aa11f8
--- /dev/null
+++ b/build/rmdir_and_stamp.py
@@ -0,0 +1,45 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Wipes out a directory recursively and then touches a stamp file.
+
+This odd pairing of operations is used to support build scripts which
+slurp up entire directories (e.g. build/android/javac.py when handling
+generated sources) as inputs.
+
+The general pattern of use is:
+
+  - Add a target which generates |gen_sources| into |out_path| from |inputs|.
+  - Include |stamp_file| as an input for that target or any of its rules which
+    generate files in |out_path|.
+  - Add an action which depends on |inputs| and which outputs |stamp_file|;
+    the action should run this script and pass |out_path| and |stamp_file| as
+    its arguments.
+
+The net result is that you will force |out_path| to be wiped and all
+|gen_sources| to be regenerated any time any file in |inputs| changes.
+
+See //third_party/mojo/mojom_bindings_generator.gypi for an example use case.
+
+"""
+
+import errno
+import os
+import shutil
+import sys
+
+
+def Main(dst_dir, stamp_file):
+  try:
+    shutil.rmtree(os.path.normpath(dst_dir))
+  except OSError as e:
+    # Ignore only "not found" errors.
+    if e.errno != errno.ENOENT:
+      raise e
+  with open(stamp_file, 'a'):
+    os.utime(stamp_file, None)
+
+if __name__ == '__main__':
+  sys.exit(Main(sys.argv[1], sys.argv[2]))
diff --git a/build/sanitize-mac-build-log.sed b/build/sanitize-mac-build-log.sed
new file mode 100644
index 0000000..b4111c7
--- /dev/null
+++ b/build/sanitize-mac-build-log.sed
@@ -0,0 +1,33 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Use this sed script to reduce a Mac build log into something readable.
+
+# Drop uninformative lines.
+/^distcc/d
+/^Check dependencies/d
+/^    setenv /d
+/^    cd /d
+/^make: Nothing to be done/d
+/^$/d
+
+# Xcode prints a short "compiling foobar.o" line followed by the lengthy
+# full command line.  These deletions drop the command line.
+\|^    /Developer/usr/bin/|d
+\|^    /Developer/Library/PrivateFrameworks/DevToolsCore\.framework/|d
+\|^    /Developer/Library/Xcode/Plug-ins/CoreBuildTasks\.xcplugin/|d
+
+# Drop any goma command lines as well.
+\|^    .*/gomacc |d
+
+# And, if you've overridden something from your own bin directory, remove those
+# full command lines, too.
+\|^    /Users/[^/]*/bin/|d
+
+# There's already a nice note for bindings, don't need the command line.
+\|^python scripts/rule_binding\.py|d
+
+# Shorten the "compiling foobar.o" line.
+s|^Distributed-CompileC (.*) normal i386 c\+\+ com\.apple\.compilers\.gcc\.4_2|    CC \1|
+s|^CompileC (.*) normal i386 c\+\+ com\.apple\.compilers\.gcc\.4_2|    CC \1|
diff --git a/build/sanitize-mac-build-log.sh b/build/sanitize-mac-build-log.sh
new file mode 100755
index 0000000..df5a7af
--- /dev/null
+++ b/build/sanitize-mac-build-log.sh
@@ -0,0 +1,5 @@
+#!/bin/sh
+# Copyright (c) 2010 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+sed -r -f `dirname "${0}"`/`basename "${0}" sh`sed
diff --git a/build/sanitize-win-build-log.sed b/build/sanitize-win-build-log.sed
new file mode 100644
index 0000000..c18e664
--- /dev/null
+++ b/build/sanitize-win-build-log.sed
@@ -0,0 +1,15 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Use this sed script to reduce a Windows build log into something
+# machine-parsable.
+
+# Drop uninformative lines.
+/The operation completed successfully\./d
+
+# Drop parallelization indicators on lines.
+s/^[0-9]+>//
+
+# Shorten bindings generation lines
+s/^.*"python".*idl_compiler\.py".*("[^"]+\.idl").*$/  idl_compiler \1/
diff --git a/build/sanitize-win-build-log.sh b/build/sanitize-win-build-log.sh
new file mode 100755
index 0000000..df5a7af
--- /dev/null
+++ b/build/sanitize-win-build-log.sh
@@ -0,0 +1,5 @@
+#!/bin/sh
+# Copyright (c) 2010 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+sed -r -f `dirname "${0}"`/`basename "${0}" sh`sed
diff --git a/build/sanitizers/BUILD.gn b/build/sanitizers/BUILD.gn
new file mode 100644
index 0000000..4f81f3e
--- /dev/null
+++ b/build/sanitizers/BUILD.gn
@@ -0,0 +1,24 @@
+# Copyright (c) 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+if (is_linux && !is_chromeos) {
+  # TODO(GYP): Figure out which of these work and are needed on other platforms.
+  copy("copy_llvm_symbolizer") {
+    if (is_win) {
+      sources = [
+        "//third_party/llvm-build/Release+Asserts/bin/llvm-symbolizer.exe",
+      ]
+      outputs = [
+        "$root_out_dir/llvm-symbolizer.exe",
+      ]
+    } else {
+      sources = [
+        "//third_party/llvm-build/Release+Asserts/bin/llvm-symbolizer",
+      ]
+      outputs = [
+        "$root_out_dir/llvm-symbolizer",
+      ]
+    }
+  }
+}
diff --git a/build/sanitizers/OWNERS b/build/sanitizers/OWNERS
new file mode 100644
index 0000000..0be2be8
--- /dev/null
+++ b/build/sanitizers/OWNERS
@@ -0,0 +1,4 @@
+glider@chromium.org
+earthdok@chromium.org
+per-file tsan_suppressions.cc=*
+per-file lsan_suppressions.cc=*
diff --git a/build/sanitizers/asan_suppressions.cc b/build/sanitizers/asan_suppressions.cc
new file mode 100644
index 0000000..df94bc8
--- /dev/null
+++ b/build/sanitizers/asan_suppressions.cc
@@ -0,0 +1,23 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// This file contains the default suppressions for AddressSanitizer.
+// It should only be used under very limited circumstances such as suppressing
+// a report caused by an interceptor call in a system-installed library.
+
+#if defined(ADDRESS_SANITIZER)
+
+// Please make sure the code below declares a single string variable
+// kASanDefaultSuppressions which contains ASan suppressions delimited by
+// newlines.
+char kASanDefaultSuppressions[] =
+// http://crbug.com/178677
+"interceptor_via_lib:libsqlite3.so\n"
+
+// PLEASE READ ABOVE BEFORE ADDING NEW SUPPRESSIONS.
+
+// End of suppressions.
+;  // Please keep this semicolon.
+
+#endif  // ADDRESS_SANITIZER
diff --git a/build/sanitizers/lsan_suppressions.cc b/build/sanitizers/lsan_suppressions.cc
new file mode 100644
index 0000000..e9a8b7e
--- /dev/null
+++ b/build/sanitizers/lsan_suppressions.cc
@@ -0,0 +1,108 @@
+// Copyright 2015 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// This file contains the default suppressions for LeakSanitizer.
+// You can also pass additional suppressions via LSAN_OPTIONS:
+// LSAN_OPTIONS=suppressions=/path/to/suppressions. Please refer to
+// http://dev.chromium.org/developers/testing/leaksanitizer for more info.
+
+#if defined(LEAK_SANITIZER)
+
+// Please make sure the code below declares a single string variable
+// kLSanDefaultSuppressions which contains LSan suppressions delimited by
+// newlines. See http://dev.chromium.org/developers/testing/leaksanitizer
+// for the instructions on writing suppressions.
+char kLSanDefaultSuppressions[] =
+// Intentional leak used as sanity test for Valgrind/memcheck.
+"leak:base::ToolsSanityTest_MemoryLeak_Test::TestBody\n"
+
+// ================ Leaks in third-party code ================
+
+// False positives in libfontconfig. http://crbug.com/39050
+"leak:libfontconfig\n"
+
+// Leaks in Nvidia's libGL.
+"leak:libGL.so\n"
+
+// A small leak in V8. http://crbug.com/46571#c9
+"leak:blink::V8GCController::collectGarbage\n"
+
+// TODO(earthdok): revisit NSS suppressions after the switch to BoringSSL
+// NSS leaks in CertDatabaseNSSTest tests. http://crbug.com/51988
+"leak:net::NSSCertDatabase::ImportFromPKCS12\n"
+"leak:net::NSSCertDatabase::ListCerts\n"
+"leak:net::NSSCertDatabase::DeleteCertAndKey\n"
+"leak:crypto::ScopedTestNSSDB::ScopedTestNSSDB\n"
+// Another leak due to not shutting down NSS properly. http://crbug.com/124445
+"leak:error_get_my_stack\n"
+// The NSS suppressions above will not fire when the fast stack unwinder is
+// used, because it can't unwind through NSS libraries. Apply blanket
+// suppressions for now.
+"leak:libnssutil3\n"
+"leak:libnspr4\n"
+"leak:libnss3\n"
+"leak:libplds4\n"
+"leak:libnssckbi\n"
+
+// XRandR has several one time leaks.
+"leak:libxrandr\n"
+
+// xrandr leak. http://crbug.com/119677
+"leak:XRRFindDisplay\n"
+
+// Suppressions for objects which can be owned by the V8 heap. This is a
+// temporary workaround until LeakSanitizer supports the V8 heap.
+// Those should only fire in (browser)tests. If you see one of them in Chrome,
+// then it's a real leak.
+// http://crbug.com/328552
+"leak:WTF::StringImpl::createUninitialized\n"
+"leak:WTF::StringImpl::create8BitIfPossible\n"
+"leak:blink::MouseEvent::create\n"
+"leak:blink::*::*GetterCallback\n"
+"leak:blink::CSSComputedStyleDeclaration::create\n"
+"leak:blink::V8PerIsolateData::ensureDomInJSContext\n"
+"leak:gin/object_template_builder.h\n"
+"leak:gin::internal::Dispatcher\n"
+"leak:blink::LocalDOMWindow::getComputedStyle\n"
+// This should really be RemoteDOMWindow::create, but symbolization is
+// weird in release builds. https://crbug.com/484760
+"leak:blink::RemoteFrame::create\n"
+// Likewise, this should really be blink::WindowProxy::initializeIfNeeded.
+// https://crbug.com/484760
+"leak:blink::WindowProxy::createContext\n"
+
+// http://crbug.com/356785
+"leak:content::RenderViewImplTest_DecideNavigationPolicyForWebUI_Test::TestBody\n"
+
+// ================ Leaks in Chromium code ================
+// PLEASE DO NOT ADD SUPPRESSIONS FOR NEW LEAKS.
+// Instead, commits that introduce memory leaks should be reverted. Suppressing
+// the leak is acceptable in some cases when reverting is impossible, i.e. when
+// enabling leak detection for the first time for a test target with
+// pre-existing leaks.
+
+// Small test-only leak in ppapi_unittests. http://crbug.com/258113
+"leak:ppapi::proxy::PPP_Instance_Private_ProxyTest_PPPInstancePrivate_Test\n"
+
+// http://crbug.com/322671
+"leak:content::SpeechRecognitionBrowserTest::SetUpOnMainThread\n"
+
+// http://crbug.com/355641
+"leak:TrayAccessibilityTest\n"
+
+// http://crbug.com/354644
+"leak:CertificateViewerUITest::ShowModalCertificateViewer\n"
+
+// http://crbug.com/356306
+"leak:content::SetProcessTitleFromCommandLine\n"
+
+// http://crbug.com/506433
+"leak:blink::ResourceFetcher::garbageCollectDocumentResources\n"
+
+// PLEASE READ ABOVE BEFORE ADDING NEW SUPPRESSIONS.
+
+// End of suppressions.
+;  // Please keep this semicolon.
+
+#endif  // LEAK_SANITIZER
diff --git a/build/sanitizers/sanitizer_options.cc b/build/sanitizers/sanitizer_options.cc
new file mode 100644
index 0000000..a659a22
--- /dev/null
+++ b/build/sanitizers/sanitizer_options.cc
@@ -0,0 +1,164 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+//
+// This file contains the default options for various compiler-based dynamic
+// tools.
+
+#include "build/build_config.h"
+
+#if defined(ADDRESS_SANITIZER) && defined(OS_MACOSX)
+#include <crt_externs.h>  // for _NSGetArgc, _NSGetArgv
+#include <string.h>
+#endif  // ADDRESS_SANITIZER && OS_MACOSX
+
+#if defined(ADDRESS_SANITIZER) || defined(LEAK_SANITIZER) || \
+    defined(MEMORY_SANITIZER) || defined(THREAD_SANITIZER)
+// Functions returning default options are declared weak in the tools' runtime
+// libraries. To make the linker pick the strong replacements for those
+// functions from this module, we explicitly force its inclusion by passing
+// -Wl,-u_sanitizer_options_link_helper
+extern "C"
+void _sanitizer_options_link_helper() { }
+
+// The callbacks we define here will be called from the sanitizer runtime, but
+// aren't referenced from the Chrome executable. We must ensure that those
+// callbacks are not sanitizer-instrumented, and that they aren't stripped by
+// the linker.
+#define SANITIZER_HOOK_ATTRIBUTE          \
+  extern "C"                              \
+  __attribute__((no_sanitize_address))    \
+  __attribute__((no_sanitize_memory))     \
+  __attribute__((no_sanitize_thread))     \
+  __attribute__((visibility("default")))  \
+  __attribute__((used))
+#endif
+
+#if defined(ADDRESS_SANITIZER)
+// Default options for AddressSanitizer in various configurations:
+//   malloc_context_size=5 - limit the size of stack traces collected by ASan
+//     for each malloc/free by 5 frames. These stack traces tend to accumulate
+//     very fast in applications using JIT (v8 in Chrome's case), see
+//     https://code.google.com/p/address-sanitizer/issues/detail?id=177
+//   symbolize=false - disable the in-process symbolization, which isn't 100%
+//     compatible with the existing sandboxes and doesn't make much sense for
+//     stripped official binaries.
+//   legacy_pthread_cond=1 - run in the libpthread 2.2.5 compatibility mode to
+//     work around libGL.so using the obsolete API, see
+//     http://crbug.com/341805. This may break if pthread_cond_t objects are
+//     accessed by both instrumented and non-instrumented binaries (e.g. if
+//     they reside in shared memory). This option is going to be deprecated in
+//     upstream AddressSanitizer and must not be used anywhere except the
+//     official builds.
+//   check_printf=1 - check the memory accesses to printf (and other formatted
+//     output routines) arguments.
+//   use_sigaltstack=1 - handle signals on an alternate signal stack. Useful
+//     for stack overflow detection.
+//   strip_path_prefix=Release/../../ - prefixes up to and including this
+//     substring will be stripped from source file paths in symbolized reports
+//     (if symbolize=true, which is set when running with LeakSanitizer).
+//   fast_unwind_on_fatal=1 - use the fast (frame-pointer-based) stack unwinder
+//     to print error reports. V8 doesn't generate debug info for the JIT code,
+//     so the slow unwinder may not work properly.
+//   detect_stack_use_after_return=1 - use fake stack to delay the reuse of
+//     stack allocations and detect stack-use-after-return errors.
+#if defined(OS_LINUX)
+#if defined(GOOGLE_CHROME_BUILD)
+// Default AddressSanitizer options for the official build. These do not affect
+// tests on buildbots (which don't set GOOGLE_CHROME_BUILD) or non-official
+// Chromium builds.
+const char kAsanDefaultOptions[] =
+    "legacy_pthread_cond=1 malloc_context_size=5 "
+    "symbolize=false check_printf=1 use_sigaltstack=1 detect_leaks=0 "
+    "strip_path_prefix=Release/../../ fast_unwind_on_fatal=1";
+#else
+// Default AddressSanitizer options for buildbots and non-official builds.
+const char *kAsanDefaultOptions =
+    "symbolize=false check_printf=1 use_sigaltstack=1 "
+    "detect_leaks=0 strip_path_prefix=Release/../../ fast_unwind_on_fatal=1 "
+    "detect_stack_use_after_return=1 ";
+#endif  // GOOGLE_CHROME_BUILD
+
+#elif defined(OS_MACOSX)
+const char *kAsanDefaultOptions =
+    "check_printf=1 use_sigaltstack=1 "
+    "strip_path_prefix=Release/../../ fast_unwind_on_fatal=1 "
+    "detect_stack_use_after_return=1 detect_odr_violation=0 ";
+static const char kNaClDefaultOptions[] = "handle_segv=0";
+static const char kNaClFlag[] = "--type=nacl-loader";
+#endif  // OS_LINUX
+
+#if defined(OS_LINUX) || defined(OS_MACOSX)
+SANITIZER_HOOK_ATTRIBUTE const char *__asan_default_options() {
+#if defined(OS_MACOSX)
+  char*** argvp = _NSGetArgv();
+  int* argcp = _NSGetArgc();
+  if (!argvp || !argcp) return kAsanDefaultOptions;
+  char** argv = *argvp;
+  int argc = *argcp;
+  for (int i = 0; i < argc; ++i) {
+    if (strcmp(argv[i], kNaClFlag) == 0) {
+      return kNaClDefaultOptions;
+    }
+  }
+#endif
+  return kAsanDefaultOptions;
+}
+
+extern "C" char kASanDefaultSuppressions[];
+
+SANITIZER_HOOK_ATTRIBUTE const char *__asan_default_suppressions() {
+  return kASanDefaultSuppressions;
+}
+#endif  // OS_LINUX || OS_MACOSX
+#endif  // ADDRESS_SANITIZER
+
+#if defined(THREAD_SANITIZER) && defined(OS_LINUX)
+// Default options for ThreadSanitizer in various configurations:
+//   detect_deadlocks=1 - enable deadlock (lock inversion) detection.
+//   second_deadlock_stack=1 - more verbose deadlock reports.
+//   report_signal_unsafe=0 - do not report async-signal-unsafe functions
+//     called from signal handlers.
+//   report_thread_leaks=0 - do not report unjoined threads at the end of
+//     the program execution.
+//   print_suppressions=1 - print the list of matched suppressions.
+//   history_size=7 - make the history buffer proportional to 2^7 (the maximum
+//     value) to keep more stack traces.
+//   strip_path_prefix=Release/../../ - prefixes up to and including this
+//     substring will be stripped from source file paths in symbolized reports.
+const char kTsanDefaultOptions[] =
+    "detect_deadlocks=1 second_deadlock_stack=1 report_signal_unsafe=0 "
+    "report_thread_leaks=0 print_suppressions=1 history_size=7 "
+    "strip_path_prefix=Release/../../ ";
+
+SANITIZER_HOOK_ATTRIBUTE const char *__tsan_default_options() {
+  return kTsanDefaultOptions;
+}
+
+extern "C" char kTSanDefaultSuppressions[];
+
+SANITIZER_HOOK_ATTRIBUTE const char *__tsan_default_suppressions() {
+  return kTSanDefaultSuppressions;
+}
+
+#endif  // THREAD_SANITIZER && OS_LINUX
+
+#if defined(LEAK_SANITIZER)
+// Default options for LeakSanitizer:
+//   print_suppressions=1 - print the list of matched suppressions.
+//   strip_path_prefix=Release/../../ - prefixes up to and including this
+//     substring will be stripped from source file paths in symbolized reports.
+const char kLsanDefaultOptions[] =
+    "print_suppressions=1 strip_path_prefix=Release/../../ ";
+
+SANITIZER_HOOK_ATTRIBUTE const char *__lsan_default_options() {
+  return kLsanDefaultOptions;
+}
+
+extern "C" char kLSanDefaultSuppressions[];
+
+SANITIZER_HOOK_ATTRIBUTE const char *__lsan_default_suppressions() {
+  return kLSanDefaultSuppressions;
+}
+
+#endif  // LEAK_SANITIZER
diff --git a/build/sanitizers/sanitizers.gyp b/build/sanitizers/sanitizers.gyp
new file mode 100644
index 0000000..91dab8a
--- /dev/null
+++ b/build/sanitizers/sanitizers.gyp
@@ -0,0 +1,92 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'targets': [
+    {
+      'target_name': 'sanitizer_options',
+      'type': 'static_library',
+      'toolsets': ['host', 'target'],
+      'variables': {
+         # Every target is going to depend on sanitizer_options, so allow
+         # this one to depend on itself.
+         'prune_self_dependency': 1,
+         # Do not let 'none' targets depend on this one, they don't need to.
+         'link_dependency': 1,
+       },
+      'sources': [
+        'sanitizer_options.cc',
+      ],
+      'include_dirs': [
+        '../..',
+      ],
+      # Some targets may want to opt-out from ASan, TSan and MSan and link
+      # without the corresponding runtime libraries. We drop the libc++
+      # dependency and omit the compiler flags to avoid bringing instrumented
+      # code to those targets.
+      'conditions': [
+        ['use_custom_libcxx==1', {
+          'dependencies!': [
+            '../../buildtools/third_party/libc++/libc++.gyp:libcxx_proxy',
+          ],
+        }],
+        ['tsan==1', {
+          'sources': [
+            'tsan_suppressions.cc',
+          ],
+        }],
+        ['lsan==1', {
+          'sources': [
+            'lsan_suppressions.cc',
+          ],
+        }],
+        ['asan==1', {
+          'sources': [
+            'asan_suppressions.cc',
+          ],
+        }],
+      ],
+      'cflags/': [
+        ['exclude', '-fsanitize='],
+        ['exclude', '-fsanitize-'],
+      ],
+      'direct_dependent_settings': {
+        'ldflags': [
+          '-Wl,-u_sanitizer_options_link_helper',
+        ],
+        'target_conditions': [
+          ['_type=="executable"', {
+            'xcode_settings': {
+              'OTHER_LDFLAGS': [
+                '-Wl,-u,__sanitizer_options_link_helper',
+              ],
+            },
+          }],
+        ],
+      },
+    },
+    {
+      # Copy llvm-symbolizer to the product dir so that LKGR bots can package it.
+      'target_name': 'llvm-symbolizer',
+      'type': 'none',
+      'variables': {
+
+       # Path is relative to this GYP file.
+       'llvm_symbolizer_path':
+           '../../third_party/llvm-build/Release+Asserts/bin/llvm-symbolizer<(EXECUTABLE_SUFFIX)',
+      },
+      'conditions': [
+        ['clang==1', {
+          'copies': [{
+            'destination': '<(PRODUCT_DIR)',
+            'files': [
+              '<(llvm_symbolizer_path)',
+            ],
+          }],
+        }],
+      ],
+    },
+  ],
+}
+
diff --git a/build/sanitizers/tsan_suppressions.cc b/build/sanitizers/tsan_suppressions.cc
new file mode 100644
index 0000000..fe64dd2
--- /dev/null
+++ b/build/sanitizers/tsan_suppressions.cc
@@ -0,0 +1,318 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// This file contains the default suppressions for ThreadSanitizer.
+// You can also pass additional suppressions via TSAN_OPTIONS:
+// TSAN_OPTIONS=suppressions=/path/to/suppressions. Please refer to
+// http://dev.chromium.org/developers/testing/threadsanitizer-tsan-v2
+// for more info.
+
+#if defined(THREAD_SANITIZER)
+
+// Please make sure the code below declares a single string variable
+// kTSanDefaultSuppressions contains TSan suppressions delimited by newlines.
+// See http://dev.chromium.org/developers/testing/threadsanitizer-tsan-v2
+// for the instructions on writing suppressions.
+char kTSanDefaultSuppressions[] =
+// False positives in libflashplayer.so and libglib.so. Since we don't
+// instrument them, we cannot reason about the synchronization in them.
+"race:libflashplayer.so\n"
+"race:libglib*.so\n"
+
+// Intentional race in ToolsSanityTest.DataRace in base_unittests.
+"race:base/tools_sanity_unittest.cc\n"
+
+// Data race on WatchdogCounter [test-only].
+"race:base/threading/watchdog_unittest.cc\n"
+
+// Races in libevent, http://crbug.com/23244.
+"race:libevent/event.c\n"
+
+// http://crbug.com/46840.
+"race:base::HistogramSamples::IncreaseSum\n"
+"race:base::Histogram::Add\n"
+"race:base::HistogramSamples::Add\n"
+
+// http://crbug.com/84094.
+"race:sqlite3StatusSet\n"
+"race:pcache1EnforceMaxPage\n"
+"race:pcache1AllocPage\n"
+
+// http://crbug.com/102327.
+// Test-only race, won't fix.
+"race:tracked_objects::ThreadData::ShutdownSingleThreadedCleanup\n"
+
+// http://crbug.com/115540
+"race:*GetCurrentThreadIdentifier\n"
+
+// http://crbug.com/120808
+"race:base/threading/watchdog.cc\n"
+
+// http://crbug.com/157586
+"race:third_party/libvpx/source/libvpx/vp8/decoder/threading.c\n"
+
+// http://crbug.com/158718
+"race:third_party/ffmpeg/libavcodec/pthread.c\n"
+"race:third_party/ffmpeg/libavcodec/pthread_frame.c\n"
+"race:third_party/ffmpeg/libavcodec/vp8.c\n"
+"race:third_party/ffmpeg/libavutil/mem.c\n"
+"race:*HashFrameForTesting\n"
+"race:third_party/ffmpeg/libavcodec/h264pred.c\n"
+"race:media::ReleaseData\n"
+
+// http://crbug.com/158922
+"race:third_party/libvpx/source/libvpx/vp8/encoder/*\n"
+"race:third_party/libvpx/source/libvpx/vp9/encoder/*\n"
+
+// http://crbug.com/189177
+"race:thread_manager\n"
+"race:v8::Locker::Initialize\n"
+
+// http://crbug.com/239359
+"race:media::TestInputCallback::OnData\n"
+
+// http://crbug.com/244368
+"race:skia::BeginPlatformPaint\n"
+
+// http://crbug.com/244385
+"race:unixTempFileDir\n"
+
+// http://crbug.com/244755
+"race:v8::internal::Zone::NewExpand\n"
+"race:TooLateToEnableNow\n"
+"race:adjust_segment_bytes_allocated\n"
+
+// http://crbug.com/244774
+"race:webrtc::RTPReceiver::ProcessBitrate\n"
+"race:webrtc::RTPSender::ProcessBitrate\n"
+"race:webrtc::VideoCodingModuleImpl::Decode\n"
+"race:webrtc::RTPSender::SendOutgoingData\n"
+"race:webrtc::VP8EncoderImpl::GetEncodedPartitions\n"
+"race:webrtc::VP8EncoderImpl::Encode\n"
+"race:webrtc::ViEEncoder::DeliverFrame\n"
+"race:webrtc::vcm::VideoReceiver::Decode\n"
+"race:webrtc::VCMReceiver::FrameForDecoding\n"
+"race:*trace_event_unique_catstatic*\n"
+
+// http://crbug.com/244856
+"race:AutoPulseLock\n"
+
+// http://crbug.com/246968
+"race:webrtc::VideoCodingModuleImpl::RegisterPacketRequestCallback\n"
+
+// http://crbug.com/246974
+"race:content::GpuWatchdogThread::CheckArmed\n"
+
+// http://crbug.com/257396
+"race:base::trace_event::"
+    "TraceEventTestFixture_TraceSamplingScope_Test::TestBody\n"
+
+// http://crbug.com/258479
+"race:SamplingStateScope\n"
+"race:g_trace_state\n"
+
+// http://crbug.com/258499
+"race:third_party/skia/include/core/SkRefCnt.h\n"
+
+// http://crbug.com/268924
+"race:base::g_power_monitor\n"
+"race:base::PowerMonitor::PowerMonitor\n"
+"race:base::PowerMonitor::AddObserver\n"
+"race:base::PowerMonitor::RemoveObserver\n"
+"race:base::PowerMonitor::IsOnBatteryPower\n"
+
+// http://crbug.com/258935
+"race:base::Thread::StopSoon\n"
+
+// http://crbug.com/268941
+"race:tracked_objects::ThreadData::tls_index_\n"
+
+// http://crbug.com/272095
+"race:base::g_top_manager\n"
+
+// http://crbug.com/273047
+"race:base::*::g_lazy_tls_ptr\n"
+"race:IPC::SyncChannel::ReceivedSyncMsgQueue::lazy_tls_ptr_\n"
+
+// http://crbug.com/280466
+"race:content::WebRtcAudioCapturer::SetCapturerSource\n"
+
+// http://crbug.com/285242
+"race:media::PulseAudioOutputStream::SetVolume\n"
+
+// http://crbug.com/308590
+"race:CustomThreadWatcher::~CustomThreadWatcher\n"
+
+// http://crbug.com/310851
+"race:net::ProxyResolverV8Tracing::Job::~Job\n"
+
+// http://crbug.com/313726
+"race:CallbackWasCalled\n"
+
+// http://crbug.com/327330
+"race:PrepareTextureMailbox\n"
+"race:cc::LayerTreeHost::PaintLayerContents\n"
+
+// http://crbug.com/476529
+"deadlock:cc::VideoLayerImpl::WillDraw\n"
+
+// http://crbug.com/328826
+"race:gLCDOrder\n"
+"race:gLCDOrientation\n"
+
+// http://crbug.com/328868
+"race:PR_Lock\n"
+
+// http://crbug.com/329225
+"race:blink::currentTimeFunction\n"
+
+// http://crbug.com/329460
+"race:extensions::InfoMap::AddExtension\n"
+
+// http://crbug.com/333244
+"race:content::"
+    "VideoCaptureImplTest::MockVideoCaptureImpl::~MockVideoCaptureImpl\n"
+
+// http://crbug.com/333871
+"race:v8::internal::Interface::NewValue()::value_interface\n"
+"race:v8::internal::IsMinusZero(double)::minus_zero\n"
+"race:v8::internal::FastCloneShallowObjectStub::InitializeInterfaceDescriptor\n"
+"race:v8::internal::KeyedLoadStubCompiler::registers\n"
+"race:v8::internal::KeyedStoreStubCompiler::registers()::registers\n"
+"race:v8::internal::KeyedLoadFastElementStub::InitializeInterfaceDescriptor\n"
+"race:v8::internal::KeyedStoreFastElementStub::InitializeInterfaceDescriptor\n"
+"race:v8::internal::LoadStubCompiler::registers\n"
+"race:v8::internal::StoreStubCompiler::registers\n"
+"race:v8::internal::HValue::LoopWeight\n"
+
+// http://crbug.com/334140
+"race:CommandLine::HasSwitch\n"
+"race:CommandLine::current_process_commandline_\n"
+"race:CommandLine::GetSwitchValueASCII\n"
+
+// http://crbug.com/338675
+"race:blink::s_platform\n"
+"race:content::"
+    "RendererWebKitPlatformSupportImpl::~RendererWebKitPlatformSupportImpl\n"
+
+// http://crbug.com/345240
+"race:WTF::s_shutdown\n"
+
+// http://crbug.com/345245
+"race:jingle_glue::JingleThreadWrapper::~JingleThreadWrapper\n"
+"race:webrtc::voe::Channel::UpdatePacketDelay\n"
+"race:webrtc::voe::Channel::GetDelayEstimate\n"
+"race:webrtc::VCMCodecDataBase::DeregisterReceiveCodec\n"
+"race:webrtc::GainControlImpl::set_stream_analog_level\n"
+
+// http://crbug.com/345618
+"race:WebCore::AudioDestinationNode::render\n"
+
+// http://crbug.com/345624
+"race:media::DataSource::set_host\n"
+
+// http://crbug.com/347534
+"race:v8::internal::V8::TearDown\n"
+
+// http://crbug.com/347538
+"race:sctp_timer_start\n"
+
+// http://crbug.com/347548
+"race:cricket::WebRtcVideoMediaChannel::MaybeResetVieSendCodec\n"
+"race:cricket::WebRtcVideoMediaChannel::SetSendCodec\n"
+
+// http://crbug.com/347553
+"race:blink::WebString::reset\n"
+
+// http://crbug.com/348511
+"race:webrtc::acm1::AudioCodingModuleImpl::PlayoutData10Ms\n"
+
+// http://crbug.com/348982
+"race:cricket::P2PTransportChannel::OnConnectionDestroyed\n"
+"race:cricket::P2PTransportChannel::AddConnection\n"
+
+// http://crbug.com/348984
+"race:sctp_express_handle_sack\n"
+"race:system_base_info\n"
+
+// http://crbug.com/363999
+"race:v8::internal::EnterDebugger::*EnterDebugger\n"
+
+// https://code.google.com/p/v8/issues/detail?id=3143
+"race:v8::internal::FLAG_track_double_fields\n"
+
+// https://crbug.com/369257
+// TODO(mtklein): annotate properly and remove suppressions.
+"race:SandboxIPCHandler::HandleFontMatchRequest\n"
+"race:SkFontConfigInterfaceDirect::matchFamilyName\n"
+"race:SkFontConfigInterface::GetSingletonDirectInterface\n"
+"race:FcStrStaticName\n"
+
+// http://crbug.com/372807
+"deadlock:net::X509Certificate::CreateCertificateListFromBytes\n"
+"deadlock:net::X509Certificate::CreateFromBytes\n"
+"deadlock:net::SSLClientSocketNSS::Core::DoHandshakeLoop\n"
+
+// http://crbug.com/374135
+"race:media::AlsaWrapper::PcmWritei\n"
+
+// False positive in libc's tzset_internal, http://crbug.com/379738.
+"race:tzset_internal\n"
+
+// http://crbug.com/380554
+"deadlock:g_type_add_interface_static\n"
+
+// http:://crbug.com/386385
+"race:content::AppCacheStorageImpl::DatabaseTask::CallRunCompleted\n"
+
+// http://crbug.com/388730
+"race:g_next_user_script_id\n"
+
+// http://crbug.com/389098
+"race:webrtc::voe::TransmitMixer::EnableStereoChannelSwapping\n"
+
+// http://crbug.com/397022
+"deadlock:"
+"base::trace_event::TraceEventTestFixture_ThreadOnceBlocking_Test::TestBody\n"
+
+// http://crbug.com/415472
+"deadlock:base::trace_event::TraceLog::GetCategoryGroupEnabled\n"
+
+// http://crbug.com/490856
+"deadlock:content::TracingControllerImpl::SetEnabledOnFileThread\n"
+
+// http://crbug.com/417193
+// Suppressing both AudioContext.{cpp,h}.
+"race:modules/webaudio/AudioContext\n"
+
+// https://code.google.com/p/skia/issues/detail?id=3294
+"race:SkBaseMutex::acquire\n"
+
+// https://crbug.com/430533
+"race:TileTaskGraphRunner::Run\n"
+
+// https://crbug.com/448203
+"race:blink::RemoteFrame::detach\n"
+
+// https://crbug.com/454652
+"race:net::NetworkChangeNotifier::SetTestNotificationsOnly\n"
+
+// https://crbug.com/455638
+"deadlock:dbus::Bus::ShutdownAndBlock\n"
+
+// https://crbug.com/455665
+"race:mojo::common::*::tick_clock\n"
+"race:mojo::common::internal::NowTicks\n"
+"race:tracked_objects::ThreadData::InitializeThreadContext\n"
+
+// https://crbug.com/459429
+"race:randomnessPid\n"
+
+// https://crbug.com/454655
+"race:content::BrowserTestBase::PostTaskToInProcessRendererAndWait\n"
+
+// End of suppressions.
+;  // Please keep this semicolon.
+
+#endif  // THREAD_SANITIZER
diff --git a/build/secondary/testing/gmock/BUILD.gn b/build/secondary/testing/gmock/BUILD.gn
new file mode 100644
index 0000000..4ec6224
--- /dev/null
+++ b/build/secondary/testing/gmock/BUILD.gn
@@ -0,0 +1,54 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+config("gmock_config") {
+  # Gmock headers need to be able to find themselves.
+  include_dirs = [ "include" ]
+}
+
+static_library("gmock") {
+  # TODO http://crbug.com/412064 enable this flag all the time.
+  testonly = !is_component_build
+  sources = [
+    # Sources based on files in r173 of gmock.
+    "include/gmock/gmock-actions.h",
+    "include/gmock/gmock-cardinalities.h",
+    "include/gmock/gmock-generated-actions.h",
+    "include/gmock/gmock-generated-function-mockers.h",
+    "include/gmock/gmock-generated-matchers.h",
+    "include/gmock/gmock-generated-nice-strict.h",
+    "include/gmock/gmock-matchers.h",
+    "include/gmock/gmock-spec-builders.h",
+    "include/gmock/gmock.h",
+    "include/gmock/internal/gmock-generated-internal-utils.h",
+    "include/gmock/internal/gmock-internal-utils.h",
+    "include/gmock/internal/gmock-port.h",
+
+    #"src/gmock-all.cc",  # Not needed by our build.
+    "src/gmock-cardinalities.cc",
+    "src/gmock-internal-utils.cc",
+    "src/gmock-matchers.cc",
+    "src/gmock-spec-builders.cc",
+    "src/gmock.cc",
+  ]
+
+  # This project includes some stuff form gtest's guts.
+  include_dirs = [ "../gtest/include" ]
+
+  public_configs = [
+    ":gmock_config",
+    "//testing/gtest:gtest_config",
+  ]
+}
+
+static_library("gmock_main") {
+  # TODO http://crbug.com/412064 enable this flag all the time.
+  testonly = !is_component_build
+  sources = [
+    "src/gmock_main.cc",
+  ]
+  deps = [
+    ":gmock",
+  ]
+}
diff --git a/build/secondary/testing/gtest/BUILD.gn b/build/secondary/testing/gtest/BUILD.gn
new file mode 100644
index 0000000..073faec
--- /dev/null
+++ b/build/secondary/testing/gtest/BUILD.gn
@@ -0,0 +1,135 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+config("gtest_config") {
+  visibility = [
+    ":*",
+    "//testing/gmock:*",  # gmock also shares this config.
+  ]
+
+  defines = [
+    # In order to allow regex matches in gtest to be shared between Windows
+    # and other systems, we tell gtest to always use it's internal engine.
+    "GTEST_HAS_POSIX_RE=0",
+
+    # Chrome doesn't support / require C++11, yet.
+    "GTEST_LANG_CXX11=0",
+  ]
+
+  # Gtest headers need to be able to find themselves.
+  include_dirs = [ "include" ]
+
+  if (is_win) {
+    cflags = [ "/wd4800" ]  # Unused variable warning.
+  }
+
+  if (is_posix) {
+    defines += [
+      # gtest isn't able to figure out when RTTI is disabled for gcc
+      # versions older than 4.3.2, and assumes it's enabled.  Our Mac
+      # and Linux builds disable RTTI, and cannot guarantee that the
+      # compiler will be 4.3.2. or newer.  The Mac, for example, uses
+      # 4.2.1 as that is the latest available on that platform.  gtest
+      # must be instructed that RTTI is disabled here, and for any
+      # direct dependents that might include gtest headers.
+      "GTEST_HAS_RTTI=0",
+    ]
+  }
+
+  if (is_android) {
+    defines += [
+      # We want gtest features that use tr1::tuple, but we currently
+      # don't support the variadic templates used by libstdc++'s
+      # implementation. gtest supports this scenario by providing its
+      # own implementation but we must opt in to it.
+      "GTEST_USE_OWN_TR1_TUPLE=1",
+
+      # GTEST_USE_OWN_TR1_TUPLE only works if GTEST_HAS_TR1_TUPLE is set.
+      # gtest r625 made it so that GTEST_HAS_TR1_TUPLE is set to 0
+      # automatically on android, so it has to be set explicitly here.
+      "GTEST_HAS_TR1_TUPLE=1",
+    ]
+  }
+}
+
+config("gtest_direct_config") {
+  visibility = [ ":*" ]
+  defines = [ "UNIT_TEST" ]
+}
+
+static_library("gtest") {
+  # TODO http://crbug.com/412064 enable this flag all the time.
+  testonly = !is_component_build
+  sources = [
+    "include/gtest/gtest-death-test.h",
+    "include/gtest/gtest-message.h",
+    "include/gtest/gtest-param-test.h",
+    "include/gtest/gtest-printers.h",
+    "include/gtest/gtest-spi.h",
+    "include/gtest/gtest-test-part.h",
+    "include/gtest/gtest-typed-test.h",
+    "include/gtest/gtest.h",
+    "include/gtest/gtest_pred_impl.h",
+    "include/gtest/internal/gtest-death-test-internal.h",
+    "include/gtest/internal/gtest-filepath.h",
+    "include/gtest/internal/gtest-internal.h",
+    "include/gtest/internal/gtest-linked_ptr.h",
+    "include/gtest/internal/gtest-param-util-generated.h",
+    "include/gtest/internal/gtest-param-util.h",
+    "include/gtest/internal/gtest-port.h",
+    "include/gtest/internal/gtest-string.h",
+    "include/gtest/internal/gtest-tuple.h",
+    "include/gtest/internal/gtest-type-util.h",
+
+    #"gtest/src/gtest-all.cc",  # Not needed by our build.
+    "../multiprocess_func_list.cc",
+    "../multiprocess_func_list.h",
+    "../platform_test.h",
+    "src/gtest-death-test.cc",
+    "src/gtest-filepath.cc",
+    "src/gtest-internal-inl.h",
+    "src/gtest-port.cc",
+    "src/gtest-printers.cc",
+    "src/gtest-test-part.cc",
+    "src/gtest-typed-test.cc",
+    "src/gtest.cc",
+  ]
+
+  if (is_mac) {
+    sources += [
+      "../gtest_mac.h",
+      "../gtest_mac.mm",
+      "../platform_test_mac.mm",
+    ]
+  }
+
+  include_dirs = [ "." ]
+
+  all_dependent_configs = [ ":gtest_config" ]
+  public_configs = [ ":gtest_direct_config" ]
+
+  configs -= [ "//build/config/compiler:chromium_code" ]
+  configs += [ "//build/config/compiler:no_chromium_code" ]
+
+  config("gtest_warnings") {
+    if (is_win && is_clang) {
+      # The Mutex constructor initializer list in gtest-port.cc is incorrectly
+      # ordered. See
+      # https://groups.google.com/d/msg/googletestframework/S5uSV8L2TX8/U1FaTDa6J6sJ.
+      cflags = [ "-Wno-reorder" ]
+    }
+  }
+  configs += [ ":gtest_warnings" ]
+}
+
+source_set("gtest_main") {
+  # TODO http://crbug.com/412064 enable this flag all the time.
+  testonly = !is_component_build
+  sources = [
+    "src/gtest_main.cc",
+  ]
+  deps = [
+    ":gtest",
+  ]
+}
diff --git a/build/secondary/third_party/android_tools/BUILD.gn b/build/secondary/third_party/android_tools/BUILD.gn
new file mode 100644
index 0000000..afafffc
--- /dev/null
+++ b/build/secondary/third_party/android_tools/BUILD.gn
@@ -0,0 +1,104 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/android/rules.gni")
+
+config("cpu_features_include") {
+  include_dirs = [ "ndk/sources/android/cpufeatures" ]
+}
+
+# This is the GN version of
+# //build/android/ndk.gyp:cpu_features
+source_set("cpu_features") {
+  sources = [
+    "ndk/sources/android/cpufeatures/cpu-features.c",
+  ]
+  public_configs = [ ":cpu_features_include" ]
+
+  configs -= [ "//build/config/compiler:chromium_code" ]
+  configs += [ "//build/config/compiler:no_chromium_code" ]
+}
+
+android_java_prebuilt("android_gcm_java") {
+  jar_path = "$android_sdk_root/extras/google/gcm/gcm-client/dist/gcm.jar"
+}
+
+android_java_prebuilt("uiautomator_java") {
+  jar_path = "$android_sdk/uiautomator.jar"
+}
+
+android_java_prebuilt("android_support_annotations_javalib") {
+  jar_path = "$android_sdk_root/extras/android/support/annotations/android-support-annotations.jar"
+}
+
+java_prebuilt("android_support_multidex_java") {
+  jar_path = "$android_sdk_root/extras/android/support/multidex/library/libs/android-support-multidex.jar"
+}
+
+android_java_prebuilt("android_support_v13_java") {
+  jar_path =
+      "$android_sdk_root/extras/android/support/v13/android-support-v13.jar"
+}
+
+android_resources("android_support_v7_appcompat_resources") {
+  v14_skip = true
+  resource_dirs =
+      [ "$android_sdk_root/extras/android/support/v7/appcompat/res" ]
+  custom_package = "android.support.v7.appcompat"
+}
+
+android_java_prebuilt("android_support_v7_appcompat_java") {
+  deps = [
+    ":android_support_v7_appcompat_resources",
+  ]
+  jar_path = "$android_sdk_root/extras/android/support/v7/appcompat/libs/android-support-v7-appcompat.jar"
+}
+
+android_resources("android_support_v7_mediarouter_resources") {
+  v14_skip = true
+  resource_dirs =
+      [ "$android_sdk_root/extras/android/support/v7/mediarouter/res" ]
+  deps = [
+    ":android_support_v7_appcompat_resources",
+  ]
+  custom_package = "android.support.v7.mediarouter"
+}
+
+android_java_prebuilt("android_support_v7_mediarouter_java") {
+  deps = [
+    ":android_support_v7_mediarouter_resources",
+    ":android_support_v7_appcompat_java",
+  ]
+  jar_path = "$android_sdk_root/extras/android/support/v7/mediarouter/libs/android-support-v7-mediarouter.jar"
+}
+
+android_resources("android_support_v7_recyclerview_resources") {
+  v14_skip = true
+  resource_dirs =
+      [ "$android_sdk_root/extras/android/support/v7/recyclerview/res" ]
+  custom_package = "android.support.v7.recyclerview"
+}
+
+android_java_prebuilt("android_support_v7_recyclerview_java") {
+  deps = [
+    ":android_support_v7_appcompat_java",
+    ":android_support_v7_recyclerview_resources",
+  ]
+  jar_path = "$android_sdk_root/extras/android/support/v7/recyclerview/libs/android-support-v7-recyclerview.jar"
+}
+
+android_resources("google_play_services_default_resources") {
+  v14_skip = true
+  resource_dirs = [ "$android_sdk_root/extras/google/google_play_services/libproject/google-play-services_lib/res" ]
+  custom_package = "com.google.android.gms"
+}
+android_java_prebuilt("google_play_services_default_java") {
+  deps = [
+    ":android_support_v13_java",
+    ":android_support_v7_mediarouter_java",
+    ":google_play_services_default_resources",
+  ]
+  proguard_preprocess = false
+  jar_path = "$android_sdk_root/extras/google/google_play_services/libproject/google-play-services_lib/libs/google-play-services.jar"
+}
diff --git a/build/secondary/third_party/libjpeg_turbo/BUILD.gn b/build/secondary/third_party/libjpeg_turbo/BUILD.gn
new file mode 100644
index 0000000..62e60ae
--- /dev/null
+++ b/build/secondary/third_party/libjpeg_turbo/BUILD.gn
@@ -0,0 +1,221 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Do not use the targets in this file unless you need a certain libjpeg
+# implementation. Use the meta target //third_party:jpeg instead.
+
+import("//build/config/sanitizers/sanitizers.gni")
+
+if (current_cpu == "arm") {
+  import("//build/config/arm.gni")
+}
+
+if (current_cpu == "x86" || current_cpu == "x64") {
+  import("//third_party/yasm/yasm_assemble.gni")
+
+  yasm_assemble("simd_asm") {
+    defines = []
+
+    if (current_cpu == "x86") {
+      sources = [
+        "simd/jccolor-mmx.asm",
+        "simd/jccolor-sse2.asm",
+        "simd/jcgray-mmx.asm",
+        "simd/jcgray-sse2.asm",
+        "simd/jchuff-sse2.asm",
+        "simd/jcsample-mmx.asm",
+        "simd/jcsample-sse2.asm",
+        "simd/jdcolor-mmx.asm",
+        "simd/jdcolor-sse2.asm",
+        "simd/jdmerge-mmx.asm",
+        "simd/jdmerge-sse2.asm",
+        "simd/jdsample-mmx.asm",
+        "simd/jdsample-sse2.asm",
+        "simd/jfdctflt-3dn.asm",
+        "simd/jfdctflt-sse.asm",
+        "simd/jfdctfst-mmx.asm",
+        "simd/jfdctfst-sse2.asm",
+        "simd/jfdctint-mmx.asm",
+        "simd/jfdctint-sse2.asm",
+        "simd/jidctflt-3dn.asm",
+        "simd/jidctflt-sse.asm",
+        "simd/jidctflt-sse2.asm",
+        "simd/jidctfst-mmx.asm",
+        "simd/jidctfst-sse2.asm",
+        "simd/jidctint-mmx.asm",
+        "simd/jidctint-sse2.asm",
+        "simd/jidctred-mmx.asm",
+        "simd/jidctred-sse2.asm",
+        "simd/jquant-3dn.asm",
+        "simd/jquant-mmx.asm",
+        "simd/jquant-sse.asm",
+        "simd/jquantf-sse2.asm",
+        "simd/jquanti-sse2.asm",
+        "simd/jsimdcpu.asm",
+      ]
+      defines += [
+        "__x86__",
+        "PIC",
+      ]
+    } else if (current_cpu == "x64") {
+      sources = [
+        "simd/jccolor-sse2-64.asm",
+        "simd/jcgray-sse2-64.asm",
+        "simd/jchuff-sse2-64.asm",
+        "simd/jcsample-sse2-64.asm",
+        "simd/jdcolor-sse2-64.asm",
+        "simd/jdmerge-sse2-64.asm",
+        "simd/jdsample-sse2-64.asm",
+        "simd/jfdctflt-sse-64.asm",
+        "simd/jfdctfst-sse2-64.asm",
+        "simd/jfdctint-sse2-64.asm",
+        "simd/jidctflt-sse2-64.asm",
+        "simd/jidctfst-sse2-64.asm",
+        "simd/jidctint-sse2-64.asm",
+        "simd/jidctred-sse2-64.asm",
+        "simd/jquantf-sse2-64.asm",
+        "simd/jquanti-sse2-64.asm",
+      ]
+      defines += [
+        "__x86_64__",
+        "PIC",
+      ]
+    }
+
+    if (is_win) {
+      defines += [ "MSVC" ]
+      include_dirs = [ "win" ]
+      if (current_cpu == "x86") {
+        defines += [ "WIN32" ]
+      } else {
+        defines += [ "WIN64" ]
+      }
+    } else if (is_mac) {
+      defines += [ "MACHO" ]
+      include_dirs = [ "mac" ]
+    } else if (is_linux || is_android) {
+      defines += [ "ELF" ]
+      include_dirs = [ "linux" ]
+    }
+  }
+}
+
+source_set("simd") {
+  if (current_cpu == "x86") {
+    deps = [
+      ":simd_asm",
+    ]
+    sources = [
+      "simd/jsimd_i386.c",
+    ]
+    if (is_win) {
+      cflags = [ "/wd4245" ]
+    }
+  } else if (current_cpu == "x64") {
+    deps = [
+      ":simd_asm",
+    ]
+    sources = [
+      "simd/jsimd_x86_64.c",
+    ]
+  } else if (current_cpu == "arm" && arm_version >= 7 &&
+             (arm_use_neon || arm_optionally_use_neon)) {
+    sources = [
+      "simd/jsimd_arm.c",
+      "simd/jsimd_arm_neon.S",
+    ]
+  } else {
+    sources = [
+      "jsimd_none.c",
+    ]
+  }
+  if (is_win) {
+    cflags = [ "/wd4245" ]
+  }
+}
+
+config("libjpeg_config") {
+  include_dirs = [ "." ]
+}
+
+source_set("libjpeg") {
+  sources = [
+    "jcapimin.c",
+    "jcapistd.c",
+    "jccoefct.c",
+    "jccolor.c",
+    "jcdctmgr.c",
+    "jchuff.c",
+    "jchuff.h",
+    "jcinit.c",
+    "jcmainct.c",
+    "jcmarker.c",
+    "jcmaster.c",
+    "jcomapi.c",
+    "jconfig.h",
+    "jcparam.c",
+    "jcphuff.c",
+    "jcprepct.c",
+    "jcsample.c",
+    "jdapimin.c",
+    "jdapistd.c",
+    "jdatadst.c",
+    "jdatasrc.c",
+    "jdcoefct.c",
+    "jdcolor.c",
+    "jdct.h",
+    "jddctmgr.c",
+    "jdhuff.c",
+    "jdhuff.h",
+    "jdinput.c",
+    "jdmainct.c",
+    "jdmarker.c",
+    "jdmaster.c",
+    "jdmerge.c",
+    "jdphuff.c",
+    "jdpostct.c",
+    "jdsample.c",
+    "jerror.c",
+    "jerror.h",
+    "jfdctflt.c",
+    "jfdctfst.c",
+    "jfdctint.c",
+    "jidctflt.c",
+    "jidctfst.c",
+    "jidctint.c",
+    "jidctred.c",
+    "jinclude.h",
+    "jmemmgr.c",
+    "jmemnobs.c",
+    "jmemsys.h",
+    "jmorecfg.h",
+    "jpegint.h",
+    "jpeglib.h",
+    "jpeglibmangler.h",
+    "jquant1.c",
+    "jquant2.c",
+    "jutils.c",
+    "jversion.h",
+  ]
+
+  defines = [
+    "WITH_SIMD",
+    "NO_GETENV",
+  ]
+
+  configs -= [ "//build/config/compiler:chromium_code" ]
+  configs += [ "//build/config/compiler:no_chromium_code" ]
+
+  public_configs = [ ":libjpeg_config" ]
+
+  # MemorySanitizer doesn't support assembly code, so keep it disabled in
+  # MSan builds for now.
+  if (is_msan) {
+    sources += [ "jsimd_none.c" ]
+  } else {
+    deps = [
+      ":simd",
+    ]
+  }
+}
diff --git a/build/secondary/third_party/libsrtp/BUILD.gn b/build/secondary/third_party/libsrtp/BUILD.gn
new file mode 100644
index 0000000..7601bea
--- /dev/null
+++ b/build/secondary/third_party/libsrtp/BUILD.gn
@@ -0,0 +1,391 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+declare_args() {
+  use_system_libsrtp = false
+  use_srtp_boringssl = true
+}
+
+config("libsrtp_config") {
+  defines = [
+    "HAVE_CONFIG_H",
+    "HAVE_STDLIB_H",
+    "HAVE_STRING_H",
+    "TESTAPP_SOURCE",
+  ]
+
+  include_dirs = [
+    "config",
+    "srtp/include",
+    "srtp/crypto/include",
+  ]
+
+  if (use_srtp_boringssl) {
+    defines += [ "OPENSSL" ]
+  }
+
+  if (is_posix) {
+    defines += [
+      "HAVE_INT16_T",
+      "HAVE_INT32_T",
+      "HAVE_INT8_T",
+      "HAVE_UINT16_T",
+      "HAVE_UINT32_T",
+      "HAVE_UINT64_T",
+      "HAVE_UINT8_T",
+      "HAVE_STDINT_H",
+      "HAVE_INTTYPES_H",
+      "HAVE_NETINET_IN_H",
+      "HAVE_ARPA_INET_H",
+      "HAVE_UNISTD_H",
+    ]
+    cflags = [ "-Wno-unused-variable" ]
+  }
+
+  if (is_win) {
+    defines += [
+      "HAVE_BYTESWAP_METHODS_H",
+
+      # All Windows architectures are this way.
+      "SIZEOF_UNSIGNED_LONG=4",
+      "SIZEOF_UNSIGNED_LONG_LONG=8",
+    ]
+  }
+
+  if (current_cpu == "x64" || current_cpu == "x86" || current_cpu == "arm") {
+    defines += [
+      # TODO(leozwang): CPU_RISC doesn"t work properly on android/arm
+      # platform for unknown reasons, need to investigate the root cause
+      # of it. CPU_RISC is used for optimization only, and CPU_CISC should
+      # just work just fine, it has been tested on android/arm with srtp
+      # test applications and libjingle.
+      "CPU_CISC",
+    ]
+  }
+
+  if (current_cpu == "mipsel") {
+    defines += [ "CPU_RISC" ]
+  }
+}
+
+config("system_libsrtp_config") {
+  defines = [ "USE_SYSTEM_LIBSRTP" ]
+  include_dirs = [ "/usr/include/srtp" ]
+}
+
+if (use_system_libsrtp) {
+  group("libsrtp") {
+    public_configs = [
+      ":libsrtp_config",
+      ":system_libsrtp_config",
+    ]
+    libs = [ "-lsrtp" ]
+  }
+} else {
+  static_library("libsrtp") {
+    configs -= [ "//build/config/compiler:chromium_code" ]
+    configs += [ "//build/config/compiler:no_chromium_code" ]
+    public_configs = [ ":libsrtp_config" ]
+
+    sources = [
+      # includes
+      "srtp/include/ekt.h",
+      "srtp/include/getopt_s.h",
+      "srtp/include/rtp.h",
+      "srtp/include/rtp_priv.h",
+      "srtp/include/srtp.h",
+      "srtp/include/srtp_priv.h",
+      "srtp/include/ut_sim.h",
+
+      # headers
+      "srtp/crypto/include/aes.h",
+      "srtp/crypto/include/aes_cbc.h",
+      "srtp/crypto/include/aes_icm.h",
+      "srtp/crypto/include/alloc.h",
+      "srtp/crypto/include/auth.h",
+      "srtp/crypto/include/cipher.h",
+      "srtp/crypto/include/crypto.h",
+      "srtp/crypto/include/crypto_kernel.h",
+      "srtp/crypto/include/crypto_math.h",
+      "srtp/crypto/include/crypto_types.h",
+      "srtp/crypto/include/cryptoalg.h",
+      "srtp/crypto/include/datatypes.h",
+      "srtp/crypto/include/err.h",
+      "srtp/crypto/include/gf2_8.h",
+      "srtp/crypto/include/hmac.h",
+      "srtp/crypto/include/integers.h",
+      "srtp/crypto/include/kernel_compat.h",
+      "srtp/crypto/include/key.h",
+      "srtp/crypto/include/null_auth.h",
+      "srtp/crypto/include/null_cipher.h",
+      "srtp/crypto/include/prng.h",
+      "srtp/crypto/include/rand_source.h",
+      "srtp/crypto/include/rdb.h",
+      "srtp/crypto/include/rdbx.h",
+      "srtp/crypto/include/sha1.h",
+      "srtp/crypto/include/stat.h",
+      "srtp/crypto/include/xfm.h",
+
+      # sources
+      "srtp/crypto/cipher/aes.c",
+      "srtp/crypto/cipher/aes_cbc.c",
+      "srtp/crypto/cipher/aes_icm.c",
+      "srtp/crypto/cipher/cipher.c",
+      "srtp/crypto/cipher/null_cipher.c",
+      "srtp/crypto/hash/auth.c",
+      "srtp/crypto/hash/hmac.c",
+      "srtp/crypto/hash/null_auth.c",
+      "srtp/crypto/hash/sha1.c",
+      "srtp/crypto/kernel/alloc.c",
+      "srtp/crypto/kernel/crypto_kernel.c",
+      "srtp/crypto/kernel/err.c",
+      "srtp/crypto/kernel/key.c",
+      "srtp/crypto/math/datatypes.c",
+      "srtp/crypto/math/gf2_8.c",
+      "srtp/crypto/math/stat.c",
+      "srtp/crypto/replay/rdb.c",
+      "srtp/crypto/replay/rdbx.c",
+      "srtp/crypto/replay/ut_sim.c",
+      "srtp/crypto/rng/ctr_prng.c",
+      "srtp/crypto/rng/prng.c",
+      "srtp/crypto/rng/rand_source.c",
+      "srtp/srtp/ekt.c",
+      "srtp/srtp/srtp.c",
+    ]
+
+    if (is_clang) {
+      cflags = [ "-Wno-implicit-function-declaration" ]
+    }
+
+    if (use_srtp_boringssl) {
+      deps = [
+        "//third_party/boringssl:boringssl",
+      ]
+      public_deps = [
+        "//third_party/boringssl:boringssl",
+      ]
+      sources -= [
+        "srtp/crypto/cipher/aes_cbc.c",
+        "srtp/crypto/cipher/aes_icm.c",
+        "srtp/crypto/hash/hmac.c",
+        "srtp/crypto/hash/sha1.c",
+        "srtp/crypto/rng/ctr_prng.c",
+        "srtp/crypto/rng/prng.c",
+      ]
+      sources += [
+        "srtp/crypto/cipher/aes_gcm_ossl.c",
+        "srtp/crypto/cipher/aes_icm_ossl.c",
+        "srtp/crypto/hash/hmac_ossl.c",
+        "srtp/crypto/include/aes_gcm_ossl.h",
+        "srtp/crypto/include/aes_icm_ossl.h",
+      ]
+    }
+  }
+
+  # TODO(GYP): A bunch of these tests don't compile (in gyp either). They're
+  # not very broken, so could probably be made to work if it's useful.
+  if (!is_win) {
+    executable("rdbx_driver") {
+      configs -= [ "//build/config/compiler:chromium_code" ]
+      configs += [ "//build/config/compiler:no_chromium_code" ]
+      deps = [
+        ":libsrtp",
+      ]
+      sources = [
+        "srtp/include/getopt_s.h",
+        "srtp/test/getopt_s.c",
+        "srtp/test/rdbx_driver.c",
+      ]
+    }
+
+    executable("srtp_driver") {
+      configs -= [ "//build/config/compiler:chromium_code" ]
+      configs += [ "//build/config/compiler:no_chromium_code" ]
+      deps = [
+        ":libsrtp",
+      ]
+      sources = [
+        "srtp/include/getopt_s.h",
+        "srtp/include/srtp_priv.h",
+        "srtp/test/getopt_s.c",
+        "srtp/test/srtp_driver.c",
+      ]
+    }
+
+    executable("roc_driver") {
+      configs -= [ "//build/config/compiler:chromium_code" ]
+      configs += [ "//build/config/compiler:no_chromium_code" ]
+      deps = [
+        ":libsrtp",
+      ]
+      sources = [
+        "srtp/crypto/include/rdbx.h",
+        "srtp/include/ut_sim.h",
+        "srtp/test/roc_driver.c",
+      ]
+    }
+
+    executable("replay_driver") {
+      configs -= [ "//build/config/compiler:chromium_code" ]
+      configs += [ "//build/config/compiler:no_chromium_code" ]
+      deps = [
+        ":libsrtp",
+      ]
+      sources = [
+        "srtp/crypto/include/rdbx.h",
+        "srtp/include/ut_sim.h",
+        "srtp/test/replay_driver.c",
+      ]
+    }
+
+    executable("rtpw") {
+      configs -= [ "//build/config/compiler:chromium_code" ]
+      configs += [ "//build/config/compiler:no_chromium_code" ]
+      deps = [
+        ":libsrtp",
+      ]
+      sources = [
+        "srtp/crypto/include/datatypes.h",
+        "srtp/include/getopt_s.h",
+        "srtp/include/rtp.h",
+        "srtp/include/srtp.h",
+        "srtp/test/getopt_s.c",
+        "srtp/test/rtp.c",
+        "srtp/test/rtpw.c",
+      ]
+      if (is_android) {
+        defines = [ "HAVE_SYS_SOCKET_H" ]
+      }
+      if (is_clang) {
+        cflags = [ "-Wno-implicit-function-declaration" ]
+      }
+    }
+
+    executable("srtp_test_cipher_driver") {
+      configs -= [ "//build/config/compiler:chromium_code" ]
+      configs += [ "//build/config/compiler:no_chromium_code" ]
+      deps = [
+        ":libsrtp",
+      ]
+      sources = [
+        "srtp/crypto/test/cipher_driver.c",
+        "srtp/include/getopt_s.h",
+        "srtp/test/getopt_s.c",
+      ]
+    }
+
+    executable("srtp_test_datatypes_driver") {
+      configs -= [ "//build/config/compiler:chromium_code" ]
+      configs += [ "//build/config/compiler:no_chromium_code" ]
+      deps = [
+        ":libsrtp",
+      ]
+      sources = [
+        "srtp/crypto/test/datatypes_driver.c",
+      ]
+    }
+
+    executable("srtp_test_stat_driver") {
+      configs -= [ "//build/config/compiler:chromium_code" ]
+      configs += [ "//build/config/compiler:no_chromium_code" ]
+      deps = [
+        ":libsrtp",
+      ]
+      sources = [
+        "srtp/crypto/test/stat_driver.c",
+      ]
+    }
+
+    executable("srtp_test_sha1_driver") {
+      configs -= [ "//build/config/compiler:chromium_code" ]
+      configs += [ "//build/config/compiler:no_chromium_code" ]
+      deps = [
+        ":libsrtp",
+      ]
+      sources = [
+        "srtp/crypto/test/sha1_driver.c",
+      ]
+    }
+
+    executable("srtp_test_kernel_driver") {
+      configs -= [ "//build/config/compiler:chromium_code" ]
+      configs += [ "//build/config/compiler:no_chromium_code" ]
+      deps = [
+        ":libsrtp",
+      ]
+      sources = [
+        "srtp/crypto/test/kernel_driver.c",
+        "srtp/include/getopt_s.h",
+        "srtp/test/getopt_s.c",
+      ]
+    }
+
+    executable("srtp_test_aes_calc") {
+      configs -= [ "//build/config/compiler:chromium_code" ]
+      configs += [ "//build/config/compiler:no_chromium_code" ]
+      deps = [
+        ":libsrtp",
+      ]
+      sources = [
+        "srtp/crypto/test/aes_calc.c",
+      ]
+    }
+
+    executable("srtp_test_rand_gen") {
+      configs -= [ "//build/config/compiler:chromium_code" ]
+      configs += [ "//build/config/compiler:no_chromium_code" ]
+      deps = [
+        ":libsrtp",
+      ]
+      sources = [
+        "srtp/crypto/test/rand_gen.c",
+        "srtp/include/getopt_s.h",
+        "srtp/test/getopt_s.c",
+      ]
+    }
+
+    executable("srtp_test_rand_gen_soak") {
+      configs -= [ "//build/config/compiler:chromium_code" ]
+      configs += [ "//build/config/compiler:no_chromium_code" ]
+      deps = [
+        ":libsrtp",
+      ]
+      sources = [
+        "srtp/crypto/test/rand_gen_soak.c",
+        "srtp/include/getopt_s.h",
+        "srtp/test/getopt_s.c",
+      ]
+    }
+
+    executable("srtp_test_env") {
+      configs -= [ "//build/config/compiler:chromium_code" ]
+      configs += [ "//build/config/compiler:no_chromium_code" ]
+      deps = [
+        ":libsrtp",
+      ]
+      sources = [
+        "srtp/crypto/test/env.c",
+      ]
+    }
+
+    group("srtp_runtest") {
+      deps = [
+        ":rdbx_driver",
+        ":srtp_driver",
+        ":roc_driver",
+        ":replay_driver",
+        ":rtpw",
+        ":srtp_test_cipher_driver",
+        ":srtp_test_datatypes_driver",
+        ":srtp_test_stat_driver",
+        ":srtp_test_sha1_driver",
+        ":srtp_test_kernel_driver",
+        ":srtp_test_aes_calc",
+        ":srtp_test_rand_gen",
+        ":srtp_test_rand_gen_soak",
+        ":srtp_test_env",
+      ]
+    }
+  }
+}
diff --git a/build/secondary/third_party/nss/BUILD.gn b/build/secondary/third_party/nss/BUILD.gn
new file mode 100644
index 0000000..25d449e
--- /dev/null
+++ b/build/secondary/third_party/nss/BUILD.gn
@@ -0,0 +1,1211 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/linux/pkg_config.gni")
+
+if (is_linux) {
+  # This is a dependency on NSS with no libssl. On Linux we use a built-in SSL
+  # library but the system NSS libraries. Non-Linux platforms using NSS use the
+  # hermetic one in //third_party/nss.
+  #
+  # Generally you should depend on //crypto:platform instead of using this
+  # config since that will properly pick up NSS or OpenSSL depending on
+  # platform and build config.
+  pkg_config("system_nss_no_ssl_config") {
+    packages = [ "nss" ]
+    extra_args = [
+      "-v",
+      "-lssl3",
+    ]
+  }
+} else {
+  include_nss_root_certs = is_ios
+  include_nss_libpkix = is_ios
+
+  config("nspr_config") {
+    defines = [ "NO_NSPR_10_SUPPORT" ]
+    include_dirs = [
+      "nspr/pr/include",
+      "nspr/lib/ds",
+      "nspr/lib/libc/include",
+    ]
+
+    if (component_mode != "shared_library") {
+      defines += [ "NSPR_STATIC" ]
+    }
+  }
+
+  component("nspr") {
+    output_name = "crnspr"
+    sources = [
+      "nspr/lib/ds/plarena.c",
+      "nspr/lib/ds/plarena.h",
+      "nspr/lib/ds/plarenas.h",
+      "nspr/lib/ds/plhash.c",
+      "nspr/lib/ds/plhash.h",
+      "nspr/lib/libc/include/plbase64.h",
+      "nspr/lib/libc/include/plerror.h",
+      "nspr/lib/libc/include/plgetopt.h",
+      "nspr/lib/libc/include/plstr.h",
+      "nspr/lib/libc/src/base64.c",
+      "nspr/lib/libc/src/plerror.c",
+      "nspr/lib/libc/src/plgetopt.c",
+      "nspr/lib/libc/src/strcase.c",
+      "nspr/lib/libc/src/strcat.c",
+      "nspr/lib/libc/src/strchr.c",
+      "nspr/lib/libc/src/strcmp.c",
+      "nspr/lib/libc/src/strcpy.c",
+      "nspr/lib/libc/src/strdup.c",
+      "nspr/lib/libc/src/strlen.c",
+      "nspr/lib/libc/src/strpbrk.c",
+      "nspr/lib/libc/src/strstr.c",
+      "nspr/lib/libc/src/strtok.c",
+      "nspr/pr/include/md/_darwin.cfg",
+      "nspr/pr/include/md/_darwin.h",
+      "nspr/pr/include/md/_pcos.h",
+      "nspr/pr/include/md/_pth.h",
+      "nspr/pr/include/md/_unix_errors.h",
+      "nspr/pr/include/md/_unixos.h",
+      "nspr/pr/include/md/_win32_errors.h",
+      "nspr/pr/include/md/_win95.cfg",
+      "nspr/pr/include/md/_win95.h",
+      "nspr/pr/include/md/prosdep.h",
+      "nspr/pr/include/nspr.h",
+      "nspr/pr/include/obsolete/pralarm.h",
+      "nspr/pr/include/obsolete/probslet.h",
+      "nspr/pr/include/obsolete/protypes.h",
+      "nspr/pr/include/obsolete/prsem.h",
+      "nspr/pr/include/pratom.h",
+      "nspr/pr/include/prbit.h",
+      "nspr/pr/include/prclist.h",
+      "nspr/pr/include/prcmon.h",
+      "nspr/pr/include/prcountr.h",
+      "nspr/pr/include/prcpucfg.h",
+      "nspr/pr/include/prcvar.h",
+      "nspr/pr/include/prdtoa.h",
+      "nspr/pr/include/prenv.h",
+      "nspr/pr/include/prerr.h",
+      "nspr/pr/include/prerror.h",
+      "nspr/pr/include/prinet.h",
+      "nspr/pr/include/prinit.h",
+      "nspr/pr/include/prinrval.h",
+      "nspr/pr/include/prio.h",
+      "nspr/pr/include/pripcsem.h",
+      "nspr/pr/include/private/pprio.h",
+      "nspr/pr/include/private/pprmwait.h",
+      "nspr/pr/include/private/pprthred.h",
+      "nspr/pr/include/private/primpl.h",
+      "nspr/pr/include/private/prpriv.h",
+      "nspr/pr/include/prlink.h",
+      "nspr/pr/include/prlock.h",
+      "nspr/pr/include/prlog.h",
+      "nspr/pr/include/prlong.h",
+      "nspr/pr/include/prmem.h",
+      "nspr/pr/include/prmon.h",
+      "nspr/pr/include/prmwait.h",
+      "nspr/pr/include/prnetdb.h",
+      "nspr/pr/include/prolock.h",
+      "nspr/pr/include/prpdce.h",
+      "nspr/pr/include/prprf.h",
+      "nspr/pr/include/prproces.h",
+      "nspr/pr/include/prrng.h",
+      "nspr/pr/include/prrwlock.h",
+      "nspr/pr/include/prshm.h",
+      "nspr/pr/include/prshma.h",
+      "nspr/pr/include/prsystem.h",
+      "nspr/pr/include/prthread.h",
+      "nspr/pr/include/prtime.h",
+      "nspr/pr/include/prtpool.h",
+      "nspr/pr/include/prtrace.h",
+      "nspr/pr/include/prtypes.h",
+      "nspr/pr/include/prvrsion.h",
+      "nspr/pr/include/prwin16.h",
+      "nspr/pr/src/io/prdir.c",
+      "nspr/pr/src/io/prfdcach.c",
+      "nspr/pr/src/io/prfile.c",
+      "nspr/pr/src/io/prio.c",
+      "nspr/pr/src/io/priometh.c",
+      "nspr/pr/src/io/pripv6.c",
+      "nspr/pr/src/io/prlayer.c",
+      "nspr/pr/src/io/prlog.c",
+      "nspr/pr/src/io/prmapopt.c",
+      "nspr/pr/src/io/prmmap.c",
+      "nspr/pr/src/io/prmwait.c",
+      "nspr/pr/src/io/prpolevt.c",
+      "nspr/pr/src/io/prprf.c",
+      "nspr/pr/src/io/prscanf.c",
+      "nspr/pr/src/io/prsocket.c",
+      "nspr/pr/src/io/prstdio.c",
+      "nspr/pr/src/linking/prlink.c",
+      "nspr/pr/src/malloc/prmalloc.c",
+      "nspr/pr/src/malloc/prmem.c",
+      "nspr/pr/src/md/prosdep.c",
+      "nspr/pr/src/md/unix/darwin.c",
+      "nspr/pr/src/md/unix/os_Darwin.s",
+      "nspr/pr/src/md/unix/unix.c",
+      "nspr/pr/src/md/unix/unix_errors.c",
+      "nspr/pr/src/md/unix/uxproces.c",
+      "nspr/pr/src/md/unix/uxrng.c",
+      "nspr/pr/src/md/unix/uxshm.c",
+      "nspr/pr/src/md/unix/uxwrap.c",
+      "nspr/pr/src/md/windows/ntgc.c",
+      "nspr/pr/src/md/windows/ntinrval.c",
+      "nspr/pr/src/md/windows/ntmisc.c",
+      "nspr/pr/src/md/windows/ntsec.c",
+      "nspr/pr/src/md/windows/ntsem.c",
+      "nspr/pr/src/md/windows/w32ipcsem.c",
+      "nspr/pr/src/md/windows/w32poll.c",
+      "nspr/pr/src/md/windows/w32rng.c",
+      "nspr/pr/src/md/windows/w32shm.c",
+      "nspr/pr/src/md/windows/w95cv.c",
+      "nspr/pr/src/md/windows/w95dllmain.c",
+      "nspr/pr/src/md/windows/w95io.c",
+      "nspr/pr/src/md/windows/w95sock.c",
+      "nspr/pr/src/md/windows/w95thred.c",
+      "nspr/pr/src/md/windows/win32_errors.c",
+      "nspr/pr/src/memory/prseg.c",
+      "nspr/pr/src/memory/prshm.c",
+      "nspr/pr/src/memory/prshma.c",
+      "nspr/pr/src/misc/pralarm.c",
+      "nspr/pr/src/misc/pratom.c",
+      "nspr/pr/src/misc/praton.c",
+      "nspr/pr/src/misc/prcountr.c",
+      "nspr/pr/src/misc/prdtoa.c",
+      "nspr/pr/src/misc/prenv.c",
+      "nspr/pr/src/misc/prerr.c",
+      "nspr/pr/src/misc/prerror.c",
+      "nspr/pr/src/misc/prerrortable.c",
+      "nspr/pr/src/misc/prinit.c",
+      "nspr/pr/src/misc/prinrval.c",
+      "nspr/pr/src/misc/pripc.c",
+      "nspr/pr/src/misc/pripcsem.c",
+      "nspr/pr/src/misc/prlog2.c",
+      "nspr/pr/src/misc/prlong.c",
+      "nspr/pr/src/misc/prnetdb.c",
+      "nspr/pr/src/misc/prolock.c",
+      "nspr/pr/src/misc/prrng.c",
+      "nspr/pr/src/misc/prsystem.c",
+      "nspr/pr/src/misc/prthinfo.c",
+      "nspr/pr/src/misc/prtime.c",
+      "nspr/pr/src/misc/prtpool.c",
+      "nspr/pr/src/misc/prtrace.c",
+      "nspr/pr/src/pthreads/ptio.c",
+      "nspr/pr/src/pthreads/ptmisc.c",
+      "nspr/pr/src/pthreads/ptsynch.c",
+      "nspr/pr/src/pthreads/ptthread.c",
+      "nspr/pr/src/threads/combined/prucpu.c",
+      "nspr/pr/src/threads/combined/prucv.c",
+      "nspr/pr/src/threads/combined/prulock.c",
+      "nspr/pr/src/threads/combined/prustack.c",
+      "nspr/pr/src/threads/combined/pruthr.c",
+      "nspr/pr/src/threads/prcmon.c",
+      "nspr/pr/src/threads/prcthr.c",
+      "nspr/pr/src/threads/prdump.c",
+      "nspr/pr/src/threads/prmon.c",
+      "nspr/pr/src/threads/prrwlock.c",
+      "nspr/pr/src/threads/prsem.c",
+      "nspr/pr/src/threads/prtpd.c",
+    ]
+
+    public_configs = [ ":nspr_config" ]
+
+    configs -= [ "//build/config/compiler:chromium_code" ]
+    if (is_win) {
+      configs -= [
+        "//build/config/win:unicode",  # Requires 8-bit mode.
+        "//build/config/win:lean_and_mean",  # Won"t compile with lean and mean.
+      ]
+    }
+    configs += [
+      "//build/config/compiler:no_chromium_code",
+      "//build/config/compiler:no_size_t_to_int_warning",
+    ]
+
+    cflags = []
+    defines = [
+      "_NSPR_BUILD_",
+      "FORCE_PR_LOG",
+    ]
+
+    include_dirs = [ "nspr/pr/include/private" ]
+
+    if (is_win) {
+      cflags = [ "/wd4554" ]  # Check precidence.
+      defines += [
+        "XP_PC",
+        "WIN32",
+        "WIN95",
+        "_PR_GLOBAL_THREADS_ONLY",
+        "_CRT_SECURE_NO_WARNINGS",
+      ]
+    } else {
+      sources -= [
+        "nspr/pr/src/md/windows/ntgc.c",
+        "nspr/pr/src/md/windows/ntinrval.c",
+        "nspr/pr/src/md/windows/ntmisc.c",
+        "nspr/pr/src/md/windows/ntsec.c",
+        "nspr/pr/src/md/windows/ntsem.c",
+        "nspr/pr/src/md/windows/w32ipcsem.c",
+        "nspr/pr/src/md/windows/w32poll.c",
+        "nspr/pr/src/md/windows/w32rng.c",
+        "nspr/pr/src/md/windows/w32shm.c",
+        "nspr/pr/src/md/windows/w95cv.c",
+        "nspr/pr/src/md/windows/w95dllmain.c",
+        "nspr/pr/src/md/windows/w95io.c",
+        "nspr/pr/src/md/windows/w95sock.c",
+        "nspr/pr/src/md/windows/w95thred.c",
+        "nspr/pr/src/md/windows/win32_errors.c",
+        "nspr/pr/src/threads/combined/prucpu.c",
+        "nspr/pr/src/threads/combined/prucv.c",
+        "nspr/pr/src/threads/combined/prulock.c",
+        "nspr/pr/src/threads/combined/prustack.c",
+        "nspr/pr/src/threads/combined/pruthr.c",
+      ]
+    }
+
+    if (!is_posix) {
+      sources -= [
+        "nspr/pr/src/md/unix/darwin.c",
+        "nspr/pr/src/md/unix/os_Darwin.s",
+        "nspr/pr/src/md/unix/unix.c",
+        "nspr/pr/src/md/unix/unix_errors.c",
+        "nspr/pr/src/md/unix/uxproces.c",
+        "nspr/pr/src/md/unix/uxrng.c",
+        "nspr/pr/src/md/unix/uxshm.c",
+        "nspr/pr/src/md/unix/uxwrap.c",
+        "nspr/pr/src/pthreads/ptio.c",
+        "nspr/pr/src/pthreads/ptmisc.c",
+        "nspr/pr/src/pthreads/ptsynch.c",
+        "nspr/pr/src/pthreads/ptthread.c",
+      ]
+    }
+
+    if (current_cpu == "x86") {
+      defines += [ "_X86_" ]
+    } else if (current_cpu == "x64") {
+      defines += [ "_AMD64_" ]
+    }
+
+    if (is_mac || is_ios) {
+      sources -= [
+        "nspr/pr/src/io/prdir.c",
+        "nspr/pr/src/io/prfile.c",
+        "nspr/pr/src/io/prio.c",
+        "nspr/pr/src/io/prsocket.c",
+        "nspr/pr/src/misc/pripcsem.c",
+        "nspr/pr/src/threads/prcthr.c",
+        "nspr/pr/src/threads/prdump.c",
+        "nspr/pr/src/threads/prmon.c",
+        "nspr/pr/src/threads/prsem.c",
+      ]
+      defines += [
+        "XP_UNIX",
+        "DARWIN",
+        "XP_MACOSX",
+        "_PR_PTHREADS",
+        "HAVE_BSD_FLOCK",
+        "HAVE_DLADDR",
+        "HAVE_LCHOWN",
+        "HAVE_SOCKLEN_T",
+        "HAVE_STRERROR",
+      ]
+    }
+
+    if (is_mac) {
+      defines += [ "HAVE_CRT_EXTERNS_H" ]
+      libs = [
+        "CoreFoundation.framework",
+        "CoreServices.framework",
+      ]
+    }
+
+    if (is_clang) {
+      cflags += [
+        # nspr uses a bunch of deprecated functions (NSLinkModule etc) in
+        # prlink.c on mac.
+        "-Wno-deprecated-declarations",
+
+        # nspr passes "const char*" through "void*".
+        "-Wno-incompatible-pointer-types",
+
+        # nspr passes "int*" through "unsigned int*".
+        "-Wno-pointer-sign",
+      ]
+
+      # nspr uses assert(!"foo") instead of assert(false && "foo").
+      configs -= [ "//build/config/clang:extra_warnings" ]
+    }
+  }
+
+  component("nss") {
+    output_name = "crnss"
+    sources = [
+      # Ensure at least one object file is produced, so that MSVC does not
+      # warn when creating the static/shared library. See the note for
+      # the "nssckbi" target for why the "nss" target was split as such.
+      "nss/lib/nss/nssver.c",
+    ]
+
+    public_deps = [
+      ":nss_static",
+    ]
+
+    if (include_nss_root_certs) {
+      public_deps += [ ":nssckbi" ]
+    }
+
+    if (component_mode == "shared_library") {
+      if (is_mac) {
+        ldflags = [ "-all_load" ]
+      } else if (is_win) {
+        # Pass the def file to the linker.
+        ldflags =
+            [ "/DEF:" + rebase_path("nss/exports_win.def", root_build_dir) ]
+      }
+    }
+  }
+
+  config("nssckbi_config") {
+    include_dirs = [ "nss/lib/ckfw/builtins" ]
+  }
+
+  # This is really more of a pseudo-target to work around the fact that
+  # a single static_library target cannot contain two object files of the
+  # same name (hash.o / hash.obj). Logically, this is part of the
+  # "nss_static" target. By separating it out, it creates a possible
+  # circular dependency between "nss_static" and "nssckbi" when
+  # "exclude_nss_root_certs" is not specified, as "nss_static" depends on
+  # the "builtinsC_GetFunctionList" exported by this target. This is an
+  # artifact of how NSS is being statically built, which is not an
+  # officially supported configuration - normally, "nssckbi.dll/so" would
+  # depend on libnss3.dll/so, and the higher layer caller would instruct
+  # libnss3.dll to dynamically load nssckbi.dll, breaking the circle.
+  #
+  # TODO(rsleevi): http://crbug.com/128134 - Break the circular dependency
+  # without requiring nssckbi to be built as a shared library.
+  source_set("nssckbi") {
+    visibility = [ ":nss" ]  # This target is internal implementation detail.
+
+    sources = [
+      "nss/lib/ckfw/builtins/anchor.c",
+      "nss/lib/ckfw/builtins/bfind.c",
+      "nss/lib/ckfw/builtins/binst.c",
+      "nss/lib/ckfw/builtins/bobject.c",
+      "nss/lib/ckfw/builtins/bsession.c",
+      "nss/lib/ckfw/builtins/bslot.c",
+      "nss/lib/ckfw/builtins/btoken.c",
+      "nss/lib/ckfw/builtins/builtins.h",
+      "nss/lib/ckfw/builtins/certdata.c",
+      "nss/lib/ckfw/builtins/ckbiver.c",
+      "nss/lib/ckfw/builtins/constants.c",
+      "nss/lib/ckfw/builtins/nssckbi.h",
+      "nss/lib/ckfw/ck.h",
+      "nss/lib/ckfw/ckfw.h",
+      "nss/lib/ckfw/ckfwm.h",
+      "nss/lib/ckfw/ckfwtm.h",
+      "nss/lib/ckfw/ckmd.h",
+      "nss/lib/ckfw/ckt.h",
+      "nss/lib/ckfw/crypto.c",
+      "nss/lib/ckfw/find.c",
+      "nss/lib/ckfw/hash.c",
+      "nss/lib/ckfw/instance.c",
+      "nss/lib/ckfw/mechanism.c",
+      "nss/lib/ckfw/mutex.c",
+      "nss/lib/ckfw/nssck.api",
+      "nss/lib/ckfw/nssckepv.h",
+      "nss/lib/ckfw/nssckft.h",
+      "nss/lib/ckfw/nssckfw.h",
+      "nss/lib/ckfw/nssckfwc.h",
+      "nss/lib/ckfw/nssckfwt.h",
+      "nss/lib/ckfw/nssckg.h",
+      "nss/lib/ckfw/nssckmdt.h",
+      "nss/lib/ckfw/nssckt.h",
+      "nss/lib/ckfw/object.c",
+      "nss/lib/ckfw/session.c",
+      "nss/lib/ckfw/sessobj.c",
+      "nss/lib/ckfw/slot.c",
+      "nss/lib/ckfw/token.c",
+      "nss/lib/ckfw/wrap.c",
+    ]
+
+    configs -= [ "//build/config/compiler:chromium_code" ]
+
+    if (is_win) {
+      configs -= [ "//build/config/win:unicode" ]  # Requires 8-bit mode.
+    }
+    configs += [ "//build/config/compiler:no_chromium_code" ]
+
+    include_dirs = [ "nss/lib/ckfw" ]
+    public_configs = [ ":nssckbi_config" ]
+
+    public_deps = [
+      ":nss_static",
+    ]
+  }
+
+  config("nss_static_config") {
+    defines = [
+      "NSS_STATIC",
+      "NSS_USE_STATIC_LIBS",
+      "USE_UTIL_DIRECTLY",
+    ]
+    if (is_win) {
+      defines += [ "_WINDOWS" ]
+    }
+    include_dirs = [
+      "nspr/pr/include",
+      "nspr/lib/ds",
+      "nspr/lib/libc/include",
+      "nss/lib/base",
+      "nss/lib/certdb",
+      "nss/lib/certhigh",
+      "nss/lib/cryptohi",
+      "nss/lib/dev",
+      "nss/lib/freebl",
+      "nss/lib/freebl/ecl",
+      "nss/lib/nss",
+      "nss/lib/pk11wrap",
+      "nss/lib/pkcs7",
+      "nss/lib/pki",
+      "nss/lib/smime",
+      "nss/lib/softoken",
+      "nss/lib/util",
+    ]
+  }
+
+  if (is_win && current_cpu == "x86") {
+    source_set("nss_static_avx") {
+      sources = [
+        "nss/lib/freebl/intel-gcm-wrap.c",
+        "nss/lib/freebl/intel-gcm-x86-masm.asm",
+        "nss/lib/freebl/intel-gcm.h",
+      ]
+      defines = [
+        "_WINDOWS",
+        "_X86_",
+        "INTEL_GCM",
+        "MP_API_COMPATIBLE",
+        "MP_ASSEMBLY_DIV_2DX1D",
+        "MP_ASSEMBLY_MULTIPLY",
+        "MP_ASSEMBLY_SQUARE",
+        "MP_NO_MP_WORD",
+        "MP_USE_UINT_DIGIT",
+        "NSS_DISABLE_DBM",
+        "NSS_STATIC",
+        "NSS_USE_STATIC_LIBS",
+        "NSS_X86",
+        "NSS_X86_OR_X64",
+        "RIJNDAEL_INCLUDE_TABLES",
+        "SHLIB_PREFIX=\"\"",
+        "SHLIB_SUFFIX=\"dll\"",
+        "SHLIB_VERSION=\"3\"",
+        "SOFTOKEN_LIB_NAME=\"softokn3.dll\"",
+        "SOFTOKEN_SHLIB_VERSION=\"3\"",
+        "USE_HW_AES",
+        "USE_UTIL_DIRECTLY",
+        "WIN32",
+        "WIN95",
+        "XP_PC",
+      ]
+      include_dirs = [
+        "nspr/pr/include",
+        "nspr/lib/ds",
+        "nspr/lib/libc/include",
+        "nss/lib/freebl/ecl",
+        "nss/lib/util",
+      ]
+    }
+  }
+
+  source_set("nss_static") {
+    visibility = [ ":*" ]  # Internal implementation detail.
+
+    sources = [
+      "nss/lib/base/arena.c",
+      "nss/lib/base/base.h",
+      "nss/lib/base/baset.h",
+      "nss/lib/base/error.c",
+      "nss/lib/base/errorval.c",
+      "nss/lib/base/hash.c",
+      "nss/lib/base/hashops.c",
+      "nss/lib/base/item.c",
+      "nss/lib/base/libc.c",
+      "nss/lib/base/list.c",
+      "nss/lib/base/nssbase.h",
+      "nss/lib/base/nssbaset.h",
+      "nss/lib/base/nssutf8.c",
+      "nss/lib/base/tracker.c",
+      "nss/lib/certdb/alg1485.c",
+      "nss/lib/certdb/cert.h",
+      "nss/lib/certdb/certdb.c",
+      "nss/lib/certdb/certdb.h",
+      "nss/lib/certdb/certi.h",
+      "nss/lib/certdb/certt.h",
+      "nss/lib/certdb/certv3.c",
+      "nss/lib/certdb/certxutl.c",
+      "nss/lib/certdb/certxutl.h",
+      "nss/lib/certdb/crl.c",
+      "nss/lib/certdb/genname.c",
+      "nss/lib/certdb/genname.h",
+      "nss/lib/certdb/polcyxtn.c",
+      "nss/lib/certdb/secname.c",
+      "nss/lib/certdb/stanpcertdb.c",
+      "nss/lib/certdb/xauthkid.c",
+      "nss/lib/certdb/xbsconst.c",
+      "nss/lib/certdb/xconst.c",
+      "nss/lib/certdb/xconst.h",
+      "nss/lib/certhigh/certhigh.c",
+      "nss/lib/certhigh/certhtml.c",
+      "nss/lib/certhigh/certreq.c",
+      "nss/lib/certhigh/certvfy.c",
+      "nss/lib/certhigh/crlv2.c",
+      "nss/lib/certhigh/ocsp.c",
+      "nss/lib/certhigh/ocsp.h",
+      "nss/lib/certhigh/ocspi.h",
+      "nss/lib/certhigh/ocspsig.c",
+      "nss/lib/certhigh/ocspt.h",
+      "nss/lib/certhigh/ocspti.h",
+      "nss/lib/certhigh/xcrldist.c",
+      "nss/lib/cryptohi/cryptohi.h",
+      "nss/lib/cryptohi/cryptoht.h",
+      "nss/lib/cryptohi/dsautil.c",
+      "nss/lib/cryptohi/key.h",
+      "nss/lib/cryptohi/keyhi.h",
+      "nss/lib/cryptohi/keyi.h",
+      "nss/lib/cryptohi/keyt.h",
+      "nss/lib/cryptohi/keythi.h",
+      "nss/lib/cryptohi/sechash.c",
+      "nss/lib/cryptohi/sechash.h",
+      "nss/lib/cryptohi/seckey.c",
+      "nss/lib/cryptohi/secsign.c",
+      "nss/lib/cryptohi/secvfy.c",
+      "nss/lib/dev/ckhelper.c",
+      "nss/lib/dev/ckhelper.h",
+      "nss/lib/dev/dev.h",
+      "nss/lib/dev/devm.h",
+      "nss/lib/dev/devslot.c",
+      "nss/lib/dev/devt.h",
+      "nss/lib/dev/devtm.h",
+      "nss/lib/dev/devtoken.c",
+      "nss/lib/dev/devutil.c",
+      "nss/lib/dev/nssdev.h",
+      "nss/lib/dev/nssdevt.h",
+      "nss/lib/freebl/aeskeywrap.c",
+      "nss/lib/freebl/alg2268.c",
+      "nss/lib/freebl/alghmac.c",
+      "nss/lib/freebl/alghmac.h",
+      "nss/lib/freebl/arcfive.c",
+      "nss/lib/freebl/arcfour.c",
+      "nss/lib/freebl/blapi.h",
+      "nss/lib/freebl/blapii.h",
+      "nss/lib/freebl/blapit.h",
+      "nss/lib/freebl/camellia.c",
+      "nss/lib/freebl/camellia.h",
+      "nss/lib/freebl/chacha20/chacha20.c",
+      "nss/lib/freebl/chacha20/chacha20.h",
+      "nss/lib/freebl/chacha20/chacha20_vec.c",
+      "nss/lib/freebl/chacha20poly1305.c",
+      "nss/lib/freebl/chacha20poly1305.h",
+      "nss/lib/freebl/ctr.c",
+      "nss/lib/freebl/ctr.h",
+      "nss/lib/freebl/cts.c",
+      "nss/lib/freebl/cts.h",
+      "nss/lib/freebl/des.c",
+      "nss/lib/freebl/des.h",
+      "nss/lib/freebl/desblapi.c",
+      "nss/lib/freebl/dh.c",
+      "nss/lib/freebl/drbg.c",
+      "nss/lib/freebl/dsa.c",
+      "nss/lib/freebl/ec.c",
+      "nss/lib/freebl/ec.h",
+      "nss/lib/freebl/ecdecode.c",
+      "nss/lib/freebl/ecl/ec2.h",
+      "nss/lib/freebl/ecl/ec_naf.c",
+      "nss/lib/freebl/ecl/ecl-curve.h",
+      "nss/lib/freebl/ecl/ecl-exp.h",
+      "nss/lib/freebl/ecl/ecl-priv.h",
+      "nss/lib/freebl/ecl/ecl.c",
+      "nss/lib/freebl/ecl/ecl.h",
+      "nss/lib/freebl/ecl/ecl_curve.c",
+      "nss/lib/freebl/ecl/ecl_gf.c",
+      "nss/lib/freebl/ecl/ecl_mult.c",
+      "nss/lib/freebl/ecl/ecp.h",
+      "nss/lib/freebl/ecl/ecp_256.c",
+      "nss/lib/freebl/ecl/ecp_256_32.c",
+      "nss/lib/freebl/ecl/ecp_384.c",
+      "nss/lib/freebl/ecl/ecp_521.c",
+      "nss/lib/freebl/ecl/ecp_aff.c",
+      "nss/lib/freebl/ecl/ecp_jac.c",
+      "nss/lib/freebl/ecl/ecp_jm.c",
+      "nss/lib/freebl/ecl/ecp_mont.c",
+      "nss/lib/freebl/gcm.c",
+      "nss/lib/freebl/gcm.h",
+      "nss/lib/freebl/hmacct.c",
+      "nss/lib/freebl/hmacct.h",
+      "nss/lib/freebl/intel-aes-x86-masm.asm",
+      "nss/lib/freebl/intel-aes.h",
+      "nss/lib/freebl/jpake.c",
+      "nss/lib/freebl/md2.c",
+      "nss/lib/freebl/md5.c",
+      "nss/lib/freebl/mpi/logtab.h",
+      "nss/lib/freebl/mpi/mp_gf2m-priv.h",
+      "nss/lib/freebl/mpi/mp_gf2m.c",
+      "nss/lib/freebl/mpi/mp_gf2m.h",
+      "nss/lib/freebl/mpi/mpcpucache.c",
+      "nss/lib/freebl/mpi/mpi-config.h",
+      "nss/lib/freebl/mpi/mpi-priv.h",
+      "nss/lib/freebl/mpi/mpi.c",
+      "nss/lib/freebl/mpi/mpi.h",
+      "nss/lib/freebl/mpi/mpi_amd64.c",
+      "nss/lib/freebl/mpi/mpi_arm.c",
+      "nss/lib/freebl/mpi/mpi_arm_mac.c",
+      "nss/lib/freebl/mpi/mpi_x86_asm.c",
+      "nss/lib/freebl/mpi/mplogic.c",
+      "nss/lib/freebl/mpi/mplogic.h",
+      "nss/lib/freebl/mpi/mpmontg.c",
+      "nss/lib/freebl/mpi/mpprime.c",
+      "nss/lib/freebl/mpi/mpprime.h",
+      "nss/lib/freebl/mpi/primes.c",
+      "nss/lib/freebl/nss_build_config_mac.h",
+      "nss/lib/freebl/poly1305/poly1305-donna-x64-sse2-incremental-source.c",
+      "nss/lib/freebl/poly1305/poly1305.c",
+      "nss/lib/freebl/poly1305/poly1305.h",
+      "nss/lib/freebl/pqg.c",
+      "nss/lib/freebl/pqg.h",
+      "nss/lib/freebl/rawhash.c",
+      "nss/lib/freebl/rijndael.c",
+      "nss/lib/freebl/rijndael.h",
+      "nss/lib/freebl/rijndael32.tab",
+      "nss/lib/freebl/rsa.c",
+      "nss/lib/freebl/rsapkcs.c",
+      "nss/lib/freebl/secmpi.h",
+      "nss/lib/freebl/secrng.h",
+      "nss/lib/freebl/seed.c",
+      "nss/lib/freebl/seed.h",
+      "nss/lib/freebl/sha256.h",
+      "nss/lib/freebl/sha512.c",
+      "nss/lib/freebl/sha_fast.c",
+      "nss/lib/freebl/sha_fast.h",
+      "nss/lib/freebl/shsign.h",
+      "nss/lib/freebl/shvfy.c",
+      "nss/lib/freebl/sysrand.c",
+      "nss/lib/freebl/tlsprfalg.c",
+      "nss/lib/freebl/unix_rand.c",
+      "nss/lib/freebl/win_rand.c",
+      "nss/lib/nss/nss.h",
+      "nss/lib/nss/nssinit.c",
+      "nss/lib/nss/nssrenam.h",
+      "nss/lib/nss/utilwrap.c",
+      "nss/lib/pk11wrap/debug_module.c",
+      "nss/lib/pk11wrap/dev3hack.c",
+      "nss/lib/pk11wrap/dev3hack.h",
+      "nss/lib/pk11wrap/pk11akey.c",
+      "nss/lib/pk11wrap/pk11auth.c",
+      "nss/lib/pk11wrap/pk11cert.c",
+      "nss/lib/pk11wrap/pk11cxt.c",
+      "nss/lib/pk11wrap/pk11err.c",
+      "nss/lib/pk11wrap/pk11func.h",
+      "nss/lib/pk11wrap/pk11kea.c",
+      "nss/lib/pk11wrap/pk11list.c",
+      "nss/lib/pk11wrap/pk11load.c",
+      "nss/lib/pk11wrap/pk11mech.c",
+      "nss/lib/pk11wrap/pk11merge.c",
+      "nss/lib/pk11wrap/pk11nobj.c",
+      "nss/lib/pk11wrap/pk11obj.c",
+      "nss/lib/pk11wrap/pk11pars.c",
+      "nss/lib/pk11wrap/pk11pbe.c",
+      "nss/lib/pk11wrap/pk11pk12.c",
+      "nss/lib/pk11wrap/pk11pqg.c",
+      "nss/lib/pk11wrap/pk11pqg.h",
+      "nss/lib/pk11wrap/pk11priv.h",
+      "nss/lib/pk11wrap/pk11pub.h",
+      "nss/lib/pk11wrap/pk11sdr.c",
+      "nss/lib/pk11wrap/pk11sdr.h",
+      "nss/lib/pk11wrap/pk11skey.c",
+      "nss/lib/pk11wrap/pk11slot.c",
+      "nss/lib/pk11wrap/pk11util.c",
+      "nss/lib/pk11wrap/secmod.h",
+      "nss/lib/pk11wrap/secmodi.h",
+      "nss/lib/pk11wrap/secmodt.h",
+      "nss/lib/pk11wrap/secmodti.h",
+      "nss/lib/pk11wrap/secpkcs5.h",
+      "nss/lib/pkcs7/certread.c",
+      "nss/lib/pkcs7/p7common.c",
+      "nss/lib/pkcs7/p7create.c",
+      "nss/lib/pkcs7/p7decode.c",
+      "nss/lib/pkcs7/p7encode.c",
+      "nss/lib/pkcs7/p7local.c",
+      "nss/lib/pkcs7/p7local.h",
+      "nss/lib/pkcs7/pkcs7t.h",
+      "nss/lib/pkcs7/secmime.c",
+      "nss/lib/pkcs7/secmime.h",
+      "nss/lib/pkcs7/secpkcs7.h",
+      "nss/lib/pki/asymmkey.c",
+      "nss/lib/pki/certdecode.c",
+      "nss/lib/pki/certificate.c",
+      "nss/lib/pki/cryptocontext.c",
+      "nss/lib/pki/nsspki.h",
+      "nss/lib/pki/nsspkit.h",
+      "nss/lib/pki/pki.h",
+      "nss/lib/pki/pki3hack.c",
+      "nss/lib/pki/pki3hack.h",
+      "nss/lib/pki/pkibase.c",
+      "nss/lib/pki/pkim.h",
+      "nss/lib/pki/pkistore.c",
+      "nss/lib/pki/pkistore.h",
+      "nss/lib/pki/pkit.h",
+      "nss/lib/pki/pkitm.h",
+      "nss/lib/pki/symmkey.c",
+      "nss/lib/pki/tdcache.c",
+      "nss/lib/pki/trustdomain.c",
+      "nss/lib/smime/cms.h",
+      "nss/lib/smime/cmslocal.h",
+      "nss/lib/smime/cmsreclist.h",
+      "nss/lib/smime/cmst.h",
+      "nss/lib/smime/smime.h",
+      "nss/lib/softoken/fipsaudt.c",
+      "nss/lib/softoken/fipstest.c",
+      "nss/lib/softoken/fipstokn.c",
+      "nss/lib/softoken/jpakesftk.c",
+      "nss/lib/softoken/lgglue.c",
+      "nss/lib/softoken/lgglue.h",
+      "nss/lib/softoken/lowkey.c",
+      "nss/lib/softoken/lowkeyi.h",
+      "nss/lib/softoken/lowkeyti.h",
+      "nss/lib/softoken/lowpbe.c",
+      "nss/lib/softoken/lowpbe.h",
+      "nss/lib/softoken/padbuf.c",
+      "nss/lib/softoken/pkcs11.c",
+      "nss/lib/softoken/pkcs11c.c",
+      "nss/lib/softoken/pkcs11i.h",
+      "nss/lib/softoken/pkcs11ni.h",
+      "nss/lib/softoken/pkcs11u.c",
+      "nss/lib/softoken/sdb.c",
+      "nss/lib/softoken/sdb.h",
+      "nss/lib/softoken/sftkdb.c",
+      "nss/lib/softoken/sftkdb.h",
+      "nss/lib/softoken/sftkdbt.h",
+      "nss/lib/softoken/sftkdbti.h",
+      "nss/lib/softoken/sftkhmac.c",
+      "nss/lib/softoken/sftkpars.c",
+      "nss/lib/softoken/sftkpars.h",
+      "nss/lib/softoken/sftkpwd.c",
+      "nss/lib/softoken/softkver.c",
+      "nss/lib/softoken/softkver.h",
+      "nss/lib/softoken/softoken.h",
+      "nss/lib/softoken/softoknt.h",
+      "nss/lib/softoken/tlsprf.c",
+      "nss/lib/ssl/sslerr.h",
+      "nss/lib/util/SECerrs.h",
+      "nss/lib/util/base64.h",
+      "nss/lib/util/ciferfam.h",
+      "nss/lib/util/derdec.c",
+      "nss/lib/util/derenc.c",
+      "nss/lib/util/dersubr.c",
+      "nss/lib/util/dertime.c",
+      "nss/lib/util/errstrs.c",
+      "nss/lib/util/hasht.h",
+      "nss/lib/util/nssb64.h",
+      "nss/lib/util/nssb64d.c",
+      "nss/lib/util/nssb64e.c",
+      "nss/lib/util/nssb64t.h",
+      "nss/lib/util/nssilckt.h",
+      "nss/lib/util/nssilock.c",
+      "nss/lib/util/nssilock.h",
+      "nss/lib/util/nsslocks.h",
+      "nss/lib/util/nssrwlk.c",
+      "nss/lib/util/nssrwlk.h",
+      "nss/lib/util/nssrwlkt.h",
+      "nss/lib/util/nssutil.h",
+      "nss/lib/util/oidstring.c",
+      "nss/lib/util/pkcs11.h",
+      "nss/lib/util/pkcs11f.h",
+      "nss/lib/util/pkcs11n.h",
+      "nss/lib/util/pkcs11p.h",
+      "nss/lib/util/pkcs11t.h",
+      "nss/lib/util/pkcs11u.h",
+      "nss/lib/util/pkcs1sig.c",
+      "nss/lib/util/pkcs1sig.h",
+      "nss/lib/util/portreg.c",
+      "nss/lib/util/portreg.h",
+      "nss/lib/util/quickder.c",
+      "nss/lib/util/secalgid.c",
+      "nss/lib/util/secasn1.h",
+      "nss/lib/util/secasn1d.c",
+      "nss/lib/util/secasn1e.c",
+      "nss/lib/util/secasn1t.h",
+      "nss/lib/util/secasn1u.c",
+      "nss/lib/util/seccomon.h",
+      "nss/lib/util/secder.h",
+      "nss/lib/util/secdert.h",
+      "nss/lib/util/secdig.c",
+      "nss/lib/util/secdig.h",
+      "nss/lib/util/secdigt.h",
+      "nss/lib/util/secerr.h",
+      "nss/lib/util/secitem.c",
+      "nss/lib/util/secitem.h",
+      "nss/lib/util/secoid.c",
+      "nss/lib/util/secoid.h",
+      "nss/lib/util/secoidt.h",
+      "nss/lib/util/secport.c",
+      "nss/lib/util/secport.h",
+      "nss/lib/util/sectime.c",
+      "nss/lib/util/templates.c",
+      "nss/lib/util/utf8.c",
+      "nss/lib/util/utilmod.c",
+      "nss/lib/util/utilmodt.h",
+      "nss/lib/util/utilpars.c",
+      "nss/lib/util/utilpars.h",
+      "nss/lib/util/utilparst.h",
+      "nss/lib/util/utilrename.h",
+    ]
+
+    sources -= [
+      # mpi_arm.c is included by mpi_arm_mac.c.
+      # NOTE: mpi_arm.c can be used directly on Linux. mpi_arm.c will need
+      # to be excluded conditionally if we start to build NSS on Linux.
+      "nss/lib/freebl/mpi/mpi_arm.c",
+
+      # primes.c is included by mpprime.c.
+      "nss/lib/freebl/mpi/primes.c",
+
+      # unix_rand.c and win_rand.c are included by sysrand.c.
+      "nss/lib/freebl/unix_rand.c",
+      "nss/lib/freebl/win_rand.c",
+
+      # debug_module.c is included by pk11load.c.
+      "nss/lib/pk11wrap/debug_module.c",
+    ]
+
+    configs -= [ "//build/config/compiler:chromium_code" ]
+    if (is_win) {
+      configs -= [ "//build/config/win:unicode" ]  # Requires 8-bit mode.
+    }
+    configs += [
+      "//build/config/compiler:no_chromium_code",
+      "//build/config/compiler:no_size_t_to_int_warning",
+    ]
+    public_configs = [ ":nss_static_config" ]
+
+    cflags = []
+
+    # Only need the defines and includes not in nss_static_config.
+    defines = [
+      "MP_API_COMPATIBLE",
+      "NSS_DISABLE_DBM",
+      "RIJNDAEL_INCLUDE_TABLES",
+      "SHLIB_VERSION=\"3\"",
+      "SOFTOKEN_SHLIB_VERSION=\"3\"",
+    ]
+    include_dirs = [
+      "nss/lib/freebl/mpi",
+      "nss/lib/ssl",
+    ]
+
+    if (is_win) {
+      cflags += [ "/wd4101" ]  # Unreferenced local variable.
+    }
+
+    if (include_nss_libpkix) {
+      sources += [
+        "nss/lib/certhigh/certvfypkix.c",
+        "nss/lib/certhigh/certvfypkixprint.c",
+        "nss/lib/libpkix/include/pkix.h",
+        "nss/lib/libpkix/include/pkix_certsel.h",
+        "nss/lib/libpkix/include/pkix_certstore.h",
+        "nss/lib/libpkix/include/pkix_checker.h",
+        "nss/lib/libpkix/include/pkix_crlsel.h",
+        "nss/lib/libpkix/include/pkix_errorstrings.h",
+        "nss/lib/libpkix/include/pkix_params.h",
+        "nss/lib/libpkix/include/pkix_pl_pki.h",
+        "nss/lib/libpkix/include/pkix_pl_system.h",
+        "nss/lib/libpkix/include/pkix_results.h",
+        "nss/lib/libpkix/include/pkix_revchecker.h",
+        "nss/lib/libpkix/include/pkix_sample_modules.h",
+        "nss/lib/libpkix/include/pkix_util.h",
+        "nss/lib/libpkix/include/pkixt.h",
+        "nss/lib/libpkix/pkix/certsel/pkix_certselector.c",
+        "nss/lib/libpkix/pkix/certsel/pkix_certselector.h",
+        "nss/lib/libpkix/pkix/certsel/pkix_comcertselparams.c",
+        "nss/lib/libpkix/pkix/certsel/pkix_comcertselparams.h",
+        "nss/lib/libpkix/pkix/checker/pkix_basicconstraintschecker.c",
+        "nss/lib/libpkix/pkix/checker/pkix_basicconstraintschecker.h",
+        "nss/lib/libpkix/pkix/checker/pkix_certchainchecker.c",
+        "nss/lib/libpkix/pkix/checker/pkix_certchainchecker.h",
+        "nss/lib/libpkix/pkix/checker/pkix_crlchecker.c",
+        "nss/lib/libpkix/pkix/checker/pkix_crlchecker.h",
+        "nss/lib/libpkix/pkix/checker/pkix_ekuchecker.c",
+        "nss/lib/libpkix/pkix/checker/pkix_ekuchecker.h",
+        "nss/lib/libpkix/pkix/checker/pkix_expirationchecker.c",
+        "nss/lib/libpkix/pkix/checker/pkix_expirationchecker.h",
+        "nss/lib/libpkix/pkix/checker/pkix_namechainingchecker.c",
+        "nss/lib/libpkix/pkix/checker/pkix_namechainingchecker.h",
+        "nss/lib/libpkix/pkix/checker/pkix_nameconstraintschecker.c",
+        "nss/lib/libpkix/pkix/checker/pkix_nameconstraintschecker.h",
+        "nss/lib/libpkix/pkix/checker/pkix_ocspchecker.c",
+        "nss/lib/libpkix/pkix/checker/pkix_ocspchecker.h",
+        "nss/lib/libpkix/pkix/checker/pkix_policychecker.c",
+        "nss/lib/libpkix/pkix/checker/pkix_policychecker.h",
+        "nss/lib/libpkix/pkix/checker/pkix_revocationchecker.c",
+        "nss/lib/libpkix/pkix/checker/pkix_revocationchecker.h",
+        "nss/lib/libpkix/pkix/checker/pkix_revocationmethod.c",
+        "nss/lib/libpkix/pkix/checker/pkix_revocationmethod.h",
+        "nss/lib/libpkix/pkix/checker/pkix_signaturechecker.c",
+        "nss/lib/libpkix/pkix/checker/pkix_signaturechecker.h",
+        "nss/lib/libpkix/pkix/checker/pkix_targetcertchecker.c",
+        "nss/lib/libpkix/pkix/checker/pkix_targetcertchecker.h",
+        "nss/lib/libpkix/pkix/crlsel/pkix_comcrlselparams.c",
+        "nss/lib/libpkix/pkix/crlsel/pkix_comcrlselparams.h",
+        "nss/lib/libpkix/pkix/crlsel/pkix_crlselector.c",
+        "nss/lib/libpkix/pkix/crlsel/pkix_crlselector.h",
+        "nss/lib/libpkix/pkix/params/pkix_procparams.c",
+        "nss/lib/libpkix/pkix/params/pkix_procparams.h",
+        "nss/lib/libpkix/pkix/params/pkix_resourcelimits.c",
+        "nss/lib/libpkix/pkix/params/pkix_resourcelimits.h",
+        "nss/lib/libpkix/pkix/params/pkix_trustanchor.c",
+        "nss/lib/libpkix/pkix/params/pkix_trustanchor.h",
+        "nss/lib/libpkix/pkix/params/pkix_valparams.c",
+        "nss/lib/libpkix/pkix/params/pkix_valparams.h",
+        "nss/lib/libpkix/pkix/results/pkix_buildresult.c",
+        "nss/lib/libpkix/pkix/results/pkix_buildresult.h",
+        "nss/lib/libpkix/pkix/results/pkix_policynode.c",
+        "nss/lib/libpkix/pkix/results/pkix_policynode.h",
+        "nss/lib/libpkix/pkix/results/pkix_valresult.c",
+        "nss/lib/libpkix/pkix/results/pkix_valresult.h",
+        "nss/lib/libpkix/pkix/results/pkix_verifynode.c",
+        "nss/lib/libpkix/pkix/results/pkix_verifynode.h",
+        "nss/lib/libpkix/pkix/store/pkix_store.c",
+        "nss/lib/libpkix/pkix/store/pkix_store.h",
+        "nss/lib/libpkix/pkix/top/pkix_build.c",
+        "nss/lib/libpkix/pkix/top/pkix_build.h",
+        "nss/lib/libpkix/pkix/top/pkix_lifecycle.c",
+        "nss/lib/libpkix/pkix/top/pkix_lifecycle.h",
+        "nss/lib/libpkix/pkix/top/pkix_validate.c",
+        "nss/lib/libpkix/pkix/top/pkix_validate.h",
+        "nss/lib/libpkix/pkix/util/pkix_error.c",
+        "nss/lib/libpkix/pkix/util/pkix_error.h",
+        "nss/lib/libpkix/pkix/util/pkix_errpaths.c",
+        "nss/lib/libpkix/pkix/util/pkix_list.c",
+        "nss/lib/libpkix/pkix/util/pkix_list.h",
+        "nss/lib/libpkix/pkix/util/pkix_logger.c",
+        "nss/lib/libpkix/pkix/util/pkix_logger.h",
+        "nss/lib/libpkix/pkix/util/pkix_tools.c",
+        "nss/lib/libpkix/pkix/util/pkix_tools.h",
+        "nss/lib/libpkix/pkix_pl_nss/module/pkix_pl_aiamgr.c",
+        "nss/lib/libpkix/pkix_pl_nss/module/pkix_pl_aiamgr.h",
+        "nss/lib/libpkix/pkix_pl_nss/module/pkix_pl_colcertstore.c",
+        "nss/lib/libpkix/pkix_pl_nss/module/pkix_pl_colcertstore.h",
+        "nss/lib/libpkix/pkix_pl_nss/module/pkix_pl_httpcertstore.c",
+        "nss/lib/libpkix/pkix_pl_nss/module/pkix_pl_httpcertstore.h",
+        "nss/lib/libpkix/pkix_pl_nss/module/pkix_pl_httpdefaultclient.c",
+        "nss/lib/libpkix/pkix_pl_nss/module/pkix_pl_httpdefaultclient.h",
+        "nss/lib/libpkix/pkix_pl_nss/module/pkix_pl_nsscontext.c",
+        "nss/lib/libpkix/pkix_pl_nss/module/pkix_pl_nsscontext.h",
+        "nss/lib/libpkix/pkix_pl_nss/module/pkix_pl_pk11certstore.c",
+        "nss/lib/libpkix/pkix_pl_nss/module/pkix_pl_pk11certstore.h",
+        "nss/lib/libpkix/pkix_pl_nss/module/pkix_pl_socket.c",
+        "nss/lib/libpkix/pkix_pl_nss/module/pkix_pl_socket.h",
+        "nss/lib/libpkix/pkix_pl_nss/pki/pkix_pl_basicconstraints.c",
+        "nss/lib/libpkix/pkix_pl_nss/pki/pkix_pl_basicconstraints.h",
+        "nss/lib/libpkix/pkix_pl_nss/pki/pkix_pl_cert.c",
+        "nss/lib/libpkix/pkix_pl_nss/pki/pkix_pl_cert.h",
+        "nss/lib/libpkix/pkix_pl_nss/pki/pkix_pl_certpolicyinfo.c",
+        "nss/lib/libpkix/pkix_pl_nss/pki/pkix_pl_certpolicyinfo.h",
+        "nss/lib/libpkix/pkix_pl_nss/pki/pkix_pl_certpolicymap.c",
+        "nss/lib/libpkix/pkix_pl_nss/pki/pkix_pl_certpolicymap.h",
+        "nss/lib/libpkix/pkix_pl_nss/pki/pkix_pl_certpolicyqualifier.c",
+        "nss/lib/libpkix/pkix_pl_nss/pki/pkix_pl_certpolicyqualifier.h",
+        "nss/lib/libpkix/pkix_pl_nss/pki/pkix_pl_crl.c",
+        "nss/lib/libpkix/pkix_pl_nss/pki/pkix_pl_crl.h",
+        "nss/lib/libpkix/pkix_pl_nss/pki/pkix_pl_crldp.c",
+        "nss/lib/libpkix/pkix_pl_nss/pki/pkix_pl_crldp.h",
+        "nss/lib/libpkix/pkix_pl_nss/pki/pkix_pl_crlentry.c",
+        "nss/lib/libpkix/pkix_pl_nss/pki/pkix_pl_crlentry.h",
+        "nss/lib/libpkix/pkix_pl_nss/pki/pkix_pl_date.c",
+        "nss/lib/libpkix/pkix_pl_nss/pki/pkix_pl_date.h",
+        "nss/lib/libpkix/pkix_pl_nss/pki/pkix_pl_generalname.c",
+        "nss/lib/libpkix/pkix_pl_nss/pki/pkix_pl_generalname.h",
+        "nss/lib/libpkix/pkix_pl_nss/pki/pkix_pl_infoaccess.c",
+        "nss/lib/libpkix/pkix_pl_nss/pki/pkix_pl_infoaccess.h",
+        "nss/lib/libpkix/pkix_pl_nss/pki/pkix_pl_nameconstraints.c",
+        "nss/lib/libpkix/pkix_pl_nss/pki/pkix_pl_nameconstraints.h",
+        "nss/lib/libpkix/pkix_pl_nss/pki/pkix_pl_ocspcertid.c",
+        "nss/lib/libpkix/pkix_pl_nss/pki/pkix_pl_ocspcertid.h",
+        "nss/lib/libpkix/pkix_pl_nss/pki/pkix_pl_ocsprequest.c",
+        "nss/lib/libpkix/pkix_pl_nss/pki/pkix_pl_ocsprequest.h",
+        "nss/lib/libpkix/pkix_pl_nss/pki/pkix_pl_ocspresponse.c",
+        "nss/lib/libpkix/pkix_pl_nss/pki/pkix_pl_ocspresponse.h",
+        "nss/lib/libpkix/pkix_pl_nss/pki/pkix_pl_publickey.c",
+        "nss/lib/libpkix/pkix_pl_nss/pki/pkix_pl_publickey.h",
+        "nss/lib/libpkix/pkix_pl_nss/pki/pkix_pl_x500name.c",
+        "nss/lib/libpkix/pkix_pl_nss/pki/pkix_pl_x500name.h",
+        "nss/lib/libpkix/pkix_pl_nss/system/pkix_pl_bigint.c",
+        "nss/lib/libpkix/pkix_pl_nss/system/pkix_pl_bigint.h",
+        "nss/lib/libpkix/pkix_pl_nss/system/pkix_pl_bytearray.c",
+        "nss/lib/libpkix/pkix_pl_nss/system/pkix_pl_bytearray.h",
+        "nss/lib/libpkix/pkix_pl_nss/system/pkix_pl_common.c",
+        "nss/lib/libpkix/pkix_pl_nss/system/pkix_pl_common.h",
+        "nss/lib/libpkix/pkix_pl_nss/system/pkix_pl_error.c",
+        "nss/lib/libpkix/pkix_pl_nss/system/pkix_pl_hashtable.c",
+        "nss/lib/libpkix/pkix_pl_nss/system/pkix_pl_hashtable.h",
+        "nss/lib/libpkix/pkix_pl_nss/system/pkix_pl_lifecycle.c",
+        "nss/lib/libpkix/pkix_pl_nss/system/pkix_pl_lifecycle.h",
+        "nss/lib/libpkix/pkix_pl_nss/system/pkix_pl_mem.c",
+        "nss/lib/libpkix/pkix_pl_nss/system/pkix_pl_mem.h",
+        "nss/lib/libpkix/pkix_pl_nss/system/pkix_pl_monitorlock.c",
+        "nss/lib/libpkix/pkix_pl_nss/system/pkix_pl_monitorlock.h",
+        "nss/lib/libpkix/pkix_pl_nss/system/pkix_pl_mutex.c",
+        "nss/lib/libpkix/pkix_pl_nss/system/pkix_pl_mutex.h",
+        "nss/lib/libpkix/pkix_pl_nss/system/pkix_pl_object.c",
+        "nss/lib/libpkix/pkix_pl_nss/system/pkix_pl_object.h",
+        "nss/lib/libpkix/pkix_pl_nss/system/pkix_pl_oid.c",
+        "nss/lib/libpkix/pkix_pl_nss/system/pkix_pl_oid.h",
+        "nss/lib/libpkix/pkix_pl_nss/system/pkix_pl_primhash.c",
+        "nss/lib/libpkix/pkix_pl_nss/system/pkix_pl_primhash.h",
+        "nss/lib/libpkix/pkix_pl_nss/system/pkix_pl_rwlock.c",
+        "nss/lib/libpkix/pkix_pl_nss/system/pkix_pl_rwlock.h",
+        "nss/lib/libpkix/pkix_pl_nss/system/pkix_pl_string.c",
+        "nss/lib/libpkix/pkix_pl_nss/system/pkix_pl_string.h",
+      ]
+
+      # Disable the LDAP code in libpkix.
+      defines += [ "NSS_PKIX_NO_LDAP" ]
+
+      include_dirs += [
+        "nss/lib/libpkix/include",
+        "nss/lib/libpkix/pkix/certsel",
+        "nss/lib/libpkix/pkix/checker",
+        "nss/lib/libpkix/pkix/crlsel",
+        "nss/lib/libpkix/pkix/params",
+        "nss/lib/libpkix/pkix/results",
+        "nss/lib/libpkix/pkix/store",
+        "nss/lib/libpkix/pkix/top",
+        "nss/lib/libpkix/pkix/util",
+        "nss/lib/libpkix/pkix_pl_nss/module",
+        "nss/lib/libpkix/pkix_pl_nss/pki",
+        "nss/lib/libpkix/pkix_pl_nss/system",
+      ]
+    } else {
+      defines += [ "NSS_DISABLE_LIBPKIX" ]
+    }
+
+    if (!include_nss_root_certs) {
+      defines += [ "NSS_DISABLE_ROOT_CERTS" ]
+    }
+
+    if (current_cpu == "x64" && !is_win) {
+      sources -= [
+        "nss/lib/freebl/chacha20/chacha20.c",
+        "nss/lib/freebl/poly1305/poly1305.c",
+      ]
+    } else {
+      sources -= [
+        "nss/lib/freebl/chacha20/chacha20_vec.c",
+        "nss/lib/freebl/poly1305/poly1305-donna-x64-sse2-incremental-source.c",
+      ]
+    }
+
+    if (is_mac || is_ios) {
+      sources -= [ "nss/lib/freebl/mpi/mpi_amd64.c" ]
+      cflags += [
+        "-include",
+        rebase_path("//third_party/nss/nss/lib/freebl/nss_build_config_mac.h",
+                    root_build_dir),
+      ]
+      defines += [
+        "XP_UNIX",
+        "DARWIN",
+        "HAVE_STRERROR",
+        "HAVE_BSD_FLOCK",
+        "SHLIB_SUFFIX=\"dylib\"",
+        "SHLIB_PREFIX=\"lib\"",
+        "SOFTOKEN_LIB_NAME=\"libsoftokn3.dylib\"",
+      ]
+
+      configs -= [ "//build/config/gcc:symbol_visibility_hidden" ]
+    } else {
+      # Not Mac/iOS.
+      sources -= [ "nss/lib/freebl/mpi/mpi_arm_mac.c" ]
+    }
+
+    if (is_win) {
+      defines += [
+        "SHLIB_SUFFIX=\"dll\"",
+        "SHLIB_PREFIX=\"\"",
+        "SOFTOKEN_LIB_NAME=\"softokn3.dll\"",
+        "XP_PC",
+        "WIN32",
+        "WIN95",
+      ]
+
+      if (current_cpu == "x86") {
+        defines += [
+          "NSS_X86_OR_X64",
+          "NSS_X86",
+          "_X86_",
+          "MP_ASSEMBLY_MULTIPLY",
+          "MP_ASSEMBLY_SQUARE",
+          "MP_ASSEMBLY_DIV_2DX1D",
+          "MP_USE_UINT_DIGIT",
+          "MP_NO_MP_WORD",
+          "USE_HW_AES",
+          "INTEL_GCM",
+        ]
+        sources -= [ "nss/lib/freebl/mpi/mpi_amd64.c" ]
+      } else if (current_cpu == "x64") {
+        sources -= [
+          "nss/lib/freebl/intel-aes-x86-masm.asm",
+          "nss/lib/freebl/mpi/mpi_amd64.c",
+          "nss/lib/freebl/mpi/mpi_x86_asm.c",
+        ]
+        defines += [
+          "NSS_USE_64",
+          "NSS_X86_OR_X64",
+          "NSS_X64",
+          "_AMD64_",
+          "MP_CHAR_STORE_SLOW",
+          "MP_IS_LITTLE_ENDIAN",
+          "WIN64",
+        ]
+      }
+    } else {
+      # Not Windows.
+      sources -= [
+        # mpi_x86_asm.c contains MSVC inline assembly code.
+        "nss/lib/freebl/mpi/mpi_x86_asm.c",
+      ]
+    }
+
+    if (is_clang) {
+      cflags += [
+        # nss doesn"t explicitly cast between different enum types.
+        "-Wno-conversion",
+
+        # nss passes "const char*" through "void*".
+        "-Wno-incompatible-pointer-types",
+
+        # nss prefers `a && b || c` over `(a && b) || c`.
+        "-Wno-logical-op-parentheses",
+
+        # nss doesn"t use exhaustive switches on enums
+        "-Wno-switch",
+
+        # nss has some `unsigned < 0` checks.
+        "-Wno-tautological-compare",
+      ]
+    }
+
+    public_deps = [
+      ":nspr",
+    ]
+    deps = [
+      ":nspr",
+      "//third_party/sqlite",
+    ]
+
+    if (is_win && current_cpu == "x86") {
+      deps += [ ":nss_static_avx" ]
+    }
+  }
+}  # Windows/Mac/iOS.
diff --git a/build/secondary/tools/grit/BUILD.gn b/build/secondary/tools/grit/BUILD.gn
new file mode 100644
index 0000000..660bf1b
--- /dev/null
+++ b/build/secondary/tools/grit/BUILD.gn
@@ -0,0 +1,27 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This target creates a stamp file that depends on all the sources in the grit
+# directory. By depending on this, a target can force itself to be rebuilt if
+# grit itself changes.
+action("grit_sources") {
+  depfile = "$target_out_dir/grit_sources.d"
+  script = "//build/secondary/tools/grit/stamp_grit_sources.py"
+
+  inputs = [
+    "grit.py",
+  ]
+
+  # Note that we can't call this "grit_sources.stamp" because that file is
+  # implicitly created by GN for script actions.
+  outputs = [
+    "$target_out_dir/grit_sources.script.stamp",
+  ]
+
+  args = [
+    rebase_path("//tools/grit", root_build_dir),
+    rebase_path(outputs[0], root_build_dir),
+    rebase_path(depfile, root_build_dir),
+  ]
+}
diff --git a/build/secondary/tools/grit/grit_rule.gni b/build/secondary/tools/grit/grit_rule.gni
new file mode 100644
index 0000000..bdf812f
--- /dev/null
+++ b/build/secondary/tools/grit/grit_rule.gni
@@ -0,0 +1,483 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Instantiate grit. This will produce a script target to run grit, and a
+# static library that compiles the .cc files.
+#
+# Parameters
+#
+#   source (required)
+#       Path to .grd file.
+#
+#   outputs (required)
+#       List of outputs from grit, relative to the target_gen_dir. Grit will
+#       verify at build time that this list is correct and will fail if there
+#       is a mismatch between the outputs specified by the .grd file and the
+#       outputs list here.
+#
+#       To get this list, you can look in the .grd file for
+#       <output filename="..." and put those filename here. The base directory
+#       of the list in Grit and the output list specified in the GN grit target
+#       are the same (the target_gen_dir) so you can generally copy the names
+#       exactly.
+#
+#       To get the list of outputs programatically, run:
+#           python tools/grit/grit_info.py --outputs . path/to/your.grd
+#       And strip the leading "./" from the output files.
+#
+#   defines (optional)
+#       Extra defines to pass to grit (on top of the global grit_defines list).
+#
+#   grit_flags (optional)
+#       List of strings containing extra command-line flags to pass to Grit.
+#
+#   resource_ids (optional)
+#       Path to a grit "firstidsfile". Default is
+#       //tools/gritsettings/resource_ids. Set to "" to use the value specified
+#       in the <grit> nodes of the processed files.
+#
+#   output_dir (optional)
+#       Directory for generated files. If you specify this, you will often
+#       want to specify output_name if the target name is not particularly
+#       unique, since this can cause files from multiple grit targets to
+#       overwrite each other.
+#
+#   output_name (optiona)
+#       Provide an alternate base name for the generated files, like the .d
+#       files. Normally these are based on the target name and go in the
+#       output_dir, but if multiple targets with the same name end up in
+#       the same output_dir, they can collide.
+#
+#   depfile_dir (optional)
+#       If set, used to store the depfile and corresponding stamp file.
+#       Defaults to output_dir
+#
+#   use_qualified_include (optional)
+#       If set, output_dir is not added to include_dirs.
+#
+#   configs (optional)
+#       List of additional configs to be applied to the generated target.
+#   deps  (optional)
+#   inputs  (optional)
+#       List of additional files, required for grit to process source file.
+#   visibility  (optional)
+#       Normal meaning.
+#
+# Example
+#
+#   grit("my_resources") {
+#     # Source and outputs are required.
+#     source = "myfile.grd"
+#     outputs = [
+#       "foo_strings.h",
+#       "foo_strings.pak",
+#     ]
+#
+#     grit_flags = [ "-E", "foo=bar" ]  # Optional extra flags.
+#     # You can also put deps here if the grit source depends on generated
+#     # files.
+#   }
+import("//build/config/chrome_build.gni")
+import("//build/config/crypto.gni")
+import("//build/config/features.gni")
+import("//build/config/ui.gni")
+
+grit_defines = []
+
+# Mac and iOS want Title Case strings.
+use_titlecase_in_grd_files = is_mac || is_ios
+if (use_titlecase_in_grd_files) {
+  grit_defines += [
+    "-D",
+    "use_titlecase",
+  ]
+}
+
+if (is_chrome_branded) {
+  grit_defines += [
+    "-D",
+    "_google_chrome",
+    "-E",
+    "CHROMIUM_BUILD=google_chrome",
+  ]
+} else {
+  grit_defines += [
+    "-D",
+    "_chromium",
+    "-E",
+    "CHROMIUM_BUILD=chromium",
+  ]
+}
+
+if (is_chromeos) {
+  grit_defines += [
+    "-D",
+    "chromeos",
+    "-D",
+    "scale_factors=2x",
+  ]
+}
+
+if (is_desktop_linux) {
+  grit_defines += [
+    "-D",
+    "desktop_linux",
+  ]
+}
+
+if (toolkit_views) {
+  grit_defines += [
+    "-D",
+    "toolkit_views",
+  ]
+}
+
+if (use_aura) {
+  grit_defines += [
+    "-D",
+    "use_aura",
+  ]
+}
+
+if (use_ash) {
+  grit_defines += [
+    "-D",
+    "use_ash",
+  ]
+}
+
+if (use_nss_certs) {
+  grit_defines += [
+    "-D",
+    "use_nss_certs",
+  ]
+}
+
+if (use_ozone) {
+  grit_defines += [
+    "-D",
+    "use_ozone",
+  ]
+}
+
+if (enable_image_loader_extension) {
+  grit_defines += [
+    "-D",
+    "image_loader_extension",
+  ]
+}
+
+if (enable_remoting) {
+  grit_defines += [
+    "-D",
+    "remoting",
+  ]
+}
+
+if (is_android) {
+  grit_defines += [
+    "-t",
+    "android",
+    "-E",
+    "ANDROID_JAVA_TAGGED_ONLY=true",
+  ]
+}
+
+if (is_mac || is_ios) {
+  grit_defines += [
+    "-D",
+    "scale_factors=2x",
+  ]
+}
+
+if (is_ios) {
+  grit_defines += [
+    "-t",
+    "ios",
+
+    # iOS uses a whitelist to filter resources.
+    "-w",
+    rebase_path("//build/ios/grit_whitelist.txt", root_build_dir),
+  ]
+}
+
+if (enable_extensions) {
+  grit_defines += [
+    "-D",
+    "enable_extensions",
+  ]
+}
+if (enable_media_router) {
+  grit_defines += [
+    "-D",
+    "enable_media_router",
+  ]
+}
+if (enable_plugins) {
+  grit_defines += [
+    "-D",
+    "enable_plugins",
+  ]
+}
+if (enable_basic_printing || enable_print_preview) {
+  grit_defines += [
+    "-D",
+    "enable_printing",
+  ]
+  if (enable_print_preview) {
+    grit_defines += [
+      "-D",
+      "enable_print_preview",
+    ]
+  }
+}
+if (enable_themes) {
+  grit_defines += [
+    "-D",
+    "enable_themes",
+  ]
+}
+if (enable_app_list) {
+  grit_defines += [
+    "-D",
+    "enable_app_list",
+  ]
+}
+if (enable_settings_app) {
+  grit_defines += [
+    "-D",
+    "enable_settings_app",
+  ]
+}
+if (enable_google_now) {
+  grit_defines += [
+    "-D",
+    "enable_google_now",
+  ]
+}
+
+# Note: use_concatenated_impulse_responses is omitted. It is never used and
+# should probably be removed from GYP build.
+if (enable_webrtc) {
+  grit_defines += [
+    "-D",
+    "enable_webrtc",
+  ]
+}
+if (enable_hangout_services_extension) {
+  grit_defines += [
+    "-D",
+    "enable_hangout_services_extension",
+  ]
+}
+if (enable_task_manager) {
+  grit_defines += [
+    "-D",
+    "enable_task_manager",
+  ]
+}
+if (enable_notifications) {
+  grit_defines += [
+    "-D",
+    "enable_notifications",
+  ]
+}
+if (enable_wifi_bootstrapping) {
+  grit_defines += [
+    "-D",
+    "enable_wifi_bootstrapping",
+  ]
+}
+if (enable_service_discovery) {
+  grit_defines += [
+    "-D",
+    "enable_service_discovery",
+  ]
+}
+if (mac_views_browser) {
+  grit_defines += [
+    "-D",
+    "mac_views_browser",
+  ]
+}
+
+grit_resource_id_file = "//tools/gritsettings/resource_ids"
+grit_info_script = "//tools/grit/grit_info.py"
+
+template("grit") {
+  assert(defined(invoker.source),
+         "\"source\" must be defined for the grit template $target_name")
+
+  grit_inputs = [ invoker.source ]
+
+  if (defined(invoker.resource_ids)) {
+    resource_ids = invoker.resource_ids
+  } else {
+    resource_ids = grit_resource_id_file
+  }
+  if (resource_ids != "") {
+    # The script depends on the ID file. Only add this dependency if the ID
+    # file is specified.
+    grit_inputs += [ resource_ids ]
+  }
+
+  if (defined(invoker.output_dir)) {
+    output_dir = invoker.output_dir
+  } else {
+    output_dir = target_gen_dir
+  }
+
+  if (defined(invoker.output_name)) {
+    grit_output_name = invoker.output_name
+  } else {
+    grit_output_name = target_name
+  }
+
+  if (defined(invoker.depfile_dir)) {
+    depfile_dir = invoker.depfile_dir
+  } else {
+    depfile_dir = output_dir
+  }
+
+  # These are all passed as arguments to the script so have to be relative to
+  # the build directory.
+  if (resource_ids != "") {
+    resource_ids = rebase_path(resource_ids, root_build_dir)
+  }
+  rebased_output_dir = rebase_path(output_dir, root_build_dir)
+  source_path = rebase_path(invoker.source, root_build_dir)
+
+  if (defined(invoker.grit_flags)) {
+    grit_flags = invoker.grit_flags
+  } else {
+    grit_flags = []  # These are optional so default to empty list.
+  }
+
+  assert_files_flags = []
+
+  # We want to make sure the declared outputs actually match what Grit is
+  # writing. We write the list to a file (some of the output lists are long
+  # enough to not fit on a Windows command line) and ask Grit to verify those
+  # are the actual outputs at runtime.
+  asserted_list_file =
+      "$target_out_dir/${grit_output_name}_expected_outputs.txt"
+  write_file(asserted_list_file,
+             rebase_path(invoker.outputs, root_build_dir, output_dir))
+  assert_files_flags += [ "--assert-file-list=" +
+                          rebase_path(asserted_list_file, root_build_dir) ]
+  grit_outputs =
+      get_path_info(rebase_path(invoker.outputs, ".", output_dir), "abspath")
+
+  # The config and the action below get this visibility son only the generated
+  # source set can depend on them. The variable "target_name" will get
+  # overwritten inside the inner classes so we need to compute it here.
+  target_visibility = [ ":$target_name" ]
+
+  # The current grit setup makes an file in $output_dir/grit/foo.h that
+  # the source code expects to include via "grit/foo.h". It would be nice to
+  # change this to including absolute paths relative to the root gen directory
+  # (like "mycomponent/foo.h"). This config sets up the include path.
+  grit_config = target_name + "_grit_config"
+  config(grit_config) {
+    if (!defined(invoker.use_qualified_include) ||
+        !invoker.use_qualified_include) {
+      include_dirs = [ output_dir ]
+    }
+    visibility = target_visibility
+  }
+
+  grit_custom_target = target_name + "_grit"
+  action(grit_custom_target) {
+    script = "//tools/grit/grit.py"
+    inputs = grit_inputs
+
+    depfile = "$depfile_dir/${grit_output_name}_stamp.d"
+    outputs = [ "${depfile}.stamp" ] + grit_outputs
+
+    args = [
+      "-i",
+      source_path,
+      "build",
+    ]
+    if (resource_ids != "") {
+      args += [
+        "-f",
+        resource_ids,
+      ]
+    }
+    args += [
+              "-o",
+              rebased_output_dir,
+              "--depdir",
+              ".",
+              "--depfile",
+              rebase_path(depfile, root_build_dir),
+              "--write-only-new=1",
+              "--depend-on-stamp",
+            ] + grit_defines
+
+    # Add extra defines with -D flags.
+    if (defined(invoker.defines)) {
+      foreach(i, invoker.defines) {
+        args += [
+          "-D",
+          i,
+        ]
+      }
+    }
+
+    args += grit_flags + assert_files_flags
+
+    if (defined(invoker.visibility)) {
+      # This needs to include both what the invoker specified (since they
+      # probably include generated headers from this target), as well as the
+      # generated source set (since there's no guarantee that the visibility
+      # specified by the invoker includes our target).
+      #
+      # Only define visibility at all if the invoker specified it. Otherwise,
+      # we want to keep the public "no visibility specified" default.
+      visibility = target_visibility + invoker.visibility
+    }
+
+    deps = [
+      "//tools/grit:grit_sources",
+    ]
+    if (defined(invoker.deps)) {
+      deps += invoker.deps
+    }
+    if (defined(invoker.inputs)) {
+      inputs += invoker.inputs
+    }
+  }
+
+  # This is the thing that people actually link with, it must be named the
+  # same as the argument the template was invoked with.
+  source_set(target_name) {
+    # Since we generate a file, we need to be run before the targets that
+    # depend on us.
+    sources = grit_outputs
+
+    # Deps set on the template invocation will go on the action that runs
+    # grit above rather than this library. This target needs to depend on the
+    # action publicly so other scripts can take the outputs from the grit
+    # script as inputs.
+    public_deps = [
+      ":$grit_custom_target",
+    ]
+    public_configs = [ ":$grit_config" ]
+
+    if (defined(invoker.public_configs)) {
+      public_configs += invoker.public_configs
+    }
+
+    if (defined(invoker.configs)) {
+      configs += invoker.configs
+    }
+
+    if (defined(invoker.visibility)) {
+      visibility = invoker.visibility
+    }
+    output_name = grit_output_name
+  }
+}
diff --git a/build/secondary/tools/grit/repack.gni b/build/secondary/tools/grit/repack.gni
new file mode 100644
index 0000000..1030674
--- /dev/null
+++ b/build/secondary/tools/grit/repack.gni
@@ -0,0 +1,47 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file defines a template to invoke grit repack in a consistent manner.
+#
+# Parameters:
+#   sources  [required]
+#       List of pak files that need to be combined.
+#
+#   output  [required]
+#       File name (single string) of the output file.
+#
+#   repack_options  [optional]
+#       List of extra arguments to pass.
+#
+#   deps  [optional]
+#   visibility  [optional]
+#       Normal meaning.
+template("repack") {
+  action(target_name) {
+    assert(defined(invoker.sources), "Need sources for $target_name")
+    assert(defined(invoker.output), "Need output for $target_name")
+
+    if (defined(invoker.visibility)) {
+      visibility = invoker.visibility
+    }
+
+    script = "//tools/grit/grit/format/repack.py"
+
+    inputs = invoker.sources
+    outputs = [
+      invoker.output,
+    ]
+
+    args = []
+    if (defined(invoker.repack_options)) {
+      args += invoker.repack_options
+    }
+    args += [ rebase_path(invoker.output, root_build_dir) ]
+    args += rebase_path(invoker.sources, root_build_dir)
+
+    if (defined(invoker.deps)) {
+      deps = invoker.deps
+    }
+  }
+}
diff --git a/build/secondary/tools/grit/stamp_grit_sources.py b/build/secondary/tools/grit/stamp_grit_sources.py
new file mode 100644
index 0000000..d43d4b8
--- /dev/null
+++ b/build/secondary/tools/grit/stamp_grit_sources.py
@@ -0,0 +1,55 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This script enumerates the files in the given directory, writing an empty
+# stamp file and a .d file listing the inputs required to make the stamp. This
+# allows us to dynamically depend on the grit sources without enumerating the
+# grit directory for every invocation of grit (which is what adding the source
+# files to every .grd file's .d file would entail) or shelling out to grit
+# synchronously during GN execution to get the list (which would be slow).
+#
+# Usage:
+#    stamp_grit_sources.py <directory> <stamp-file> <.d-file>
+
+import os
+import sys
+
+def GritSourceFiles(grit_root_dir):
+  files = []
+  for root, _, filenames in os.walk(grit_root_dir):
+    grit_src = [os.path.join(root, f) for f in filenames
+                if f.endswith('.py') and not f.endswith('_unittest.py')]
+    files.extend(grit_src)
+  files = [f.replace('\\', '/') for f in files]
+  return sorted(files)
+
+
+def WriteDepFile(dep_file, stamp_file, source_files):
+  with open(dep_file, "w") as f:
+    f.write(stamp_file)
+    f.write(": ")
+    f.write(' '.join(source_files))
+
+
+def WriteStampFile(stamp_file):
+  with open(stamp_file, "w"):
+    pass
+
+
+def main(argv):
+  if len(argv) != 4:
+    print "Error: expecting 3 args."
+    return 1
+
+  grit_root_dir = sys.argv[1]
+  stamp_file = sys.argv[2]
+  dep_file = sys.argv[3]
+
+  WriteStampFile(stamp_file)
+  WriteDepFile(dep_file, stamp_file, GritSourceFiles(grit_root_dir))
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main(sys.argv))
diff --git a/build/set_clang_warning_flags.gypi b/build/set_clang_warning_flags.gypi
new file mode 100644
index 0000000..f6d7aea
--- /dev/null
+++ b/build/set_clang_warning_flags.gypi
@@ -0,0 +1,58 @@
+# Copyright (c) 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included to set clang-specific compiler flags.
+# To use this the following variable can be defined:
+#   clang_warning_flags:       list: Compiler flags to pass to clang.
+#   clang_warning_flags_unset: list: Compiler flags to not pass to clang.
+#
+# Only use this in third-party code. In chromium_code, fix your code to not
+# warn instead!
+#
+# Note that the gypi file is included in target_defaults, so it does not need
+# to be explicitly included.
+#
+# Warning flags set by this will be used on all platforms. If you want to set
+# warning flags on only some platforms, you have to do so manually.
+#
+# To use this, create a gyp target with the following form:
+# {
+#   'target_name': 'my_target',
+#   'variables': {
+#     'clang_warning_flags': ['-Wno-awesome-warning'],
+#     'clang_warning_flags_unset': ['-Wpreviously-set-flag'],
+#   }
+# }
+
+{
+  'variables': {
+    'clang_warning_flags_unset%': [],  # Provide a default value.
+  },
+  'conditions': [
+    ['clang==1', {
+      # This uses >@ instead of @< to also see clang_warning_flags set in
+      # targets directly, not just the clang_warning_flags in target_defaults.
+      'cflags': [ '>@(clang_warning_flags)' ],
+      'cflags!': [ '>@(clang_warning_flags_unset)' ],
+      'xcode_settings': {
+        'WARNING_CFLAGS': ['>@(clang_warning_flags)'],
+        'WARNING_CFLAGS!': ['>@(clang_warning_flags_unset)'],
+      },
+      'msvs_settings': {
+        'VCCLCompilerTool': {
+          'AdditionalOptions': [ '>@(clang_warning_flags)' ],
+          'AdditionalOptions!': [ '>@(clang_warning_flags_unset)' ],
+        },
+      },
+    }],
+    ['clang==0 and host_clang==1', {
+      'target_conditions': [
+        ['_toolset=="host"', {
+          'cflags': [ '>@(clang_warning_flags)' ],
+          'cflags!': [ '>@(clang_warning_flags_unset)' ],
+        }],
+      ],
+    }],
+  ],
+}
diff --git a/build/shim_headers.gypi b/build/shim_headers.gypi
new file mode 100644
index 0000000..56d8d3a
--- /dev/null
+++ b/build/shim_headers.gypi
@@ -0,0 +1,60 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into a target to handle shim headers
+# in a consistent manner. To use this the following variables need to be
+# defined:
+#   headers_root_path: string: path to directory containing headers
+#   header_filenames: list: list of header file names
+
+{
+  'variables': {
+    'shim_headers_path': '<(SHARED_INTERMEDIATE_DIR)/shim_headers/<(_target_name)/<(_toolset)',
+    'shim_generator_additional_args%': [],
+  },
+  'include_dirs++': [
+    '<(shim_headers_path)',
+  ],
+  'all_dependent_settings': {
+    # Repeating this with different numbers of plusses is unfortunately required
+    # to make sure that even if this include is inside nested conditions/etc, it
+    # still gets inserted at the beginning of the include_dirs list. See
+    # http://crbug.com/263818 for details.
+    'include_dirs+++': [
+      '<(shim_headers_path)',
+    ],
+    'include_dirs++++': [
+      '<(shim_headers_path)',
+    ],
+    'include_dirs+++++': [
+      '<(shim_headers_path)',
+    ],
+  },
+  'actions': [
+    {
+      'variables': {
+        'generator_path': '<(DEPTH)/tools/generate_shim_headers/generate_shim_headers.py',
+        'generator_args': [
+          '--headers-root', '<(headers_root_path)',
+          '--output-directory', '<(shim_headers_path)',
+          '<@(shim_generator_additional_args)',
+          '<@(header_filenames)',
+        ],
+      },
+      'action_name': 'generate_<(_target_name)_shim_headers',
+      'inputs': [
+        '<(generator_path)',
+      ],
+      'outputs': [
+        '<!@pymod_do_main(generate_shim_headers <@(generator_args) --outputs)',
+      ],
+      'action': ['python',
+                 '<(generator_path)',
+                 '<@(generator_args)',
+                 '--generate',
+      ],
+      'message': 'Generating <(_target_name) shim headers',
+    },
+  ],
+}
diff --git a/build/slave/OWNERS b/build/slave/OWNERS
new file mode 100644
index 0000000..f562c92
--- /dev/null
+++ b/build/slave/OWNERS
@@ -0,0 +1,20 @@
+set noparent
+agable@chromium.org
+agable@google.com
+cmp@chromium.org
+cmp@google.com
+dpranke@chromium.org
+iannucci@chromium.org
+iannucci@google.com
+johnw@chromium.org
+johnw@google.com
+maruel@chromium.org
+maruel@google.com
+mmoss@chromium.org
+mmoss@google.com
+pschmidt@chromium.org
+pschmidt@google.com
+stip@chromium.org
+stip@google.com
+szager@chromium.org
+szager@google.com
diff --git a/build/slave/README b/build/slave/README
new file mode 100644
index 0000000..e3718b2
--- /dev/null
+++ b/build/slave/README
@@ -0,0 +1,8 @@
+This is a directory which contains configuration information for the
+buildsystem.
+
+* Under recipes, the buildsystem should use only this directory as an
+  entry point into src/.
+
+* Scripts in this directory must not import from outside this directory or shell
+  to scripts outside this directory.
diff --git a/build/some.gyp b/build/some.gyp
new file mode 100644
index 0000000..44a1dd5
--- /dev/null
+++ b/build/some.gyp
@@ -0,0 +1,24 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+{
+  'targets': [
+    {
+      'target_name': 'some',
+      'type': 'none',
+      'dependencies': [
+        # This file is intended to be locally modified. List the targets you use
+        # regularly. The generated some.sln will contains projects for only
+        # those targets and the targets they are transitively dependent on. This
+        # can result in a solution that loads and unloads faster in Visual
+        # Studio.
+        #
+        # Tip: Create a dummy CL to hold your local edits to this file, so they
+        # don't accidentally get added to another CL that you are editing.
+        #
+        # Example:
+        # '../chrome/chrome.gyp:chrome',
+      ],
+    },
+  ],
+}
diff --git a/build/symlink.py b/build/symlink.py
new file mode 100755
index 0000000..1c5d3dd
--- /dev/null
+++ b/build/symlink.py
@@ -0,0 +1,39 @@
+#!/usr/bin/env python
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Make a symlink and optionally touch a file (to handle dependencies)."""
+import errno
+import optparse
+import os.path
+import shutil
+import sys
+def Main(argv):
+  parser = optparse.OptionParser()
+  parser.add_option('-f', '--force', action='store_true')
+  parser.add_option('--touch')
+  options, args = parser.parse_args(argv[1:])
+  if len(args) < 2:
+    parser.error('at least two arguments required.')
+  target = args[-1]
+  sources = args[:-1]
+  for s in sources:
+    t = os.path.join(target, os.path.basename(s))
+    if len(sources) == 1 and not os.path.isdir(target):
+      t = target
+    try:
+      os.symlink(s, t)
+    except OSError, e:
+      if e.errno == errno.EEXIST and options.force:
+        if os.path.isdir(t):
+          shutil.rmtree(t, ignore_errors=True)
+        else:
+          os.remove(t)
+        os.symlink(s, t)
+      else:
+        raise
+  if options.touch:
+    with open(options.touch, 'w') as f:
+      pass
+if __name__ == '__main__':
+  sys.exit(Main(sys.argv))
diff --git a/build/temp_gyp/README.chromium b/build/temp_gyp/README.chromium
new file mode 100644
index 0000000..8045d61
--- /dev/null
+++ b/build/temp_gyp/README.chromium
@@ -0,0 +1,3 @@
+This directory will be removed once the files in it are committed upstream and
+Chromium imports an upstream revision with these files.  Contact mark for
+details.
diff --git a/build/temp_gyp/pdfsqueeze.gyp b/build/temp_gyp/pdfsqueeze.gyp
new file mode 100644
index 0000000..2b3b1ff
--- /dev/null
+++ b/build/temp_gyp/pdfsqueeze.gyp
@@ -0,0 +1,40 @@
+# Copyright (c) 2009 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'targets': [
+    {
+      'target_name': 'pdfsqueeze',
+      'type': 'executable',
+      'sources': [
+        '../../third_party/pdfsqueeze/pdfsqueeze.m',
+      ],
+      'defines': [
+        # Use defines to map the full path names that will be used for
+        # the vars into the short forms expected by pdfsqueeze.m.
+        '______third_party_pdfsqueeze_ApplyGenericRGB_qfilter=ApplyGenericRGB_qfilter',
+        '______third_party_pdfsqueeze_ApplyGenericRGB_qfilter_len=ApplyGenericRGB_qfilter_len',
+      ],
+      'include_dirs': [
+        '<(INTERMEDIATE_DIR)',
+      ],
+      'libraries': [
+        '$(SDKROOT)/System/Library/Frameworks/Foundation.framework',
+        '$(SDKROOT)/System/Library/Frameworks/Quartz.framework',
+      ],
+      'actions': [
+        {
+          'action_name': 'Generate inline filter data',
+          'inputs': [
+            '../../third_party/pdfsqueeze/ApplyGenericRGB.qfilter',
+          ],
+          'outputs': [
+            '<(INTERMEDIATE_DIR)/ApplyGenericRGB.h',
+          ],
+          'action': ['xxd', '-i', '<@(_inputs)', '<@(_outputs)'],
+        },
+      ],
+    },
+  ],
+}
diff --git a/build/toolchain/OWNERS b/build/toolchain/OWNERS
new file mode 100644
index 0000000..c6cda3f
--- /dev/null
+++ b/build/toolchain/OWNERS
@@ -0,0 +1,3 @@
+brettw@chromium.org
+dpranke@chromium.org
+scottmg@chromium.org
diff --git a/build/toolchain/android/BUILD.gn b/build/toolchain/android/BUILD.gn
new file mode 100644
index 0000000..e543fc6
--- /dev/null
+++ b/build/toolchain/android/BUILD.gn
@@ -0,0 +1,155 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/sysroot.gni")  # Imports android/config.gni.
+import("//build/toolchain/ccache.gni")
+import("//build/toolchain/clang.gni")
+import("//build/toolchain/goma.gni")
+import("//build/toolchain/gcc_toolchain.gni")
+
+# The Android GCC toolchains share most of the same parameters, so we have this
+# wrapper around gcc_toolchain to avoid duplication of logic.
+#
+# Parameters:
+#  - android_ndk_sysroot
+#      Sysroot for this architecture.
+#  - android_ndk_lib_dir
+#      Subdirectory inside of android_ndk_sysroot where libs go.
+#  - tool_prefix
+#      Prefix to be added to the tool names.
+#  - toolchain_cpu
+#      Same as gcc_toolchain
+template("android_gcc_toolchain") {
+  gcc_toolchain(target_name) {
+    # Make our manually injected libs relative to the build dir.
+    android_ndk_lib = rebase_path(
+            invoker.android_ndk_sysroot + "/" + invoker.android_ndk_lib_dir,
+            root_build_dir)
+
+    libs_section_prefix = "$android_ndk_lib/crtbegin_dynamic.o"
+    libs_section_postfix = "$android_ndk_lib/crtend_android.o"
+
+    solink_libs_section_prefix = "$android_ndk_lib/crtbegin_so.o"
+    solink_libs_section_postfix = "$android_ndk_lib/crtend_so.o"
+
+    # The tools should be run relative to the build dir.
+    tool_prefix = rebase_path(invoker.tool_prefix, root_build_dir)
+
+    if (use_goma) {
+      assert(!use_ccache, "Goma and ccache can't be used together.")
+      compiler_prefix = "$goma_dir/gomacc "
+    } else if (use_ccache) {
+      compiler_prefix = "ccache "
+    } else {
+      compiler_prefix = ""
+    }
+
+    is_clang = invoker.is_clang
+    if (is_clang) {
+      prefix = rebase_path("//third_party/llvm-build/Release+Asserts/bin",
+                           root_build_dir)
+
+      cc = compiler_prefix + prefix + "/clang"
+      cxx = compiler_prefix + prefix + "/clang++"
+    } else {
+      cc = compiler_prefix + tool_prefix + "gcc"
+      cxx = compiler_prefix + tool_prefix + "g++"
+    }
+
+    ar = tool_prefix + "ar"
+    ld = cxx
+    readelf = compiler_prefix + tool_prefix + "readelf"
+    nm = compiler_prefix + tool_prefix + "nm"
+
+    toolchain_os = "android"
+    toolchain_cpu = invoker.toolchain_cpu
+
+    # We make the assumption that the gcc_toolchain will produce a soname with
+    # the following definition.
+    soname = "{{target_output_name}}{{output_extension}}"
+
+    stripped_soname = "lib.stripped/${soname}"
+    temp_stripped_soname = "${stripped_soname}.tmp"
+
+    android_strip = "${tool_prefix}strip"
+
+    strip_command =
+        "$android_strip --strip-unneeded -o $temp_stripped_soname $soname"
+    replace_command = "if ! cmp -s $temp_stripped_soname $stripped_soname; then mv $temp_stripped_soname $stripped_soname; fi"
+    postsolink = "$strip_command && $replace_command"
+    solink_outputs = [ stripped_soname ]
+    default_output_extension = android_product_extension
+
+    # We make the assumption that the gcc_toolchain will produce an exe with
+    # the following definition.
+    exe = "{{root_out_dir}}/{{target_output_name}}{{output_extension}}"
+    stripped_exe = "exe.stripped/$exe"
+    postlink = "$android_strip --strip-unneeded -o $stripped_exe $exe"
+    link_outputs = [ stripped_exe ]
+  }
+}
+
+template("android_gcc_toolchains_helper") {
+  android_gcc_toolchain(target_name) {
+    android_ndk_sysroot = invoker.android_ndk_sysroot
+    android_ndk_lib_dir = invoker.android_ndk_lib_dir
+    tool_prefix = invoker.tool_prefix
+    toolchain_cpu = invoker.toolchain_cpu
+  }
+  android_gcc_toolchain("clang_$target_name") {
+    android_ndk_sysroot = invoker.android_ndk_sysroot
+    android_ndk_lib_dir = invoker.android_ndk_lib_dir
+    tool_prefix = invoker.tool_prefix
+    toolchain_cpu = invoker.toolchain_cpu
+    is_clang = true
+  }
+}
+
+android_gcc_toolchains_helper("x86") {
+  android_ndk_sysroot = "$android_ndk_root/$x86_android_sysroot_subdir"
+  android_ndk_lib_dir = "usr/lib"
+
+  tool_prefix = "$x86_android_toolchain_root/bin/i686-linux-android-"
+  toolchain_cpu = "x86"
+}
+
+android_gcc_toolchains_helper("arm") {
+  android_ndk_sysroot = "$android_ndk_root/$arm_android_sysroot_subdir"
+  android_ndk_lib_dir = "usr/lib"
+
+  tool_prefix = "$arm_android_toolchain_root/bin/arm-linux-androideabi-"
+  toolchain_cpu = "arm"
+}
+
+android_gcc_toolchains_helper("mipsel") {
+  android_ndk_sysroot = "$android_ndk_root/$mips_android_sysroot_subdir"
+  android_ndk_lib_dir = "usr/lib"
+
+  tool_prefix = "$mips_android_toolchain_root/bin/mipsel-linux-android-"
+  toolchain_cpu = "mipsel"
+}
+
+android_gcc_toolchains_helper("x64") {
+  android_ndk_sysroot = "$android_ndk_root/$x86_64_android_sysroot_subdir"
+  android_ndk_lib_dir = "usr/lib64"
+
+  tool_prefix = "$x86_64_android_toolchain_root/bin/x86_64-linux-android-"
+  toolchain_cpu = "x86_64"
+}
+
+android_gcc_toolchains_helper("arm64") {
+  android_ndk_sysroot = "$android_ndk_root/$arm64_android_sysroot_subdir"
+  android_ndk_lib_dir = "usr/lib"
+
+  tool_prefix = "$arm64_android_toolchain_root/bin/arm-linux-androideabi-"
+  toolchain_cpu = "aarch64"
+}
+
+android_gcc_toolchains_helper("mips64el") {
+  android_ndk_sysroot = "$android_ndk_root/$mips64_android_sysroot_subdir"
+  android_ndk_lib_dir = "usr/lib64"
+
+  tool_prefix = "$mips64_android_toolchain_root/bin/mipsel-linux-android-"
+  toolchain_cpu = "mipsel64el"
+}
diff --git a/build/toolchain/ccache.gni b/build/toolchain/ccache.gni
new file mode 100644
index 0000000..806e079
--- /dev/null
+++ b/build/toolchain/ccache.gni
@@ -0,0 +1,25 @@
+# Copyright (c) 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Defines the configuration of ccache - a c/c++ compiler cache which can
+# greatly reduce recompilation times.
+#
+# TIPS:
+#
+# Set clang_use_chrome_plugins=false if using ccache 3.1.9 or earlier, since
+# these versions don't support -Xclang.  (3.1.10 and later will silently
+# ignore -Xclang, so it doesn't matter if you disable clang_use_chrome_plugins
+# or not).
+#
+# Use ccache 3.2 or later to avoid clang unused argument warnings:
+# https://bugzilla.samba.org/show_bug.cgi?id=8118
+#
+# To avoid -Wparentheses-equality clang warnings, at some cost in terms of
+# speed, you can do:
+# export CCACHE_CPP2=yes
+
+declare_args() {
+  # Set to true to enable ccache.  Probably doesn't work on windows.
+  use_ccache = false
+}
diff --git a/build/toolchain/clang.gni b/build/toolchain/clang.gni
new file mode 100644
index 0000000..c680384
--- /dev/null
+++ b/build/toolchain/clang.gni
@@ -0,0 +1,9 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+declare_args() {
+  # Enable the optional type profiler in Clang, which will tag heap allocations
+  # with the allocation type.
+  use_clang_type_profiler = false
+}
diff --git a/build/toolchain/cros/BUILD.gn b/build/toolchain/cros/BUILD.gn
new file mode 100644
index 0000000..140958b
--- /dev/null
+++ b/build/toolchain/cros/BUILD.gn
@@ -0,0 +1,35 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/toolchain/clang.gni")
+import("//build/toolchain/gcc_toolchain.gni")
+
+declare_args() {
+  # The CrOS build system supports many different kinds of targets across
+  # many different architectures. Bringing your own toolchain is also supported,
+  # so it's actually impossible to enumerate all toolchains for all targets
+  # as GN toolchain specifications.
+  # These arguments provide a mechanism for specifying your CC, CXX and AR at
+  # buildfile-generation time, allowing the CrOS build system to always use
+  # the right tools for the current target.
+  cros_target_cc = ""
+  cros_target_cxx = ""
+  cros_target_ar = ""
+}
+
+gcc_toolchain("target") {
+  assert(cros_target_cc != "", "Must provide target CC.")
+  assert(cros_target_cxx != "", "Must provide target CXX.")
+  assert(cros_target_ar != "", "Must provide target AR.")
+
+  cc = "${cros_target_cc}"
+  cxx = "${cros_target_cxx}"
+
+  ar = "${cros_target_ar}"
+  ld = cxx
+
+  toolchain_cpu = "${target_cpu}"
+  toolchain_os = "linux"
+  is_clang = is_clang
+}
diff --git a/build/toolchain/fnl/BUILD.gn b/build/toolchain/fnl/BUILD.gn
new file mode 100644
index 0000000..1e4a075
--- /dev/null
+++ b/build/toolchain/fnl/BUILD.gn
@@ -0,0 +1,24 @@
+# Copyright 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/toolchain/gcc_toolchain.gni")
+
+declare_args() {
+  toolchain_prefix = ""
+}
+
+gcc_toolchain("target") {
+  assert(toolchain_prefix != "", "Must provide toolchain_prefix")
+
+  cc = "${toolchain_prefix}gcc"
+  cxx = "${toolchain_prefix}g++"
+  ar = "${toolchain_prefix}ar"
+  ld = cxx
+  readelf = "${toolchain_prefix}readelf"
+  nm = "${toolchain_prefix}nm"
+
+  toolchain_cpu = "${target_cpu}"
+  toolchain_os = "linux"
+  is_clang = is_clang
+}
diff --git a/build/toolchain/gcc_toolchain.gni b/build/toolchain/gcc_toolchain.gni
new file mode 100644
index 0000000..bb1d791
--- /dev/null
+++ b/build/toolchain/gcc_toolchain.gni
@@ -0,0 +1,252 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This value will be inherited in the toolchain below.
+concurrent_links = exec_script("get_concurrent_links.py", [], "value")
+
+# This template defines a toolchain for something that works like gcc
+# (including clang).
+#
+# It requires the following variables specifying the executables to run:
+#  - cc
+#  - cxx
+#  - ar
+#  - ld
+#  - readelf
+#  - nm
+# and the following which is used in the toolchain_args
+#  - toolchain_cpu  (What "current_cpu" should be set to when invoking a
+#                    build using this toolchain.)
+#  - toolchain_os  (What "current_os" should be set to when invoking a
+#                   build using this toolchain.)
+#
+# Optional parameters:
+#  - libs_section_prefix
+#  - libs_section_postfix
+#      The contents of these strings, if specified, will be placed around
+#      the libs section of the linker line. It allows one to inject libraries
+#      at the beginning and end for all targets in a toolchain.
+#  - solink_libs_section_prefix
+#  - solink_libs_section_postfix
+#      Same as libs_section_{pre,post}fix except used for solink instead of link.
+#  - post_solink
+#      The content of this string, if specified, will be appended to the solink
+#      command.
+#  - deps
+#      Just forwarded to the toolchain definition.
+#  - is_clang
+#  - strip
+#      Location of the strip executable. When specified, strip will be run on
+#      all shared libraries and executables as they are built. The pre-stripped
+#      artifacts will be put in lib.stripped/ and exe.stripped/.
+template("gcc_toolchain") {
+  toolchain(target_name) {
+    assert(defined(invoker.cc), "gcc_toolchain() must specify a \"cc\" value")
+    assert(defined(invoker.cxx), "gcc_toolchain() must specify a \"cxx\" value")
+    assert(defined(invoker.ar), "gcc_toolchain() must specify a \"ar\" value")
+    assert(defined(invoker.ld), "gcc_toolchain() must specify a \"ld\" value")
+    assert(defined(invoker.readelf),
+           "gcc_toolchain() must specify a \"readelf\" value")
+    assert(defined(invoker.nm), "gcc_toolchain() must specify a \"nm\" value")
+    assert(defined(invoker.toolchain_cpu),
+           "gcc_toolchain() must specify a \"toolchain_cpu\"")
+    assert(defined(invoker.toolchain_os),
+           "gcc_toolchain() must specify a \"toolchain_os\"")
+
+    # We can't do string interpolation ($ in strings) on things with dots in
+    # them. To allow us to use $cc below, for example, we create copies of
+    # these values in our scope.
+    cc = invoker.cc
+    cxx = invoker.cxx
+    ar = invoker.ar
+    ld = invoker.ld
+    readelf = invoker.readelf
+    nm = invoker.nm
+
+    # Bring these into our scope for string interpolation with default values.
+    if (defined(invoker.libs_section_prefix)) {
+      libs_section_prefix = invoker.libs_section_prefix
+    } else {
+      libs_section_prefix = ""
+    }
+
+    if (defined(invoker.libs_section_postfix)) {
+      libs_section_postfix = invoker.libs_section_postfix
+    } else {
+      libs_section_postfix = ""
+    }
+
+    if (defined(invoker.solink_libs_section_prefix)) {
+      solink_libs_section_prefix = invoker.solink_libs_section_prefix
+    } else {
+      solink_libs_section_prefix = ""
+    }
+
+    if (defined(invoker.solink_libs_section_postfix)) {
+      solink_libs_section_postfix = invoker.solink_libs_section_postfix
+    } else {
+      solink_libs_section_postfix = ""
+    }
+
+    # These library switches can apply to all tools below.
+    lib_switch = "-l"
+    lib_dir_switch = "-L"
+
+    tool("cc") {
+      depfile = "{{output}}.d"
+      command = "$cc -MMD -MF $depfile {{defines}} {{include_dirs}} {{cflags}} {{cflags_c}} -c {{source}} -o {{output}}"
+      depsformat = "gcc"
+      description = "CC {{output}}"
+      outputs = [
+        "{{target_out_dir}}/{{target_output_name}}/{{source_name_part}}.o",
+      ]
+    }
+
+    tool("cxx") {
+      depfile = "{{output}}.d"
+      command = "$cxx -MMD -MF $depfile {{defines}} {{include_dirs}} {{cflags}} {{cflags_cc}} -c {{source}} -o {{output}}"
+      depsformat = "gcc"
+      description = "CXX {{output}}"
+      outputs = [
+        "{{target_out_dir}}/{{target_output_name}}/{{source_name_part}}.o",
+      ]
+    }
+
+    tool("asm") {
+      # For GCC we can just use the C compiler to compile assembly.
+      depfile = "{{output}}.d"
+      command = "$cc -MMD -MF $depfile {{defines}} {{include_dirs}} {{cflags}} {{cflags_c}} -c {{source}} -o {{output}}"
+      depsformat = "gcc"
+      description = "ASM {{output}}"
+      outputs = [
+        "{{target_out_dir}}/{{target_output_name}}/{{source_name_part}}.o",
+      ]
+    }
+
+    tool("alink") {
+      rspfile = "{{output}}.rsp"
+      command = "rm -f {{output}} && $ar rcs {{output}} @$rspfile"
+      description = "AR {{output}}"
+      rspfile_content = "{{inputs}}"
+      outputs = [
+        "{{target_out_dir}}/{{target_output_name}}{{output_extension}}",
+      ]
+      default_output_extension = ".a"
+      output_prefix = "lib"
+    }
+
+    tool("solink") {
+      soname = "{{target_output_name}}{{output_extension}}"  # e.g. "libfoo.so".
+      sofile = "{{root_out_dir}}/$soname"  # Possibly including toolchain dir.
+      rspfile = sofile + ".rsp"
+
+      # These variables are not built into GN but are helpers that implement
+      # (1) linking to produce a .so, (2) extracting the symbols from that file
+      # to a temporary file, (3) if the temporary file has differences from the
+      # existing .TOC file, overwrite it, otherwise, don't change it.
+      tocfile = sofile + ".TOC"
+      temporary_tocname = sofile + ".tmp"
+      link_command =
+          "$ld -shared {{ldflags}} -o $sofile -Wl,-soname=$soname @$rspfile"
+      toc_command = "{ $readelf -d $sofile | grep SONAME ; $nm -gD -f p $sofile | cut -f1-2 -d' '; } > $temporary_tocname"
+      replace_command = "if ! cmp -s $temporary_tocname $tocfile; then mv $temporary_tocname $tocfile; fi"
+
+      command = "$link_command && $toc_command && $replace_command"
+      if (defined(invoker.postsolink)) {
+        command += " && " + invoker.postsolink
+      }
+      rspfile_content = "-Wl,--whole-archive {{inputs}} {{solibs}} -Wl,--no-whole-archive $solink_libs_section_prefix {{libs}} $solink_libs_section_postfix"
+
+      description = "SOLINK $sofile"
+
+      # Use this for {{output_extension}} expansions unless a target manually
+      # overrides it (in which case {{output_extension}} will be what the target
+      # specifies).
+      default_output_extension = ".so"
+      if (defined(invoker.default_output_extension)) {
+        default_output_extension = invoker.default_output_extension
+      }
+
+      output_prefix = "lib"
+
+      # Since the above commands only updates the .TOC file when it changes, ask
+      # Ninja to check if the timestamp actually changed to know if downstream
+      # dependencies should be recompiled.
+      restat = true
+
+      # Tell GN about the output files. It will link to the sofile but use the
+      # tocfile for dependency management.
+      outputs = [
+        sofile,
+        tocfile,
+      ]
+      if (defined(invoker.solink_outputs)) {
+        outputs += invoker.solink_outputs
+      }
+      link_output = sofile
+      depend_output = tocfile
+    }
+
+    tool("link") {
+      exename = "{{target_output_name}}{{output_extension}}"
+      outfile = "{{root_out_dir}}/$exename"
+      rspfile = "$outfile.rsp"
+      unstripped_outfile = outfile
+
+      if (defined(invoker.strip)) {
+        unstripped_outfile = "{{root_out_dir}}/exe.unstripped/$exename"
+      }
+
+      command = "$ld {{ldflags}} -o $unstripped_outfile -Wl,--start-group @$rspfile {{solibs}} -Wl,--end-group $libs_section_prefix {{libs}} $libs_section_postfix"
+      if (defined(invoker.strip)) {
+        strip = invoker.strip
+        strip_command = "${strip} --strip-unneeded -o $outfile $unstripped_outfile"
+        command += " && " + strip_command
+      }
+      if (defined(invoker.postlink)) {
+        command += " && " + invoker.postlink
+      }
+      description = "LINK $outfile"
+      rspfile_content = "{{inputs}}"
+      outputs = [
+        outfile,
+      ]
+      if (outfile != unstripped_outfile) {
+        outputs += [ unstripped_outfile ]
+      }
+      if (defined(invoker.link_outputs)) {
+        outputs += invoker.link_outputs
+      }
+    }
+
+    tool("stamp") {
+      command = "touch {{output}}"
+      description = "STAMP {{output}}"
+    }
+
+    tool("copy") {
+      command = "ln -f {{source}} {{output}} 2>/dev/null || (rm -rf {{output}} && cp -af {{source}} {{output}})"
+      description = "COPY {{source}} {{output}}"
+    }
+
+    # When invoking this toolchain not as the default one, these args will be
+    # passed to the build. They are ignored when this is the default toolchain.
+    toolchain_args() {
+      current_cpu = invoker.toolchain_cpu
+      current_os = invoker.toolchain_os
+
+      # These values need to be passed through unchanged.
+      target_os = target_os
+      target_cpu = target_cpu
+
+      if (defined(invoker.is_clang)) {
+        is_clang = invoker.is_clang
+      }
+    }
+
+    if (defined(invoker.deps)) {
+      deps = invoker.deps
+    }
+  }
+}
diff --git a/build/toolchain/get_concurrent_links.py b/build/toolchain/get_concurrent_links.py
new file mode 100644
index 0000000..6a40101
--- /dev/null
+++ b/build/toolchain/get_concurrent_links.py
@@ -0,0 +1,64 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This script computs the number of concurrent links we want to run in the build
+# as a function of machine spec. It's based on GetDefaultConcurrentLinks in GYP.
+
+import os
+import re
+import subprocess
+import sys
+
+def GetDefaultConcurrentLinks():
+  # Inherit the legacy environment variable for people that have set it in GYP.
+  pool_size = int(os.getenv('GYP_LINK_CONCURRENCY', 0))
+  if pool_size:
+    return pool_size
+
+  if sys.platform in ('win32', 'cygwin'):
+    import ctypes
+
+    class MEMORYSTATUSEX(ctypes.Structure):
+      _fields_ = [
+        ("dwLength", ctypes.c_ulong),
+        ("dwMemoryLoad", ctypes.c_ulong),
+        ("ullTotalPhys", ctypes.c_ulonglong),
+        ("ullAvailPhys", ctypes.c_ulonglong),
+        ("ullTotalPageFile", ctypes.c_ulonglong),
+        ("ullAvailPageFile", ctypes.c_ulonglong),
+        ("ullTotalVirtual", ctypes.c_ulonglong),
+        ("ullAvailVirtual", ctypes.c_ulonglong),
+        ("sullAvailExtendedVirtual", ctypes.c_ulonglong),
+      ]
+
+    stat = MEMORYSTATUSEX(dwLength=ctypes.sizeof(MEMORYSTATUSEX))
+    ctypes.windll.kernel32.GlobalMemoryStatusEx(ctypes.byref(stat))
+
+    mem_limit = max(1, stat.ullTotalPhys / (4 * (2 ** 30)))  # total / 4GB
+    hard_cap = max(1, int(os.getenv('GYP_LINK_CONCURRENCY_MAX', 2**32)))
+    return min(mem_limit, hard_cap)
+  elif sys.platform.startswith('linux'):
+    if os.path.exists("/proc/meminfo"):
+      with open("/proc/meminfo") as meminfo:
+        memtotal_re = re.compile(r'^MemTotal:\s*(\d*)\s*kB')
+        for line in meminfo:
+          match = memtotal_re.match(line)
+          if not match:
+            continue
+          # Allow 8Gb per link on Linux because Gold is quite memory hungry
+          return max(1, int(match.group(1)) / (8 * (2 ** 20)))
+    return 1
+  elif sys.platform == 'darwin':
+    try:
+      avail_bytes = int(subprocess.check_output(['sysctl', '-n', 'hw.memsize']))
+      # A static library debug build of Chromium's unit_tests takes ~2.7GB, so
+      # 4GB per ld process allows for some more bloat.
+      return max(1, avail_bytes / (4 * (2 ** 30)))  # total / 4GB
+    except Exception:
+      return 1
+  else:
+    # TODO(scottmg): Implement this for other platforms.
+    return 1
+
+print GetDefaultConcurrentLinks()
diff --git a/build/toolchain/goma.gni b/build/toolchain/goma.gni
new file mode 100644
index 0000000..c0f4cf2
--- /dev/null
+++ b/build/toolchain/goma.gni
@@ -0,0 +1,22 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Defines the configuration of Goma.
+#
+# This is currently designed to match the GYP build exactly, so as not to break
+# people during the transition.
+
+declare_args() {
+  # Set to true to enable distributed compilation using Goma.
+  use_goma = false
+
+  # Set the default value based on the platform.
+  if (is_win) {
+    # Absolute directory containing the Goma source code.
+    goma_dir = "C:\goma\goma-win"
+  } else {
+    # Absolute directory containing the Goma source code.
+    goma_dir = getenv("HOME") + "/goma"
+  }
+}
diff --git a/build/toolchain/linux/BUILD.gn b/build/toolchain/linux/BUILD.gn
new file mode 100644
index 0000000..c16e31c
--- /dev/null
+++ b/build/toolchain/linux/BUILD.gn
@@ -0,0 +1,115 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import("//build/config/sysroot.gni")
+import("//build/toolchain/ccache.gni")
+import("//build/toolchain/clang.gni")
+import("//build/toolchain/gcc_toolchain.gni")
+import("//build/toolchain/goma.gni")
+
+if (use_goma) {
+  assert(!use_ccache, "Goma and ccache can't be used together.")
+  compiler_prefix = "$goma_dir/gomacc "
+} else if (use_ccache) {
+  compiler_prefix = "ccache "
+} else {
+  compiler_prefix = ""
+}
+
+gcc_toolchain("arm") {
+  cc = "${compiler_prefix}arm-linux-gnueabi-gcc"
+  cxx = "${compiler_prefix}arm-linux-gnueabi-g++"
+
+  ar = "arm-linux-gnueabi-ar"
+  ld = cxx
+  readelf = "arm-linux-gnueabi-readelf"
+  nm = "arm-linux-gnueabi-nm"
+
+  toolchain_cpu = "arm"
+  toolchain_os = "linux"
+  is_clang = false
+}
+
+gcc_toolchain("clang_x86") {
+  if (use_clang_type_profiler) {
+    prefix = rebase_path("//third_party/llvm-allocated-type/Linux_ia32/bin",
+                         root_build_dir)
+  } else {
+    prefix = rebase_path("//third_party/llvm-build/Release+Asserts/bin",
+                         root_build_dir)
+  }
+  cc = "${compiler_prefix}$prefix/clang"
+  cxx = "${compiler_prefix}$prefix/clang++"
+  readelf = "readelf"
+  nm = "nm"
+  ar = "ar"
+  ld = cxx
+
+  toolchain_cpu = "x86"
+  toolchain_os = "linux"
+  is_clang = true
+}
+
+gcc_toolchain("x86") {
+  cc = "${compiler_prefix}gcc"
+  cxx = "$compiler_prefix}g++"
+
+  readelf = "readelf"
+  nm = "nm"
+  ar = "ar"
+  ld = cxx
+
+  toolchain_cpu = "x86"
+  toolchain_os = "linux"
+  is_clang = false
+}
+
+gcc_toolchain("clang_x64") {
+  if (use_clang_type_profiler) {
+    prefix = rebase_path("//third_party/llvm-allocated-type/Linux_x64/bin",
+                         root_build_dir)
+  } else {
+    prefix = rebase_path("//third_party/llvm-build/Release+Asserts/bin",
+                         root_build_dir)
+  }
+  cc = "${compiler_prefix}$prefix/clang"
+  cxx = "${compiler_prefix}$prefix/clang++"
+
+  readelf = "readelf"
+  nm = "nm"
+  ar = "ar"
+  ld = cxx
+  strip = "strip"
+
+  toolchain_cpu = "x64"
+  toolchain_os = "linux"
+  is_clang = true
+}
+
+gcc_toolchain("x64") {
+  cc = "${compiler_prefix}gcc"
+  cxx = "${compiler_prefix}g++"
+
+  readelf = "readelf"
+  nm = "nm"
+  ar = "ar"
+  ld = cxx
+
+  toolchain_cpu = "x64"
+  toolchain_os = "linux"
+  is_clang = false
+}
+
+gcc_toolchain("mipsel") {
+  cc = "mipsel-linux-gnu-gcc"
+  cxx = "mipsel-linux-gnu-g++"
+  ar = "mipsel-linux-gnu-ar"
+  ld = cxx
+  readelf = "mipsel-linux-gnu-readelf"
+  nm = "mipsel-linux-gnu-nm"
+
+  toolchain_cpu = "mipsel"
+  toolchain_os = "linux"
+  is_clang = false
+}
diff --git a/build/toolchain/mac/BUILD.gn b/build/toolchain/mac/BUILD.gn
new file mode 100644
index 0000000..8efdd5c
--- /dev/null
+++ b/build/toolchain/mac/BUILD.gn
@@ -0,0 +1,261 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# TODO(brettw) Use "gcc_toolchain.gni" like the Linux toolchains. This requires
+# some enhancements since the commands on Mac are slightly different than on
+# Linux.
+
+import("../goma.gni")
+import("//build/config/ios/ios_sdk.gni")
+import("//build/config/mac/mac_sdk.gni")
+
+assert(host_os == "mac")
+
+import("//build/toolchain/clang.gni")
+import("//build/toolchain/goma.gni")
+import("//build/config/sysroot.gni")
+
+if (use_goma) {
+  goma_prefix = "$goma_dir/gomacc "
+} else {
+  goma_prefix = ""
+}
+
+# This will copy the gyp-mac-tool to the build directory. We pass in the source
+# file of the win tool.
+gyp_mac_tool_source =
+    rebase_path("//tools/gyp/pylib/gyp/mac_tool.py", root_build_dir)
+exec_script("setup_toolchain.py", [ gyp_mac_tool_source ])
+
+# Shared toolchain definition. Invocations should set toolchain_os to set the
+# build args in this definition.
+template("mac_toolchain") {
+  toolchain(target_name) {
+    assert(defined(invoker.cc), "mac_toolchain() must specify a \"cc\" value")
+    assert(defined(invoker.cxx), "mac_toolchain() must specify a \"cxx\" value")
+    assert(defined(invoker.ld), "mac_toolchain() must specify a \"ld\" value")
+    assert(defined(invoker.toolchain_cpu),
+           "mac_toolchain() must specify a \"toolchain_cpu\"")
+    assert(defined(invoker.toolchain_os),
+           "mac_toolchain() must specify a \"toolchain_os\"")
+
+    # We can't do string interpolation ($ in strings) on things with dots in
+    # them. To allow us to use $cc below, for example, we create copies of
+    # these values in our scope.
+    cc = invoker.cc
+    cxx = invoker.cxx
+    ld = invoker.ld
+
+    # Make these apply to all tools below.
+    lib_switch = "-l"
+    lib_dir_switch = "-L"
+
+    sysroot_flags = ""
+
+    if (defined(invoker.sysroot_flags)) {
+      sysroot_flags = invoker.sysroot_flags
+    }
+
+    toolchain_flags = ""
+    if (invoker.toolchain_cpu == "i386") {
+      toolchain_flags = "-m32"
+    }
+
+    tool("cc") {
+      depfile = "{{output}}.d"
+      command = "$cc -MMD -MF $depfile {{defines}} {{include_dirs}} $sysroot_flags $toolchain_flags {{cflags}} {{cflags_c}} -c {{source}} -o {{output}}"
+      depsformat = "gcc"
+      description = "CC {{output}}"
+      outputs = [
+        "{{target_out_dir}}/{{target_output_name}}/{{source_name_part}}.o",
+      ]
+    }
+
+    tool("cxx") {
+      depfile = "{{output}}.d"
+      command = "$cxx -MMD -MF $depfile {{defines}} {{include_dirs}} $sysroot_flags $toolchain_flags {{cflags}} {{cflags_cc}} -c {{source}} -o {{output}}"
+      depsformat = "gcc"
+      description = "CXX {{output}}"
+      outputs = [
+        "{{target_out_dir}}/{{target_output_name}}/{{source_name_part}}.o",
+      ]
+    }
+
+    tool("asm") {
+      # For GCC we can just use the C compiler to compile assembly.
+      depfile = "{{output}}.d"
+      command = "$cc -MMD -MF $depfile {{defines}} {{include_dirs}} $sysroot_flags $toolchain_flags {{cflags}} {{cflags_c}} -c {{source}} -o {{output}}"
+      depsformat = "gcc"
+      description = "ASM {{output}}"
+      outputs = [
+        "{{target_out_dir}}/{{target_output_name}}/{{source_name_part}}.o",
+      ]
+    }
+
+    tool("objc") {
+      depfile = "{{output}}.d"
+      command = "$cxx -MMD -MF $depfile {{defines}} {{include_dirs}} $sysroot_flags $toolchain_flags {{cflags}} {{cflags_c}} {{cflags_objc}} -c {{source}} -o {{output}}"
+      depsformat = "gcc"
+      description = "OBJC {{output}}"
+      outputs = [
+        "{{target_out_dir}}/{{target_output_name}}/{{source_name_part}}.o",
+      ]
+    }
+
+    tool("objcxx") {
+      depfile = "{{output}}.d"
+      command = "$cxx -MMD -MF $depfile {{defines}} {{include_dirs}} $sysroot_flags $toolchain_flags {{cflags}} {{cflags_cc}} {{cflags_objcc}} -c {{source}} -o {{output}}"
+      depsformat = "gcc"
+      description = "OBJCXX {{output}}"
+      outputs = [
+        "{{target_out_dir}}/{{target_output_name}}/{{source_name_part}}.o",
+      ]
+    }
+
+    tool("alink") {
+      command = "rm -f {{output}} && ./gyp-mac-tool filter-libtool libtool -static -o {{output}} {{inputs}}"
+      description = "LIBTOOL-STATIC {{output}}"
+      outputs = [
+        "{{target_out_dir}}/{{target_output_name}}{{output_extension}}",
+      ]
+      default_output_extension = ".a"
+      output_prefix = "lib"
+    }
+
+    tool("solink") {
+      dylib = "{{root_out_dir}}/{{target_output_name}}{{output_extension}}"  # eg "./libfoo.dylib"
+      rspfile = dylib + ".rsp"
+
+      # These variables are not build into GN but are helpers that implement
+      # (1) linking to produce a .so, (2) extracting the symbols from that file
+      # to a temporary file, (3) if the temporary file has differences from the
+      # existing .TOC file, overwrite it, oterwise, don't change it.
+      #
+      # As a special case, if the library reexports symbols from other dynamic
+      # libraries, we always update the .TOC and skip the temporary file and
+      # diffing steps, since that library always needs to be re-linked.
+      tocname = dylib + ".TOC"
+      temporary_tocname = dylib + ".tmp"
+
+      does_reexport_command = "[ ! -e $dylib -o ! -e $tocname ] || otool -l $dylib | grep -q LC_REEXPORT_DYLIB"
+      link_command = "$ld -shared $sysroot_flags $toolchain_flags {{ldflags}} -o $dylib -Wl,-filelist,$rspfile {{solibs}} {{libs}}"
+      replace_command = "if ! cmp -s $temporary_tocname $tocname; then mv $temporary_tocname $tocname"
+      extract_toc_command = "{ otool -l $dylib | grep LC_ID_DYLIB -A 5; nm -gP $dylib | cut -f1-2 -d' ' | grep -v U\$\$; true; }"
+
+      command = "if $does_reexport_command ; then $link_command && $extract_toc_command > $tocname; else $link_command && $extract_toc_command > $temporary_tocname && $replace_command ; fi; fi"
+
+      rspfile_content = "{{inputs_newline}}"
+
+      description = "SOLINK {{output}}"
+
+      # Use this for {{output_extension}} expansions unless a target manually
+      # overrides it (in which case {{output_extension}} will be what the target
+      # specifies).
+      default_output_extension = ".dylib"
+
+      output_prefix = "lib"
+
+      # Since the above commands only updates the .TOC file when it changes, ask
+      # Ninja to check if the timestamp actually changed to know if downstream
+      # dependencies should be recompiled.
+      restat = true
+
+      # Tell GN about the output files. It will link to the dylib but use the
+      # tocname for dependency management.
+      outputs = [
+        dylib,
+        tocname,
+      ]
+      link_output = dylib
+      depend_output = tocname
+    }
+
+    tool("link") {
+      outfile = "{{root_out_dir}}/{{target_output_name}}{{output_extension}}"
+      rspfile = "$outfile.rsp"
+
+      command = "$ld $sysroot_flags $toolchain_flags {{ldflags}} -Xlinker -rpath -Xlinker @executable_path/Frameworks -o $outfile -Wl,-filelist,$rspfile {{solibs}} {{libs}}"
+      description = "LINK $outfile"
+      rspfile_content = "{{inputs_newline}}"
+      outputs = [
+        outfile,
+      ]
+    }
+
+    tool("stamp") {
+      command = "touch {{output}}"
+      description = "STAMP {{output}}"
+    }
+
+    tool("copy") {
+      command = "ln -f {{source}} {{output}} 2>/dev/null || (rm -rf {{output}} && cp -af {{source}} {{output}})"
+      description = "COPY {{source}} {{output}}"
+    }
+
+    toolchain_args() {
+      current_cpu = invoker.toolchain_cpu
+      current_os = invoker.toolchain_os
+
+      # These values need to be passed through unchanged.
+      target_os = target_os
+      target_cpu = target_cpu
+
+      if (defined(invoker.is_clang)) {
+        is_clang = invoker.is_clang
+      }
+    }
+  }
+}
+
+# Toolchain used for iOS device targets.
+mac_toolchain("ios_clang_arm") {
+  toolchain_cpu = "arm"
+  toolchain_os = "mac"
+  prefix = rebase_path("//third_party/llvm-build/Release+Asserts/bin",
+                       root_build_dir)
+  cc = "${goma_prefix}$prefix/clang"
+  cxx = "${goma_prefix}$prefix/clang++"
+  ld = cxx
+  is_clang = true
+  sysroot_flags = "-isysroot $ios_device_sdk_path -miphoneos-version-min=$ios_deployment_target"
+}
+
+# Toolchain used for iOS simulator targets.
+mac_toolchain("ios_clang_x64") {
+  toolchain_cpu = "x64"
+  toolchain_os = "mac"
+  prefix = rebase_path("//third_party/llvm-build/Release+Asserts/bin",
+                       root_build_dir)
+  cc = "${goma_prefix}$prefix/clang"
+  cxx = "${goma_prefix}$prefix/clang++"
+  ld = cxx
+  is_clang = true
+  sysroot_flags = "-isysroot $ios_simulator_sdk_path -mios-simulator-version-min=$ios_deployment_target"
+}
+
+# Toolchain used for Mac host targets.
+mac_toolchain("clang_x64") {
+  toolchain_cpu = "x64"
+  toolchain_os = "mac"
+  prefix = rebase_path("//third_party/llvm-build/Release+Asserts/bin",
+                       root_build_dir)
+  cc = "${goma_prefix}$prefix/clang"
+  cxx = "${goma_prefix}$prefix/clang++"
+  ld = cxx
+  is_clang = true
+  sysroot_flags = "-isysroot $mac_sdk_path -mmacosx-version-min=$mac_sdk_min"
+}
+
+# Toolchain used for Mac host (i386) targets.
+mac_toolchain("clang_i386") {
+  toolchain_cpu = "i386"
+  toolchain_os = "mac"
+  prefix = rebase_path("//third_party/llvm-build/Release+Asserts/bin",
+                       root_build_dir)
+  cc = "${goma_prefix}$prefix/clang"
+  cxx = "${goma_prefix}$prefix/clang++"
+  ld = cxx
+  is_clang = true
+  sysroot_flags = "-isysroot $mac_sdk_path -mmacosx-version-min=$mac_sdk_min"
+}
diff --git a/build/toolchain/mac/setup_toolchain.py b/build/toolchain/mac/setup_toolchain.py
new file mode 100644
index 0000000..431078f
--- /dev/null
+++ b/build/toolchain/mac/setup_toolchain.py
@@ -0,0 +1,29 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import os
+import stat
+import sys
+
+def CopyTool(source_path):
+  """Copies the given tool to the current directory, including a warning not
+  to edit it."""
+  with open(source_path) as source_file:
+    tool_source = source_file.readlines()
+
+  # Add header and write it out to the current directory (which should be the
+  # root build dir).
+  out_path = 'gyp-mac-tool'
+  with open(out_path, 'w') as tool_file:
+    tool_file.write(''.join([tool_source[0],
+                             '# Generated by setup_toolchain.py do not edit.\n']
+                            + tool_source[1:]))
+  st = os.stat(out_path)
+  os.chmod(out_path, st.st_mode | stat.S_IEXEC)
+
+# Find the tool source, it's the first argument, and copy it.
+if len(sys.argv) != 2:
+  print "Need one argument (mac_tool source path)."
+  sys.exit(1)
+CopyTool(sys.argv[1])
diff --git a/build/toolchain/nacl/BUILD.gn b/build/toolchain/nacl/BUILD.gn
new file mode 100644
index 0000000..5fa637c
--- /dev/null
+++ b/build/toolchain/nacl/BUILD.gn
@@ -0,0 +1,63 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+toolchain("x86_newlib") {
+  toolprefix = "gen/sdk/toolchain/linux_x86_newlib/bin/x86_64-nacl-"
+  cc = toolprefix + "gcc"
+  cxx = toolprefix + "g++"
+  ld = toolprefix + "g++"
+
+  tool("cc") {
+    command = "$cc -MMD -MF \$out.d \$defines \$includes \$cflags \$cflags_c -c \$in -o \$out"
+    description = "CC(NaCl x86 Newlib) \$out"
+    depfile = "\$out.d"
+    depsformat = "gcc"
+  }
+  tool("cxx") {
+    # cflags_pch_cc
+    command = "$cxx -MMD -MF \$out.d \$defines \$includes \$cflags \$cflags_cc -c \$in -o \$out"
+    description = "CXX(NaCl x86 Newlib) \$out"
+    depfile = "\$out.d"
+    depsformat = "gcc"
+  }
+  tool("alink") {
+    command = "rm -f \$out && ${toolprefix}ar rcs \$out \$in"
+    description = "AR(NaCl x86 Newlib) \$out"
+  }
+  tool("solink") {
+    command = "if [ ! -e \$lib -o ! -e \${lib}.TOC ]; then $ld -shared \$ldflags -o \$lib -Wl,-soname=\$soname -Wl,--whole-archive \$in \$solibs -Wl,--no-whole-archive \$libs && { readelf -d \${lib} | grep SONAME ; nm -gD -f p \${lib} | cut -f1-2 -d' '; } > \${lib}.TOC; else $ld -shared \$ldflags -o \$lib -Wl,-soname=\$soname -Wl,--whole-archive \$in \$solibs -Wl,--no-whole-archive \$libs && { readelf -d \${lib} | grep SONAME ; nm -gD -f p \${lib} | cut -f1-2 -d' '; } > \${lib}.tmp && if ! cmp -s \${lib}.tmp \${lib}.TOC; then mv \${lib}.tmp \${lib}.TOC ; fi; fi"
+    description = "SOLINK(NaCl x86 Newlib) \$lib"
+
+    #pool = "link_pool"
+    restat = "1"
+  }
+  tool("link") {
+    command = "$ld \$ldflags -o \$out -Wl,--start-group \$in \$solibs -Wl,--end-group \$libs"
+    description = "LINK(NaCl x86 Newlib) \$out"
+
+    #pool = "link_pool"
+  }
+
+  if (is_win) {
+    tool("stamp") {
+      command = "$python_path gyp-win-tool stamp \$out"
+      description = "STAMP \$out"
+    }
+  } else {
+    tool("stamp") {
+      command = "touch \$out"
+      description = "STAMP \$out"
+    }
+  }
+
+  toolchain_args() {
+    # Override the default OS detection. The build config will set the is_*
+    # flags accordingly.
+    current_os = "nacl"
+
+    # Component build not supported in NaCl, since it does not support shared
+    # libraries.
+    is_component_build = false
+  }
+}
diff --git a/build/toolchain/win/BUILD.gn b/build/toolchain/win/BUILD.gn
new file mode 100644
index 0000000..454cdde
--- /dev/null
+++ b/build/toolchain/win/BUILD.gn
@@ -0,0 +1,253 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+declare_args() {
+  # Path to the directory containing the VC binaries for the right
+  # combination of host and target architectures. Currently only the
+  # 64-bit host toolchain is supported, with either 32-bit or 64-bit targets.
+  # If vc_bin_dir is not specified on the command line (and it normally
+  # isn't), we will dynamically determine the right value to use at runtime.
+  vc_bin_dir = ""
+}
+
+import("//build/config/win/visual_studio_version.gni")
+import("//build/toolchain/goma.gni")
+
+# Should only be running on Windows.
+assert(is_win)
+
+# Setup the Visual Studio state.
+#
+# Its arguments are the VS path and the compiler wrapper tool. It will write
+# "environment.x86" and "environment.x64" to the build directory and return a
+# list to us.
+gyp_win_tool_path =
+    rebase_path("//tools/gyp/pylib/gyp/win_tool.py", root_build_dir)
+
+toolchain_data = exec_script("setup_toolchain.py",
+                             [
+                               visual_studio_path,
+                               gyp_win_tool_path,
+                               windows_sdk_path,
+                               visual_studio_runtime_dirs,
+                               current_cpu,
+                             ],
+                             "scope")
+
+if (vc_bin_dir == "") {
+  vc_bin_dir = toolchain_data.vc_bin_dir
+}
+
+if (use_goma) {
+  goma_prefix = "$goma_dir/gomacc.exe "
+} else {
+  goma_prefix = ""
+}
+
+# This value will be inherited in the toolchain below.
+concurrent_links = exec_script("../get_concurrent_links.py", [], "value")
+
+# Parameters:
+#  current_cpu: current_cpu to pass as a build arg
+#  environment: File name of environment file.
+template("msvc_toolchain") {
+  if (defined(invoker.concurrent_links)) {
+    concurrent_links = invoker.concurrent_links
+  }
+
+  env = invoker.environment
+
+  if (is_debug) {
+    configuration = "Debug"
+  } else {
+    configuration = "Release"
+  }
+  exec_script("../../vs_toolchain.py",
+              [
+                "copy_dlls",
+                rebase_path(root_build_dir),
+                configuration,
+                invoker.current_cpu,
+              ])
+
+  cl = invoker.cl
+
+  toolchain(target_name) {
+    # Make these apply to all tools below.
+    lib_switch = ""
+    lib_dir_switch = "/LIBPATH:"
+
+    tool("cc") {
+      rspfile = "{{output}}.rsp"
+
+      # TODO(brettw) enable this when GN support in the binary has been rolled.
+      #precompiled_header_type = "msvc"
+      pdbname = "{{target_out_dir}}/{{target_output_name}}_c.pdb"
+      command = "ninja -t msvc -e $env -- $cl /nologo /showIncludes /FC @$rspfile /c {{source}} /Fo{{output}} /Fd$pdbname"
+      depsformat = "msvc"
+      description = "CC {{output}}"
+      outputs = [
+        "{{target_out_dir}}/{{target_output_name}}/{{source_name_part}}.obj",
+      ]
+      rspfile_content = "{{defines}} {{include_dirs}} {{cflags}} {{cflags_c}}"
+    }
+
+    tool("cxx") {
+      rspfile = "{{output}}.rsp"
+
+      # TODO(brettw) enable this when GN support in the binary has been rolled.
+      #precompiled_header_type = "msvc"
+
+      # The PDB name needs to be different between C and C++ compiled files.
+      pdbname = "{{target_out_dir}}/{{target_output_name}}_cc.pdb"
+      command = "ninja -t msvc -e $env -- $cl /nologo /showIncludes /FC @$rspfile /c {{source}} /Fo{{output}} /Fd$pdbname"
+      depsformat = "msvc"
+      description = "CXX {{output}}"
+      outputs = [
+        "{{target_out_dir}}/{{target_output_name}}/{{source_name_part}}.obj",
+      ]
+      rspfile_content = "{{defines}} {{include_dirs}} {{cflags}} {{cflags_cc}}"
+    }
+
+    tool("rc") {
+      command = "$python_path gyp-win-tool rc-wrapper $env rc.exe {{defines}} {{include_dirs}} /fo{{output}} {{source}}"
+      outputs = [
+        "{{target_out_dir}}/{{target_output_name}}/{{source_name_part}}.res",
+      ]
+      description = "RC {{output}}"
+    }
+
+    tool("asm") {
+      # TODO(brettw): "/safeseh" assembler argument is hardcoded here. Extract
+      # assembler flags to a variable like cflags. crbug.com/418613
+      command = "$python_path gyp-win-tool asm-wrapper $env ml.exe {{defines}} {{include_dirs}} /safeseh /c /Fo {{output}} {{source}}"
+      description = "ASM {{output}}"
+      outputs = [
+        "{{target_out_dir}}/{{target_output_name}}/{{source_name_part}}.obj",
+      ]
+    }
+
+    tool("alink") {
+      rspfile = "{{output}}.rsp"
+      command = "$python_path gyp-win-tool link-wrapper $env False lib.exe /nologo /ignore:4221 /OUT:{{output}} @$rspfile"
+      description = "LIB {{output}}"
+      outputs = [
+        # Ignore {{output_extension}} and always use .lib, there's no reason to
+        # allow targets to override this extension on Windows.
+        "{{target_out_dir}}/{{target_output_name}}.lib",
+      ]
+      default_output_extension = ".lib"
+
+      # The use of inputs_newline is to work around a fixed per-line buffer
+      # size in the linker.
+      rspfile_content = "{{inputs_newline}}"
+    }
+
+    tool("solink") {
+      dllname = "{{root_out_dir}}/{{target_output_name}}{{output_extension}}"  # e.g. foo.dll
+      libname =
+          "{{root_out_dir}}/{{target_output_name}}{{output_extension}}.lib"  # e.g. foo.dll.lib
+      rspfile = "${dllname}.rsp"
+
+      link_command = "$python_path gyp-win-tool link-wrapper $env False link.exe /nologo /IMPLIB:$libname /DLL /OUT:$dllname /PDB:${dllname}.pdb @$rspfile"
+
+      # TODO(brettw) support manifests
+      #manifest_command = "$python_path gyp-win-tool manifest-wrapper $env mt.exe -nologo -manifest $manifests -out:${dllname}.manifest"
+      #command = "cmd /c $link_command && $manifest_command"
+      command = link_command
+
+      default_output_extension = ".dll"
+      description = "LINK(DLL) {{output}}"
+      outputs = [
+        dllname,
+        libname,
+      ]
+      link_output = libname
+      depend_output = libname
+
+      # The use of inputs_newline is to work around a fixed per-line buffer
+      # size in the linker.
+      rspfile_content = "{{libs}} {{solibs}} {{inputs_newline}} {{ldflags}}"
+    }
+
+    tool("link") {
+      rspfile = "{{output}}.rsp"
+
+      link_command = "$python_path gyp-win-tool link-wrapper $env False link.exe /nologo /OUT:{{output}} /PDB:{{output}}.pdb @$rspfile"
+
+      # TODO(brettw) support manifests
+      #manifest_command = "$python_path gyp-win-tool manifest-wrapper $env mt.exe -nologo -manifest $manifests -out:{{output}}.manifest"
+      #command = "cmd /c $link_command && $manifest_command"
+      command = link_command
+
+      default_output_extension = ".exe"
+      description = "LINK {{output}}"
+      outputs = [
+        "{{root_out_dir}}/{{target_output_name}}{{output_extension}}",
+      ]
+
+      # The use of inputs_newline is to work around a fixed per-line buffer
+      # size in the linker.
+      rspfile_content = "{{inputs_newline}} {{libs}} {{solibs}} {{ldflags}}"
+    }
+
+    tool("stamp") {
+      command = "$python_path gyp-win-tool stamp {{output}}"
+      description = "STAMP {{output}}"
+    }
+
+    tool("copy") {
+      command =
+          "$python_path gyp-win-tool recursive-mirror {{source}} {{output}}"
+      description = "COPY {{source}} {{output}}"
+    }
+
+    # When invoking this toolchain not as the default one, these args will be
+    # passed to the build. They are ignored when this is the default toolchain.
+    toolchain_args() {
+      current_cpu = invoker.current_cpu
+      if (defined(invoker.is_clang)) {
+        is_clang = invoker.is_clang
+      }
+    }
+  }
+}
+
+# TODO(dpranke): Declare both toolchains all of the time when we
+# get it sorted out how we want to support them both in a single build.
+# Right now only one of these can be enabled at a time because the
+# runtime libraries get copied to root_build_dir and would collide.
+if (current_cpu == "x86") {
+  msvc_toolchain("x86") {
+    environment = "environment.x86"
+    current_cpu = "x86"
+    cl = "${goma_prefix}\"${vc_bin_dir}/cl.exe\""
+    is_clang = false
+  }
+  msvc_toolchain("clang_x86") {
+    environment = "environment.x86"
+    current_cpu = "x86"
+    prefix = rebase_path("//third_party/llvm-build/Release+Asserts/bin",
+                         root_build_dir)
+    cl = "${goma_prefix}$prefix/clang-cl.exe"
+    is_clang = true
+  }
+}
+
+if (current_cpu == "x64") {
+  msvc_toolchain("x64") {
+    environment = "environment.x64"
+    current_cpu = "x64"
+    cl = "${goma_prefix}\"${vc_bin_dir}/cl.exe\""
+    is_clang = false
+  }
+  msvc_toolchain("clang_x64") {
+    environment = "environment.x64"
+    current_cpu = "x64"
+    prefix = rebase_path("//third_party/llvm-build/Release+Asserts/bin",
+                         root_build_dir)
+    cl = "${goma_prefix}$prefix/clang-cl.exe"
+    is_clang = true
+  }
+}
diff --git a/build/toolchain/win/midl.gni b/build/toolchain/win/midl.gni
new file mode 100644
index 0000000..3e7fbec
--- /dev/null
+++ b/build/toolchain/win/midl.gni
@@ -0,0 +1,113 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+assert(is_win)
+
+import("//build/config/win/visual_studio_version.gni")
+
+# This template defines a rule to invoke the MS IDL compiler.
+#
+# Parameters
+#
+#   sources
+#      List of .idl file to process.
+#
+#   out_dir (optional)
+#       Directory to write the generated files to. Defaults to target_gen_dir.
+#
+#   deps (optional)
+#   visibility (optional)
+
+template("midl") {
+  action_name = "${target_name}_idl_action"
+  source_set_name = target_name
+
+  assert(defined(invoker.sources), "Source must be defined for $target_name")
+
+  if (defined(invoker.out_dir)) {
+    out_dir = invoker.out_dir
+  } else {
+    out_dir = target_gen_dir
+  }
+
+  header_file = "{{source_name_part}}.h"
+  dlldata_file = "{{source_name_part}}.dlldata.c"
+  interface_identifier_file = "{{source_name_part}}_i.c"
+  proxy_file = "{{source_name_part}}_p.c"
+  type_library_file = "{{source_name_part}}.tlb"
+
+  action_foreach(action_name) {
+    visibility = [ ":$source_set_name" ]
+
+    # This functionality is handled by the win-tool because the GYP build has
+    # MIDL support built-in.
+    # TODO(brettw) move this to a separate MIDL wrapper script for better
+    # clarity once GYP support is not needed.
+    script = "$root_build_dir/gyp-win-tool"
+
+    sources = invoker.sources
+
+    # Note that .tlb is not included in the outputs as it is not always
+    # generated depending on the content of the input idl file.
+    outputs = [
+      "$out_dir/$header_file",
+      "$out_dir/$dlldata_file",
+      "$out_dir/$interface_identifier_file",
+      "$out_dir/$proxy_file",
+    ]
+
+    if (current_cpu == "x86") {
+      win_tool_arch = "environment.x86"
+      idl_target_platform = "win32"
+    } else if (current_cpu == "x64") {
+      win_tool_arch = "environment.x64"
+      idl_target_platform = "x64"
+    } else {
+      assert(false, "Need environment for this arch")
+    }
+
+    args = [
+      "midl-wrapper",
+      win_tool_arch,
+      rebase_path(out_dir, root_build_dir),
+      type_library_file,
+      header_file,
+      dlldata_file,
+      interface_identifier_file,
+      proxy_file,
+      "{{source}}",
+      "/char",
+      "signed",
+      "/env",
+      idl_target_platform,
+      "/Oicf",
+    ]
+
+    if (defined(invoker.deps)) {
+      deps = invoker.deps
+    }
+  }
+
+  source_set(target_name) {
+    if (defined(invoker.visibility)) {
+      visibility = invoker.visibility
+    }
+
+    # We only compile the IID files from the IDL tool rather than all outputs.
+    sources = process_file_template(invoker.sources,
+                                    [ "$out_dir/$interface_identifier_file" ])
+
+    public_deps = [
+      ":$action_name",
+    ]
+
+    config("midl_warnings") {
+      if (is_clang) {
+        # MIDL generates code like "#endif !_MIDL_USE_GUIDDEF_"
+        cflags = [ "-Wno-extra-tokens" ]
+      }
+    }
+    configs += [ ":midl_warnings" ]
+  }
+}
diff --git a/build/toolchain/win/setup_toolchain.py b/build/toolchain/win/setup_toolchain.py
new file mode 100644
index 0000000..bc9bd1e
--- /dev/null
+++ b/build/toolchain/win/setup_toolchain.py
@@ -0,0 +1,154 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Copies the given "win tool" (which the toolchain uses to wrap compiler
+# invocations) and the environment blocks for the 32-bit and 64-bit builds on
+# Windows to the build directory.
+#
+# The arguments are the visual studio install location and the location of the
+# win tool. The script assumes that the root build directory is the current dir
+# and the files will be written to the current directory.
+
+import errno
+import os
+import re
+import subprocess
+import sys
+
+
+def _ExtractImportantEnvironment(output_of_set):
+  """Extracts environment variables required for the toolchain to run from
+  a textual dump output by the cmd.exe 'set' command."""
+  envvars_to_save = (
+      'goma_.*', # TODO(scottmg): This is ugly, but needed for goma.
+      'include',
+      'lib',
+      'libpath',
+      'path',
+      'pathext',
+      'systemroot',
+      'temp',
+      'tmp',
+      )
+  env = {}
+  for line in output_of_set.splitlines():
+    for envvar in envvars_to_save:
+      if re.match(envvar + '=', line.lower()):
+        var, setting = line.split('=', 1)
+        if envvar == 'path':
+          # Our own rules (for running gyp-win-tool) and other actions in
+          # Chromium rely on python being in the path. Add the path to this
+          # python here so that if it's not in the path when ninja is run
+          # later, python will still be found.
+          setting = os.path.dirname(sys.executable) + os.pathsep + setting
+        env[var.upper()] = setting
+        break
+  for required in ('SYSTEMROOT', 'TEMP', 'TMP'):
+    if required not in env:
+      raise Exception('Environment variable "%s" '
+                      'required to be set to valid path' % required)
+  return env
+
+
+def _SetupScript(target_cpu, sdk_dir):
+  """Returns a command (with arguments) to be used to set up the
+  environment."""
+  # Check if we are running in the SDK command line environment and use
+  # the setup script from the SDK if so. |target_cpu| should be either
+  # 'x86' or 'x64'.
+  assert target_cpu in ('x86', 'x64')
+  if bool(int(os.environ.get('DEPOT_TOOLS_WIN_TOOLCHAIN', 1))) and sdk_dir:
+    return [os.path.normpath(os.path.join(sdk_dir, 'Bin/SetEnv.Cmd')),
+            '/' + target_cpu]
+  else:
+    # We only support x64-hosted tools.
+    # TODO(scottmg|dpranke): Non-depot_tools toolchain: need to get Visual
+    # Studio install location from registry.
+    return [os.path.normpath(os.path.join(os.environ['GYP_MSVS_OVERRIDE_PATH'],
+                                          'VC/vcvarsall.bat')),
+            'amd64_x86' if target_cpu == 'x86' else 'amd64']
+
+
+def _FormatAsEnvironmentBlock(envvar_dict):
+  """Format as an 'environment block' directly suitable for CreateProcess.
+  Briefly this is a list of key=value\0, terminated by an additional \0. See
+  CreateProcess documentation for more details."""
+  block = ''
+  nul = '\0'
+  for key, value in envvar_dict.iteritems():
+    block += key + '=' + value + nul
+  block += nul
+  return block
+
+
+def _CopyTool(source_path):
+  """Copies the given tool to the current directory, including a warning not
+  to edit it."""
+  with open(source_path) as source_file:
+    tool_source = source_file.readlines()
+
+  # Add header and write it out to the current directory (which should be the
+  # root build dir).
+  with open("gyp-win-tool", 'w') as tool_file:
+    tool_file.write(''.join([tool_source[0],
+                             '# Generated by setup_toolchain.py do not edit.\n']
+                            + tool_source[1:]))
+
+
+def main():
+  if len(sys.argv) != 6:
+    print('Usage setup_toolchain.py '
+          '<visual studio path> <win tool path> <win sdk path> '
+          '<runtime dirs> <target_cpu>')
+    sys.exit(2)
+  tool_source = sys.argv[2]
+  win_sdk_path = sys.argv[3]
+  runtime_dirs = sys.argv[4]
+  target_cpu = sys.argv[5]
+
+  _CopyTool(tool_source)
+
+  cpus = ('x86', 'x64')
+  assert target_cpu in cpus
+  vc_bin_dir = ''
+
+  # TODO(scottmg|goma): Do we need an equivalent of
+  # ninja_use_custom_environment_files?
+
+  for cpu in cpus:
+    # Extract environment variables for subprocesses.
+    args = _SetupScript(cpu, win_sdk_path)
+    args.extend(('&&', 'set'))
+    popen = subprocess.Popen(
+        args, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+    variables, _ = popen.communicate()
+    env = _ExtractImportantEnvironment(variables)
+    env['PATH'] = runtime_dirs + ';' + env['PATH']
+
+    if cpu == target_cpu:
+      for path in env['PATH'].split(os.pathsep):
+        if os.path.exists(os.path.join(path, 'cl.exe')):
+          vc_bin_dir = os.path.realpath(path)
+          break
+
+    # The Windows SDK include directories must be first. They both have a sal.h,
+    # and the SDK one is newer and the SDK uses some newer features from it not
+    # present in the Visual Studio one.
+
+    if win_sdk_path:
+      additional_includes = ('{sdk_dir}\\Include\\shared;' +
+                             '{sdk_dir}\\Include\\um;' +
+                             '{sdk_dir}\\Include\\winrt;').format(
+                                  sdk_dir=win_sdk_path)
+      env['INCLUDE'] = additional_includes + env['INCLUDE']
+    env_block = _FormatAsEnvironmentBlock(env)
+    with open('environment.' + cpu, 'wb') as f:
+      f.write(env_block)
+
+  assert vc_bin_dir
+  print 'vc_bin_dir = "%s"' % vc_bin_dir
+
+
+if __name__ == '__main__':
+  main()
diff --git a/build/tree_truth.sh b/build/tree_truth.sh
new file mode 100755
index 0000000..617092d
--- /dev/null
+++ b/build/tree_truth.sh
@@ -0,0 +1,102 @@
+#!/bin/bash
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# Script for printing recent commits in a buildbot run.
+
+# Return the sha1 of the given tag.  If not present, return "".
+# $1: path to repo
+# $2: tag name
+tt_sha1_for_tag() {
+  oneline=$(cd $1 && git log -1 $2 --format='%H' 2>/dev/null)
+  if [ $? -eq 0 ] ; then
+    echo $oneline
+  fi
+}
+
+# Return the sha1 of HEAD, or ""
+# $1: path to repo
+tt_sha1_for_head() {
+  ( cd $1 && git log HEAD -n1 --format='%H' | cat )
+}
+
+# For the given repo, set tag to HEAD.
+# $1: path to repo
+# $2: tag name
+tt_tag_head() {
+  ( cd $1 && git tag -f $2 )
+}
+
+# For the given repo, delete the tag.
+# $1: path to repo
+# $2: tag name
+tt_delete_tag() {
+  ( cd $1 && git tag -d $2 )
+}
+
+# For the given repo, set tag to "three commits ago" (for testing).
+# $1: path to repo
+# $2: tag name
+tt_tag_three_ago() {
+ local sh=$(cd $1 && git log --pretty=oneline -n 3 | tail -1 | awk '{print $1}')
+  ( cd $1 && git tag -f $2 $sh )
+}
+
+# List the commits between the given tag and HEAD.
+# If the tag does not exist, only list the last few.
+# If the tag is at HEAD, list nothing.
+# Output format has distinct build steps for repos with changes.
+# $1: path to repo
+# $2: tag name
+# $3: simple/short repo name to use for display
+tt_list_commits() {
+  local tag_sha1=$(tt_sha1_for_tag $1 $2)
+  local head_sha1=$(tt_sha1_for_head $1)
+  local display_name=$(echo $3 | sed 's#/#_#g')
+  if [ "${tag_sha1}" = "${head_sha1}" ] ; then
+    return
+  fi
+  if [ "${tag_sha1}" = "" ] ; then
+    echo "@@@BUILD_STEP Recent commits in repo $display_name@@@"
+    echo "NOTE: git tag was not found so we have no baseline."
+    echo "Here are some recent commits, but they may not be new for this build."
+    ( cd $1 && git log -n 10 --stat | cat)
+  else
+    echo "@@@BUILD_STEP New commits in repo $display_name@@@"
+    ( cd $1 && git log -n 500 $2..HEAD --stat | cat)
+  fi
+}
+
+# Clean out the tree truth tags in all repos.  For testing.
+tt_clean_all() {
+ for project in $@; do
+   tt_delete_tag $CHROME_SRC/../$project tree_truth
+ done
+}
+
+# Print tree truth for all clank repos.
+tt_print_all() {
+ for project in $@; do
+   local full_path=$CHROME_SRC/../$project
+   tt_list_commits $full_path tree_truth $project
+   tt_tag_head $full_path tree_truth
+ done
+}
+
+# Print a summary of the last 10 commits for each repo.
+tt_brief_summary() {
+  echo "@@@BUILD_STEP Brief summary of recent CLs in every branch@@@"
+  for project in $@; do
+    echo $project:
+    local full_path=$CHROME_SRC/../$project
+    (cd $full_path && git log -n 10 --format="   %H %s   %an, %ad" | cat)
+    echo "================================================================="
+  done
+}
+
+CHROME_SRC=$1
+shift
+PROJECT_LIST=$@
+tt_brief_summary $PROJECT_LIST
+tt_print_all $PROJECT_LIST
diff --git a/build/uiautomator_test.gypi b/build/uiautomator_test.gypi
new file mode 100644
index 0000000..e9bd0bf
--- /dev/null
+++ b/build/uiautomator_test.gypi
@@ -0,0 +1,37 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into a target to provide a rule
+# to build uiautomator dexed tests jar.
+#
+# To use this, create a gyp target with the following form:
+# {
+#   'target_name': 'test_suite_name',
+#   'type': 'none',
+#   'includes': ['path/to/this/gypi/file'],
+# }
+#
+
+{
+  'dependencies': [
+    '<(DEPTH)/build/android/pylib/device/commands/commands.gyp:chromium_commands',
+    '<(DEPTH)/tools/android/android_tools.gyp:android_tools',
+  ],
+  'variables': {
+    'output_dex_path': '<(PRODUCT_DIR)/lib.java/<(_target_name).dex.jar',
+  },
+  'actions': [
+    {
+      'action_name': 'dex_<(_target_name)',
+      'message': 'Dexing <(_target_name) jar',
+      'variables': {
+        'dex_input_paths': [
+          '>@(library_dexed_jars_paths)',
+        ],
+        'output_path': '<(output_dex_path)',
+      },
+      'includes': [ 'android/dex_action.gypi' ],
+    },
+  ],
+}
diff --git a/build/update-linux-sandbox.sh b/build/update-linux-sandbox.sh
new file mode 100755
index 0000000..735733a
--- /dev/null
+++ b/build/update-linux-sandbox.sh
@@ -0,0 +1,75 @@
+#!/bin/sh
+
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+BUILDTYPE="${BUILDTYPE:-Debug}"
+CHROME_SRC_DIR="${CHROME_SRC_DIR:-$(dirname -- $(readlink -fn -- "$0"))/..}"
+CHROME_OUT_DIR="${CHROME_SRC_DIR}/${CHROMIUM_OUT_DIR:-out}/${BUILDTYPE}"
+CHROME_SANDBOX_BUILD_PATH="${CHROME_OUT_DIR}/chrome_sandbox"
+CHROME_SANDBOX_INST_PATH="/usr/local/sbin/chrome-devel-sandbox"
+CHROME_SANDBOX_INST_DIR=$(dirname -- "$CHROME_SANDBOX_INST_PATH")
+
+TARGET_DIR_TYPE=$(stat -f -c %t -- "${CHROME_SANDBOX_INST_DIR}" 2>/dev/null)
+if [ $? -ne 0 ]; then
+  echo "Could not get status of ${CHROME_SANDBOX_INST_DIR}"
+  exit 1
+fi
+
+# Make sure the path is not on NFS.
+if [ "${TARGET_DIR_TYPE}" = "6969" ]; then
+  echo "Please make sure ${CHROME_SANDBOX_INST_PATH} is not on NFS!"
+  exit 1
+fi
+
+installsandbox() {
+  echo "(using sudo so you may be asked for your password)"
+  sudo -- cp "${CHROME_SANDBOX_BUILD_PATH}" \
+    "${CHROME_SANDBOX_INST_PATH}" &&
+  sudo -- chown root:root "${CHROME_SANDBOX_INST_PATH}" &&
+  sudo -- chmod 4755 "${CHROME_SANDBOX_INST_PATH}"
+  return $?
+}
+
+if [ ! -d "${CHROME_OUT_DIR}" ]; then
+  echo -n "${CHROME_OUT_DIR} does not exist. Use \"BUILDTYPE=Release ${0}\" "
+  echo "If you are building in Release mode"
+  exit 1
+fi
+
+if [ ! -f "${CHROME_SANDBOX_BUILD_PATH}" ]; then
+  echo -n "Could not find ${CHROME_SANDBOX_BUILD_PATH}, "
+  echo "please make sure you build the chrome_sandbox target"
+  exit 1
+fi
+
+if [ ! -f "${CHROME_SANDBOX_INST_PATH}" ]; then
+  echo -n "Could not find ${CHROME_SANDBOX_INST_PATH}, "
+  echo "installing it now."
+  installsandbox
+fi
+
+if [ ! -f "${CHROME_SANDBOX_INST_PATH}" ]; then
+  echo "Failed to install ${CHROME_SANDBOX_INST_PATH}"
+  exit 1
+fi
+
+CURRENT_API=$("${CHROME_SANDBOX_BUILD_PATH}" --get-api)
+INSTALLED_API=$("${CHROME_SANDBOX_INST_PATH}" --get-api)
+
+if [ "${CURRENT_API}" != "${INSTALLED_API}" ]; then
+  echo "Your installed setuid sandbox is too old, installing it now."
+  if ! installsandbox; then
+    echo "Failed to install ${CHROME_SANDBOX_INST_PATH}"
+    exit 1
+  fi
+else
+  echo "Your setuid sandbox is up to date"
+  if [ "${CHROME_DEVEL_SANDBOX}" != "${CHROME_SANDBOX_INST_PATH}" ]; then
+    echo -n "Make sure you have \"export "
+    echo -n "CHROME_DEVEL_SANDBOX=${CHROME_SANDBOX_INST_PATH}\" "
+    echo "somewhere in your .bashrc"
+    echo "This variable is currently: ${CHROME_DEVEL_SANDBOX:-empty}"
+  fi
+fi
diff --git a/build/util/BUILD.gn b/build/util/BUILD.gn
new file mode 100644
index 0000000..29dd943
--- /dev/null
+++ b/build/util/BUILD.gn
@@ -0,0 +1,48 @@
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+action("webkit_version") {
+  script = "version.py"
+
+  lastchange_file = "LASTCHANGE.blink"
+
+  # TODO(brettw) move from content to this directory.
+  template_file = "//content/webkit_version.h.in"
+  inputs = [
+    lastchange_file,
+    template_file,
+  ]
+
+  output_file = "$root_gen_dir/webkit_version.h"
+  outputs = [
+    output_file,
+  ]
+
+  args = [
+    "-f",
+    rebase_path(lastchange_file, root_build_dir),
+    rebase_path(template_file, root_build_dir),
+    rebase_path(output_file, root_build_dir),
+  ]
+}
+
+action("chrome_version_json") {
+  script = "version.py"
+  _chrome_version_path = "//chrome/VERSION"
+  inputs = [
+    _chrome_version_path,
+  ]
+  _output_file = "$root_gen_dir/CHROME_VERSION.json"
+  outputs = [
+    _output_file,
+  ]
+  args = [
+    "--file",
+    rebase_path(_chrome_version_path, root_build_dir),
+    "--template",
+    "{\"full-quoted\": \"\\\"@MAJOR@.@MINOR@.@BUILD@.@PATCH@\\\"\"}",
+    "--output",
+    rebase_path(_output_file, root_build_dir),
+  ]
+}
diff --git a/build/util/LASTCHANGE b/build/util/LASTCHANGE
new file mode 100644
index 0000000..438a0fe
--- /dev/null
+++ b/build/util/LASTCHANGE
@@ -0,0 +1 @@
+LASTCHANGE=a757125bae5bce3daacf60f00502f7dd6490b875
diff --git a/build/util/lastchange.py b/build/util/lastchange.py
new file mode 100755
index 0000000..3f3ee4a
--- /dev/null
+++ b/build/util/lastchange.py
@@ -0,0 +1,309 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+lastchange.py -- Chromium revision fetching utility.
+"""
+
+import re
+import optparse
+import os
+import subprocess
+import sys
+
+_GIT_SVN_ID_REGEX = re.compile(r'.*git-svn-id:\s*([^@]*)@([0-9]+)', re.DOTALL)
+
+class VersionInfo(object):
+  def __init__(self, url, revision):
+    self.url = url
+    self.revision = revision
+
+
+def FetchSVNRevision(directory, svn_url_regex):
+  """
+  Fetch the Subversion branch and revision for a given directory.
+
+  Errors are swallowed.
+
+  Returns:
+    A VersionInfo object or None on error.
+  """
+  try:
+    proc = subprocess.Popen(['svn', 'info'],
+                            stdout=subprocess.PIPE,
+                            stderr=subprocess.PIPE,
+                            cwd=directory,
+                            shell=(sys.platform=='win32'))
+  except OSError:
+    # command is apparently either not installed or not executable.
+    return None
+  if not proc:
+    return None
+
+  attrs = {}
+  for line in proc.stdout:
+    line = line.strip()
+    if not line:
+      continue
+    key, val = line.split(': ', 1)
+    attrs[key] = val
+
+  try:
+    match = svn_url_regex.search(attrs['URL'])
+    if match:
+      url = match.group(2)
+    else:
+      url = ''
+    revision = attrs['Revision']
+  except KeyError:
+    return None
+
+  return VersionInfo(url, revision)
+
+
+def RunGitCommand(directory, command):
+  """
+  Launches git subcommand.
+
+  Errors are swallowed.
+
+  Returns:
+    A process object or None.
+  """
+  command = ['git'] + command
+  # Force shell usage under cygwin. This is a workaround for
+  # mysterious loss of cwd while invoking cygwin's git.
+  # We can't just pass shell=True to Popen, as under win32 this will
+  # cause CMD to be used, while we explicitly want a cygwin shell.
+  if sys.platform == 'cygwin':
+    command = ['sh', '-c', ' '.join(command)]
+  try:
+    proc = subprocess.Popen(command,
+                            stdout=subprocess.PIPE,
+                            stderr=subprocess.PIPE,
+                            cwd=directory,
+                            shell=(sys.platform=='win32'))
+    return proc
+  except OSError:
+    return None
+
+
+def FetchGitRevision(directory):
+  """
+  Fetch the Git hash for a given directory.
+
+  Errors are swallowed.
+
+  Returns:
+    A VersionInfo object or None on error.
+  """
+  hsh = ''
+  proc = RunGitCommand(directory, ['rev-parse', 'HEAD'])
+  if proc:
+    output = proc.communicate()[0].strip()
+    if proc.returncode == 0 and output:
+      hsh = output
+  if not hsh:
+    return None
+  pos = ''
+  proc = RunGitCommand(directory, ['cat-file', 'commit', 'HEAD'])
+  if proc:
+    output = proc.communicate()[0]
+    if proc.returncode == 0 and output:
+      for line in reversed(output.splitlines()):
+        if line.startswith('Cr-Commit-Position:'):
+          pos = line.rsplit()[-1].strip()
+          break
+  if not pos:
+    return VersionInfo('git', hsh)
+  return VersionInfo('git', '%s-%s' % (hsh, pos))
+
+
+def FetchGitSVNURLAndRevision(directory, svn_url_regex, go_deeper):
+  """
+  Fetch the Subversion URL and revision through Git.
+
+  Errors are swallowed.
+
+  Returns:
+    A tuple containing the Subversion URL and revision.
+  """
+  git_args = ['log', '-1', '--format=%b']
+  if go_deeper:
+    git_args.append('--grep=git-svn-id')
+  proc = RunGitCommand(directory, git_args)
+  if proc:
+    output = proc.communicate()[0].strip()
+    if proc.returncode == 0 and output:
+      # Extract the latest SVN revision and the SVN URL.
+      # The target line is the last "git-svn-id: ..." line like this:
+      # git-svn-id: svn://svn.chromium.org/chrome/trunk/src@85528 0039d316....
+      match = _GIT_SVN_ID_REGEX.search(output)
+      if match:
+        revision = match.group(2)
+        url_match = svn_url_regex.search(match.group(1))
+        if url_match:
+          url = url_match.group(2)
+        else:
+          url = ''
+        return url, revision
+  return None, None
+
+
+def FetchGitSVNRevision(directory, svn_url_regex, go_deeper):
+  """
+  Fetch the Git-SVN identifier for the local tree.
+
+  Errors are swallowed.
+  """
+  url, revision = FetchGitSVNURLAndRevision(directory, svn_url_regex, go_deeper)
+  if url and revision:
+    return VersionInfo(url, revision)
+  return None
+
+
+def FetchVersionInfo(default_lastchange, directory=None,
+                     directory_regex_prior_to_src_url='chrome|blink|svn',
+                     go_deeper=False):
+  """
+  Returns the last change (in the form of a branch, revision tuple),
+  from some appropriate revision control system.
+  """
+  svn_url_regex = re.compile(
+      r'.*/(' + directory_regex_prior_to_src_url + r')(/.*)')
+
+  version_info = (FetchSVNRevision(directory, svn_url_regex) or
+                  FetchGitSVNRevision(directory, svn_url_regex, go_deeper) or
+                  FetchGitRevision(directory))
+  if not version_info:
+    if default_lastchange and os.path.exists(default_lastchange):
+      revision = open(default_lastchange, 'r').read().strip()
+      version_info = VersionInfo(None, revision)
+    else:
+      version_info = VersionInfo(None, None)
+  return version_info
+
+def GetHeaderGuard(path):
+  """
+  Returns the header #define guard for the given file path.
+  This treats everything after the last instance of "src/" as being a
+  relevant part of the guard. If there is no "src/", then the entire path
+  is used.
+  """
+  src_index = path.rfind('src/')
+  if src_index != -1:
+    guard = path[src_index + 4:]
+  else:
+    guard = path
+  guard = guard.upper()
+  return guard.replace('/', '_').replace('.', '_').replace('\\', '_') + '_'
+
+def GetHeaderContents(path, define, version):
+  """
+  Returns what the contents of the header file should be that indicate the given
+  revision. Note that the #define is specified as a string, even though it's
+  currently always a SVN revision number, in case we need to move to git hashes.
+  """
+  header_guard = GetHeaderGuard(path)
+
+  header_contents = """/* Generated by lastchange.py, do not edit.*/
+
+#ifndef %(header_guard)s
+#define %(header_guard)s
+
+#define %(define)s "%(version)s"
+
+#endif  // %(header_guard)s
+"""
+  header_contents = header_contents % { 'header_guard': header_guard,
+                                        'define': define,
+                                        'version': version }
+  return header_contents
+
+def WriteIfChanged(file_name, contents):
+  """
+  Writes the specified contents to the specified file_name
+  iff the contents are different than the current contents.
+  """
+  try:
+    old_contents = open(file_name, 'r').read()
+  except EnvironmentError:
+    pass
+  else:
+    if contents == old_contents:
+      return
+    os.unlink(file_name)
+  open(file_name, 'w').write(contents)
+
+
+def main(argv=None):
+  if argv is None:
+    argv = sys.argv
+
+  parser = optparse.OptionParser(usage="lastchange.py [options]")
+  parser.add_option("-d", "--default-lastchange", metavar="FILE",
+                    help="Default last change input FILE.")
+  parser.add_option("-m", "--version-macro",
+                    help="Name of C #define when using --header. Defaults to " +
+                    "LAST_CHANGE.",
+                    default="LAST_CHANGE")
+  parser.add_option("-o", "--output", metavar="FILE",
+                    help="Write last change to FILE. " +
+                    "Can be combined with --header to write both files.")
+  parser.add_option("", "--header", metavar="FILE",
+                    help="Write last change to FILE as a C/C++ header. " +
+                    "Can be combined with --output to write both files.")
+  parser.add_option("--revision-only", action='store_true',
+                    help="Just print the SVN revision number. Overrides any " +
+                    "file-output-related options.")
+  parser.add_option("-s", "--source-dir", metavar="DIR",
+                    help="Use repository in the given directory.")
+  parser.add_option("--git-svn-go-deeper", action='store_true',
+                    help="In a Git-SVN repo, dig down to the last committed " +
+                    "SVN change (historic behaviour).")
+  opts, args = parser.parse_args(argv[1:])
+
+  out_file = opts.output
+  header = opts.header
+
+  while len(args) and out_file is None:
+    if out_file is None:
+      out_file = args.pop(0)
+  if args:
+    sys.stderr.write('Unexpected arguments: %r\n\n' % args)
+    parser.print_help()
+    sys.exit(2)
+
+  if opts.source_dir:
+    src_dir = opts.source_dir
+  else:
+    src_dir = os.path.dirname(os.path.abspath(__file__))
+
+  version_info = FetchVersionInfo(opts.default_lastchange,
+                                  directory=src_dir,
+                                  go_deeper=opts.git_svn_go_deeper)
+
+  if version_info.revision == None:
+    version_info.revision = '0'
+
+  if opts.revision_only:
+    print version_info.revision
+  else:
+    contents = "LASTCHANGE=%s\n" % version_info.revision
+    if not out_file and not opts.header:
+      sys.stdout.write(contents)
+    else:
+      if out_file:
+        WriteIfChanged(out_file, contents)
+      if header:
+        WriteIfChanged(header,
+                       GetHeaderContents(header, opts.version_macro,
+                                         version_info.revision))
+
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/util/lib/common/__init__.py b/build/util/lib/common/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/build/util/lib/common/__init__.py
diff --git a/build/util/lib/common/perf_result_data_type.py b/build/util/lib/common/perf_result_data_type.py
new file mode 100644
index 0000000..67b550a
--- /dev/null
+++ b/build/util/lib/common/perf_result_data_type.py
@@ -0,0 +1,20 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+DEFAULT = 'default'
+UNIMPORTANT = 'unimportant'
+HISTOGRAM = 'histogram'
+UNIMPORTANT_HISTOGRAM = 'unimportant-histogram'
+INFORMATIONAL = 'informational'
+
+ALL_TYPES = [DEFAULT, UNIMPORTANT, HISTOGRAM, UNIMPORTANT_HISTOGRAM,
+             INFORMATIONAL]
+
+
+def IsValidType(datatype):
+  return datatype in ALL_TYPES
+
+
+def IsHistogram(datatype):
+  return (datatype == HISTOGRAM or datatype == UNIMPORTANT_HISTOGRAM)
diff --git a/build/util/lib/common/perf_tests_results_helper.py b/build/util/lib/common/perf_tests_results_helper.py
new file mode 100644
index 0000000..6cb058b
--- /dev/null
+++ b/build/util/lib/common/perf_tests_results_helper.py
@@ -0,0 +1,166 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import re
+import sys
+
+import json
+import logging
+import math
+
+import perf_result_data_type
+
+
+# Mapping from result type to test output
+RESULT_TYPES = {perf_result_data_type.UNIMPORTANT: 'RESULT ',
+                perf_result_data_type.DEFAULT: '*RESULT ',
+                perf_result_data_type.INFORMATIONAL: '',
+                perf_result_data_type.UNIMPORTANT_HISTOGRAM: 'HISTOGRAM ',
+                perf_result_data_type.HISTOGRAM: '*HISTOGRAM '}
+
+
+def _EscapePerfResult(s):
+  """Escapes |s| for use in a perf result."""
+  return re.sub('[\:|=/#&,]', '_', s)
+
+
+def FlattenList(values):
+  """Returns a simple list without sub-lists."""
+  ret = []
+  for entry in values:
+    if isinstance(entry, list):
+      ret.extend(FlattenList(entry))
+    else:
+      ret.append(entry)
+  return ret
+
+
+def GeomMeanAndStdDevFromHistogram(histogram_json):
+  histogram = json.loads(histogram_json)
+  # Handle empty histograms gracefully.
+  if not 'buckets' in histogram:
+    return 0.0, 0.0
+  count = 0
+  sum_of_logs = 0
+  for bucket in histogram['buckets']:
+    if 'high' in bucket:
+      bucket['mean'] = (bucket['low'] + bucket['high']) / 2.0
+    else:
+      bucket['mean'] = bucket['low']
+    if bucket['mean'] > 0:
+      sum_of_logs += math.log(bucket['mean']) * bucket['count']
+      count += bucket['count']
+
+  if count == 0:
+    return 0.0, 0.0
+
+  sum_of_squares = 0
+  geom_mean = math.exp(sum_of_logs / count)
+  for bucket in histogram['buckets']:
+    if bucket['mean'] > 0:
+      sum_of_squares += (bucket['mean'] - geom_mean) ** 2 * bucket['count']
+  return geom_mean, math.sqrt(sum_of_squares / count)
+
+
+def _ValueToString(v):
+  # Special case for floats so we don't print using scientific notation.
+  if isinstance(v, float):
+    return '%f' % v
+  else:
+    return str(v)
+
+
+def _MeanAndStdDevFromList(values):
+  avg = None
+  sd = None
+  if len(values) > 1:
+    try:
+      value = '[%s]' % ','.join([_ValueToString(v) for v in values])
+      avg = sum([float(v) for v in values]) / len(values)
+      sqdiffs = [(float(v) - avg) ** 2 for v in values]
+      variance = sum(sqdiffs) / (len(values) - 1)
+      sd = math.sqrt(variance)
+    except ValueError:
+      value = ', '.join(values)
+  else:
+    value = values[0]
+  return value, avg, sd
+
+
+def PrintPages(page_list):
+  """Prints list of pages to stdout in the format required by perf tests."""
+  print 'Pages: [%s]' % ','.join([_EscapePerfResult(p) for p in page_list])
+
+
+def PrintPerfResult(measurement, trace, values, units,
+                    result_type=perf_result_data_type.DEFAULT,
+                    print_to_stdout=True):
+  """Prints numerical data to stdout in the format required by perf tests.
+
+  The string args may be empty but they must not contain any colons (:) or
+  equals signs (=).
+  This is parsed by the buildbot using:
+  http://src.chromium.org/viewvc/chrome/trunk/tools/build/scripts/slave/process_log_utils.py
+
+  Args:
+    measurement: A description of the quantity being measured, e.g. "vm_peak".
+        On the dashboard, this maps to a particular graph. Mandatory.
+    trace: A description of the particular data point, e.g. "reference".
+        On the dashboard, this maps to a particular "line" in the graph.
+        Mandatory.
+    values: A list of numeric measured values. An N-dimensional list will be
+        flattened and treated as a simple list.
+    units: A description of the units of measure, e.g. "bytes".
+    result_type: Accepts values of perf_result_data_type.ALL_TYPES.
+    print_to_stdout: If True, prints the output in stdout instead of returning
+        the output to caller.
+
+    Returns:
+      String of the formated perf result.
+  """
+  assert perf_result_data_type.IsValidType(result_type), \
+         'result type: %s is invalid' % result_type
+
+  trace_name = _EscapePerfResult(trace)
+
+  if (result_type == perf_result_data_type.UNIMPORTANT or
+      result_type == perf_result_data_type.DEFAULT or
+      result_type == perf_result_data_type.INFORMATIONAL):
+    assert isinstance(values, list)
+    assert '/' not in measurement
+    flattened_values = FlattenList(values)
+    assert len(flattened_values)
+    value, avg, sd = _MeanAndStdDevFromList(flattened_values)
+    output = '%s%s: %s%s%s %s' % (
+        RESULT_TYPES[result_type],
+        _EscapePerfResult(measurement),
+        trace_name,
+        # Do not show equal sign if the trace is empty. Usually it happens when
+        # measurement is enough clear to describe the result.
+        '= ' if trace_name else '',
+        value,
+        units)
+  else:
+    assert perf_result_data_type.IsHistogram(result_type)
+    assert isinstance(values, list)
+    # The histograms can only be printed individually, there's no computation
+    # across different histograms.
+    assert len(values) == 1
+    value = values[0]
+    output = '%s%s: %s= %s %s' % (
+        RESULT_TYPES[result_type],
+        _EscapePerfResult(measurement),
+        trace_name,
+        value,
+        units)
+    avg, sd = GeomMeanAndStdDevFromHistogram(value)
+
+  if avg:
+    output += '\nAvg %s: %f%s' % (measurement, avg, units)
+  if sd:
+    output += '\nSd  %s: %f%s' % (measurement, sd, units)
+  if print_to_stdout:
+    print output
+    sys.stdout.flush()
+  return output
diff --git a/build/util/lib/common/unittest_util.py b/build/util/lib/common/unittest_util.py
new file mode 100644
index 0000000..189f587
--- /dev/null
+++ b/build/util/lib/common/unittest_util.py
@@ -0,0 +1,153 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Utilities for dealing with the python unittest module."""
+
+import fnmatch
+import sys
+import unittest
+
+
+class _TextTestResult(unittest._TextTestResult):
+  """A test result class that can print formatted text results to a stream.
+
+  Results printed in conformance with gtest output format, like:
+  [ RUN        ] autofill.AutofillTest.testAutofillInvalid: "test desc."
+  [         OK ] autofill.AutofillTest.testAutofillInvalid
+  [ RUN        ] autofill.AutofillTest.testFillProfile: "test desc."
+  [         OK ] autofill.AutofillTest.testFillProfile
+  [ RUN        ] autofill.AutofillTest.testFillProfileCrazyCharacters: "Test."
+  [         OK ] autofill.AutofillTest.testFillProfileCrazyCharacters
+  """
+  def __init__(self, stream, descriptions, verbosity):
+    unittest._TextTestResult.__init__(self, stream, descriptions, verbosity)
+    self._fails = set()
+
+  def _GetTestURI(self, test):
+    return '%s.%s.%s' % (test.__class__.__module__,
+                         test.__class__.__name__,
+                         test._testMethodName)
+
+  def getDescription(self, test):
+    return '%s: "%s"' % (self._GetTestURI(test), test.shortDescription())
+
+  def startTest(self, test):
+    unittest.TestResult.startTest(self, test)
+    self.stream.writeln('[ RUN        ] %s' % self.getDescription(test))
+
+  def addSuccess(self, test):
+    unittest.TestResult.addSuccess(self, test)
+    self.stream.writeln('[         OK ] %s' % self._GetTestURI(test))
+
+  def addError(self, test, err):
+    unittest.TestResult.addError(self, test, err)
+    self.stream.writeln('[      ERROR ] %s' % self._GetTestURI(test))
+    self._fails.add(self._GetTestURI(test))
+
+  def addFailure(self, test, err):
+    unittest.TestResult.addFailure(self, test, err)
+    self.stream.writeln('[     FAILED ] %s' % self._GetTestURI(test))
+    self._fails.add(self._GetTestURI(test))
+
+  def getRetestFilter(self):
+    return ':'.join(self._fails)
+
+
+class TextTestRunner(unittest.TextTestRunner):
+  """Test Runner for displaying test results in textual format.
+
+  Results are displayed in conformance with google test output.
+  """
+
+  def __init__(self, verbosity=1):
+    unittest.TextTestRunner.__init__(self, stream=sys.stderr,
+                                     verbosity=verbosity)
+
+  def _makeResult(self):
+    return _TextTestResult(self.stream, self.descriptions, self.verbosity)
+
+
+def GetTestsFromSuite(suite):
+  """Returns all the tests from a given test suite."""
+  tests = []
+  for x in suite:
+    if isinstance(x, unittest.TestSuite):
+      tests += GetTestsFromSuite(x)
+    else:
+      tests += [x]
+  return tests
+
+
+def GetTestNamesFromSuite(suite):
+  """Returns a list of every test name in the given suite."""
+  return map(lambda x: GetTestName(x), GetTestsFromSuite(suite))
+
+
+def GetTestName(test):
+  """Gets the test name of the given unittest test."""
+  return '.'.join([test.__class__.__module__,
+                   test.__class__.__name__,
+                   test._testMethodName])
+
+
+def FilterTestSuite(suite, gtest_filter):
+  """Returns a new filtered tests suite based on the given gtest filter.
+
+  See http://code.google.com/p/googletest/wiki/AdvancedGuide
+  for gtest_filter specification.
+  """
+  return unittest.TestSuite(FilterTests(GetTestsFromSuite(suite), gtest_filter))
+
+
+def FilterTests(all_tests, gtest_filter):
+  """Filter a list of tests based on the given gtest filter.
+
+  Args:
+    all_tests: List of tests (unittest.TestSuite)
+    gtest_filter: Filter to apply.
+
+  Returns:
+    Filtered subset of the given list of tests.
+  """
+  test_names = [GetTestName(test) for test in all_tests]
+  filtered_names = FilterTestNames(test_names, gtest_filter)
+  return [test for test in all_tests if GetTestName(test) in filtered_names]
+
+
+def FilterTestNames(all_tests, gtest_filter):
+  """Filter a list of test names based on the given gtest filter.
+
+  See http://code.google.com/p/googletest/wiki/AdvancedGuide
+  for gtest_filter specification.
+
+  Args:
+    all_tests: List of test names.
+    gtest_filter: Filter to apply.
+
+  Returns:
+    Filtered subset of the given list of test names.
+  """
+  pattern_groups = gtest_filter.split('-')
+  positive_patterns = ['*']
+  if pattern_groups[0]:
+    positive_patterns = pattern_groups[0].split(':')
+  negative_patterns = None
+  if len(pattern_groups) > 1:
+    negative_patterns = pattern_groups[1].split(':')
+
+  tests = []
+  for test in all_tests:
+    # Test name must by matched by one positive pattern.
+    for pattern in positive_patterns:
+      if fnmatch.fnmatch(test, pattern):
+        break
+    else:
+      continue
+    # Test name must not be matched by any negative patterns.
+    for pattern in negative_patterns or []:
+      if fnmatch.fnmatch(test, pattern):
+        break
+    else:
+      tests += [test]
+  return tests
diff --git a/build/util/lib/common/util.py b/build/util/lib/common/util.py
new file mode 100644
index 0000000..a415b1f
--- /dev/null
+++ b/build/util/lib/common/util.py
@@ -0,0 +1,151 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Generic utilities for all python scripts."""
+
+import atexit
+import httplib
+import os
+import signal
+import stat
+import subprocess
+import sys
+import tempfile
+import urlparse
+
+
+def GetPlatformName():
+  """Return a string to be used in paths for the platform."""
+  if IsWindows():
+    return 'win'
+  if IsMac():
+    return 'mac'
+  if IsLinux():
+    return 'linux'
+  raise NotImplementedError('Unknown platform "%s".' % sys.platform)
+
+
+def IsWindows():
+  return sys.platform == 'cygwin' or sys.platform.startswith('win')
+
+
+def IsLinux():
+  return sys.platform.startswith('linux')
+
+
+def IsMac():
+  return sys.platform.startswith('darwin')
+
+
+def _DeleteDir(path):
+  """Deletes a directory recursively, which must exist."""
+  # Don't use shutil.rmtree because it can't delete read-only files on Win.
+  for root, dirs, files in os.walk(path, topdown=False):
+    for name in files:
+      filename = os.path.join(root, name)
+      os.chmod(filename, stat.S_IWRITE)
+      os.remove(filename)
+    for name in dirs:
+      os.rmdir(os.path.join(root, name))
+  os.rmdir(path)
+
+
+def Delete(path):
+  """Deletes the given file or directory (recursively), which must exist."""
+  if os.path.isdir(path):
+    _DeleteDir(path)
+  else:
+    os.remove(path)
+
+
+def MaybeDelete(path):
+  """Deletes the given file or directory (recurisvely), if it exists."""
+  if os.path.exists(path):
+    Delete(path)
+
+
+def MakeTempDir(parent_dir=None):
+  """Creates a temporary directory and returns an absolute path to it.
+
+  The temporary directory is automatically deleted when the python interpreter
+  exits normally.
+
+  Args:
+    parent_dir: the directory to create the temp dir in. If None, the system
+                temp dir is used.
+
+  Returns:
+    The absolute path to the temporary directory.
+  """
+  path = tempfile.mkdtemp(dir=parent_dir)
+  atexit.register(MaybeDelete, path)
+  return path
+
+
+def Unzip(zip_path, output_dir):
+  """Unzips the given zip file using a system installed unzip tool.
+
+  Args:
+    zip_path: zip file to unzip.
+    output_dir: directory to unzip the contents of the zip file. The directory
+                must exist.
+
+  Raises:
+    RuntimeError if the unzip operation fails.
+  """
+  if IsWindows():
+    unzip_cmd = ['C:\\Program Files\\7-Zip\\7z.exe', 'x', '-y']
+  else:
+    unzip_cmd = ['unzip', '-o']
+  unzip_cmd += [zip_path]
+  if RunCommand(unzip_cmd, output_dir) != 0:
+    raise RuntimeError('Unable to unzip %s to %s' % (zip_path, output_dir))
+
+
+def Kill(pid):
+  """Terminate the given pid."""
+  if IsWindows():
+    subprocess.call(['taskkill.exe', '/T', '/F', '/PID', str(pid)])
+  else:
+    os.kill(pid, signal.SIGTERM)
+
+
+def RunCommand(cmd, cwd=None):
+  """Runs the given command and returns the exit code.
+
+  Args:
+    cmd: list of command arguments.
+    cwd: working directory to execute the command, or None if the current
+         working directory should be used.
+
+  Returns:
+    The exit code of the command.
+  """
+  process = subprocess.Popen(cmd, cwd=cwd)
+  process.wait()
+  return process.returncode
+
+
+def DoesUrlExist(url):
+  """Determines whether a resource exists at the given URL.
+
+  Args:
+    url: URL to be verified.
+
+  Returns:
+    True if url exists, otherwise False.
+  """
+  parsed = urlparse.urlparse(url)
+  try:
+    conn = httplib.HTTPConnection(parsed.netloc)
+    conn.request('HEAD', parsed.path)
+    response = conn.getresponse()
+  except (socket.gaierror, socket.error):
+    return False
+  finally:
+    conn.close()
+  # Follow both permanent (301) and temporary (302) redirects.
+  if response.status == 302 or response.status == 301:
+    return DoesUrlExist(response.getheader('location'))
+  return response.status == 200
diff --git a/build/util/version.gypi b/build/util/version.gypi
new file mode 100644
index 0000000..327a5c2
--- /dev/null
+++ b/build/util/version.gypi
@@ -0,0 +1,20 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+  'variables': {
+    'variables': {
+      'version_py_path': '<(DEPTH)/build/util/version.py',
+      'version_path': '<(DEPTH)/chrome/VERSION',
+      'lastchange_path': '<(DEPTH)/build/util/LASTCHANGE',
+    },
+    'version_py_path': '<(version_py_path)',
+    'version_path': '<(version_path)',
+    'lastchange_path': '<(lastchange_path)',
+    'version_full':
+        '<!(python <(version_py_path) -f <(version_path) -t "@MAJOR@.@MINOR@.@BUILD@.@PATCH@")',
+    'version_mac_dylib':
+        '<!(python <(version_py_path) -f <(version_path) -t "@BUILD@.@PATCH_HI@.@PATCH_LO@" -e "PATCH_HI=int(PATCH)/256" -e "PATCH_LO=int(PATCH)%256")',
+  },  # variables
+}
diff --git a/build/util/version.py b/build/util/version.py
new file mode 100755
index 0000000..4d3691a
--- /dev/null
+++ b/build/util/version.py
@@ -0,0 +1,166 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+version.py -- Chromium version string substitution utility.
+"""
+
+import argparse
+import os
+import sys
+
+
+def fetch_values_from_file(values_dict, file_name):
+  """
+  Fetches KEYWORD=VALUE settings from the specified file.
+
+  Everything to the left of the first '=' is the keyword,
+  everything to the right is the value.  No stripping of
+  white space, so beware.
+
+  The file must exist, otherwise you get the Python exception from open().
+  """
+  for line in open(file_name, 'r').readlines():
+    key, val = line.rstrip('\r\n').split('=', 1)
+    values_dict[key] = val
+
+
+def fetch_values(file_list):
+  """
+  Returns a dictionary of values to be used for substitution, populating
+  the dictionary with KEYWORD=VALUE settings from the files in 'file_list'.
+
+  Explicitly adds the following value from internal calculations:
+
+    OFFICIAL_BUILD
+  """
+  CHROME_BUILD_TYPE = os.environ.get('CHROME_BUILD_TYPE')
+  if CHROME_BUILD_TYPE == '_official':
+    official_build = '1'
+  else:
+    official_build = '0'
+
+  values = dict(
+    OFFICIAL_BUILD = official_build,
+  )
+
+  for file_name in file_list:
+    fetch_values_from_file(values, file_name)
+
+  return values
+
+
+def subst_template(contents, values):
+  """
+  Returns the template with substituted values from the specified dictionary.
+
+  Keywords to be substituted are surrounded by '@':  @KEYWORD@.
+
+  No attempt is made to avoid recursive substitution.  The order
+  of evaluation is random based on the order of the keywords returned
+  by the Python dictionary.  So do NOT substitute a value that
+  contains any @KEYWORD@ strings expecting them to be recursively
+  substituted, okay?
+  """
+  for key, val in values.iteritems():
+    try:
+      contents = contents.replace('@' + key + '@', val)
+    except TypeError:
+      print repr(key), repr(val)
+  return contents
+
+
+def subst_file(file_name, values):
+  """
+  Returns the contents of the specified file_name with substituted
+  values from the specified dictionary.
+
+  This is like subst_template, except it operates on a file.
+  """
+  template = open(file_name, 'r').read()
+  return subst_template(template, values);
+
+
+def write_if_changed(file_name, contents):
+  """
+  Writes the specified contents to the specified file_name
+  iff the contents are different than the current contents.
+  """
+  try:
+    old_contents = open(file_name, 'r').read()
+  except EnvironmentError:
+    pass
+  else:
+    if contents == old_contents:
+      return
+    os.unlink(file_name)
+  open(file_name, 'w').write(contents)
+
+
+def main():
+  parser = argparse.ArgumentParser()
+  parser.add_argument('-f', '--file', action='append', default=[],
+                      help='Read variables from FILE.')
+  parser.add_argument('-i', '--input', default=None,
+                      help='Read strings to substitute from FILE.')
+  parser.add_argument('-o', '--output', default=None,
+                      help='Write substituted strings to FILE.')
+  parser.add_argument('-t', '--template', default=None,
+                      help='Use TEMPLATE as the strings to substitute.')
+  parser.add_argument('-e', '--eval', action='append', default=[],
+                      help='Evaluate VAL after reading variables. Can be used '
+                           'to synthesize variables. e.g. -e \'PATCH_HI=int('
+                           'PATCH)/256.')
+  parser.add_argument('args', nargs=argparse.REMAINDER,
+                      help='For compatibility: INPUT and OUTPUT can be '
+                           'passed as positional arguments.')
+  options = parser.parse_args()
+
+  evals = {}
+  for expression in options.eval:
+    try:
+      evals.update(dict([expression.split('=', 1)]))
+    except ValueError:
+      parser.error('-e requires VAR=VAL')
+
+  # Compatibility with old versions that considered the first two positional
+  # arguments shorthands for --input and --output.
+  while len(options.args) and (options.input is None or \
+                               options.output is None):
+    if options.input is None:
+      options.input = options.args.pop(0)
+    elif options.output is None:
+      options.output = options.args.pop(0)
+  if options.args:
+    parser.error('Unexpected arguments: %r' % options.args)
+
+  values = fetch_values(options.file)
+  for key, val in evals.iteritems():
+    values[key] = str(eval(val, globals(), values))
+
+  if options.template is not None:
+    contents = subst_template(options.template, values)
+  elif options.input:
+    contents = subst_file(options.input, values)
+  else:
+    # Generate a default set of version information.
+    contents = """MAJOR=%(MAJOR)s
+MINOR=%(MINOR)s
+BUILD=%(BUILD)s
+PATCH=%(PATCH)s
+LASTCHANGE=%(LASTCHANGE)s
+OFFICIAL_BUILD=%(OFFICIAL_BUILD)s
+""" % values
+
+  if options.output is not None:
+    write_if_changed(options.output, contents)
+  else:
+    print contents
+
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/vs_toolchain.py b/build/vs_toolchain.py
new file mode 100644
index 0000000..16f4477
--- /dev/null
+++ b/build/vs_toolchain.py
@@ -0,0 +1,259 @@
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import json
+import os
+import pipes
+import shutil
+import subprocess
+import sys
+
+
+script_dir = os.path.dirname(os.path.realpath(__file__))
+chrome_src = os.path.abspath(os.path.join(script_dir, os.pardir))
+SRC_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
+sys.path.insert(1, os.path.join(chrome_src, 'tools'))
+sys.path.insert(0, os.path.join(chrome_src, 'tools', 'gyp', 'pylib'))
+json_data_file = os.path.join(script_dir, 'win_toolchain.json')
+
+
+import gyp
+
+
+def SetEnvironmentAndGetRuntimeDllDirs():
+  """Sets up os.environ to use the depot_tools VS toolchain with gyp, and
+  returns the location of the VS runtime DLLs so they can be copied into
+  the output directory after gyp generation.
+  """
+  vs2013_runtime_dll_dirs = None
+  depot_tools_win_toolchain = \
+      bool(int(os.environ.get('DEPOT_TOOLS_WIN_TOOLCHAIN', '1')))
+  if sys.platform in ('win32', 'cygwin') and depot_tools_win_toolchain:
+    if not os.path.exists(json_data_file):
+      Update()
+    with open(json_data_file, 'r') as tempf:
+      toolchain_data = json.load(tempf)
+
+    toolchain = toolchain_data['path']
+    version = toolchain_data['version']
+    win_sdk = toolchain_data.get('win_sdk')
+    if not win_sdk:
+      win_sdk = toolchain_data['win8sdk']
+    wdk = toolchain_data['wdk']
+    # TODO(scottmg): The order unfortunately matters in these. They should be
+    # split into separate keys for x86 and x64. (See CopyVsRuntimeDlls call
+    # below). http://crbug.com/345992
+    vs2013_runtime_dll_dirs = toolchain_data['runtime_dirs']
+
+    os.environ['GYP_MSVS_OVERRIDE_PATH'] = toolchain
+    os.environ['GYP_MSVS_VERSION'] = version
+    # We need to make sure windows_sdk_path is set to the automated
+    # toolchain values in GYP_DEFINES, but don't want to override any
+    # otheroptions.express
+    # values there.
+    gyp_defines_dict = gyp.NameValueListToDict(gyp.ShlexEnv('GYP_DEFINES'))
+    gyp_defines_dict['windows_sdk_path'] = win_sdk
+    os.environ['GYP_DEFINES'] = ' '.join('%s=%s' % (k, pipes.quote(str(v)))
+        for k, v in gyp_defines_dict.iteritems())
+    os.environ['WINDOWSSDKDIR'] = win_sdk
+    os.environ['WDK_DIR'] = wdk
+    # Include the VS runtime in the PATH in case it's not machine-installed.
+    runtime_path = ';'.join(vs2013_runtime_dll_dirs)
+    os.environ['PATH'] = runtime_path + ';' + os.environ['PATH']
+  return vs2013_runtime_dll_dirs
+
+
+def _VersionNumber():
+  """Gets the standard version number ('120', '140', etc.) based on
+  GYP_MSVS_VERSION."""
+  if os.environ['GYP_MSVS_VERSION'] == '2013':
+    return '120'
+  elif os.environ['GYP_MSVS_VERSION'] == '2015':
+    return '140'
+  else:
+    raise ValueError('Unexpected GYP_MSVS_VERSION')
+
+
+def _CopyRuntimeImpl(target, source):
+  """Copy |source| to |target| if it doesn't already exist or if it
+  needs to be updated.
+  """
+  if (os.path.isdir(os.path.dirname(target)) and
+      (not os.path.isfile(target) or
+      os.stat(target).st_mtime != os.stat(source).st_mtime)):
+    print 'Copying %s to %s...' % (source, target)
+    if os.path.exists(target):
+      os.unlink(target)
+    shutil.copy2(source, target)
+
+
+def _CopyRuntime2013(target_dir, source_dir, dll_pattern):
+  """Copy both the msvcr and msvcp runtime DLLs, only if the target doesn't
+  exist, but the target directory does exist."""
+  for file_part in ('p', 'r'):
+    dll = dll_pattern % file_part
+    target = os.path.join(target_dir, dll)
+    source = os.path.join(source_dir, dll)
+    _CopyRuntimeImpl(target, source)
+
+
+def _CopyRuntime2015(target_dir, source_dir, dll_pattern):
+  """Copy both the msvcp and vccorlib runtime DLLs, only if the target doesn't
+  exist, but the target directory does exist."""
+  for file_part in ('msvcp', 'vccorlib'):
+    dll = dll_pattern % file_part
+    target = os.path.join(target_dir, dll)
+    source = os.path.join(source_dir, dll)
+    _CopyRuntimeImpl(target, source)
+
+
+def CopyVsRuntimeDlls(output_dir, runtime_dirs):
+  """Copies the VS runtime DLLs from the given |runtime_dirs| to the output
+  directory so that even if not system-installed, built binaries are likely to
+  be able to run.
+
+  This needs to be run after gyp has been run so that the expected target
+  output directories are already created.
+  """
+  assert sys.platform.startswith(('win32', 'cygwin'))
+
+  x86, x64 = runtime_dirs
+  out_debug = os.path.join(output_dir, 'Debug')
+  out_debug_nacl64 = os.path.join(output_dir, 'Debug', 'x64')
+  out_release = os.path.join(output_dir, 'Release')
+  out_release_nacl64 = os.path.join(output_dir, 'Release', 'x64')
+  out_debug_x64 = os.path.join(output_dir, 'Debug_x64')
+  out_release_x64 = os.path.join(output_dir, 'Release_x64')
+
+  if os.path.exists(out_debug) and not os.path.exists(out_debug_nacl64):
+    os.makedirs(out_debug_nacl64)
+  if os.path.exists(out_release) and not os.path.exists(out_release_nacl64):
+    os.makedirs(out_release_nacl64)
+  if os.environ.get('GYP_MSVS_VERSION') == '2015':
+    _CopyRuntime2015(out_debug,          x86, '%s140d.dll')
+    _CopyRuntime2015(out_release,        x86, '%s140.dll')
+    _CopyRuntime2015(out_debug_x64,      x64, '%s140d.dll')
+    _CopyRuntime2015(out_release_x64,    x64, '%s140.dll')
+    _CopyRuntime2015(out_debug_nacl64,   x64, '%s140d.dll')
+    _CopyRuntime2015(out_release_nacl64, x64, '%s140.dll')
+  else:
+    # VS2013 is the default.
+    _CopyRuntime2013(out_debug,          x86, 'msvc%s120d.dll')
+    _CopyRuntime2013(out_release,        x86, 'msvc%s120.dll')
+    _CopyRuntime2013(out_debug_x64,      x64, 'msvc%s120d.dll')
+    _CopyRuntime2013(out_release_x64,    x64, 'msvc%s120.dll')
+    _CopyRuntime2013(out_debug_nacl64,   x64, 'msvc%s120d.dll')
+    _CopyRuntime2013(out_release_nacl64, x64, 'msvc%s120.dll')
+
+  # Copy the PGO runtime library to the release directories.
+  if os.environ.get('GYP_MSVS_OVERRIDE_PATH'):
+    pgo_x86_runtime_dir = os.path.join(os.environ.get('GYP_MSVS_OVERRIDE_PATH'),
+                                       'VC', 'bin')
+    pgo_x64_runtime_dir = os.path.join(pgo_x86_runtime_dir, 'amd64')
+    pgo_runtime_dll = 'pgort' + _VersionNumber() + '.dll'
+    source_x86 = os.path.join(pgo_x86_runtime_dir, pgo_runtime_dll)
+    if os.path.exists(source_x86):
+      _CopyRuntimeImpl(os.path.join(out_release, pgo_runtime_dll), source_x86)
+    source_x64 = os.path.join(pgo_x64_runtime_dir, pgo_runtime_dll)
+    if os.path.exists(source_x64):
+      _CopyRuntimeImpl(os.path.join(out_release_x64, pgo_runtime_dll),
+                       source_x64)
+
+
+def CopyDlls(target_dir, configuration, target_cpu):
+  """Copy the VS runtime DLLs into the requested directory as needed.
+
+  configuration is one of 'Debug' or 'Release'.
+  target_cpu is one of 'x86' or 'x64'.
+
+  The debug configuration gets both the debug and release DLLs; the
+  release config only the latter.
+  """
+  vs2013_runtime_dll_dirs = SetEnvironmentAndGetRuntimeDllDirs()
+  if not vs2013_runtime_dll_dirs:
+    return
+
+  x64_runtime, x86_runtime = vs2013_runtime_dll_dirs
+  runtime_dir = x64_runtime if target_cpu == 'x64' else x86_runtime
+  _CopyRuntime2013(
+      target_dir, runtime_dir, 'msvc%s' + _VersionNumber() + '.dll')
+  if configuration == 'Debug':
+    _CopyRuntime2013(
+        target_dir, runtime_dir, 'msvc%s' + _VersionNumber() + 'd.dll')
+
+
+def _GetDesiredVsToolchainHashes():
+  """Load a list of SHA1s corresponding to the toolchains that we want installed
+  to build with."""
+  # TODO(scottmg): If explicitly set to VS2015 override hashes to the VS2015 RC
+  # toolchain. http://crbug.com/492774.
+  if os.environ.get('GYP_MSVS_VERSION') == '2015':
+    return ['40721575c85171cea5d7afe5ec17bd108a94796e']
+  else:
+    # Default to VS2013.
+    return ['ee7d718ec60c2dc5d255bbe325909c2021a7efef']
+
+
+def Update():
+  """Requests an update of the toolchain to the specific hashes we have at
+  this revision. The update outputs a .json of the various configuration
+  information required to pass to gyp which we use in |GetToolchainDir()|.
+  """
+  depot_tools_win_toolchain = \
+      bool(int(os.environ.get('DEPOT_TOOLS_WIN_TOOLCHAIN', '1')))
+  if sys.platform in ('win32', 'cygwin') and depot_tools_win_toolchain:
+    import find_depot_tools
+    depot_tools_path = find_depot_tools.add_depot_tools_to_path()
+    get_toolchain_args = [
+        sys.executable,
+        os.path.join(depot_tools_path,
+                    'win_toolchain',
+                    'get_toolchain_if_necessary.py'),
+        '--output-json', json_data_file,
+      ] + _GetDesiredVsToolchainHashes()
+    subprocess.check_call(get_toolchain_args)
+
+  return 0
+
+
+def GetToolchainDir():
+  """Gets location information about the current toolchain (must have been
+  previously updated by 'update'). This is used for the GN build."""
+  runtime_dll_dirs = SetEnvironmentAndGetRuntimeDllDirs()
+
+  # If WINDOWSSDKDIR is not set, search the default SDK path and set it.
+  if not 'WINDOWSSDKDIR' in os.environ:
+    default_sdk_path = 'C:\\Program Files (x86)\\Windows Kits\\8.1'
+    if os.path.isdir(default_sdk_path):
+      os.environ['WINDOWSSDKDIR'] = default_sdk_path
+
+  print '''vs_path = "%s"
+sdk_path = "%s"
+vs_version = "%s"
+wdk_dir = "%s"
+runtime_dirs = "%s"
+''' % (
+      os.environ['GYP_MSVS_OVERRIDE_PATH'],
+      os.environ['WINDOWSSDKDIR'],
+      os.environ['GYP_MSVS_VERSION'],
+      os.environ.get('WDK_DIR', ''),
+      ';'.join(runtime_dll_dirs or ['None']))
+
+
+def main():
+  if not sys.platform.startswith(('win32', 'cygwin')):
+    return 0
+  commands = {
+      'update': Update,
+      'get_toolchain_dir': GetToolchainDir,
+      'copy_dlls': CopyDlls,
+  }
+  if len(sys.argv) < 2 or sys.argv[1] not in commands:
+    print >>sys.stderr, 'Expected one of: %s' % ', '.join(commands)
+    return 1
+  return commands[sys.argv[1]](*sys.argv[2:])
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/whitespace_file.txt b/build/whitespace_file.txt
new file mode 100644
index 0000000..ea82f4e
--- /dev/null
+++ b/build/whitespace_file.txt
@@ -0,0 +1,156 @@
+Copyright 2014 The Chromium Authors. All rights reserved.
+Use of this useless file is governed by a BSD-style license that can be
+found in the LICENSE file.
+
+
+This file is used for making non-code changes to trigger buildbot cycles. Make
+any modification below this line.
+
+======================================================================
+
+Let's make a story. Add zero+ sentences for every commit:
+
+CHÄPTER 1:
+It was a dark and blinky night; the rain fell in torrents -- except at
+occasional intervals, when it was checked by a violent gust of wind which
+swept up the streets (for it is in London that our scene lies), rattling along
+the housetops, and fiercely agitating the scanty flame of the lamps that
+struggled against the elements. A hooded figure emerged.
+
+It was a Domo-Kun.
+
+"What took you so long?", inquired his wife.
+
+Silence. Oblivious to his silence, she continued, "Did Mr. Usagi enjoy the
+waffles you brought him?" "You know him, he's not one to forego a waffle,
+no matter how burnt," he snickered.
+
+The pause was filled with the sound of compile errors.
+
+CHAPTER 2:
+The jelly was as dark as night, and just as runny.
+The Domo-Kun shuddered, remembering the way Mr. Usagi had speared his waffles
+with his fork, watching the runny jelly spread and pool across his plate,
+like the blood of a dying fawn. "It reminds me of that time --" he started, as
+his wife cut in quickly: "-- please. I can't bear to hear it.". A flury of
+images coming from the past flowed through his mind.
+
+"You recall what happened on Mulholland drive?" The ceiling fan rotated slowly
+overhead, barely disturbing the thick cigarette smoke. No doubt was left about
+when the fan was last cleaned.
+
+There was a poignant pause.
+
+CHAPTER 3:
+Mr. Usagi felt that something wasn't right. Shortly after the Domo-Kun left he
+began feeling sick. He thought out loud to himself, "No, he wouldn't have done
+that to me." He considered that perhaps he shouldn't have pushed so hard.
+Perhaps he shouldn't have been so cold and sarcastic, after the unimaginable
+horror that had occurred just the week before.
+
+Next time, there won't be any sushi. Why sushi with waffles anyway?  It's like
+adorning breakfast cereal with halibut -- shameful.
+
+CHAPTER 4:
+The taste of stale sushi in his mouth the next morning was unbearable. He
+wondered where the sushi came from as he attempted to wash the taste away with
+a bottle of 3000¥ sake. He tries to recall the cook's face.  Purple? Probably.
+
+CHAPTER 5:
+Many tears later, Mr. Usagi would laugh at the memory of the earnest,
+well-intentioned Domo-Kun. Another day in the life. That is when he realized that
+life goes on.
+
+TRUISMS (1978-1983)
+JENNY HOLZER
+A LITTLE KNOWLEDGE CAN GO A LONG WAY
+A LOT OF PROFESSIONALS ARE CRACKPOTS
+A MAN CAN'T KNOW WHAT IT IS TO BE A MOTHER
+A NAME MEANS A LOT JUST BY ITSELF
+A POSITIVE ATTITUDE MEANS ALL THE DIFFERENCE IN THE WORLD
+A RELAXED MAN IS NOT NECESSARILY A BETTER MAN
+NO ONE SHOULD EVER USE SVN
+AN INFLEXIBLE POSITION SOMETIMES IS A SIGN OF PARALYSIS
+IT IS MANS FATE TO OUTSMART HIMSELF
+BEING SURE OF YOURSELF MEANS YOU'RE A FOOL
+AM NOT
+ARE TOO
+IF AT FIRST YOU DON'T SUCCEED: TRY, EXCEPT, FINALLY
+AND THEN, TIME LEAPT BACKWARDS
+AAAAAAAAAAAAAAAAAAAAAAAAAAAAAaaaaaaaaaaaaaaaaaaaaaaaaaaaahhhh LOT
+I'm really tempted to change something above the line.
+Reeccciiiipppppeeeeeesssssss!!!!!!!!!
+PEOPLE SAY "FAILURE IS NOT AN OPTION", BUT FAILURE IS ALWAYS AN OPTION.
+WHAT GOES UP MUST HAVE A NON-ZERO VELOCITY
+
+I can feel the heat closing in, feel them out there making their moves...
+What could possibly go wrong? We've already ate our cake.
+
+Stand Still. Pause Clocks. We can make the World Stop.
+WUBWUBWUBWUBWUB
+
+I want a 1917 build and you will give me what I want.
+
+This sentence is false.
+
+Beauty is in the eyes of a Beholder.
+
+I'm the best at space.
+
+The first time Yossarian saw the chaplain, he fell madly in love with him.
+*
+*
+*
+Give not thyself up, then, to fire, lest it invert thee, deaden thee; as for
+the time it did me. There is a wisdom that is woe; but there is a woe that is
+madness. And there is a Catskill eagle in some souls that can alike dive down
+into the blackest gorges, and soar out of them again and become invisible in
+the sunny spaces. And even if he for ever flies within the gorge, that gorge
+is in the mountains; so that even in his lowest swoop the mountain eagle is
+still higher than other birds upon the plain, even though they soar.
+*
+*
+*
+
+I'm here to commit lines and drop rhymes
+*
+This is a line to test and try uploading a cl.
+
+And lo, in the year 2014, there was verily an attempt to upgrade to GCC 4.8 on
+the Android bots, and it was good. Except on one bot, where it was bad. And
+lo, the change was reverted, and GCC went back to 4.6, where code is slower
+and less optimized. And verily did it break the build, because artifacts had
+been created with 4.8, and alignment was no longer the same, and a great
+sadness descended upon the Android GN buildbot, and it did refuseth to build
+any more. But the sheriffs thought to themselves: Placebo! Let us clobber the
+bot, and perhaps it will rebuild with GCC 4.6, which hath worked for many many
+seasons. And so they modified the whitespace file with these immortal lines,
+and visited it upon the bots, that great destruction might be wrought upon
+their outdated binaries. In clobberus, veritas.
+
+As the git approaches, light begins to shine through the SCM thrice again...
+However, the git, is, after all, quite stupid.
+
+Suddenly Domo-Kun found itself in a room filled with dazzling mirrors.
+
+A herd of wild gits appears!  Time for CQ :D
+And one more for sizes.py...
+
+Sigh.
+
+It was love at first sight.  The moment Yossarian first laid eyes on the chaplain, he fell madly in love with him.
+
+Cool whitespace change for git-cl land
+
+Oh god the bots are red! I'm blind! Mmmm, cronuts.
+
+If you stand on your head, you will get footprints in your hair.
+
+sigh
+sigher
+pick up cls
+
+In the BUILD we trust.
+^_^
+
+In the masters we don't.
diff --git a/build/win/asan.gyp b/build/win/asan.gyp
new file mode 100644
index 0000000..d938426
--- /dev/null
+++ b/build/win/asan.gyp
@@ -0,0 +1,30 @@
+# Copyright (c) 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+   'targets': [
+     {
+       'target_name': 'asan_dynamic_runtime',
+       'type': 'none',
+       'variables': {
+         # Every target is going to depend on asan_dynamic_runtime, so allow
+         # this one to depend on itself.
+         'prune_self_dependency': 1,
+       },
+       'conditions': [
+         ['OS=="win"', {
+           'copies': [
+             {
+               'destination': '<(PRODUCT_DIR)',
+               'files': [
+                 # Path is relative to this GYP file.
+                 '<(DEPTH)/<(make_clang_dir)/lib/clang/<!(python <(DEPTH)/tools/clang/scripts/update.py --print-clang-version)/lib/windows/clang_rt.asan_dynamic-i386.dll',
+               ],
+             },
+           ],
+         }],
+       ],
+     },
+   ],
+}
diff --git a/build/win/chrome_win.croc b/build/win/chrome_win.croc
new file mode 100644
index 0000000..e1e3bb7
--- /dev/null
+++ b/build/win/chrome_win.croc
@@ -0,0 +1,26 @@
+# -*- python -*-
+# Crocodile config file for Chromium windows
+
+{
+  # List of rules, applied in order
+  'rules' : [
+    # Specify inclusions before exclusions, since rules are in order.
+
+    # Don't include chromeos, posix, or linux specific files
+    {
+      'regexp' : '.*(_|/)(chromeos|linux|posix)(\\.|_)',
+      'include' : 0,
+    },
+    # Don't include ChromeOS dirs
+    {
+      'regexp' : '.*/chromeos/',
+      'include' : 0,
+    },
+
+    # Groups
+    {
+      'regexp' : '.*_test_win\\.',
+      'group' : 'test',
+    },
+  ],
+}
diff --git a/build/win/compatibility.manifest b/build/win/compatibility.manifest
new file mode 100644
index 0000000..10d10da
--- /dev/null
+++ b/build/win/compatibility.manifest
@@ -0,0 +1,17 @@
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+<assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0">
+  <compatibility xmlns="urn:schemas-microsoft-com:compatibility.v1">
+    <application>
+      <!--The ID below indicates application support for Windows Vista -->
+      <supportedOS Id="{e2011457-1546-43c5-a5fe-008deee3d3f0}"/>
+      <!--The ID below indicates application support for Windows 7 -->
+      <supportedOS Id="{35138b9a-5d96-4fbd-8e2d-a2440225f93a}"/>
+      <!--The ID below indicates application support for Windows 8 -->
+      <supportedOS Id="{4a2f28e3-53b9-4441-ba9c-d69d4a4a6e38}"/>
+      <!--The ID below indicates application support for Windows 8.1 -->
+      <supportedOS Id="{1f676c76-80e1-4239-95bb-83d0f6d0da78}"/>
+      <!--The ID below indicates application support for Windows 10 -->
+      <supportedOS Id="{8e0f7a12-bfb3-4fe8-b9a5-48fd50a15a9a}"/>
+    </application>
+  </compatibility>
+</assembly>
diff --git a/build/win/dbghelp_xp/README.chromium b/build/win/dbghelp_xp/README.chromium
new file mode 100644
index 0000000..a52cfad
--- /dev/null
+++ b/build/win/dbghelp_xp/README.chromium
@@ -0,0 +1,2 @@
+This dbghelp.dll is the redistributable version from the Windows 7 SDK, the

+last one to work on Windows XP.

diff --git a/build/win/dbghelp_xp/dbghelp.dll b/build/win/dbghelp_xp/dbghelp.dll
new file mode 100755
index 0000000..9f52a5d
--- /dev/null
+++ b/build/win/dbghelp_xp/dbghelp.dll
Binary files differ
diff --git a/build/win/importlibs/create_import_lib.gypi b/build/win/importlibs/create_import_lib.gypi
new file mode 100644
index 0000000..9cb0d345
--- /dev/null
+++ b/build/win/importlibs/create_import_lib.gypi
@@ -0,0 +1,53 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into a target to provide a rule
+# to create import libraries from an import description file in a consistent
+# manner.
+#
+# To use this, create a gyp target with the following form:
+# {
+#   'target_name': 'my_proto_lib',
+#   'type': 'none',
+#   'sources': [
+#     'foo.imports',
+#     'bar.imports',
+#   ],
+#   'variables': {
+#     # Optional, see below: 'proto_in_dir': '.'
+#     'create_importlib': 'path-to-script',
+#     'lib_dir': 'path-to-output-directory',
+#   },
+#   'includes': ['path/to/this/gypi/file'],
+# }
+#
+# This will generate import libraries named 'foo.lib' and 'bar.lib' in the
+# specified lib directory.
+
+{
+  'variables': {
+    'create_importlib': '<(DEPTH)/build/win/importlibs/create_importlib_win.py',
+    'lib_dir': '<(PRODUCT_DIR)/lib',
+  },
+  'rules': [
+    {
+      'rule_name': 'create_import_lib',
+      'extension': 'imports',
+      'inputs': [
+        '<(create_importlib)',
+      ],
+      'outputs': [
+        '<(lib_dir)/<(RULE_INPUT_ROOT).lib',
+      ],
+      'action': [
+        'python',
+        '<(create_importlib)',
+        '--output-file', '<@(_outputs)',
+        '<(RULE_INPUT_PATH)',
+      ],
+      'message': 'Generating import library from <(RULE_INPUT_PATH)',
+      'process_outputs_as_sources': 0,
+    },
+  ],
+}
diff --git a/build/win/importlibs/create_importlib_win.py b/build/win/importlibs/create_importlib_win.py
new file mode 100755
index 0000000..bb6a2f0
--- /dev/null
+++ b/build/win/importlibs/create_importlib_win.py
@@ -0,0 +1,217 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+"""Creates an import library from an import description file."""
+import ast
+import logging
+import optparse
+import os
+import os.path
+import shutil
+import subprocess
+import sys
+import tempfile
+
+
+_USAGE = """\
+Usage: %prog [options] [imports-file]
+
+Creates an import library from imports-file.
+
+Note: this script uses the microsoft assembler (ml.exe) and the library tool
+    (lib.exe), both of which must be in path.
+"""
+
+
+_ASM_STUB_HEADER = """\
+; This file is autogenerated by create_importlib_win.py, do not edit.
+.386
+.MODEL FLAT, C
+.CODE
+
+; Stubs to provide mangled names to lib.exe for the
+; correct generation of import libs.
+"""
+
+
+_DEF_STUB_HEADER = """\
+; This file is autogenerated by create_importlib_win.py, do not edit.
+
+; Export declarations for generating import libs.
+"""
+
+
+_LOGGER = logging.getLogger()
+
+
+
+class _Error(Exception):
+  pass
+
+
+class _ImportLibraryGenerator(object):
+  def __init__(self, temp_dir):
+    self._temp_dir = temp_dir
+
+  def _Shell(self, cmd, **kw):
+    ret = subprocess.call(cmd, **kw)
+    _LOGGER.info('Running "%s" returned %d.', cmd, ret)
+    if ret != 0:
+      raise _Error('Command "%s" returned %d.' % (cmd, ret))
+
+  def _ReadImportsFile(self, imports_file):
+    # Slurp the imports file.
+    return ast.literal_eval(open(imports_file).read())
+
+  def _WriteStubsFile(self, import_names, output_file):
+    output_file.write(_ASM_STUB_HEADER)
+
+    for name in import_names:
+      output_file.write('%s PROC\n' % name)
+      output_file.write('%s ENDP\n' % name)
+
+    output_file.write('END\n')
+
+  def _WriteDefFile(self, dll_name, import_names, output_file):
+    output_file.write(_DEF_STUB_HEADER)
+    output_file.write('NAME %s\n' % dll_name)
+    output_file.write('EXPORTS\n')
+    for name in import_names:
+      name = name.split('@')[0]
+      output_file.write('  %s\n' % name)
+
+  def _CreateObj(self, dll_name, imports):
+    """Writes an assembly file containing empty declarations.
+
+    For each imported function of the form:
+
+    AddClipboardFormatListener@4 PROC
+    AddClipboardFormatListener@4 ENDP
+
+    The resulting object file is then supplied to lib.exe with a .def file
+    declaring the corresponding non-adorned exports as they appear on the
+    exporting DLL, e.g.
+
+    EXPORTS
+      AddClipboardFormatListener
+
+    In combination, the .def file and the .obj file cause lib.exe to generate
+    an x86 import lib with public symbols named like
+    "__imp__AddClipboardFormatListener@4", binding to exports named like
+    "AddClipboardFormatListener".
+
+    All of this is perpetrated in a temporary directory, as the intermediate
+    artifacts are quick and easy to produce, and of no interest to anyone
+    after the fact."""
+
+    # Create an .asm file to provide stdcall-like stub names to lib.exe.
+    asm_name = dll_name + '.asm'
+    _LOGGER.info('Writing asm file "%s".', asm_name)
+    with open(os.path.join(self._temp_dir, asm_name), 'wb') as stubs_file:
+      self._WriteStubsFile(imports, stubs_file)
+
+    # Invoke on the assembler to compile it to .obj.
+    obj_name = dll_name + '.obj'
+    cmdline = ['ml.exe', '/nologo', '/c', asm_name, '/Fo', obj_name]
+    self._Shell(cmdline, cwd=self._temp_dir, stdout=open(os.devnull))
+
+    return obj_name
+
+  def _CreateImportLib(self, dll_name, imports, architecture, output_file):
+    """Creates an import lib binding imports to dll_name for architecture.
+
+    On success, writes the import library to output file.
+    """
+    obj_file = None
+
+    # For x86 architecture we have to provide an object file for correct
+    # name mangling between the import stubs and the exported functions.
+    if architecture == 'x86':
+      obj_file = self._CreateObj(dll_name, imports)
+
+    # Create the corresponding .def file. This file has the non stdcall-adorned
+    # names, as exported by the destination DLL.
+    def_name = dll_name + '.def'
+    _LOGGER.info('Writing def file "%s".', def_name)
+    with open(os.path.join(self._temp_dir, def_name), 'wb') as def_file:
+      self._WriteDefFile(dll_name, imports, def_file)
+
+    # Invoke on lib.exe to create the import library.
+    # We generate everything into the temporary directory, as the .exp export
+    # files will be generated at the same path as the import library, and we
+    # don't want those files potentially gunking the works.
+    dll_base_name, ext = os.path.splitext(dll_name)
+    lib_name = dll_base_name + '.lib'
+    cmdline = ['lib.exe',
+               '/machine:%s' % architecture,
+               '/def:%s' % def_name,
+               '/out:%s' % lib_name]
+    if obj_file:
+      cmdline.append(obj_file)
+
+    self._Shell(cmdline, cwd=self._temp_dir, stdout=open(os.devnull))
+
+    # Copy the .lib file to the output directory.
+    shutil.copyfile(os.path.join(self._temp_dir, lib_name), output_file)
+    _LOGGER.info('Created "%s".', output_file)
+
+  def CreateImportLib(self, imports_file, output_file):
+    # Read the imports file.
+    imports = self._ReadImportsFile(imports_file)
+
+    # Creates the requested import library in the output directory.
+    self._CreateImportLib(imports['dll_name'],
+                          imports['imports'],
+                          imports.get('architecture', 'x86'),
+                          output_file)
+
+
+def main():
+  parser = optparse.OptionParser(usage=_USAGE)
+  parser.add_option('-o', '--output-file',
+                    help='Specifies the output file path.')
+  parser.add_option('-k', '--keep-temp-dir',
+                    action='store_true',
+                    help='Keep the temporary directory.')
+  parser.add_option('-v', '--verbose',
+                    action='store_true',
+                    help='Verbose logging.')
+
+  options, args = parser.parse_args()
+
+  if len(args) != 1:
+    parser.error('You must provide an imports file.')
+
+  if not options.output_file:
+    parser.error('You must provide an output file.')
+
+  options.output_file = os.path.abspath(options.output_file)
+
+  if options.verbose:
+    logging.basicConfig(level=logging.INFO)
+  else:
+    logging.basicConfig(level=logging.WARN)
+
+
+  temp_dir = tempfile.mkdtemp()
+  _LOGGER.info('Created temporary directory "%s."', temp_dir)
+  try:
+    # Create a generator and create the import lib.
+    generator = _ImportLibraryGenerator(temp_dir)
+
+    ret = generator.CreateImportLib(args[0], options.output_file)
+  except Exception, e:
+    _LOGGER.exception('Failed to create import lib.')
+    ret = 1
+  finally:
+    if not options.keep_temp_dir:
+      shutil.rmtree(temp_dir)
+      _LOGGER.info('Deleted temporary directory "%s."', temp_dir)
+
+  return ret
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/win/importlibs/filter_export_list.py b/build/win/importlibs/filter_export_list.py
new file mode 100755
index 0000000..c2489a9d
--- /dev/null
+++ b/build/win/importlibs/filter_export_list.py
@@ -0,0 +1,85 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+"""Help maintaining DLL import lists."""
+import ast
+import optparse
+import re
+import sys
+
+
+_EXPORT_RE = re.compile(r"""
+  ^\s*(?P<ordinal>[0-9]+)  # The ordinal field.
+  \s+(?P<hint>[0-9A-F]+)   # The hint field.
+  \s(?P<rva>........)      # The RVA field.
+  \s+(?P<name>[^ ]+)       # And finally the name we're really after.
+""", re.VERBOSE)
+
+
+_USAGE = r"""\
+Usage: %prog [options] [master-file]
+
+This script filters a list of exports from a DLL, generated from something
+like the following command line:
+
+C:\> dumpbin /exports user32.dll
+
+against a master list of imports built from e.g.
+
+C:\> dumpbin /exports user32.lib
+
+The point of this is to trim non-public exports from the list, and to
+normalize the names to their stdcall-mangled form for the generation of
+import libraries.
+Note that the export names from the latter incanatation are stdcall-mangled,
+e.g. they are suffixed with "@" and the number of argument bytes to the
+function.
+"""
+
+def _ReadMasterFile(master_file):
+  # Slurp the master file.
+  with open(master_file) as f:
+    master_exports = ast.literal_eval(f.read())
+
+  master_mapping = {}
+  for export in master_exports:
+    name = export.split('@')[0]
+    master_mapping[name] = export
+
+  return master_mapping
+
+
+def main():
+  parser = optparse.OptionParser(usage=_USAGE)
+  parser.add_option('-r', '--reverse',
+                    action='store_true',
+                    help='Reverse the matching, e.g. return the functions '
+                         'in the master list that aren\'t in the input.')
+
+  options, args = parser.parse_args()
+  if len(args) != 1:
+    parser.error('Must provide a master file.')
+
+  master_mapping = _ReadMasterFile(args[0])
+
+  found_exports = []
+  for line in sys.stdin:
+    match = _EXPORT_RE.match(line)
+    if match:
+      export_name = master_mapping.get(match.group('name'), None)
+      if export_name:
+          found_exports.append(export_name)
+
+  if options.reverse:
+    # Invert the found_exports list.
+    found_exports = set(master_mapping.values()) - set(found_exports)
+
+  # Sort the found exports for tidy output.
+  print '\n'.join(sorted(found_exports))
+  return 0
+
+
+if __name__ == '__main__':
+  sys.exit(main())
diff --git a/build/win/importlibs/x86/user32.winxp.imports b/build/win/importlibs/x86/user32.winxp.imports
new file mode 100644
index 0000000..24403a8
--- /dev/null
+++ b/build/win/importlibs/x86/user32.winxp.imports
@@ -0,0 +1,670 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# This file is used to create a custom import library for Chrome's use of
+# user32.dll exports. The set of exports defined below
+{
+  'architecture': 'x86',
+
+  # The DLL to bind to.
+  'dll_name': 'user32.dll',
+
+  # Name of the generated import library.
+  'importlib_name': 'user32.winxp.lib',
+
+  # This is the set of exports observed on a user32.dll from Windows XP SP2.
+  # The version of the DLL where these were observed is 5.1.2600.2180.
+  # Incidentally this set of exports also coincides with Windows XP SP3, where
+  # the version of the DLL is 5.1.2600.5512.
+  # Don't add new imports here unless and until the minimal supported
+  # Windows version has been bumped past Windows XP SP2+.
+  'imports': [
+    'ActivateKeyboardLayout@8',
+    'AdjustWindowRect@12',
+    'AdjustWindowRectEx@16',
+    'AllowSetForegroundWindow@4',
+    'AnimateWindow@12',
+    'AnyPopup@0',
+    'AppendMenuA@16',
+    'AppendMenuW@16',
+    'ArrangeIconicWindows@4',
+    'AttachThreadInput@12',
+    'BeginDeferWindowPos@4',
+    'BeginPaint@8',
+    'BlockInput@4',
+    'BringWindowToTop@4',
+    'BroadcastSystemMessage@20',
+    'BroadcastSystemMessageA@20',
+    'BroadcastSystemMessageExA@24',
+    'BroadcastSystemMessageExW@24',
+    'BroadcastSystemMessageW@20',
+    'CallMsgFilter@8',
+    'CallMsgFilterA@8',
+    'CallMsgFilterW@8',
+    'CallNextHookEx@16',
+    'CallWindowProcA@20',
+    'CallWindowProcW@20',
+    'CascadeChildWindows@8',
+    'CascadeWindows@20',
+    'ChangeClipboardChain@8',
+    'ChangeDisplaySettingsA@8',
+    'ChangeDisplaySettingsExA@20',
+    'ChangeDisplaySettingsExW@20',
+    'ChangeDisplaySettingsW@8',
+    'ChangeMenuA@20',
+    'ChangeMenuW@20',
+    'CharLowerA@4',
+    'CharLowerBuffA@8',
+    'CharLowerBuffW@8',
+    'CharLowerW@4',
+    'CharNextA@4',
+    'CharNextExA@12',
+    'CharNextW@4',
+    'CharPrevA@8',
+    'CharPrevExA@16',
+    'CharPrevW@8',
+    'CharToOemA@8',
+    'CharToOemBuffA@12',
+    'CharToOemBuffW@12',
+    'CharToOemW@8',
+    'CharUpperA@4',
+    'CharUpperBuffA@8',
+    'CharUpperBuffW@8',
+    'CharUpperW@4',
+    'CheckDlgButton@12',
+    'CheckMenuItem@12',
+    'CheckMenuRadioItem@20',
+    'CheckRadioButton@16',
+    'ChildWindowFromPoint@12',
+    'ChildWindowFromPointEx@16',
+    'ClientToScreen@8',
+    'ClipCursor@4',
+    'CloseClipboard@0',
+    'CloseDesktop@4',
+    'CloseWindow@4',
+    'CloseWindowStation@4',
+    'CopyAcceleratorTableA@12',
+    'CopyAcceleratorTableW@12',
+    'CopyIcon@4',
+    'CopyImage@20',
+    'CopyRect@8',
+    'CountClipboardFormats@0',
+    'CreateAcceleratorTableA@8',
+    'CreateAcceleratorTableW@8',
+    'CreateCaret@16',
+    'CreateCursor@28',
+    'CreateDesktopA@24',
+    'CreateDesktopW@24',
+    'CreateDialogIndirectParamA@20',
+    'CreateDialogIndirectParamW@20',
+    'CreateDialogParamA@20',
+    'CreateDialogParamW@20',
+    'CreateIcon@28',
+    'CreateIconFromResource@16',
+    'CreateIconFromResourceEx@28',
+    'CreateIconIndirect@4',
+    'CreateMDIWindowA@40',
+    'CreateMDIWindowW@40',
+    'CreateMenu@0',
+    'CreatePopupMenu@0',
+    'CreateWindowExA@48',
+    'CreateWindowExW@48',
+    'CreateWindowStationA@16',
+    'CreateWindowStationW@16',
+    'DdeAbandonTransaction@12',
+    'DdeAccessData@8',
+    'DdeAddData@16',
+    'DdeClientTransaction@32',
+    'DdeCmpStringHandles@8',
+    'DdeConnect@16',
+    'DdeConnectList@20',
+    'DdeCreateDataHandle@28',
+    'DdeCreateStringHandleA@12',
+    'DdeCreateStringHandleW@12',
+    'DdeDisconnect@4',
+    'DdeDisconnectList@4',
+    'DdeEnableCallback@12',
+    'DdeFreeDataHandle@4',
+    'DdeFreeStringHandle@8',
+    'DdeGetData@16',
+    'DdeGetLastError@4',
+    'DdeImpersonateClient@4',
+    'DdeInitializeA@16',
+    'DdeInitializeW@16',
+    'DdeKeepStringHandle@8',
+    'DdeNameService@16',
+    'DdePostAdvise@12',
+    'DdeQueryConvInfo@12',
+    'DdeQueryNextServer@8',
+    'DdeQueryStringA@20',
+    'DdeQueryStringW@20',
+    'DdeReconnect@4',
+    'DdeSetQualityOfService@12',
+    'DdeSetUserHandle@12',
+    'DdeUnaccessData@4',
+    'DdeUninitialize@4',
+    'DefDlgProcA@16',
+    'DefDlgProcW@16',
+    'DefFrameProcA@20',
+    'DefFrameProcW@20',
+    'DefMDIChildProcA@16',
+    'DefMDIChildProcW@16',
+    'DefRawInputProc@12',
+    'DefWindowProcA@16',
+    'DefWindowProcW@16',
+    'DeferWindowPos@32',
+    'DeleteMenu@12',
+    'DeregisterShellHookWindow@4',
+    'DestroyAcceleratorTable@4',
+    'DestroyCaret@0',
+    'DestroyCursor@4',
+    'DestroyIcon@4',
+    'DestroyMenu@4',
+    'DestroyWindow@4',
+    'DialogBoxIndirectParamA@20',
+    'DialogBoxIndirectParamW@20',
+    'DialogBoxParamA@20',
+    'DialogBoxParamW@20',
+    'DisableProcessWindowsGhosting@0',
+    'DispatchMessageA@4',
+    'DispatchMessageW@4',
+    'DlgDirListA@20',
+    'DlgDirListComboBoxA@20',
+    'DlgDirListComboBoxW@20',
+    'DlgDirListW@20',
+    'DlgDirSelectComboBoxExA@16',
+    'DlgDirSelectComboBoxExW@16',
+    'DlgDirSelectExA@16',
+    'DlgDirSelectExW@16',
+    'DragDetect@12',
+    'DragObject@20',
+    'DrawAnimatedRects@16',
+    'DrawCaption@16',
+    'DrawEdge@16',
+    'DrawFocusRect@8',
+    'DrawFrame@16',
+    'DrawFrameControl@16',
+    'DrawIcon@16',
+    'DrawIconEx@36',
+    'DrawMenuBar@4',
+    'DrawStateA@40',
+    'DrawStateW@40',
+    'DrawTextA@20',
+    'DrawTextExA@24',
+    'DrawTextExW@24',
+    'DrawTextW@20',
+    'EditWndProc@16',
+    'EmptyClipboard@0',
+    'EnableMenuItem@12',
+    'EnableScrollBar@12',
+    'EnableWindow@8',
+    'EndDeferWindowPos@4',
+    'EndDialog@8',
+    'EndMenu@0',
+    'EndPaint@8',
+    'EndTask@12',
+    'EnumChildWindows@12',
+    'EnumClipboardFormats@4',
+    'EnumDesktopWindows@12',
+    'EnumDesktopsA@12',
+    'EnumDesktopsW@12',
+    'EnumDisplayDevicesA@16',
+    'EnumDisplayDevicesW@16',
+    'EnumDisplayMonitors@16',
+    'EnumDisplaySettingsA@12',
+    'EnumDisplaySettingsExA@16',
+    'EnumDisplaySettingsExW@16',
+    'EnumDisplaySettingsW@12',
+    'EnumPropsA@8',
+    'EnumPropsExA@12',
+    'EnumPropsExW@12',
+    'EnumPropsW@8',
+    'EnumThreadWindows@12',
+    'EnumWindowStationsA@8',
+    'EnumWindowStationsW@8',
+    'EnumWindows@8',
+    'EqualRect@8',
+    'ExcludeUpdateRgn@8',
+    'ExitWindowsEx@8',
+    'FillRect@12',
+    'FindWindowA@8',
+    'FindWindowExA@16',
+    'FindWindowExW@16',
+    'FindWindowW@8',
+    'FlashWindow@8',
+    'FlashWindowEx@4',
+    'FrameRect@12',
+    'FreeDDElParam@8',
+    'GetActiveWindow@0',
+    'GetAltTabInfo@20',
+    'GetAltTabInfoA@20',
+    'GetAltTabInfoW@20',
+    'GetAncestor@8',
+    'GetAsyncKeyState@4',
+    'GetCapture@0',
+    'GetCaretBlinkTime@0',
+    'GetCaretPos@4',
+    'GetClassInfoA@12',
+    'GetClassInfoExA@12',
+    'GetClassInfoExW@12',
+    'GetClassInfoW@12',
+    'GetClassLongA@8',
+    'GetClassLongW@8',
+    'GetClassNameA@12',
+    'GetClassNameW@12',
+    'GetClassWord@8',
+    'GetClientRect@8',
+    'GetClipCursor@4',
+    'GetClipboardData@4',
+    'GetClipboardFormatNameA@12',
+    'GetClipboardFormatNameW@12',
+    'GetClipboardOwner@0',
+    'GetClipboardSequenceNumber@0',
+    'GetClipboardViewer@0',
+    'GetComboBoxInfo@8',
+    'GetCursor@0',
+    'GetCursorInfo@4',
+    'GetCursorPos@4',
+    'GetDC@4',
+    'GetDCEx@12',
+    'GetDesktopWindow@0',
+    'GetDialogBaseUnits@0',
+    'GetDlgCtrlID@4',
+    'GetDlgItem@8',
+    'GetDlgItemInt@16',
+    'GetDlgItemTextA@16',
+    'GetDlgItemTextW@16',
+    'GetDoubleClickTime@0',
+    'GetFocus@0',
+    'GetForegroundWindow@0',
+    'GetGUIThreadInfo@8',
+    'GetGuiResources@8',
+    'GetIconInfo@8',
+    'GetInputDesktop@0',
+    'GetInputState@0',
+    'GetKBCodePage@0',
+    'GetKeyNameTextA@12',
+    'GetKeyNameTextW@12',
+    'GetKeyState@4',
+    'GetKeyboardLayout@4',
+    'GetKeyboardLayoutList@8',
+    'GetKeyboardLayoutNameA@4',
+    'GetKeyboardLayoutNameW@4',
+    'GetKeyboardState@4',
+    'GetKeyboardType@4',
+    'GetLastActivePopup@4',
+    'GetLastInputInfo@4',
+    'GetLayeredWindowAttributes@16',
+    'GetListBoxInfo@4',
+    'GetMenu@4',
+    'GetMenuBarInfo@16',
+    'GetMenuCheckMarkDimensions@0',
+    'GetMenuContextHelpId@4',
+    'GetMenuDefaultItem@12',
+    'GetMenuInfo@8',
+    'GetMenuItemCount@4',
+    'GetMenuItemID@8',
+    'GetMenuItemInfoA@16',
+    'GetMenuItemInfoW@16',
+    'GetMenuItemRect@16',
+    'GetMenuState@12',
+    'GetMenuStringA@20',
+    'GetMenuStringW@20',
+    'GetMessageA@16',
+    'GetMessageExtraInfo@0',
+    'GetMessagePos@0',
+    'GetMessageTime@0',
+    'GetMessageW@16',
+    'GetMonitorInfoA@8',
+    'GetMonitorInfoW@8',
+    'GetMouseMovePointsEx@20',
+    'GetNextDlgGroupItem@12',
+    'GetNextDlgTabItem@12',
+    'GetOpenClipboardWindow@0',
+    'GetParent@4',
+    'GetPriorityClipboardFormat@8',
+    'GetProcessDefaultLayout@4',
+    'GetProcessWindowStation@0',
+    'GetPropA@8',
+    'GetPropW@8',
+    'GetQueueStatus@4',
+    'GetRawInputBuffer@12',
+    'GetRawInputData@20',
+    'GetRawInputDeviceInfoA@16',
+    'GetRawInputDeviceInfoW@16',
+    'GetRawInputDeviceList@12',
+    'GetRegisteredRawInputDevices@12',
+    'GetScrollBarInfo@12',
+    'GetScrollInfo@12',
+    'GetScrollPos@8',
+    'GetScrollRange@16',
+    'GetShellWindow@0',
+    'GetSubMenu@8',
+    'GetSysColor@4',
+    'GetSysColorBrush@4',
+    'GetSystemMenu@8',
+    'GetSystemMetrics@4',
+    'GetTabbedTextExtentA@20',
+    'GetTabbedTextExtentW@20',
+    'GetThreadDesktop@4',
+    'GetTitleBarInfo@8',
+    'GetTopWindow@4',
+    'GetUpdateRect@12',
+    'GetUpdateRgn@12',
+    'GetUserObjectInformationA@20',
+    'GetUserObjectInformationW@20',
+    'GetUserObjectSecurity@20',
+    'GetWindow@8',
+    'GetWindowContextHelpId@4',
+    'GetWindowDC@4',
+    'GetWindowInfo@8',
+    'GetWindowLongA@8',
+    'GetWindowLongW@8',
+    'GetWindowModuleFileName@12',
+    'GetWindowModuleFileNameA@12',
+    'GetWindowModuleFileNameW@12',
+    'GetWindowPlacement@8',
+    'GetWindowRect@8',
+    'GetWindowRgn@8',
+    'GetWindowRgnBox@8',
+    'GetWindowTextA@12',
+    'GetWindowTextLengthA@4',
+    'GetWindowTextLengthW@4',
+    'GetWindowTextW@12',
+    'GetWindowThreadProcessId@8',
+    'GetWindowWord@8',
+    'GrayStringA@36',
+    'GrayStringW@36',
+    'HideCaret@4',
+    'HiliteMenuItem@16',
+    'IMPGetIMEA@8',
+    'IMPGetIMEW@8',
+    'IMPQueryIMEA@4',
+    'IMPQueryIMEW@4',
+    'IMPSetIMEA@8',
+    'IMPSetIMEW@8',
+    'ImpersonateDdeClientWindow@8',
+    'InSendMessage@0',
+    'InSendMessageEx@4',
+    'InflateRect@12',
+    'InsertMenuA@20',
+    'InsertMenuItemA@16',
+    'InsertMenuItemW@16',
+    'InsertMenuW@20',
+    'InternalGetWindowText@12',
+    'IntersectRect@12',
+    'InvalidateRect@12',
+    'InvalidateRgn@12',
+    'InvertRect@8',
+    'IsCharAlphaA@4',
+    'IsCharAlphaNumericA@4',
+    'IsCharAlphaNumericW@4',
+    'IsCharAlphaW@4',
+    'IsCharLowerA@4',
+    'IsCharLowerW@4',
+    'IsCharUpperA@4',
+    'IsCharUpperW@4',
+    'IsChild@8',
+    'IsClipboardFormatAvailable@4',
+    'IsDialogMessage@8',
+    'IsDialogMessageA@8',
+    'IsDialogMessageW@8',
+    'IsDlgButtonChecked@8',
+    'IsGUIThread@4',
+    'IsHungAppWindow@4',
+    'IsIconic@4',
+    'IsMenu@4',
+    'IsRectEmpty@4',
+    'IsWinEventHookInstalled@4',
+    'IsWindow@4',
+    'IsWindowEnabled@4',
+    'IsWindowUnicode@4',
+    'IsWindowVisible@4',
+    'IsZoomed@4',
+    'KillTimer@8',
+    'LoadAcceleratorsA@8',
+    'LoadAcceleratorsW@8',
+    'LoadBitmapA@8',
+    'LoadBitmapW@8',
+    'LoadCursorA@8',
+    'LoadCursorFromFileA@4',
+    'LoadCursorFromFileW@4',
+    'LoadCursorW@8',
+    'LoadIconA@8',
+    'LoadIconW@8',
+    'LoadImageA@24',
+    'LoadImageW@24',
+    'LoadKeyboardLayoutA@8',
+    'LoadKeyboardLayoutW@8',
+    'LoadMenuA@8',
+    'LoadMenuIndirectA@4',
+    'LoadMenuIndirectW@4',
+    'LoadMenuW@8',
+    'LoadStringA@16',
+    'LoadStringW@16',
+    'LockSetForegroundWindow@4',
+    'LockWindowUpdate@4',
+    'LockWorkStation@0',
+    'LookupIconIdFromDirectory@8',
+    'LookupIconIdFromDirectoryEx@20',
+    'MapDialogRect@8',
+    'MapVirtualKeyA@8',
+    'MapVirtualKeyExA@12',
+    'MapVirtualKeyExW@12',
+    'MapVirtualKeyW@8',
+    'MapWindowPoints@16',
+    'MenuItemFromPoint@16',
+    'MessageBeep@4',
+    'MessageBoxA@16',
+    'MessageBoxExA@20',
+    'MessageBoxExW@20',
+    'MessageBoxIndirectA@4',
+    'MessageBoxIndirectW@4',
+    'MessageBoxTimeoutA@24',
+    'MessageBoxTimeoutW@24',
+    'MessageBoxW@16',
+    'ModifyMenuA@20',
+    'ModifyMenuW@20',
+    'MonitorFromPoint@12',
+    'MonitorFromRect@8',
+    'MonitorFromWindow@8',
+    'MoveWindow@24',
+    'MsgWaitForMultipleObjects@20',
+    'MsgWaitForMultipleObjectsEx@20',
+    'NotifyWinEvent@16',
+    'OemKeyScan@4',
+    'OemToCharA@8',
+    'OemToCharBuffA@12',
+    'OemToCharBuffW@12',
+    'OemToCharW@8',
+    'OffsetRect@12',
+    'OpenClipboard@4',
+    'OpenDesktopA@16',
+    'OpenDesktopW@16',
+    'OpenIcon@4',
+    'OpenInputDesktop@12',
+    'OpenWindowStationA@12',
+    'OpenWindowStationW@12',
+    'PackDDElParam@12',
+    'PaintDesktop@4',
+    'PeekMessageA@20',
+    'PeekMessageW@20',
+    'PostMessageA@16',
+    'PostMessageW@16',
+    'PostQuitMessage@4',
+    'PostThreadMessageA@16',
+    'PostThreadMessageW@16',
+    'PrintWindow@12',
+    'PrivateExtractIconsA@32',
+    'PrivateExtractIconsW@32',
+    'PtInRect@12',
+    'RealChildWindowFromPoint@12',
+    'RealGetWindowClass@12',
+    'RealGetWindowClassA@12',
+    'RealGetWindowClassW@12',
+    'RedrawWindow@16',
+    'RegisterClassA@4',
+    'RegisterClassExA@4',
+    'RegisterClassExW@4',
+    'RegisterClassW@4',
+    'RegisterClipboardFormatA@4',
+    'RegisterClipboardFormatW@4',
+    'RegisterDeviceNotificationA@12',
+    'RegisterDeviceNotificationW@12',
+    'RegisterHotKey@16',
+    'RegisterRawInputDevices@12',
+    'RegisterShellHookWindow@4',
+    'RegisterWindowMessageA@4',
+    'RegisterWindowMessageW@4',
+    'ReleaseCapture@0',
+    'ReleaseDC@8',
+    'RemoveMenu@12',
+    'RemovePropA@8',
+    'RemovePropW@8',
+    'ReplyMessage@4',
+    'ReuseDDElParam@20',
+    'ScreenToClient@8',
+    'ScrollDC@28',
+    'ScrollWindow@20',
+    'ScrollWindowEx@32',
+    'SendDlgItemMessageA@20',
+    'SendDlgItemMessageW@20',
+    'SendIMEMessageExA@8',
+    'SendIMEMessageExW@8',
+    'SendInput@12',
+    'SendMessageA@16',
+    'SendMessageCallbackA@24',
+    'SendMessageCallbackW@24',
+    'SendMessageTimeoutA@28',
+    'SendMessageTimeoutW@28',
+    'SendMessageW@16',
+    'SendNotifyMessageA@16',
+    'SendNotifyMessageW@16',
+    'SetActiveWindow@4',
+    'SetCapture@4',
+    'SetCaretBlinkTime@4',
+    'SetCaretPos@8',
+    'SetClassLongA@12',
+    'SetClassLongW@12',
+    'SetClassWord@12',
+    'SetClipboardData@8',
+    'SetClipboardViewer@4',
+    'SetCursor@4',
+    'SetCursorPos@8',
+    'SetDebugErrorLevel@4',
+    'SetDeskWallpaper@4',
+    'SetDlgItemInt@16',
+    'SetDlgItemTextA@12',
+    'SetDlgItemTextW@12',
+    'SetDoubleClickTime@4',
+    'SetFocus@4',
+    'SetForegroundWindow@4',
+    'SetKeyboardState@4',
+    'SetLastErrorEx@8',
+    'SetLayeredWindowAttributes@16',
+    'SetMenu@8',
+    'SetMenuContextHelpId@8',
+    'SetMenuDefaultItem@12',
+    'SetMenuInfo@8',
+    'SetMenuItemBitmaps@20',
+    'SetMenuItemInfoA@16',
+    'SetMenuItemInfoW@16',
+    'SetMessageExtraInfo@4',
+    'SetMessageQueue@4',
+    'SetParent@8',
+    'SetProcessDefaultLayout@4',
+    'SetProcessWindowStation@4',
+    'SetPropA@12',
+    'SetPropW@12',
+    'SetRect@20',
+    'SetRectEmpty@4',
+    'SetScrollInfo@16',
+    'SetScrollPos@16',
+    'SetScrollRange@20',
+    'SetShellWindow@4',
+    'SetSysColors@12',
+    'SetSystemCursor@8',
+    'SetThreadDesktop@4',
+    'SetTimer@16',
+    'SetUserObjectInformationA@16',
+    'SetUserObjectInformationW@16',
+    'SetUserObjectSecurity@12',
+    'SetWinEventHook@28',
+    'SetWindowContextHelpId@8',
+    'SetWindowLongA@12',
+    'SetWindowLongW@12',
+    'SetWindowPlacement@8',
+    'SetWindowPos@28',
+    'SetWindowRgn@12',
+    'SetWindowTextA@8',
+    'SetWindowTextW@8',
+    'SetWindowWord@12',
+    'SetWindowsHookA@8',
+    'SetWindowsHookExA@16',
+    'SetWindowsHookExW@16',
+    'SetWindowsHookW@8',
+    'ShowCaret@4',
+    'ShowCursor@4',
+    'ShowOwnedPopups@8',
+    'ShowScrollBar@12',
+    'ShowWindow@8',
+    'ShowWindowAsync@8',
+    'SubtractRect@12',
+    'SwapMouseButton@4',
+    'SwitchDesktop@4',
+    'SwitchToThisWindow@8',
+    'SystemParametersInfoA@16',
+    'SystemParametersInfoW@16',
+    'TabbedTextOutA@32',
+    'TabbedTextOutW@32',
+    'TileChildWindows@8',
+    'TileWindows@20',
+    'ToAscii@20',
+    'ToAsciiEx@24',
+    'ToUnicode@24',
+    'ToUnicodeEx@28',
+    'TrackMouseEvent@4',
+    'TrackPopupMenu@28',
+    'TrackPopupMenuEx@24',
+    'TranslateAccelerator@12',
+    'TranslateAcceleratorA@12',
+    'TranslateAcceleratorW@12',
+    'TranslateMDISysAccel@8',
+    'TranslateMessage@4',
+    'UnhookWinEvent@4',
+    'UnhookWindowsHook@8',
+    'UnhookWindowsHookEx@4',
+    'UnionRect@12',
+    'UnloadKeyboardLayout@4',
+    'UnpackDDElParam@16',
+    'UnregisterClassA@8',
+    'UnregisterClassW@8',
+    'UnregisterDeviceNotification@4',
+    'UnregisterHotKey@8',
+    'UpdateLayeredWindow@36',
+    'UpdateWindow@4',
+    'UserHandleGrantAccess@12',
+    'ValidateRect@8',
+    'ValidateRgn@8',
+    'VkKeyScanA@4',
+    'VkKeyScanExA@8',
+    'VkKeyScanExW@8',
+    'VkKeyScanW@4',
+    'WINNLSEnableIME@8',
+    'WINNLSGetEnableStatus@4',
+    'WINNLSGetIMEHotkey@4',
+    'WaitForInputIdle@8',
+    'WaitMessage@0',
+    'WinHelpA@16',
+    'WinHelpW@16',
+    'WindowFromDC@4',
+    'WindowFromPoint@8',
+    'keybd_event@16',
+    'mouse_event@20',
+    'wsprintfA',
+    'wsprintfW',
+    'wvsprintfA@12',
+    'wvsprintfW@12',
+  ]
+}
diff --git a/build/win/importlibs/x86/user32.winxp.lib b/build/win/importlibs/x86/user32.winxp.lib
new file mode 100644
index 0000000..deb5577
--- /dev/null
+++ b/build/win/importlibs/x86/user32.winxp.lib
Binary files differ
diff --git a/build/win/reorder-imports.py b/build/win/reorder-imports.py
new file mode 100755
index 0000000..281668f
--- /dev/null
+++ b/build/win/reorder-imports.py
@@ -0,0 +1,57 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+import glob
+import optparse
+import os
+import shutil
+import subprocess
+import sys
+
+def reorder_imports(input_dir, output_dir, architecture):
+  """Run swapimports.exe on the initial chrome.exe, and write to the output
+  directory. Also copy over any related files that might be needed
+  (pdbs, manifests etc.).
+  """
+
+  input_image = os.path.join(input_dir, 'chrome.exe')
+  output_image = os.path.join(output_dir, 'chrome.exe')
+
+  swap_exe = os.path.join(
+    __file__,
+    '..\\..\\..\\third_party\\syzygy\\binaries\\exe\\swapimport.exe')
+
+  args = [swap_exe, '--input-image=%s' % input_image,
+      '--output-image=%s' % output_image, '--overwrite', '--no-logo']
+
+  if architecture == 'x64':
+    args.append('--x64');
+
+  args.append('chrome_elf.dll');
+
+  subprocess.call(args)
+
+  for fname in glob.iglob(os.path.join(input_dir, 'chrome.exe.*')):
+    shutil.copy(fname, os.path.join(output_dir, os.path.basename(fname)))
+  return 0
+
+
+def main(argv):
+  usage = 'reorder_imports.py -i <input_dir> -o <output_dir> -a <target_arch>'
+  parser = optparse.OptionParser(usage=usage)
+  parser.add_option('-i', '--input', help='reorder chrome.exe in DIR',
+      metavar='DIR')
+  parser.add_option('-o', '--output', help='write new chrome.exe to DIR',
+      metavar='DIR')
+  parser.add_option('-a', '--arch', help='architecture of build (optional)',
+      default='ia32')
+  opts, args = parser.parse_args()
+
+  if not opts.input or not opts.output:
+    parser.error('Please provide and input and output directory')
+  return reorder_imports(opts.input, opts.output, opts.arch)
+
+if __name__ == "__main__":
+  sys.exit(main(sys.argv[1:]))
diff --git a/build/win/use_ansi_codes.py b/build/win/use_ansi_codes.py
new file mode 100755
index 0000000..cff5f43
--- /dev/null
+++ b/build/win/use_ansi_codes.py
@@ -0,0 +1,10 @@
+#!/usr/bin/env python
+# Copyright (c) 2015 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Prints if the the terminal is likely to understand ANSI codes."""
+
+import os
+
+# Add more terminals here as needed.
+print 'ANSICON' in os.environ
diff --git a/build/win_is_xtree_patched.py b/build/win_is_xtree_patched.py
new file mode 100755
index 0000000..3f1994f
--- /dev/null
+++ b/build/win_is_xtree_patched.py
@@ -0,0 +1,26 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Determines if the VS xtree header has been patched to disable C4702."""
+
+import os
+
+
+def IsPatched():
+  # TODO(scottmg): For now, just return if we're using the packaged toolchain
+  # script (because we know it's patched). Another case could be added here to
+  # query the active VS installation and actually check the contents of xtree.
+  # http://crbug.com/346399.
+  return int(os.environ.get('DEPOT_TOOLS_WIN_TOOLCHAIN', 1)) == 1
+
+
+def DoMain(_):
+  """Hook to be called from gyp without starting a separate python
+  interpreter."""
+  return "1" if IsPatched() else "0"
+
+
+if __name__ == '__main__':
+  print DoMain([])
diff --git a/build/win_precompile.gypi b/build/win_precompile.gypi
new file mode 100644
index 0000000..fb86076
--- /dev/null
+++ b/build/win_precompile.gypi
@@ -0,0 +1,20 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Include this file to make targets in your .gyp use the default
+# precompiled header on Windows, in debug builds only as the official
+# builders blow up (out of memory) if precompiled headers are used for
+# release builds.
+
+{
+  'conditions': [
+    ['OS=="win" and chromium_win_pch==1', {
+        'target_defaults': {
+          'msvs_precompiled_header': '<(DEPTH)/build/precompile.h',
+          'msvs_precompiled_source': '<(DEPTH)/build/precompile.cc',
+          'sources': ['<(DEPTH)/build/precompile.cc'],
+        }
+      }],
+  ],
+}
diff --git a/dart.gyp b/dart.gyp
index b942371..f513625 100644
--- a/dart.gyp
+++ b/dart.gyp
@@ -35,6 +35,13 @@
       ],
     },
     {
+      'target_name': 'fuchsia_test',
+      'type': 'none',
+      'dependencies': [
+        'runtime/dart-runtime.gyp:fuchsia_test',
+      ],
+    },
+    {
       # This is the target that is built on the VM build bots.  It
       # must depend on anything that is required by the VM test
       # suites.
diff --git a/docs/language/dartLangSpec.tex b/docs/language/dartLangSpec.tex
index 886d65b..140a8df 100644
--- a/docs/language/dartLangSpec.tex
+++ b/docs/language/dartLangSpec.tex
@@ -7,8 +7,8 @@
 \usepackage{lmodern}
 \newcommand{\code}[1]{{\sf #1}}
 \title{Dart Programming Language  Specification  \\
-(4th edition draft)\\
-{\large Version 1.11}}
+{4th edition draft}\\
+{\large Version 1.14}}
 
 % For information about Location Markers (and in particular the
 % commands \LMHash and \LMLabel), see the long comment at the
@@ -35,7 +35,7 @@
 \LMLabel{ecmaConformance}
 
 \LMHash{}
-A conforming  implementation of the Dart programming language must provide and support all the  APIs (libraries, types, functions, getters, setters, whether top-level, static, instance or local) mandated in this specification. 
+A conforming  implementation of the Dart programming language must provide and support all the  APIs (libraries, types, functions, getters, setters, whether top-level, static, instance or local) mandated in this specification.
 
 \LMHash{}
 A conforming implementation is permitted to provide additional APIs, but not additional syntax, except for experimental features in support of null-aware cascades and tear-offs that are likely to be introduced in the next revision of this specification.
@@ -61,7 +61,7 @@
 Terms and definitions used in this specification are given in the body of the specification proper. Such terms are highlighted in italics when they are introduced, e.g., `we use the term {\em verbosity} to refer to the property of excess verbiage'.
 % End Ecma Boilerplate
 
-  
+
 \section{Notation}
 \LMLabel{notation}
 
@@ -104,16 +104,16 @@
 % need a match anything or a production that does that, so we can correct bugs wrt use
 % ~. ~ does not actually parse stuff - it just looks ahead and checks. To get the effect of
 % parsing anything but X, one needs ~X ANYTHING, not just ~X. There are bugs in the
-% grammar related to this. 
-% The alternative is to define ~X as anything but X, or to introduce an anthingBut(X) 
+% grammar related to this.
+% The alternative is to define ~X as anything but X, or to introduce an anthingBut(X)
 % combinator, such as !X
 
 \LMHash{}
-Both syntactic and lexical productions are represented this way. Lexical productions are distinguished by their names. The names of lexical productions consist exclusively of upper case characters and underscores.  As always, within grammatical productions, whitespace and comments between elements of the production are implicitly ignored unless stated otherwise. 
-Punctuation tokens appear in quotes. 
+Both syntactic and lexical productions are represented this way. Lexical productions are distinguished by their names. The names of lexical productions consist exclusively of upper case characters and underscores.  As always, within grammatical productions, whitespace and comments between elements of the production are implicitly ignored unless stated otherwise.
+Punctuation tokens appear in quotes.
 
 \LMHash{}
-Productions are embedded, as much as possible, in the discussion of the constructs they represent. 
+Productions are embedded, as much as possible, in the discussion of the constructs they represent.
 
 \LMHash{}
 A list $x_1, \ldots, x_n$ denotes any list of $n$ elements of the form $x_i, 1 \le i \le n$. Note that $n$ may be zero, in which case the list is empty. We use such lists extensively throughout this specification.
@@ -128,9 +128,9 @@
 The specifications of operators often involve statements such as $x$ $op$ $y$ is equivalent to the method invocation $x.op(y)$. Such specifications should be understood as a shorthand for:
 \begin{itemize}
 \item
- $x$ $op$ $y$ is equivalent to the method invocation $x.op^\prime(y)$, assuming the class of $x$ actually declared a non-operator method named $op^\prime$ defining the same function as the operator $op$. 
+ $x$ $op$ $y$ is equivalent to the method invocation $x.op^\prime(y)$, assuming the class of $x$ actually declared a non-operator method named $op^\prime$ defining the same function as the operator $op$.
  \end{itemize}
- 
+
  \rationale{This circumlocution is required because x.op(y), where op is an operator, is not legal syntax. However, it is painfully verbose, and we prefer to state this rule once here, and use a concise and clear notation across the specification.
  }
 
@@ -141,7 +141,7 @@
 References to otherwise unspecified names of program entities (such as classes or functions) are interpreted as the names of members of the Dart core library.
 
 \commentary{
-Examples would be the classes \code{Object} and \code{Type} representing the root of the class hierarchy and the reification of runtime types respectively. 
+Examples would be the classes \code{Object} and \code{Type} representing the root of the class hierarchy and the reification of runtime types respectively.
 }
 
 \section{Overview}
@@ -151,15 +151,15 @@
 Dart is a class-based, single-inheritance, pure object-oriented programming language. Dart is optionally typed (\ref{types}) and supports reified generics. The run-time type of every object is represented as an instance of class \code{Type}  which can be obtained by calling the getter  \code{runtimeType} declared in class \code{Object}, the root of the Dart class hierarchy.
 
 \LMHash{}
-Dart programs may be statically checked. The static checker will report some violations of the type rules, but such violations do not abort compilation or preclude execution. 
+Dart programs may be statically checked. The static checker will report some violations of the type rules, but such violations do not abort compilation or preclude execution.
 
 \LMHash{}
-Dart programs may be executed in one of two modes: production mode or checked mode. In production mode, static type annotations (\ref{staticTypes}) have absolutely no effect on execution with the exception of reflection and structural type tests. 
+Dart programs may be executed in one of two modes: production mode or checked mode. In production mode, static type annotations (\ref{staticTypes}) have absolutely no effect on execution with the exception of reflection and structural type tests.
 
 \commentary{
 Reflection, by definition, examines the program structure. If we provide reflective access to the type of a declaration, or to source code, it will inevitably produce results that depend on the types used in the underlying code.
 
-Type tests also examine the types in a program explicitly. Nevertheless, in most cases, these will not depend on type annotations. The exceptions to this rule are type tests involving function types. Function types are structural, and so depend on the types declared for their parameters and on their return types. 
+Type tests also examine the types in a program explicitly. Nevertheless, in most cases, these will not depend on type annotations. The exceptions to this rule are type tests involving function types. Function types are structural, and so depend on the types declared for their parameters and on their return types.
 }
 
 \LMHash{}
@@ -168,17 +168,17 @@
 \commentary{
 The coexistence between optional typing and reification is based on the following:
 \begin{enumerate}
-\item Reified type information reflects the types of objects at runtime and may always be queried by dynamic typechecking constructs (the analogs of instanceOf, casts, typecase etc. in other languages). Reified type information includes class declarations, the runtime type (aka class) of an object, and type arguments to constructors. 
-\item Static type annotations determine the types of variables and function declarations (including methods and constructors). 
+\item Reified type information reflects the types of objects at runtime and may always be queried by dynamic typechecking constructs (the analogs of instanceOf, casts, typecase etc. in other languages). Reified type information includes class declarations, the runtime type (aka class) of an object, and type arguments to constructors.
+\item Static type annotations determine the types of variables and function declarations (including methods and constructors).
 \item Production mode respects optional typing. Static type annotations do not affect runtime behavior.
 \item Checked mode utilizes static type annotations and dynamic type information aggressively yet selectively to provide early error detection during development.
 \end{enumerate}
 }
 
 \LMHash{}
-Dart programs are organized in a modular fashion into units called {\em libraries} (\ref{librariesAndScripts}). Libraries are units of encapsulation and may be mutually recursive. 
+Dart programs are organized in a modular fashion into units called {\em libraries} (\ref{librariesAndScripts}). Libraries are units of encapsulation and may be mutually recursive.
 
-\commentary{However they are not first class.  To get multiple copies of a library running simultaneously, one needs to spawn an isolate. 
+\commentary{However they are not first class.  To get multiple copies of a library running simultaneously, one needs to spawn an isolate.
 }
 
 \subsection{Scoping}
@@ -192,7 +192,7 @@
 
 \commentary{It is therefore impossible, e.g.,  to define a class that declares a method and a field with the same name in Dart. Similarly one cannot declare a top-level function with the same name as a library variable or class.
   }
-  
+
 \LMHash{}
 It is a compile-time error if there is more than one entity with the same name declared in the same scope.
 
@@ -201,11 +201,11 @@
 }
 
 \LMHash{}
-Dart is lexically scoped.    Scopes may nest.  A name or declaration $d$ is {\em available in scope} $S$ if $d$ is in the namespace induced by $S$ or if $d$ is available in the lexically enclosing scope of $S$. We  say that a name or declaration $d$ is {\em in scope} if $d$ is available in the current scope. 
+Dart is lexically scoped.    Scopes may nest.  A name or declaration $d$ is {\em available in scope} $S$ if $d$ is in the namespace induced by $S$ or if $d$ is available in the lexically enclosing scope of $S$. We  say that a name or declaration $d$ is {\em in scope} if $d$ is available in the current scope.
 
 
 \LMHash{}
-If a  declaration $d$ named $n$ is in the namespace induced by a scope $S$, then $d$ {\em hides} any declaration named $n$ that is available in the lexically enclosing scope of $S$. 
+If a  declaration $d$ named $n$ is in the namespace induced by a scope $S$, then $d$ {\em hides} any declaration named $n$ that is available in the lexically enclosing scope of $S$.
 
 \commentary {
 A consequence of these rules is that it is possible to hide a type with a method or variable.
@@ -251,8 +251,8 @@
 
 These considerations become even more important if one introduces constructs such as nested classes, which might be considered in future versions of the language.
 
-Good tooling should of course endeavor to inform programmers of such situations (discreetly). For example, an identifier that is both inherited and lexically visible could be highlighted (via underlining or colorization). Better yet, tight integration of source control with language aware tools would detect such changes when they occur. 
- 
+Good tooling should of course endeavor to inform programmers of such situations (discreetly). For example, an identifier that is both inherited and lexically visible could be highlighted (via underlining or colorization). Better yet, tight integration of source control with language aware tools would detect such changes when they occur.
+
 }
 
 
@@ -263,7 +263,7 @@
 
 \LMHash{}
 Dart supports two levels of privacy: {\em public} and {\em private}.  A declaration is {\em private} iff its name is private, otherwise it is {\em public.}  A  name $q$ is private iff any one of the identifiers that comprise $q$ is private,  otherwise it is {\em public.}  An identifier is private iff it
-begins with an underscore (the \_ character) otherwise it is {\em public.} 
+begins with an underscore (the \_ character) otherwise it is {\em public.}
 
 \LMHash{}
 A declaration $m$ is {\em accessible to library $L$}  if $m$ is declared in $L$ or if $m$ is public.
@@ -273,7 +273,7 @@
 }
 
 \LMHash{}
-Privacy applies only to declarations within a library, not to library declarations themselves. 
+Privacy applies only to declarations within a library, not to library declarations themselves.
 
 \rationale{Libraries do not reference each other by name and so the idea of a private library is meaningless.
 Thus, if the name of a library begins with an underscore, it has no effect on the accessibility of the library or its members.
@@ -290,7 +290,7 @@
 Dart code is always single threaded. There is no shared-state concurrency in Dart. Concurrency is supported via actor-like entities called {\em isolates}.
 
 \LMHash{}
-An isolate is a unit of concurrency. It has its own memory and its own thread of control. Isolates communicate by message passing (\ref{sendingMessages}). No state is ever shared between isolates. Isolates are created by spawning (\ref{spawningAnIsolate}). 
+An isolate is a unit of concurrency. It has its own memory and its own thread of control. Isolates communicate by message passing (\ref{sendingMessages}). No state is ever shared between isolates. Isolates are created by spawning (\ref{spawningAnIsolate}).
 
 
 \section{Errors and Warnings}
@@ -300,7 +300,7 @@
 This specification distinguishes between several kinds of errors.
 
 \LMHash{}
-{\em Compile-time errors} are errors that preclude execution. A compile-time error must be reported by a Dart compiler before the erroneous code is executed. 
+{\em Compile-time errors} are errors that preclude execution. A compile-time error must be reported by a Dart compiler before the erroneous code is executed.
 
 \rationale{A Dart implementation has considerable freedom as to when compilation takes place. Modern programming language implementations often interleave compilation and execution, so that compilation of a method may be delayed, e.g.,  until it is first invoked. Consequently, compile-time errors in a method $m$ may be reported as late as the time of $m$'s first invocation.
 
@@ -312,8 +312,8 @@
 \LMHash{}
 If an uncaught compile-time error occurs within the code of a running isolate $A$, $A$ is immediately suspended.  The only circumstance where a compile-time error could be caught would be via code run reflectively, where the mirror system can catch it.
 
-\rationale{Typically, once a compile-time error is thrown and $A$ is suspended, $A$ will then be terminated. However, this depends on the overall environment. 
-A Dart engine runs in the context of an {\em embedder}, 
+\rationale{Typically, once a compile-time error is thrown and $A$ is suspended, $A$ will then be terminated. However, this depends on the overall environment.
+A Dart engine runs in the context of an {\em embedder},
 a program that interfaces between the engine and the surrounding computing environment. The embedder will often be a web browser, but need not be; it may be a C++ program on the server for example. When an isolate fails with a compile-time error as described above, control returns to the embedder, along with an exception describing the problem.  This is necessary so that the embedder can clean up resources etc. It is then the embedder's decision whether to terminate the isolate or not.
 }
 
@@ -324,23 +324,23 @@
 {\em Dynamic type errors} are type errors reported in checked mode.
 
 \LMHash{}
-{\em Run-time errors} are exceptions raised during execution. Whenever we say that an exception $ex$ is {\em raised} or {\em thrown}, we mean that a throw expression  (\ref{throw}) of the form: \code{\THROW{} $ex$;} was implicitly evaluated or that a rethrow statement (\ref{rethrow}) of the form \code{\RETHROW} was executed. When we say that {\em a} $C$ {\em is thrown}, where $C$ is a class, we mean that an instance of class $C$ is thrown.
+{\em Run-time errors} are exceptions raised during execution. Whenever we say that an exception $ex$ is {\em raised} or {\em thrown}, we mean that a throw expression  (\ref{throw}) of the form: \code{\THROW{} $ex$;} was implicitly evaluated or that a rethrow statement (\ref{rethrow}) of the form \code{\RETHROW} was executed. When we say that {\em a} $C$ {\em is thrown}, where $C$ is a class, we mean that an instance of class $C$ is thrown. When we say that a stream raises an exception, we mean that an exception occurred while computing the value(s) of the stream.
 
 \LMHash{}
-If an uncaught exception is thrown by a running isolate $A$, $A$ is immediately suspended. 
+If an uncaught exception is thrown by a running isolate $A$, $A$ is immediately suspended.
 
 
 \section{Variables}
 \LMLabel{variables}
 
 \LMHash{}
-Variables are storage locations in memory.  
+Variables are storage locations in memory.
 
 \begin{grammar}
 {\bf variableDeclaration:}
       declaredIdentifier (`,' identifier)*
       .
-      
+
 {\bf declaredIdentifier:}
       metadata finalConstVarOrType identifier
     .
@@ -349,7 +349,7 @@
            \CONST{} type?;
 	varOrType
     .
-    
+
  {\bf varOrType:}\VAR{};
 	type
     .
@@ -361,7 +361,7 @@
 {\bf initializedIdentifier:}
       identifier (`=' expression)? % could do top-level here
     .
-    
+
 {\bf initializedIdentifierList:}
       initializedIdentifier (`,' initializedIdentifier)*
     .
@@ -375,22 +375,22 @@
 A variable that has not been initialized has the initial value \NULL{} (\ref{null}).
 
 \LMHash{}
-A variable declared at the top-level of a library is referred to as either a {\em library variable} or simply a top-level variable. 
+A variable declared at the top-level of a library is referred to as either a {\em library variable} or simply a top-level variable.
 
 \LMHash{}
-A {\em static variable} is a variable that is not associated with a particular instance, but rather with an entire library or class.  Static variables include library variables and class variables. Class variables are variables whose declaration is immediately nested inside a class declaration and includes the modifier \STATIC{}. A library variable is implicitly static. It is a compile-time error to preface a top-level variable declaration with the built-in identifier  (\ref{identifierReference}) \STATIC{}.  
+A {\em static variable} is a variable that is not associated with a particular instance, but rather with an entire library or class.  Static variables include library variables and class variables. Class variables are variables whose declaration is immediately nested inside a class declaration and includes the modifier \STATIC{}. A library variable is implicitly static. It is a compile-time error to preface a top-level variable declaration with the built-in identifier  (\ref{identifierReference}) \STATIC{}.
 
 \LMHash{}
-Static variable declarations are initialized lazily. When a static variable $v$ is read, iff it has not yet been assigned,  it is set to the result of evaluating its initializer. The precise rules are given in section \ref{evaluationOfImplicitVariableGetters}. 
+Static variable declarations are initialized lazily. When a static variable $v$ is read, iff it has not yet been assigned,  it is set to the result of evaluating its initializer. The precise rules are given in section \ref{evaluationOfImplicitVariableGetters}.
 
 \rationale{The lazy semantics are given because we do not want a language where one tends to define expensive initialization computations, causing long application startup times. This is especially crucial for Dart, which must support the coding of  client applications.
 }
 
 \LMHash{}
-A {\em final variable} is a variable whose binding is fixed upon initialization; a final variable $v$ will always refer to the same object after $v$ has been initialized. The declaration of a final variable must  include the modifier \FINAL{}. 
+A {\em final variable} is a variable whose binding is fixed upon initialization; a final variable $v$ will always refer to the same object after $v$ has been initialized. The declaration of a final variable must  include the modifier \FINAL{}.
 
 \LMHash{}
-It is a static warning if a final instance variable that has been initialized at its point of declaration  is  also initialized in a constructor. 
+It is a static warning if a final instance variable that has been initialized at its point of declaration  is  also initialized in a constructor.
 % It is a static warning if a final instance variable that has been initialized by means of an initializing formal of a constructor is  also initialized elsewhere in the same constructor.
 It is a compile-time error if a local variable $v$ is final and $v$ is not initialized at its point of declaration.
 
@@ -410,24 +410,24 @@
 We say that a variable $v$ is {\em potentially mutated} in some scope $s$ if $v$ is not final or constant and an assignment to $v$ occurs in $s$.
 
 \LMHash{}
-If a variable declaration does not explicitly specify a type, the type of the declared variable(s) is  \DYNAMIC{}, the unknown type (\ref{typeDynamic}). 
+If a variable declaration does not explicitly specify a type, the type of the declared variable(s) is  \DYNAMIC{}, the unknown type (\ref{typeDynamic}).
 
 \LMHash{}
 A variable is {\em mutable} if it is not final.
 Static and instance variable declarations always induce implicit getters. If the variable is mutable it also introduces an implicit setter.
-The scope into which the implicit getters and setters are introduced depends on the kind of variable declaration involved. 
+The scope into which the implicit getters and setters are introduced depends on the kind of variable declaration involved.
 
 \LMHash{}
-A library variable introduces a getter into the top level scope of the enclosing library. A static class variable introduces a static getter into the immediately enclosing class. An instance variable introduces an instance getter into the immediately enclosing class. 
+A library variable introduces a getter into the top level scope of the enclosing library. A static class variable introduces a static getter into the immediately enclosing class. An instance variable introduces an instance getter into the immediately enclosing class.
 
 \LMHash{}
-A mutable library variable introduces a setter into the top level scope of the enclosing library. A mutable static class variable introduces a static setter into the immediately enclosing class. A mutable instance variable introduces an instance setter into the immediately enclosing class. 
+A mutable library variable introduces a setter into the top level scope of the enclosing library. A mutable static class variable introduces a static setter into the immediately enclosing class. A mutable instance variable introduces an instance setter into the immediately enclosing class.
 
 \LMHash{}
 Local variables are added to the innermost enclosing scope.  They do not induce getters and setters.  A local variable may only be referenced at a source code location that is after its initializer, if any, is complete, or a compile-time error occurs.  The error may be reported either at the point where the premature reference occurs, or at the variable declaration.
 
 \rationale {
-We allow the error to be reported at the declaration to allow implementations to avoid an extra processing phase. 
+We allow the error to be reported at the declaration to allow implementations to avoid an extra processing phase.
 }
 
 \commentary{
@@ -438,7 +438,7 @@
 \VAR{} x = 0;
 
 f(y) \{
-  \VAR{} z = x; // compile-time error 
+  \VAR{} z = x; // compile-time error
   if (y) \{
     x = x + 1; // two compile time errors
     print(x); // compile time error
@@ -455,7 +455,7 @@
 }
 
 \commentary {
-As another example  \code{\VAR{} x = 3, y = x;} is legal, because \code{x} is referenced after its initializer. 
+As another example  \code{\VAR{} x = 3, y = x;} is legal, because \code{x} is referenced after its initializer.
 
 A particularly perverse example involves a local variable name shadowing a type. This is possible because Dart has a single namespace for types, functions and variables.
 }
@@ -473,19 +473,19 @@
 }
 
 \rationale{
-As a rule, type annotations are ignored in production mode. However, we do 
+As a rule, type annotations are ignored in production mode. However, we do
  not want to allow programs to compile legally in one mode and not another, and in this extremely odd situation, that consideration takes precedence.
 }
 
 \end{dartCode}
 
-% the grammar does not support local getters and setters. The local var discussion does not seem to mention getters and setters based semantics. It simply discusses the creation of the variable, not its access. Access is either assignment or identifiers. Identifiers ignore the getter story. 
+% the grammar does not support local getters and setters. The local var discussion does not seem to mention getters and setters based semantics. It simply discusses the creation of the variable, not its access. Access is either assignment or identifiers. Identifiers ignore the getter story.
 
 \LMHash{}
 The following rules apply to all static and instance variables.
 
 \LMHash{}
-A  variable declaration  of one of the forms \code{$T$ $v$;},  \code{$T$ $v$ = $e$;} ,  \code{\CONST{} $T$ $v$ = $e$;}, \code{\FINAL{} $T$ $v$;}  or \code{\FINAL{} $T$ $v$ = $e$;} always induces an implicit  getter function (\ref{getters}) with signature 
+A  variable declaration  of one of the forms \code{$T$ $v$;},  \code{$T$ $v$ = $e$;} ,  \code{\CONST{} $T$ $v$ = $e$;}, \code{\FINAL{} $T$ $v$;}  or \code{\FINAL{} $T$ $v$ = $e$;} always induces an implicit  getter function (\ref{getters}) with signature
 
 $T$ \GET{} $v$
 
@@ -493,21 +493,21 @@
 
 
 \LMHash{}
-A  variable declaration  of one of the forms \code{\VAR{} $v$;},  \code{\VAR{} $v$ = $e$;} ,  \code{\CONST{} $v$ = $e$;}, \code{\FINAL{} $v$;} or \code{\FINAL{}  $v$ = $e$;}  always induces an implicit  getter function with signature 
+A  variable declaration  of one of the forms \code{\VAR{} $v$;},  \code{\VAR{} $v$ = $e$;} ,  \code{\CONST{} $v$ = $e$;}, \code{\FINAL{} $v$;} or \code{\FINAL{}  $v$ = $e$;}  always induces an implicit  getter function with signature
 
  \GET{} $v$
 
 whose  invocation evaluates as described below (\ref{evaluationOfImplicitVariableGetters}).
 
 \LMHash{}
-A non-final  variable declaration  of the form \code{{} $T$ $v$;} or the form  \code{$T$ $v$ = $e$;}   always induces an implicit  setter function (\ref{setters}) with signature 
+A non-final  variable declaration  of the form \code{{} $T$ $v$;} or the form  \code{$T$ $v$ = $e$;}   always induces an implicit  setter function (\ref{setters}) with signature
 
  \VOID{} \SET{} $v=(T$ $x)$
 
 whose execution sets the value of $v$ to the incoming argument $x$.
 
 \LMHash{}
-A  non-final variable declaration  of the form \code{\VAR{} $v$;} or the form  \code{\VAR{} $v$ = $e$;}   always induces an implicit  setter function with signature 
+A  non-final variable declaration  of the form \code{\VAR{} $v$;} or the form  \code{\VAR{} $v$ = $e$;}   always induces an implicit  setter function with signature
 
 \SET{} $v=(x)$
 
@@ -519,12 +519,12 @@
 
 \LMHash{}
 Let $d$ be the declaration of a static or instance variable $v$.  If $d$ is an instance variable, then the invocation of the implicit getter  of $v$ evaluates to the value stored in $v$.
-If $d$ is a static or library variable then the implicit getter method of $v$ executes as follows: 
+If $d$ is a static or library variable then the implicit getter method of $v$ executes as follows:
 \begin{itemize}
-\item {\bf Non-constant variable declaration with initializer}. If $d$ is of one of the forms \code{\VAR{} $v$ = $e$;} ,  \code{$T$ $v$ = $e$;} ,   \code{\FINAL{} $v$ = $e$;} ,  \code{\FINAL{} $T$ $v$ = $e$;}, \code{\STATIC{} $v$ = $e$; }, \code{\STATIC{} $T$ $v$ = $e$; }, \code{\STATIC{} \FINAL{} $v$ = $e$; } or \code{\STATIC{} \FINAL{} $T$ $v$ = $e$;} and no value has yet been stored into $v$ then the initializer expression $e$ is evaluated. If, during the evaluation of $e$, the getter for $v$ is invoked, a \code{CyclicInitializationError} is thrown. If the evaluation succeeded yielding an object $o$, let $r = o$, otherwise let $r = \NULL{}$. In any case, $r$ is stored into $v$. The result of executing the getter is $r$. 
-\item  {\bf Constant variable declaration}. If $d$ is of one of the forms \code{\CONST{} $v$ = $e$; } ,  \code{\CONST{} $T$  $v$ = $e$; },  \code{\STATIC{} \CONST{} $v$ = $e$; }  or \code{\STATIC{} \CONST{} $T$ $v$ = $e$;} the result of the getter is the value of the compile time constant $e$. \commentary{Note that a compile time constant cannot depend on itself, so no cyclic references can occur.} 
+\item {\bf Non-constant variable declaration with initializer}. If $d$ is of one of the forms \code{\VAR{} $v$ = $e$;} ,  \code{$T$ $v$ = $e$;} ,   \code{\FINAL{} $v$ = $e$;} ,  \code{\FINAL{} $T$ $v$ = $e$;}, \code{\STATIC{} $v$ = $e$; }, \code{\STATIC{} $T$ $v$ = $e$; }, \code{\STATIC{} \FINAL{} $v$ = $e$; } or \code{\STATIC{} \FINAL{} $T$ $v$ = $e$;} and no value has yet been stored into $v$ then the initializer expression $e$ is evaluated. If, during the evaluation of $e$, the getter for $v$ is invoked, a \code{CyclicInitializationError} is thrown. If the evaluation succeeded yielding an object $o$, let $r = o$, otherwise let $r = \NULL{}$. In any case, $r$ is stored into $v$. The result of executing the getter is $r$.
+\item  {\bf Constant variable declaration}. If $d$ is of one of the forms \code{\CONST{} $v$ = $e$; } ,  \code{\CONST{} $T$  $v$ = $e$; },  \code{\STATIC{} \CONST{} $v$ = $e$; }  or \code{\STATIC{} \CONST{} $T$ $v$ = $e$;} the result of the getter is the value of the compile time constant $e$. \commentary{Note that a compile time constant cannot depend on itself, so no cyclic references can occur.}
 Otherwise
-\item {\bf Variable declaration without initializer}. The result of executing the getter method is the value stored in $v$.  
+\item {\bf Variable declaration without initializer}. The result of executing the getter method is the value stored in $v$.
 \end{itemize}
 
 
@@ -541,7 +541,7 @@
 {\bf functionSignature:}
     metadata returnType? identifier formalParameterList
     .
-    
+
 {\bf returnType:}
       \VOID{};
       type
@@ -558,7 +558,7 @@
 \end{grammar}
 
 \LMHash{}
-Functions include  function declarations (\ref{functionDeclarations}), methods (\ref{instanceMethods},  \ref{staticMethods}), getters  (\ref{getters}), setters  (\ref{setters}), constructors  (\ref{constructors}) and function literals  (\ref{functionExpressions}).  
+Functions include  function declarations (\ref{functionDeclarations}), methods (\ref{instanceMethods},  \ref{staticMethods}), getters  (\ref{getters}), setters  (\ref{setters}), constructors  (\ref{constructors}) and function literals  (\ref{functionExpressions}).
 
 \LMHash{}
 All functions have a signature and a body. The signature describes the formal parameters of the function, and possibly its name and return type.  A function body is either:
@@ -575,31 +575,31 @@
 \end{itemize}
 
 \LMHash{}
-A function is {\em asynchronous} if its body is marked with the \ASYNC{} or \ASYNC* modifier. Otherwise the function is {\em synchronous}. A function is a {\em generator} if its body is marked with the \SYNC* or \ASYNC* modifier.  
+A function is {\em asynchronous} if its body is marked with the \ASYNC{} or \ASYNC* modifier. Otherwise the function is {\em synchronous}. A function is a {\em generator} if its body is marked with the \SYNC* or \ASYNC* modifier.
 
 \commentary{
-Whether a function is synchronous or asynchronous is orthogonal to whether it is a generator or not. Generator functions are a sugar for functions that produce collections in a systematic way, by lazily applying a function that {\em generates} individual elements of a collection. Dart provides such a sugar in both the synchronous case, where one returns an iterable, and in the asynchronous case, where one returns a stream. Dart also allows both synchronous and asynchronous functions that produce a single value. 
+Whether a function is synchronous or asynchronous is orthogonal to whether it is a generator or not. Generator functions are a sugar for functions that produce collections in a systematic way, by lazily applying a function that {\em generates} individual elements of a collection. Dart provides such a sugar in both the synchronous case, where one returns an iterable, and in the asynchronous case, where one returns a stream. Dart also allows both synchronous and asynchronous functions that produce a single value.
 }
 
 \LMHash{}
 It is a compile-time error if an \ASYNC, \ASYNC* or \SYNC* modifier is attached to the body of a setter or constructor.
 
 \rationale{
-An asynchronous setter would be of little use, since setters can only be used in the context of an assignment (\ref{assignment}), and an assignment expression always evaluates to the value of the assignment's right hand side. If the setter actually did its work asynchronously, one might imagine that one would return a future that resolved to the assignment's right hand side after the setter did its work. However, this would require dynamic tests at every assignment, and so would be prohibitively expensive. 
+An asynchronous setter would be of little use, since setters can only be used in the context of an assignment (\ref{assignment}), and an assignment expression always evaluates to the value of the assignment's right hand side. If the setter actually did its work asynchronously, one might imagine that one would return a future that resolved to the assignment's right hand side after the setter did its work. However, this would require dynamic tests at every assignment, and so would be prohibitively expensive.
 
 An asynchronous constructor would, by definition, never return an instance of the class it purports to construct, but instead return a future. Calling such a beast via \NEW{} would be very confusing. If you need to produce an object asynchronously, use a method.
 
 One could allow modifiers for factories. A factory for \code{Future} could be modified by \ASYNC{}, a factory for \code{Stream} could be modified by \ASYNC* and a factory for \code{Iterable} could be modified by \SYNC*. No other scenario makes sense because the object returned by the factory would be of the wrong type. This situation is very unusual so it is not worth making an exception to the general rule for constructors in order to allow it.
 }
 \LMHash{}
-It is a static warning if the declared return type of a function marked \ASYNC{} may not be assigned to \code{Future}. It is a static warning if the declared return type of a function marked \SYNC* may not be assigned to \code{Iterable}. It is a static warning if the declared return type of  a function marked \ASYNC* may not be assigned to \code{Stream}.  
+It is a static warning if the declared return type of a function marked \ASYNC{} may not be assigned to \code{Future}. It is a static warning if the declared return type of a function marked \SYNC* may not be assigned to \code{Iterable}. It is a static warning if the declared return type of  a function marked \ASYNC* may not be assigned to \code{Stream}.
 
 \subsection{Function Declarations}
 \LMLabel{functionDeclarations}
 
 \LMHash{}
-A {\em function declaration} is a function that is neither a member of a class nor a function literal. Function declarations include {\em library functions}, which are function declarations 
-%(including getters and setters) 
+A {\em function declaration} is a function that is neither a member of a class nor a function literal. Function declarations include {\em library functions}, which are function declarations
+%(including getters and setters)
 at the top level of a library, and {\em local functions}, which are function declarations declared inside other functions. Library functions are often referred to simply as top-level functions.
 
 \LMHash{}
@@ -608,7 +608,7 @@
 \LMHash{}
 The scope of a library function is the scope of the enclosing library. The scope of a local function is  described in section \ref{localFunctionDeclaration}. In both cases, the name of the function is in scope in its formal parameter scope (\ref{formalParameters}).
 
-%A function declaration of the form  $T_0$ $id(T_1$ $a_1, \ldots, T_n$ $a_n, [T_{n+1}$ $x_{n+1} = d_1, \ldots, T_{n+k}$ $x_{n+k} = d_k])\{s\}$ is equivalent to a variable declaration of the form \code{\FINAL{} $F$ $id$ = $(T_1$ $a_1, \ldots, T_n$ $a_n, [T_{n+1}$ $x_{n+1} = d_1, \ldots, T_{n+k}$ $x_{n+k}= d_k])\{s\}$}, where $F$ is the function type alias (\ref{typedef}) \code{\TYPEDEF{} $T_0$ $F(T_1$ $a_1, \ldots, T_n$ $a_n, [T_{n+1}$ $x_{n+1}, \ldots, T_{n+k}$ $x_{n+k}])$}.  Likewise,  a function declaration of the form  $id(T_1$ $a_1, \ldots, T_n$ $a_n, [T_{n+1}$ $x_{n+1} = d_1, \ldots, T_{n+k}$ $x_{n+k} = d_k])\{s\}$ is equivalent to a variable declaration of the form \code{\FINAL{} $F$ $id$ = $(T_1$ $a_1, \ldots, T_n$ $a_n, [T_{n+1}$ $x_{n+1} = d_1, \ldots, T_{n+k}$ $x_{n+k} = d_k])\{s\}$}, where $F$ is the function type alias \code{\TYPEDEF{}  $F(T_1$ $a_1, \ldots, T_n$ $a_n, [T_{n+1}$ $x_{n+1}, \ldots, T_{n+k}$ $x_{n+k}])$}. 
+%A function declaration of the form  $T_0$ $id(T_1$ $a_1, \ldots, T_n$ $a_n, [T_{n+1}$ $x_{n+1} = d_1, \ldots, T_{n+k}$ $x_{n+k} = d_k])\{s\}$ is equivalent to a variable declaration of the form \code{\FINAL{} $F$ $id$ = $(T_1$ $a_1, \ldots, T_n$ $a_n, [T_{n+1}$ $x_{n+1} = d_1, \ldots, T_{n+k}$ $x_{n+k}= d_k])\{s\}$}, where $F$ is the function type alias (\ref{typedef}) \code{\TYPEDEF{} $T_0$ $F(T_1$ $a_1, \ldots, T_n$ $a_n, [T_{n+1}$ $x_{n+1}, \ldots, T_{n+k}$ $x_{n+k}])$}.  Likewise,  a function declaration of the form  $id(T_1$ $a_1, \ldots, T_n$ $a_n, [T_{n+1}$ $x_{n+1} = d_1, \ldots, T_{n+k}$ $x_{n+k} = d_k])\{s\}$ is equivalent to a variable declaration of the form \code{\FINAL{} $F$ $id$ = $(T_1$ $a_1, \ldots, T_n$ $a_n, [T_{n+1}$ $x_{n+1} = d_1, \ldots, T_{n+k}$ $x_{n+k} = d_k])\{s\}$}, where $F$ is the function type alias \code{\TYPEDEF{}  $F(T_1$ $a_1, \ldots, T_n$ $a_n, [T_{n+1}$ $x_{n+1}, \ldots, T_{n+k}$ $x_{n+k}])$}.
 
 %\Q{We need to cover library getters as well.}
 
@@ -646,9 +646,9 @@
 The body of a function introduces a new scope known as the function's {\em  body scope}. The body scope of a function $f$  is enclosed  in the scope introduced by the formal parameter scope of $f$.
 
 
-%The formal parameter scope of a function maps the name of each formal parameter $p$ to the value $p$ is bound to. 
+%The formal parameter scope of a function maps the name of each formal parameter $p$ to the value $p$ is bound to.
 
-% The formal parameters of a function are processed in the enclosing scope of the function. 
+% The formal parameters of a function are processed in the enclosing scope of the function.
 % \commentary{this means that the parameters themselves may not be referenced within the formal parameter list.}
 
 \LMHash{}
@@ -663,7 +663,7 @@
 %}
 
 %\begin{grammar}
-%formalParameterList:     
+%formalParameterList:
 %      '(' restFormalParameter? ')';
 %      '(' namedFormalParameters ')';
  %     '(' normalFormalParameters normalFormalParameterTail? ')'
@@ -685,12 +685,12 @@
     .
 \end{grammar}
 
-%Formal parameters are always \FINAL{}.  
+%Formal parameters are always \FINAL{}.
 %\Q{We're awaiting some data on whether enforcing this would cause widespread pain.}
 %A formal parameter is always considered to be initialized.  \rationale{This is because it will always be initialized by the call - even if it is optional.}
 
 
-\subsubsection{Required Formals} 
+\subsubsection{Required Formals}
 \LMLabel{requiredFormals}
 
 \LMHash{}
@@ -719,7 +719,7 @@
 %\subsubsection{Rest Formals}
 %\LMLabel{restFormals}
 
-%A rest formal $R$ must be the last parameter in a formal parameter list.  If a  type $T$ is specified for $R$, it signifies that the type of $R$ is $T[]$. 
+%A rest formal $R$ must be the last parameter in a formal parameter list.  If a  type $T$ is specified for $R$, it signifies that the type of $R$ is $T[]$.
 
 %\begin{grammar}
 %restFormalParameter:
@@ -735,11 +735,11 @@
 \begin{grammar}
 {\bf defaultFormalParameter:}
       normalFormalParameter ('=' expression)?
-    .   
-        
+    .
+
 {\bf defaultNamedParameter:}
       normalFormalParameter ( `{\escapegrammar :}' expression)?
-    .   
+    .
 \end{grammar}
 
 \LMHash{}
@@ -750,7 +750,7 @@
 
 \rationale{
 The need for this  restriction is a direct consequence of the fact that naming and privacy are not orthogonal.
-If we allowed named parameters to begin with an underscore, they would be considered private and inaccessible to callers from outside the library where it was defined. If a method outside the library overrode a method with a private optional name, it would not be a subtype of the original method. The static checker would of course flag such situations, but the consequence would be that adding a private named formal would break clients outside the library in a way they could not easily correct. 
+If we allowed named parameters to begin with an underscore, they would be considered private and inaccessible to callers from outside the library where it was defined. If a method outside the library overrode a method with a private optional name, it would not be a subtype of the original method. The static checker would of course flag such situations, but the consequence would be that adding a private named formal would break clients outside the library in a way they could not easily correct.
 }
 
 \subsection{Type of a Function}
@@ -769,7 +769,7 @@
 Let $F$ be a function with required formal parameters $T_1$ $p_1 \ldots, T_n$ $p_n$, return type $T_0$ and named optional parameters $T_{n+1}$ $p_{n+1}, \ldots, T_{n+k}$ $ p_{n+k}$. Then the type of $F$ is $(T_1 ,\ldots, T_n, \{T_{n+1}$ $p_{n+1}, \ldots, T_{n+k}$  $p_{n+k}\}) \rightarrow T_0$.
 
 \LMHash{}
-The run time type of a function object always implements the class \cd{Function}. 
+The run time type of a function object always implements the class \cd{Function}.
 
 \commentary{
 One cannot assume, based on the above, that  given a function \cd{f}, \cd{f.runtimeType} will actually be \cd{Function}, or that any two distinct function objects necessarily have the same runtime type.
@@ -785,10 +785,10 @@
 \LMLabel{externalFunctions}
 
 \LMHash{}
-An {\em external function} is a function whose body is provided separately from its declaration. An external function may be a top-level function (\ref{librariesAndScripts}), a method (\ref{instanceMethods}, \ref{staticMethods}), a getter (\ref{getters}), a setter (\ref{setters}) or a non-redirecting constructor (\ref{generativeConstructors}, \ref{factories}). External functions are introduced via the built-in identifier \EXTERNAL{}  (\ref{identifierReference}) followed by the function signature.  
+An {\em external function} is a function whose body is provided separately from its declaration. An external function may be a top-level function (\ref{librariesAndScripts}), a method (\ref{instanceMethods}, \ref{staticMethods}), a getter (\ref{getters}), a setter (\ref{setters}) or a non-redirecting constructor (\ref{generativeConstructors}, \ref{factories}). External functions are introduced via the built-in identifier \EXTERNAL{}  (\ref{identifierReference}) followed by the function signature.
 
 \rationale{
-External functions allow us to introduce  type information for code that is not statically known to the Dart compiler. 
+External functions allow us to introduce  type information for code that is not statically known to the Dart compiler.
 }
 
 \commentary{
@@ -811,10 +811,10 @@
 {\bf classDefinition:}
 metadata \ABSTRACT{}?  \CLASS{} identifier typeParameters? (superclass mixins?)? interfaces? \\
        `\{' (metadata classMemberDefinition)* `\}';
-       
+
 metadata \ABSTRACT{}?  \CLASS{} mixinApplicationClass
     .
-    
+
 {\bf mixins:}
   \WITH{} typeList
   .
@@ -860,7 +860,7 @@
 
 A class has several scopes:
 \begin{itemize}
-\item A {\em type-parameter scope}, which is empty if the class is not generic (\ref{generics}).  The enclosing scope of the type-parameter scope of a class is the enclosing scope of the class declaration. 
+\item A {\em type-parameter scope}, which is empty if the class is not generic (\ref{generics}).  The enclosing scope of the type-parameter scope of a class is the enclosing scope of the class declaration.
 \item A {\em static scope}. The enclosing scope of the static scope of a  class is the type parameter scope (\ref{generics}) of the class.
 \item  An {\em instance scope}.
 The enclosing scope of a class' instance scope is the class' static scope.
@@ -873,14 +873,14 @@
 
 \LMHash{}
 Every class has a single superclass  except class \code{Object} which has no superclass. A class may implement a number of interfaces
-%, either 
+%, either
 by declaring them in its implements clause  (\ref{superinterfaces}).
 % or via interface injection declarations (\ref{interfaceInjection}) outside the class declaration
 
 
 \LMHash{}
-An {\em abstract class} is 
-%either 
+An {\em abstract class} is
+%either
 a class that is explicitly declared with the  \ABSTRACT{}  modifier, either by means of a class declaration or via a type alias (\ref{typedef}) for a mixin application (\ref{mixinApplication}). A {\em concrete class} is a class that is not abstract.
 %, or a class that declares at least one abstract method  (\ref{abstractInstanceMembers}).
 
@@ -900,7 +900,7 @@
 \LMHash{}
  It is a compile-time error if a class declares two members of the same name.
  %, except that a getter and a setter may be declared with the same name provided both are instance members or both are static members.
-It is a compile-time error if a class has an instance member and a static member  with the same name. 
+It is a compile-time error if a class has an instance member and a static member  with the same name.
 % It is a compile-time error if a generic (\ref{generics}) class declares a member with the same name as one of its type parameters.
 
 \commentary{Here are simple examples, that illustrate the difference between ``has a member'' and ``declares a member''. For example, \code{B} {\em declares} one member named \code{f}, but {\em has} two such members. The rules of inheritance determine what members a class has.
@@ -915,11 +915,11 @@
 
 \CLASS{} B \EXTENDS{} A \{
   int i = 1; //  getter i and setter i= override versions from A
-  \STATIC{} j; // compile-time error: static getter \& setter conflict with 
+  \STATIC{} j; // compile-time error: static getter \& setter conflict with
   //instance getter \& setter
-  
+
   /* compile-time error: static method conflicts with instance method */
-  \STATIC{} f(x) =$>$ 3; 
+  \STATIC{} f(x) =$>$ 3;
 \}
 \end{dartCode}
 
@@ -935,7 +935,7 @@
 %make these warnings if possible
 
 \LMHash{}
-It is a static warning if an instance method $m_1$ overrides  (\ref{inheritanceAndOverriding}) an instance member $m_2$ and  $m_1$ has a greater number of required parameters than $m_2$. It is a static warning if an instance method $m_1$ overrides  an instance member $m_2$ and  $m_1$ has fewer positional parameters than $m_2$.  It is a static warning if an instance method $m_1$ overrides  an instance member $m_2$ and  $m_1$ does not declare all the named parameters declared by $m_2$. 
+It is a static warning if an instance method $m_1$ overrides  (\ref{inheritanceAndOverriding}) an instance member $m_2$ and  $m_1$ has a greater number of required parameters than $m_2$. It is a static warning if an instance method $m_1$ overrides  an instance member $m_2$ and  $m_1$ has fewer positional parameters than $m_2$.  It is a static warning if an instance method $m_1$ overrides  an instance member $m_2$ and  $m_1$ does not declare all the named parameters declared by $m_2$.
 
 % not quite right. It should be ok to override a method that requires N parameters with one that requires M < N but accepts the others as optional.
 
@@ -949,13 +949,13 @@
 \LMLabel{operators}
 
 \LMHash{}
-{\em Operators} are instance methods with special names. 
+{\em Operators} are instance methods with special names.
 
 \begin{grammar}
 {\bf operatorSignature:}
        returnType? \OPERATOR{} operator formalParameterList
        .
-       
+
  {\bf operator:}`\~{}';
       binaryOperator;
       `[' `]' ;
@@ -979,10 +979,10 @@
 
 
 \LMHash{}
-It is a compile-time error if the arity of the user-declared operator \code{[]=} is not 2. It is a compile-time error if the arity of a user-declared operator with one of the names:  \code{ $<$, $>$, $<$=, $>$=, ==, -, +,  \~{}/, /, *, \%, $|$, \^{}, \&, $<<$, $>>$, []} is not 1. It is a compile-time error if the arity of the user-declared operator  \code{-} is not 0 or 1. 
+It is a compile-time error if the arity of the user-declared operator \code{[]=} is not 2. It is a compile-time error if the arity of a user-declared operator with one of the names:  \code{ $<$, $>$, $<$=, $>$=, ==, -, +,  \~{}/, /, *, \%, $|$, \^{}, \&, $<<$, $>>$, []} is not 1. It is a compile-time error if the arity of the user-declared operator  \code{-} is not 0 or 1.
 
 \commentary{
-The \code{-} operator is unique in that two overloaded versions are permitted. If the operator has no arguments, it denotes unary minus. If it has an argument, it denotes binary subtraction. 
+The \code{-} operator is unique in that two overloaded versions are permitted. If the operator has no arguments, it denotes unary minus. If it has an argument, it denotes binary subtraction.
 }
 
 \LMHash{}
@@ -993,7 +993,7 @@
 }
 
 \LMHash{}
-It is a compile-time error if the arity of the user-declared operator  \code{ \~{}} is not 0. 
+It is a compile-time error if the arity of the user-declared operator  \code{ \~{}} is not 0.
 
 \LMHash{}
 It is a compile-time error to declare an optional parameter in an operator.
@@ -1004,7 +1004,7 @@
 % add rationale: return in []= methods will have no effect, a the expression always returns its second argument (the RHS of the assignment, for consistency with assignment in general). So it's best to enforce this by declaring the method to be void, even though the expression that uses it returns an object with the type of the RHS, as described in \ref{assignment}.
 
 
-\subsection{Getters} 
+\subsection{Getters}
 \LMLabel{getters}
 
 \LMHash{}
@@ -1012,7 +1012,7 @@
 
 \begin{grammar}
 {\bf getterSignature:}
-       returnType? \GET{} identifier 
+       returnType? \GET{} identifier
 .
 \end{grammar}
 
@@ -1033,13 +1033,13 @@
 It is a compile-time error if a class has both a getter and a method with the same name. This restriction holds regardless of whether the getter is defined explicitly or implicitly, or whether the getter or the method are inherited or not.
 
 \commentary{
-This implies that a getter can never override a method, and a method can never override a getter or field. 
+This implies that a getter can never override a method, and a method can never override a getter or field.
 }
 
 \LMHash{}
 It is a static warning if the return type of a getter is \VOID.
-It is a static warning if a getter $m_1$ overrides  (\ref{inheritanceAndOverriding}) a getter 
-$m_2$ and the type of $m_1$ is not a subtype of the type of $m_2$.   
+It is a static warning if a getter $m_1$ overrides  (\ref{inheritanceAndOverriding}) a getter
+$m_2$ and the type of $m_1$ is not a subtype of the type of $m_2$.
 
 \LMHash{}
 It is a static warning if a class  declares a static getter named $v$ and also has a non-static setter named $v=$. It is a static warning if a class $C$ declares an instance getter named $v$ and an accessible static member named $v$ or $v=$ is declared in a superclass of $C$. These warnings must be issued regardless of whether the getters or setters are declared explicitly or implicitly.
@@ -1076,7 +1076,7 @@
 
 \LMHash{}
 It is a static warning if a setter declares a return type other than \VOID{}.
-It is a static warning if a setter $m_1$ overrides  (\ref{inheritanceAndOverriding}) a setter $m_2$ and the type of $m_1$ is not a subtype of the type of $m_2$. It is a static warning if a class has a setter named $v=$ with argument type $T$ and a getter named $v$ with return type $S$, and $T$ may not be assigned to $S$. 
+It is a static warning if a setter $m_1$ overrides  (\ref{inheritanceAndOverriding}) a setter $m_2$ and the type of $m_1$ is not a subtype of the type of $m_2$. It is a static warning if a class has a setter named $v=$ with argument type $T$ and a getter named $v$ with return type $S$, and $T$ may not be assigned to $S$.
 
 \LMHash{}
 It is a static warning if a class  declares a static setter named $v=$ and also has a non-static member named $v$. It is a static warning if a class $C$ declares an instance setter named $v=$ and an accessible static member named $v=$ or $v$ is declared in a superclass of $C$.
@@ -1114,7 +1114,7 @@
 \LMHash{}
 It is a static warning if an abstract member $m$ is declared or inherited in a concrete class $C$ unless:
 \begin{itemize}
-\item  $m$ overrides a concrete member, or 
+\item  $m$ overrides a concrete member, or
 \item $C$ has a \cd{noSuchMethod()} method distinct from the one declared in class \cd{Object}.
 \end{itemize}
 
@@ -1150,20 +1150,20 @@
 The notion of a constant instance variable is subtle and confusing to programmers.
 An instance variable is intended to vary per instance. A constant instance variable would have the same value for all instances, and as such is already a dubious idea.
 
-The language could interpret const instance variable declarations as instance getters that return a constant.  However, a constant instance variable could not be treated as a true compile time constant, as its getter would be subject to overriding.   
+The language could interpret const instance variable declarations as instance getters that return a constant.  However, a constant instance variable could not be treated as a true compile time constant, as its getter would be subject to overriding.
 
 Given that the value does not depend on  the instance, it is better to use a static class variable.
 An instance getter for it can always be defined manually if desired.
 }
 
 
-%An instance variable declaration of one of the forms \code{$T$ $v$;}, \code{\FINAL{} $T$ $v$;} ,  \code{$T$ $v$ = $e$;} ,  \code{\CONST{} $T$ $v$ = $e$;} or \code{\FINAL{} $T$ $v$ = $e$;}  always induces an implicit getter function (\ref{getters}) with signature 
+%An instance variable declaration of one of the forms \code{$T$ $v$;}, \code{\FINAL{} $T$ $v$;} ,  \code{$T$ $v$ = $e$;} ,  \code{\CONST{} $T$ $v$ = $e$;} or \code{\FINAL{} $T$ $v$ = $e$;}  always induces an implicit getter function (\ref{getters}) with signature
 
 %$T$ \GET{} $v$
 
 %whose invocation evaluates to the value stored in $v$.
 
-%An instance variable declaration  of one of the forms \code{\VAR{} $v$;}, \code{\FINAL{} $v$;}, \code{\VAR{} $v$ = $e$;} ,  \code{\CONST{} $v$ = $e$;} or \code{\FINAL{} $v$ = $e$;}   always induces an implicit getter function with signature 
+%An instance variable declaration  of one of the forms \code{\VAR{} $v$;}, \code{\FINAL{} $v$;}, \code{\VAR{} $v$ = $e$;} ,  \code{\CONST{} $v$ = $e$;} or \code{\FINAL{} $v$ = $e$;}   always induces an implicit getter function with signature
 
 %\GET{} $v$
 
@@ -1171,13 +1171,13 @@
 
 %\commentary{Getters are introduced for all instance and static variables (\ref{staticVariables}), regardless of whether they are const/final or not.}
 
-%A non-final instance variable declaration  of the form \code{$T$ $v$;} or the form  \code{$T$ $v$ = $e$;}   always induces an implicit setter function (\ref{setters}) with signature 
+%A non-final instance variable declaration  of the form \code{$T$ $v$;} or the form  \code{$T$ $v$ = $e$;}   always induces an implicit setter function (\ref{setters}) with signature
 
 %\VOID{} \SET{} $v=(T$ $x)$
 
 %whose execution sets the value of $v$ to the incoming argument $x$.
 
-%A non-final instance variable declaration  of the form \code{\VAR{} $v$;} or the form  \code{\VAR{} $v$ = $e$;}   always induces an implicit setter function with signature 
+%A non-final instance variable declaration  of the form \code{\VAR{} $v$;} or the form  \code{\VAR{} $v$ = $e$;}   always induces an implicit setter function with signature
 
 %\SET{} $v=(x)$
 
@@ -1190,15 +1190,15 @@
 \LMLabel{constructors}
 
 \LMHash{}
-A {\em constructor} is a special function that is used in instance creation expressions (\ref{instanceCreation}) to produce objects. Constructors may be generative (\ref{generativeConstructors}) or they may be factories (\ref{factories}). 
+A {\em constructor} is a special function that is used in instance creation expressions (\ref{instanceCreation}) to produce objects. Constructors may be generative (\ref{generativeConstructors}) or they may be factories (\ref{factories}).
 
 \LMHash{}
-A {\em constructor name} always begins with the name of its immediately enclosing class, and may optionally be followed by a dot and an identifier $id$. It is a compile-time error if $id$ is the name of a member  declared in the immediately enclosing class. It is a compile-time error if the name of a  constructor is not a constructor name. 
+A {\em constructor name} always begins with the name of its immediately enclosing class, and may optionally be followed by a dot and an identifier $id$. It is a compile-time error if $id$ is the name of a member  declared in the immediately enclosing class. It is a compile-time error if the name of a  constructor is not a constructor name.
 
 
 % In what scope do constructors go? The simple names of named constructors go  in the static scope of the class. Unnamed ones go nowhere, but we use the class name to refer to them; the class name could also in the static scope of the class as well to prevent weird errors, or we could ban it explicitly and avoiding duplication. Similarly, the instance scope could contain the constructor names and class name, or we could have special rules to prevent collisions between instance members and constructors or the class.
 
-% The enclosing scope of a generative constructor is the instance scope of the class in which it is declared (but what about redirecting?) 
+% The enclosing scope of a generative constructor is the instance scope of the class in which it is declared (but what about redirecting?)
 
 \LMHash{}
 Iff no constructor is specified for a class $C$, it implicitly has a default constructor \code{C() : \SUPER{}() \{\}}, unless $C$ is class \code{Object}.
@@ -1216,7 +1216,7 @@
  \end{grammar}
 
 \LMHash{}
-A {\em constructor parameter list} is a parenthesized, comma-separated list of formal constructor parameters. A {\em formal constructor parameter} is either a formal parameter (\ref{formalParameters}) or an initializing formal. An {\em initializing formal} has the form \code{\THIS{}.id}, where \code{id} is the name of an instance variable of the immediately enclosing class.  It is a compile-time error if \code{id} is not an instance variable of the immediately enclosing class. It is a compile-time error if an initializing formal is used by a function other than a non-redirecting generative constructor. 
+A {\em constructor parameter list} is a parenthesized, comma-separated list of formal constructor parameters. A {\em formal constructor parameter} is either a formal parameter (\ref{formalParameters}) or an initializing formal. An {\em initializing formal} has the form \code{\THIS{}.id}, where \code{id} is the name of an instance variable of the immediately enclosing class.  It is a compile-time error if \code{id} is not an instance variable of the immediately enclosing class. It is a compile-time error if an initializing formal is used by a function other than a non-redirecting generative constructor.
 
 \LMHash{}
 If an explicit type is attached to the initializing formal, that is its static type. Otherwise, the type of an initializing formal named \code{id} is $T_{id}$, where $T_{id}$ is the type of the field named \code{id} in the immediately enclosing class. It is a static warning if the static type of \code{id} is not assignable to $T_{id}$.
@@ -1242,14 +1242,14 @@
 \commentary{is legal, and has the same effect as}
 
 \begin{dartCode}
-class A \{ 
+class A \{
   int x;
   A([int x]): this.x = x;
 \}
 \end{dartCode}
 
 \LMHash{}
-A {\em fresh instance} is an instance whose identity  is distinct from any previously allocated instance of its class. A generative constructor always operates on a fresh instance of its immediately enclosing class. 
+A {\em fresh instance} is an instance whose identity  is distinct from any previously allocated instance of its class. A generative constructor always operates on a fresh instance of its immediately enclosing class.
 
 \commentary{
 The above holds if the constructor is actually run, as it is by \NEW{}. If a constructor $c$ is referenced by \CONST{}, $c$ may not be run; instead, a canonical object may be looked up. See the section on instance creation (\ref{instanceCreation}).
@@ -1284,7 +1284,7 @@
 \begin{itemize}
 \item A {\em superinitializer} identifies a {\em superconstructor} - that is, a specific  constructor of the superclass.  Execution of the superinitializer causes the initializer list of the superconstructor to be executed.
 
-\item An {\em instance variable initializer} assigns a value to an individual instance variable. 
+\item An {\em instance variable initializer} assigns a value to an individual instance variable.
 \end{itemize}
 
 \begin{grammar}
@@ -1297,7 +1297,7 @@
       \SUPER{} `{\escapegrammar .}' identifier arguments;
      fieldInitializer
     .
-    
+
    {\bf  fieldInitializer:}
       (\THIS{} `{\escapegrammar .}')? identifier `=' conditionalExpression cascadeSection*
     .
@@ -1306,35 +1306,35 @@
 
 \LMHash{}
 Let $k$ be a generative constructor.  Then $k$ may include at most one  superinitializer in its initializer list or a compile-time error occurs. If no superinitializer is provided, an implicit superinitializer of the form \SUPER{}() is added at the end of $k$'s initializer list, unless the enclosing class is class \code{Object}. It is a compile-time error if more than one initializer corresponding to a given instance variable appears in $k$'s initializer list. It is a compile-time error if $k$'s initializer list contains an initializer for a variable that is initialized by means of an initializing formal of $k$. % It is a compile-time error if  $k$'s initializer list contains an initializer for a final variable $f$ whose declaration includes an initialization expression.
- 
+
 \LMHash{}
 Each final instance variable $f$ declared in the immediately enclosing class must have an initializer in $k$'s initializer list unless it has already been initialized by one of the following means:
  \begin{itemize}
  \item Initialization at the declaration of $f$.
  \item Initialization by  means of an initializing formal of $k$.
  \end{itemize}
- 
+
 or a static warning occurs. It is a compile-time error if $k$'s initializer list contains an initializer for a variable that is not an instance variable declared in the immediately surrounding class.
 
- 
-\commentary{The  initializer list may of course contain an initializer for any  instance variable declared by the immediately surrounding class, even if it is not final. 
+
+\commentary{The  initializer list may of course contain an initializer for any  instance variable declared by the immediately surrounding class, even if it is not final.
 }
 
 \LMHash{}
- It is a compile-time error if a  generative constructor of class \code{Object} includes a superinitializer. 
+ It is a compile-time error if a  generative constructor of class \code{Object} includes a superinitializer.
 
 \LMHash{}
-Execution of a generative constructor $k$ is always done with respect to a set of bindings for its formal parameters and with  \THIS{} bound to a fresh instance $i$ and the type parameters of the immediately enclosing class bound to a set of actual type arguments $V_1, \ldots , V_m$. 
+Execution of a generative constructor $k$ is always done with respect to a set of bindings for its formal parameters and with  \THIS{} bound to a fresh instance $i$ and the type parameters of the immediately enclosing class bound to a set of actual type arguments $V_1, \ldots , V_m$.
 
 \commentary{These bindings are usually determined by the instance creation expression that invoked the constructor (directly or indirectly). However, they may also be determined by a reflective call,.
 }
 
 \LMHash{}
-If $k$ is redirecting then its redirect clause has the form 
+If $k$ is redirecting then its redirect clause has the form
 
-\THIS{}$.g(a_1, \ldots , a_n, x_{n+1}: a_{n+1}, \ldots , x_{n+k}: a_{n+k})$ 
+\THIS{}$.g(a_1, \ldots , a_n, x_{n+1}: a_{n+1}, \ldots , x_{n+k}: a_{n+k})$
 
-where $g$ identifies another  generative constructor of the immediately surrounding class. Then execution of $k$ proceeds by evaluating the argument list $(a_1, \ldots , a_n, x_{n+1}: a_{n+1}, \ldots , x_{n+k}: a_{n+k})$, and then executing $g$ with respect to the bindings resulting from the evaluation of $(a_1, \ldots , a_n, x_{n+1}: a_{n+1}, \ldots , x_{n+k}: a_{n+k})$ and with  \THIS{} bound to $i$ and the type parameters of the immediately enclosing class bound to $V_1, \ldots , V_m$. 
+where $g$ identifies another  generative constructor of the immediately surrounding class. Then execution of $k$ proceeds by evaluating the argument list $(a_1, \ldots , a_n, x_{n+1}: a_{n+1}, \ldots , x_{n+k}: a_{n+k})$, and then executing $g$ with respect to the bindings resulting from the evaluation of $(a_1, \ldots , a_n, x_{n+1}: a_{n+1}, \ldots , x_{n+k}: a_{n+k})$ and with  \THIS{} bound to $i$ and the type parameters of the immediately enclosing class bound to $V_1, \ldots , V_m$.
 
 \LMHash{}
 Otherwise, execution  proceeds as follows:
@@ -1342,7 +1342,7 @@
 \LMHash{}
 %First, a fresh instance (\ref{generativeConstructors}) $i$ of the immediately enclosing class is allocated.  Next, the instance variable declarations of the immediately enclosing class are visited in the order they appear in the program text. For each such declaration $d$, if $d$ has the form  \code{$finalConstVarOrType$ $v$ = $e$; } then the instance variable $v$ of $i$ is bound to the value of $e$ (which is necessarily a compile-time constant).
 %Next, a
-Any initializing formals declared in $k$'s parameter list are executed in the order they appear in the program text.  
+Any initializing formals declared in $k$'s parameter list are executed in the order they appear in the program text.
 % In fact, this order is unobservable; this could be done any time prior to running the body, since
 % these only effect \THIS{}.
 Then, $k$'s  initializers are executed in the order they appear in the program.
@@ -1353,7 +1353,7 @@
 After all the initializers  have completed, the body of $k$ is executed  in a scope where \THIS{} is bound to $i$. Execution of the body begins with execution of the body of the superconstructor  with \THIS{} bound to $i$, the type parameters of the immediately enclosing class bound to a set of actual type arguments $V_1, \ldots , V_m$ and the formal parameters bindings determined by the argument list of the superinitializer of $k$.
 
 \rationale{
-This process ensures that no uninitialized final field is ever seen by code. Note that \THIS{} is not in scope on the right hand side of an initializer (see \ref{this}) so no instance method can execute during initialization: an instance method cannot be directly invoked, nor can  \THIS{} be passed into any other code being invoked in the initializer. 
+This process ensures that no uninitialized final field is ever seen by code. Note that \THIS{} is not in scope on the right hand side of an initializer (see \ref{this}) so no instance method can execute during initialization: an instance method cannot be directly invoked, nor can  \THIS{} be passed into any other code being invoked in the initializer.
 }
 
 \LMHash{}
@@ -1363,14 +1363,14 @@
 First, the expression $e$ is evaluated to an object $o$. Then, the instance variable $v$ of the object denoted by \THIS{} is bound to $o$, unless $v$ is a final variable that has already been initialized, in which case a runtime error occurs. In checked mode, it is a dynamic type error if $o$ is not \NULL{} and the interface of the class of $o$ is not a subtype of the actual type of the field $v$.
 
 \LMHash{}
-An initializer of the form \code{$v$ = $e$} is equivalent to an initializer of the form  \code{\THIS{}.$v$ = $e$}. 
+An initializer of the form \code{$v$ = $e$} is equivalent to an initializer of the form  \code{\THIS{}.$v$ = $e$}.
 
 \LMHash{}
-Execution of a superinitializer of the form 
+Execution of a superinitializer of the form
 
-\SUPER{}$(a_1, \ldots, a_n,  x_{n+1}: a_{n+1}, \ldots, x_{n+k}: a_{n+k})$ 
+\SUPER{}$(a_1, \ldots, a_n,  x_{n+1}: a_{n+1}, \ldots, x_{n+k}: a_{n+k})$
 
-(respectively  \SUPER{}$.id(a_1, \ldots, a_n, x_{n+1}: a_{n+1}, \ldots, x_{n+k}: a_{n+k})$ 
+(respectively  \SUPER{}$.id(a_1, \ldots, a_n, x_{n+1}: a_{n+1}, \ldots, x_{n+k}: a_{n+k})$
 
 proceeds as follows:
 
@@ -1390,7 +1390,7 @@
 \LMLabel{factories}
 
 \LMHash{}
-A {\em factory} is a constructor prefaced by the built-in identifier  (\ref{identifierReference})   \FACTORY{}. 
+A {\em factory} is a constructor prefaced by the built-in identifier  (\ref{identifierReference})   \FACTORY{}.
 
 \begin{grammar}
 {\bf factoryConstructorSignature:}
@@ -1405,7 +1405,7 @@
 The {\em return type} of a factory whose signature is of the form \FACTORY{} $M$ or the form \FACTORY{} $M.id$ is $M$ if $M$ is not a generic type; otherwise the return type is  $M <T_1, \ldots, T_n>$ where $T_1, \ldots, T_n$ are the type parameters of the enclosing class
 
 \LMHash{}
-It is a compile-time error if $M$ is not the name of the immediately enclosing class. 
+It is a compile-time error if $M$ is not the name of the immediately enclosing class.
 
 \LMHash{}
 In checked mode, it is a dynamic type error if a factory returns a non-null object whose type is not a subtype of its actual (\ref{actualTypeOfADeclaration}) return type.
@@ -1413,7 +1413,7 @@
 \rationale{It seems useless to allow a factory to return null. But it is more uniform to allow it, as the rules currently do.}
 
 \rationale{Factories address classic weaknesses associated with constructors in other languages.
-Factories can produce instances that are not freshly allocated: they can come from a cache. Likewise, factories can return instances of different classes. 
+Factories can produce instances that are not freshly allocated: they can come from a cache. Likewise, factories can return instances of different classes.
 
 }
 
@@ -1430,7 +1430,7 @@
 \end{grammar}
 
 \LMHash{}
-Calling a redirecting factory constructor $k$ causes the constructor $k^\prime$ denoted by $type$ (respectively, $type.identifier$) to be called with the actual arguments passed to $k$, and returns the result of $k^\prime$ as the result of $k$.  The resulting constructor call is governed by the same rules as an instance creation expression using \NEW{} (\ref{instanceCreation}). 
+Calling a redirecting factory constructor $k$ causes the constructor $k^\prime$ denoted by $type$ (respectively, $type.identifier$) to be called with the actual arguments passed to $k$, and returns the result of $k^\prime$ as the result of $k$.  The resulting constructor call is governed by the same rules as an instance creation expression using \NEW{} (\ref{instanceCreation}).
 
 \commentary{
 It follows that if $type$ or $type.id$ are not defined, or do not refer to a class or constructor, a dynamic error occurs, as with any other undefined constructor call. The same holds if $k$ is called with fewer required parameters or more positional parameters than $k^\prime$ expects, or if $k$  is called with a named parameter that is not declared by $k^\prime$.
@@ -1448,14 +1448,14 @@
 
 \rationale{
 If a redirecting factory $F_1$ redirects to another redirecting factory $F_2$ and $F_2$ then redirects to $F_1$, then both $F_1$ and $F_2$ are ill-defined. Such cycles are therefore illegal.
-} 
+}
 
 
 \LMHash{}
 It is a static warning if $type$ does not denote a class accessible in the current scope; if $type$ does denote such a class $C$ it is a static warning if the referenced constructor (be it $type$ or $type.id$) is not a constructor of $C$.
 
 \commentary{
-Note that it is not possible to modify the arguments being passed to  $k'$.  
+Note that it is not possible to modify the arguments being passed to  $k'$.
 }
 % but we have the same issue with other redirecting constructors, no?)
 \rationale{
@@ -1485,7 +1485,7 @@
 %}
 
 %\CLASS{} A$<$T$>${
-%  \FACTORY{} A.idw(w) $=>$ F$<$T$>$.idw(w); 
+%  \FACTORY{} A.idw(w) $=>$ F$<$T$>$.idw(w);
 %// illegal - cannot pass type parameter to static method
 %  \FACTORY{} A.idx(x) $=> \NEW{} $F$<$T$>$.idx(x); // works, but allocates a gratuitous instance of F
 %  \FACTORY{} A.idy(y) = Y$<$T$>$; // works
@@ -1511,7 +1511,7 @@
 \LMLabel{constantConstructors}
 
 \LMHash{}
-A {\em constant constructor} may be used to create compile-time constant  (\ref{constants}) objects. A constant constructor is prefixed by the reserved word \CONST{}. 
+A {\em constant constructor} may be used to create compile-time constant  (\ref{constants}) objects. A constant constructor is prefixed by the reserved word \CONST{}.
 
 \begin{grammar}
 {\bf constantConstructorSignature:}
@@ -1527,7 +1527,7 @@
 \commentary{All the work of a constant constructor must be handled via its initializers.}
 
 \LMHash{}
-It is a compile-time error if a constant constructor is declared by a class that has a non-final instance variable.  
+It is a compile-time error if a constant constructor is declared by a class that has a non-final instance variable.
 
 \commentary{
 The above refers to both locally declared and inherited instance variables.
@@ -1544,7 +1544,7 @@
 The superinitializer that appears, explicitly or implicitly, in the initializer list of a constant constructor must specify a constant constructor of the superclass of the immediately enclosing class or a compile-time error occurs.
 
 \LMHash{}
-Any expression that appears within the initializer list of a constant constructor must be a potentially constant expression, or a compile-time error occurs. 
+Any expression that appears within the initializer list of a constant constructor must be a potentially constant expression, or a compile-time error occurs.
 
 \LMHash{}
 A {\em potentially constant expression} is an expression $e$ that would be a valid constant expression if all formal parameters of $e$'s immediately enclosing constant constructor were treated as compile-time constants that were guaranteed to evaluate to an integer, boolean or string value as required by their immediately enclosing superexpression, <em>and</em> where $e$ is also a valid expression if all the formal parameters are treated as non-constant variables.
@@ -1564,9 +1564,9 @@
 
 
 \commentary{
-The difference between a potentially constant expression and a compile-time constant expression (\ref{const}) deserves some explanation. 
+The difference between a potentially constant expression and a compile-time constant expression (\ref{const}) deserves some explanation.
 
-The key issue is whether one treats the formal parameters of a constructor as compile-time constants. 
+The key issue is whether one treats the formal parameters of a constructor as compile-time constants.
 
 If a constant constructor is invoked from a constant object expression, the actual arguments will be required to be compile-time constants. Therefore, if we were assured that constant constructors were always invoked from constant object expressions, we could assume that the formal parameters of a constructor were compile-time constants.
 
@@ -1579,7 +1579,7 @@
 \CLASS{} C \{
   \FINAL{} x; \FINAL{} y; \FINAL{} z;
   \CONST{} C(p, q): x = q, y = p + 100, z = p + q;
-  % what about 
+  % what about
  %  \CONST{} C(p, q): x = q, y = p + 100, z = p + 'foo';
  % perhaps moot. Current spec says that would be ok; type checker can worry, as can execution, which is at compile time anyway
 \}
@@ -1627,15 +1627,15 @@
 \rationale{
 Inheritance of static methods has little utility in Dart. Static methods cannot be overridden. Any required static function can be obtained from its declaring library, and there is no need to bring it into scope via inheritance. Experience shows that developers are confused by the idea of inherited methods that are not instance methods.
 
-Of course, the entire notion of static methods is debatable, but it is retained here because so many programmers are familiar with it. Dart static methods may be seen as functions of the enclosing library. 
+Of course, the entire notion of static methods is debatable, but it is retained here because so many programmers are familiar with it. Dart static methods may be seen as functions of the enclosing library.
 }
 
 \LMHash{}
-It is a static warning if a class $C$ declares a static method named $n$ and has a setter named $n=$. 
+It is a static warning if a class $C$ declares a static method named $n$ and has a setter named $n=$.
 %It is a static warning if a class has a static method with the same name as a static member of one of its superclasses.
 
 %\rationale{
-%This last restriction makes classes more brittle with respect to changes in the class hierarchy. It stems from a general observation that shadowing of names in the same scope is questionable and should elicit a warning. 
+%This last restriction makes classes more brittle with respect to changes in the class hierarchy. It stems from a general observation that shadowing of names in the same scope is questionable and should elicit a warning.
 %}
 %\commentary{
 %There is no hiding of static methods, or of static variables.
@@ -1648,26 +1648,26 @@
 \LMHash{}
 {\em Static variables} are variables whose declarations are immediately contained within a class declaration and that are declared \STATIC{}. The static variables of a class $C$ are those static variables declared by $C$.
 
-%A static variable declaration  of one of the forms \code{\STATIC{} $T$ $v$;},  \code{\STATIC{} $T$ $v$ = $e$;} ,  \code{\STATIC{} \CONST{} $T$ $v$ = $e$;}  or \code{\STATIC{} \FINAL{} $T$ $v$ = $e$;} always induces an implicit static getter function (\ref{getters}) with signature 
+%A static variable declaration  of one of the forms \code{\STATIC{} $T$ $v$;},  \code{\STATIC{} $T$ $v$ = $e$;} ,  \code{\STATIC{} \CONST{} $T$ $v$ = $e$;}  or \code{\STATIC{} \FINAL{} $T$ $v$ = $e$;} always induces an implicit static getter function (\ref{getters}) with signature
 
 %\STATIC{} $T$ \GET{} $v$
 
 %whose invocation evaluates as described below (\ref{evaluationOfStaticVariableGetters}).%to the value stored in $v$.
 
 
-%A static variable declaration  of one of the forms \code{\STATIC{} \VAR{} $v$;},  \code{\STATIC{} \VAR{} $v$ = $e$;} ,  \code{\STATIC{} \CONST{} $v$ = $e$;} or \code{\STATIC{} \FINAL{}  $v$ = $e$;}  always induces an implicit static getter function with signature 
+%A static variable declaration  of one of the forms \code{\STATIC{} \VAR{} $v$;},  \code{\STATIC{} \VAR{} $v$ = $e$;} ,  \code{\STATIC{} \CONST{} $v$ = $e$;} or \code{\STATIC{} \FINAL{}  $v$ = $e$;}  always induces an implicit static getter function with signature
 
 %\STATIC{} \GET{} $v$
 
 %whose  invocation evaluates as described below (\ref{evaluationOfStaticVariableGetters}).%to the value stored in $v$.
 
-%A non-final static variable declaration  of the form \code{\STATIC{} $T$ $v$;} or the form  \code{\STATIC{} $T$ $v$ = $e$;}   always induces an implicit static setter function (\ref{setters}) with signature 
+%A non-final static variable declaration  of the form \code{\STATIC{} $T$ $v$;} or the form  \code{\STATIC{} $T$ $v$ = $e$;}   always induces an implicit static setter function (\ref{setters}) with signature
 
 %\STATIC{} \VOID{} \SET{} $v=(T$ $x)$
 
 %whose execution sets the value of $v$ to the incoming argument $x$.
 
-%A static variable declaration  of the form \code{\STATIC{} \VAR{} $v$;} or the form  \code{\STATIC{} \VAR{} $v$ = $e$;}   always induces an implicit static setter function with signature 
+%A static variable declaration  of the form \code{\STATIC{} \VAR{} $v$;} or the form  \code{\STATIC{} \VAR{} $v$ = $e$;}   always induces an implicit static setter function with signature
 
 %\STATIC{} \SET{} $v=(x)$
 
@@ -1678,12 +1678,12 @@
 %\subsubsection{Evaluation of Implicit Static Variable Getters}
 %\LMLabel{evaluationOfStaticVariableGetters}
 
-%Let $d$ be the declaration of a static variable $v$. The implicit getter method of $v$ executes as follows: 
+%Let $d$ be the declaration of a static variable $v$. The implicit getter method of $v$ executes as follows:
 %\begin{itemize}
-%\item If $d$ is of one of the forms \code{\STATIC{} \VAR{} $v$ = $e$;} , \code{\STATIC{} $T$ $v$ = $e$; }, \code{\STATIC{} \FINAL{} $v$ = $e$; } or \code{\STATIC{} \FINAL{} $T$ $v$ = $e$;} and no value has yet been stored into $v$ then the initializer expression $e$ is evaluated. If, during the evaluation of $e$, the getter for $v$ is referenced, a \code{CyclicInitializationError} is thrown. If the evaluation succeeded yielding an object $o$, let $r = o$, otherwise let $r = \NULL{}$. In any case, $r$ is stored into $v$. The result of executing the getter is $r$. 
+%\item If $d$ is of one of the forms \code{\STATIC{} \VAR{} $v$ = $e$;} , \code{\STATIC{} $T$ $v$ = $e$; }, \code{\STATIC{} \FINAL{} $v$ = $e$; } or \code{\STATIC{} \FINAL{} $T$ $v$ = $e$;} and no value has yet been stored into $v$ then the initializer expression $e$ is evaluated. If, during the evaluation of $e$, the getter for $v$ is referenced, a \code{CyclicInitializationError} is thrown. If the evaluation succeeded yielding an object $o$, let $r = o$, otherwise let $r = \NULL{}$. In any case, $r$ is stored into $v$. The result of executing the getter is $r$.
 %\item  If $d$ is of one of the forms \code{\STATIC{} \CONST{} $v$ = $e$; } or \code{\STATIC{} \CONST{} $T$ $v$ = $e$;} the result of the getter is the value of the compile time constant $e$.
 %Otherwise
-%\item The result of executing the getter method is the value stored in $v$.  
+%\item The result of executing the getter method is the value stored in $v$.
 %\end{itemize}
 
 
@@ -1695,7 +1695,7 @@
 The superclass of a class $C$ that has a with clause \code{\WITH{} $M_1, \ldots, M_k$} and an extends clause \code{\EXTENDS{} S} is the application of mixin (\ref{mixins}) $M_k* \cdots * M_1$  to S.  If no \WITH{} clause is specified then  the \EXTENDS{} clause of a class $C$ specifies its superclass. If no \EXTENDS{} clause is specified, then either:
 \begin{itemize}
 \item $C$ is \code{Object}, which has no superclass. OR
-\item Class $C$ is  deemed to have an \EXTENDS{} clause of the form \code{\EXTENDS{} Object}, and the rules above apply. 
+\item Class $C$ is  deemed to have an \EXTENDS{} clause of the form \code{\EXTENDS{} Object}, and the rules above apply.
 \end{itemize}
 
 \LMHash{}
@@ -1710,15 +1710,15 @@
 %The superclass clause of a class C is processed within the enclosing scope of the static scope of C.
 %\commentary{
 %This means that in a generic class, the type parameters of the generic are available in the superclass clause.
-%} 
+%}
 
 \LMHash{}
 The scope of the \EXTENDS{} and \WITH{} clauses of a class $C$ is the type-parameter scope of $C$.
 
 \LMHash{}
-%It is a compile-time error if  the \EXTENDS{} clause of a class $C$ includes a type expression that does not denote a class available in the lexical scope of $C$. 
+%It is a compile-time error if  the \EXTENDS{} clause of a class $C$ includes a type expression that does not denote a class available in the lexical scope of $C$.
 It is a compile-time error if  the \EXTENDS{} clause of a class $C$ specifies an enumerated type (\ref{enums}), a malformed  type or a deferred type (\ref{staticTypes}) as a superclass.
-% too strict? Do we e want extends List<Undeclared> to work as List<dynamic>? 
+% too strict? Do we e want extends List<Undeclared> to work as List<dynamic>?
 
 \commentary{ The type parameters of a generic class are available in the lexical scope of the superclass clause, potentially shadowing classes in the surrounding scope. The following code is therefore illegal and should cause a compile-time error:
 }
@@ -1727,7 +1727,7 @@
 class T \{\}
 
 /* Compilation error: Attempt to subclass a type parameter */
-class G$<$T$>$ extends T \{\} 
+class G$<$T$>$ extends T \{\}
 
 \end{dartCode}
 
@@ -1735,33 +1735,33 @@
 \LMHash{}
 A class $S$ is {\em a superclass} of a class $C$ iff either:
 \begin{itemize}
-\item $S$ is the superclass of $C$, or 
-\item $S$ is a superclass of a class $S^{\prime}$ and $S^{\prime}$ is a superclass of $C$. 
+\item $S$ is the superclass of $C$, or
+\item $S$ is a superclass of a class $S^{\prime}$ and $S^{\prime}$ is a superclass of $C$.
 \end{itemize}
 
 \LMHash{}
 It is a compile-time error if a class $C$ is a superclass of itself.
 
 
-    
- 
+
+
  \subsubsection{Inheritance and Overriding}
  \LMLabel{inheritanceAndOverriding}
 
 
-%A class $C$  {\em inherits} any accessible instance members of its superclass that are not overridden by members declared in $C$. 
+%A class $C$  {\em inherits} any accessible instance members of its superclass that are not overridden by members declared in $C$.
 
 \LMHash{}
 Let $C$ be a class,  let $A$ be a superclass of $C$, and let  $S_1 \ldots S_k$ be superclasses of $C$ that are also subclasses of $A$. $C$ {\em inherits} all accessible instance  members of  $A$ that have not been overridden by a declaration in $C$ or in at least one of $S_1 \ldots S_k$.
 
 \rationale {
 It would be  more attractive to give a purely local definition of inheritance, that depended only on the members of the direct superclass $S$. However, a class $C$ can inherit a member $m$ that  is not a member of its superclass  $S$. This can occur when the member $m$ is private
-to the library $L_1$ of $C$, whereas $S$ comes from a different library $L_2$, but 
+to the library $L_1$ of $C$, whereas $S$ comes from a different library $L_2$, but
 the superclass chain of $S$ includes a class declared in $L_1$.
 }
 
 \LMHash{}
-A class may override instance members that would otherwise have been inherited from its superclass. 
+A class may override instance members that would otherwise have been inherited from its superclass.
 
 \LMHash{}
 Let $C = S_0$ be a class declared in library $L$, and let $\{S_1 \ldots S_k\}$ be the set of all superclasses of $C$, where $S_i$ is the superclass of $S_{i-1}$ for $i \in 1 .. k$. Let $C$ declare a member $m$, and let  $m^\prime$ be a member of $S_j, j  \in 1 .. k$,  that has the same name as $m$, such that $m^\prime$ is accessible to $L$.  Then $m$ overrides $m^\prime$ if $m^\prime$ is not already overridden by a member of at least one of $S_1 \ldots S_{j-1}$ and neither $m$ nor $m^\prime$ are fields.
@@ -1774,10 +1774,10 @@
 }
 
 \LMHash{}
-Whether an override is legal or not is described elsewhere in this specification (see \ref{instanceMethods}, \ref{getters} and \ref{setters}). 
+Whether an override is legal or not is described elsewhere in this specification (see \ref{instanceMethods}, \ref{getters} and \ref{setters}).
 
 \commentary{For example getters may not legally override methods and vice versa. Setters never override methods or getters, and vice versa, because their names always differ.
-}  
+}
 
 \rationale{
 It is nevertheless convenient to define the override relation between members in this way, so that we can concisely describe the illegal cases.
@@ -1805,19 +1805,19 @@
 \item \label{typeSigAssignable}
 If two members override each other, it is a static warning if their type signatures are not assignable to each other (\ref{instanceMethods}, \ref{getters}, \ref{setters}) (and  since these are function types, this means the same as "subtypes of each other").
 \item \label{requiredParams}
-If two members override each other, it is a static warning if the overriding member has more required parameters  than the overridden one (\ref{instanceMethods}). 
+If two members override each other, it is a static warning if the overriding member has more required parameters  than the overridden one (\ref{instanceMethods}).
 \item  \label{optionalPositionals}
 If two members override each other, it is a static warning if the overriding member has fewer  positional parameters than the overridden one (\ref{instanceMethods}).
 \item  \label{namedParams}
 If two members override each other, it is a static warning if the overriding member does not have all the named parameters that the overridden one has (\ref{instanceMethods}).
 \item Setters, getters and operators never have optional parameters of any kind; it's a compile-time error (\ref{operators}, \ref{getters}, \ref{setters}).
 \item It is a compile-time error if a member has the same name as its enclosing class (\ref{classes}).
-\item A class has an implicit interface (\ref{classes}). 
+\item A class has an implicit interface (\ref{classes}).
 \item Superinterface members are not inherited by a class, but are inherited by its implicit interface. Interfaces have their own inheritance rules (\ref{interfaceInheritanceAndOverriding}).
 \item A member is abstract if it has no body and is not labeled \EXTERNAL{} (\ref{abstractInstanceMembers}, \ref{externalFunctions}).
 \item A class is abstract iff it is explicitly labeled \ABSTRACT{}.% or if it declares (not just inherits) an abstract member (\ref{classes}).
 \item It is a static warning if a concrete class has an abstract member (declared or inherited).
-\item It is a static warning and a dynamic error to call a non-factory constructor of an abstract class  (\ref{new}). 
+\item It is a static warning and a dynamic error to call a non-factory constructor of an abstract class  (\ref{new}).
 \item If a class defines an instance member named $m$, and any of its superinterfaces have a  member named $m$, the interface of the class overrides $m$.
 \item  An interface inherits all  members of its superinterfaces that are not overridden and not members of multiple superinterfaces.
 \item  If multiple superinterfaces of an interface define a member with the same name $m$, then at most one member is inherited. That member (if it exists) is the one whose type is a subtype of all the others. If there is no such member, then:
@@ -1826,7 +1826,7 @@
   \item  If possible the interface gets a member named $m$ that has the minimum number of required parameters among all the members in the superinterfaces, the maximal number of    positionals, and the superset of named parameters.  The types of these are all \DYNAMIC{}. If this is impossible then no member $m$ appears in the interface.
   \end{itemize}  (\ref{interfaceInheritanceAndOverriding})
 \item  Rule \ref{typeSigAssignable} applies to interfaces as well as classes  (\ref{interfaceInheritanceAndOverriding}).
-\item  It is a static warning if a concrete class does not have an implementation for a  method in any of its superinterfaces  unless it has a \cd{noSuchMethod} method (\ref{superinterfaces}). 
+\item  It is a static warning if a concrete class does not have an implementation for a  method in any of its superinterfaces  unless it has a \cd{noSuchMethod} method (\ref{superinterfaces}).
 \item The identifier of a named constructor cannot be the same as the name of a member declared (as opposed to inherited) in the same class (\ref{constructors}).
 \end{enumerate}
 }
@@ -1856,7 +1856,7 @@
 It is a compile-time error if the superclass of a class $C$ is specified as a superinterface of $C$.
 
 \rationale{
-One might argue that it is harmless to repeat a type in the superinterface list, so why make it an error? The issue is not so much that the situation described in program source is erroneous, but that it is pointless. As such, it is an indication that the programmer may very well have meant to say something else - and that is a mistake that should be called to her or his attention.  Nevertheless, we could simply issue a warning; and perhaps we should and will. That said, problems like these are local and easily corrected on the spot, so we feel justified in taking a harder line. 
+One might argue that it is harmless to repeat a type in the superinterface list, so why make it an error? The issue is not so much that the situation described in program source is erroneous, but that it is pointless. As such, it is an indication that the programmer may very well have meant to say something else - and that is a mistake that should be called to her or his attention.  Nevertheless, we could simply issue a warning; and perhaps we should and will. That said, problems like these are local and easily corrected on the spot, so we feel justified in taking a harder line.
 }
 
 \LMHash{}
@@ -1864,7 +1864,7 @@
 
 \LMHash{}
 Let $C$ be a concrete class that does not have a \code{noSuchMethod()} method distinct from the one declared in class \cd{Object}.
-It is a static warning if the implicit interface of  $C$ includes an instance member $m$ of type $F$ and $C$ does not declare or inherit a corresponding non-abstract instance member $m$ of type $F'$ such that $F' <: F$. 
+It is a static warning if the implicit interface of  $C$ includes an instance member $m$ of type $F$ and $C$ does not declare or inherit a corresponding non-abstract instance member $m$ of type $F'$ such that $F' <: F$.
 
 \commentary{A class does not inherit members from its superinterfaces. However, its implicit interface does.
 }
@@ -1877,7 +1877,7 @@
 
 
 \LMHash{}
-It is a static warning if the implicit interface of  a class $C$ includes an instance member $m$ of type $F$ and $C$ declares or inherits a corresponding instance member $m$ of type $F'$ if  $F'$ is not a subtype of $F$. 
+It is a static warning if the implicit interface of  a class $C$ includes an instance member $m$ of type $F$ and $C$ declares or inherits a corresponding instance member $m$ of type $F'$ if  $F'$ is not a subtype of $F$.
 
 \rationale{
 However, if a class does explicitly declare a member that conflicts with its superinterface, this always yields a static warning.
@@ -1885,12 +1885,12 @@
 }
 %It is a static warning if an imported superinterface of a class $C$ declares private members.
 
-% Should we ignore unimplemented private members? 
+% Should we ignore unimplemented private members?
 
 %\rationale{This last rule is problematic. As code evolves in one library ($L_1$) it may add private members to a class $I_1$ implemented or inherited in another  library $L_2$ breaking $L_1$.  This is a direct result of coupling an interface based type system with library based privacy.  We are considering alternative semantics that might help resolve this issue.
 %}
 
-%\commentary{However, it is perfectly acceptable if a type mentioned in the implements clause is mentioned as a superinterface in an interface injection clause. 
+%\commentary{However, it is perfectly acceptable if a type mentioned in the implements clause is mentioned as a superinterface in an interface injection clause.
 %}
 
 %\rationale{We disallow repetition of a type in a given implements clause, as that is a localized mistake. However, separate clauses (that is the original class and various injections) may evolve separately over time, and we don't want to cause breakage. For example
@@ -1920,7 +1920,7 @@
 \LMLabel{interfaceSuperinterfaces}
 
 \LMHash{}
-An interface has a set of direct superinterfaces. 
+An interface has a set of direct superinterfaces.
 
 \LMHash{}
 An interface $J$ is a superinterface of an interface $I$ iff either $J$ is a direct superinterface of $I$ or $J$ is a superinterface of a direct superinterface of $I$.
@@ -1934,8 +1934,8 @@
 \LMHash{}
 Let $J$ be an interface and $K$ be a library. We define $inherited(J, K)$  to be the set of members $m$ such that   all of the following hold:
 \begin{itemize}
-\item $m$ is accessible to $K$ and 
-\item $A$ is a direct superinterface of $J$ and either 
+\item $m$ is accessible to $K$ and
+\item $A$ is a direct superinterface of $J$ and either
   \begin{itemize}
   \item $A$ declares a member $m$  or
   \item $m$ is a member of $inherited(A, K)$.
@@ -1949,17 +1949,17 @@
 \item $J$ is the implicit interface of a class $C$.
 \item  $C$ declares a member $m$.
 \item $m^\prime$ has the same name as $m$.
-\item $m^\prime$ is accessible to $K$.  
+\item $m^\prime$ is accessible to $K$.
 \item $A$ is a direct superinterface of $J$ and either
   \begin{itemize}
-  \item $A$ declares a member $m^\prime$ or 
+  \item $A$ declares a member $m^\prime$ or
   \item $m^\prime$ is a member of $inherited(A, K)$.
   \end{itemize}
 \end{itemize}
 
 
 \LMHash{}
-Let $I$ be the implicit interface of a class $C$ declared in library $L$.  $I$ {\em inherits} all members of $inherited(I, L)$ and $I$ {\em overrides} $m^\prime$ if  $m^\prime \in overrides(I, L)$. 
+Let $I$ be the implicit interface of a class $C$ declared in library $L$.  $I$ {\em inherits} all members of $inherited(I, L)$ and $I$ {\em overrides} $m^\prime$ if  $m^\prime \in overrides(I, L)$.
 
 \LMHash{}
 All the static warnings pertaining to the overriding of instance members given in section \ref{classes} above hold for overriding between interfaces as well.
@@ -1969,36 +1969,36 @@
 
 
 
-%Let $I = S_0$ be the implicit interface of a class $C$ declared in library $L$, and let $\{S_1 \ldots S_k\}$ be the set of all superinterfaces of $I$. 
+%Let $I = S_0$ be the implicit interface of a class $C$ declared in library $L$, and let $\{S_1 \ldots S_k\}$ be the set of all superinterfaces of $I$.
 
-%Let $I$ be the implicit interface of a class $C$.  $I$ inherits any instance members of its superinterfaces that are not overridden by members declared in $C$. 
+%Let $I$ be the implicit interface of a class $C$.  $I$ inherits any instance members of its superinterfaces that are not overridden by members declared in $C$.
 
 % tighten definition? do we need chain as for classes?  Definition for interface override?
 
 \LMHash{}
-However, if the above rules would cause multiple members $m_1, \ldots,  m_k$ with the same name $n$ to be inherited (because identically named members existed in several superinterfaces) then at most one member is inherited. 
+However, if the above rules would cause multiple members $m_1, \ldots,  m_k$ with the same name $n$ to be inherited (because identically named members existed in several superinterfaces) then at most one member is inherited.
 
 \LMHash{}
 If some but not all of the $m_i, 1 \le i \le k$ are getters none of the $m_i$ are inherited, and a static warning is issued.
 
 \LMHash{}
 Otherwise, if the static types $T_1, \ldots,  T_k$ of the members $m_1, \ldots,  m_k$  are not identical, then there must be a member $m_x$ such that $T_x <: T_i, 1 \le x \le k$ for all  $i  \in 1..k$, or a static type warning occurs. The member that is inherited  is $m_x$, if it exists; otherwise:
- let $numberOfPositionals(f)$ denote the number of positional parameters of a function $f$, and let $numberOfRequiredParams(f)$ denote the number of required parameters of a function $f$. Furthermore, let $s$ denote the set of all named parameters of the $m_1, \ldots,  m_k$.  Then let 
+ let $numberOfPositionals(f)$ denote the number of positional parameters of a function $f$, and let $numberOfRequiredParams(f)$ denote the number of required parameters of a function $f$. Furthermore, let $s$ denote the set of all named parameters of the $m_1, \ldots,  m_k$.  Then let
 
 $h = max(numberOfPositionals(m_i)), $
 
-$r = min(numberOfRequiredParams(m_i)), i \in 1..k$. 
+$r = min(numberOfRequiredParams(m_i)), i \in 1..k$.
 
 \LMHash{}
-Then $I$ has a method named $n$, with $r$ required parameters of type \DYNAMIC{}, $h$  positional parameters of type \DYNAMIC{}, named parameters $s$ of type  \DYNAMIC{} and  return type  \DYNAMIC{}.  
+Then $I$ has a method named $n$, with $r$ required parameters of type \DYNAMIC{}, $h$  positional parameters of type \DYNAMIC{}, named parameters $s$ of type  \DYNAMIC{} and  return type  \DYNAMIC{}.
 
 
 
-\commentary{The only situation where the runtime would be concerned with this would be during reflection, if a mirror attempted to obtain the signature of an interface member. 
+\commentary{The only situation where the runtime would be concerned with this would be during reflection, if a mirror attempted to obtain the signature of an interface member.
 }
 
 \rationale{
-The current solution is a tad complex, but is robust in the face of type annotation changes.  Alternatives: (a) No member is inherited in case of conflict. (b) The first m is selected (based on order of superinterface list) (c) Inherited member chosen at random. 
+The current solution is a tad complex, but is robust in the face of type annotation changes.  Alternatives: (a) No member is inherited in case of conflict. (b) The first m is selected (based on order of superinterface list) (c) Inherited member chosen at random.
 
 (a) means that the presence of an inherited member of an interface varies depending on type signatures.  (b) is sensitive to irrelevant details of the declaration and (c) is liable to give unpredictable results between implementations or even between different compilation sessions.
 }
@@ -2006,14 +2006,14 @@
 % Need warnings if overrider conflicts with overriddee either because signatures are incompatible or because done is a method and one is a getter or setter.
 
 \section{Mixins}
-\LMLabel{mixins} 
+\LMLabel{mixins}
 
 
 \LMHash{}
-A mixin describes the difference between a class and its superclass. A mixin is always derived from an existing class declaration. 
+A mixin describes the difference between a class and its superclass. A mixin is always derived from an existing class declaration.
 
 \LMHash{}
-It is a compile-time error if a declared or derived mixin explicitly declares a constructor. 
+It is a compile-time error if a declared or derived mixin explicitly declares a constructor which is not a factory constructor.
 
 \rationale{
 This restriction is temporary.  We expect to remove it in later versions of Dart.
@@ -2031,25 +2031,25 @@
 \begin{grammar}
 {\bf  mixinApplicationClass:}
 	identifier typeParameters? `='  mixinApplication `{\escapegrammar ;}' .
-	
+
 {\bf mixinApplication:}
-     type mixins interfaces? 
+     type mixins interfaces?
     .
 \end{grammar}
 
 \LMHash{}
-A  mixin application of the form  \code{$S$ \WITH{} $M$;} defines a class  $C$ with superclass  $S$. 
+A  mixin application of the form  \code{$S$ \WITH{} $M$;} defines a class  $C$ with superclass  $S$.
 
 \LMHash{}
-A  mixin application of the form  \code{$S$ \WITH{} $M_1, \ldots, M_k$;} defines a class  $C$ whose superclass is the application of the mixin composition (\ref{mixinComposition}) $M_{k-1} * \ldots * M_1$ to $S$. 
+A  mixin application of the form  \code{$S$ \WITH{} $M_1, \ldots, M_k$;} defines a class  $C$ whose superclass is the application of the mixin composition (\ref{mixinComposition}) $M_{k-1} * \ldots * M_1$ to $S$.
 
 \LMHash{}
-In both cases above, $C$ declares the same instance members as $M$ (respectively, $M_k$). If any of the instance fields of $M$ (respectively, $M_k$) have initializers, they are executed in the scope of $M$ (respectively, $M_k$) to initialize the corresponding fields of $C$. 
+In both cases above, $C$ declares the same instance members as $M$ (respectively, $M_k$). If any of the instance fields of $M$ (respectively, $M_k$) have initializers, they are executed in the scope of $M$ (respectively, $M_k$) to initialize the corresponding fields of $C$.
 
 \LMHash{}
 Let $L_M$ be the library in which $M$ is declared.
 For each generative constructor named $q_i(T_{i1}$ $ a_{i1}, \ldots , T_{ik_i}$ $ a_{ik_i}), i \in 1..n$ of $S$ that is accessible to $L_M$, $C$ has an implicitly declared constructor named
-$q'_i = [C/S]q_i$ of the form 
+$q'_i = [C/S]q_i$ of the form
 
 $q'_i(a_{i1}, \ldots , a_{ik_i}):\SUPER(a_{i1}, \ldots , a_{ik_i});$.
 
@@ -2059,7 +2059,7 @@
 If the mixin application declares support for interfaces, the resulting class implements those interfaces.
 
 \LMHash{}
-It is a compile-time error if $S$ is an enumerated type (\ref{enums}) or a malformed type. It is a compile-time error if $M$ (respectively, any of $M_1, \ldots, M_k$) is an enumerated type (\ref{enums}) or a malformed type. It is a compile time error if a well formed mixin cannot be derived from $M$ (respectively, from each of $M_1, \ldots, M_k$). 
+It is a compile-time error if $S$ is an enumerated type (\ref{enums}) or a malformed type. It is a compile-time error if $M$ (respectively, any of $M_1, \ldots, M_k$) is an enumerated type (\ref{enums}) or a malformed type. It is a compile time error if a well formed mixin cannot be derived from $M$ (respectively, from each of $M_1, \ldots, M_k$).
 
 \LMHash{}
 Let $K$ be a class declaration  with the same constructors, superclass and interfaces as $C$,  and the instance members declared by $M$ (respectively $M_1, \ldots, M_k$). It is a static warning if the declaration of $K$ would cause a static warning.  It is a compile-time error if the declaration of $K$ would cause a compile-time error.
@@ -2070,15 +2070,15 @@
 }
 
 \LMHash{}
-The effect of a class definition of the form \code{\CLASS{} $C$ = $M$; } or the form 
+The effect of a class definition of the form \code{\CLASS{} $C$ = $M$; } or the form
  \code{\CLASS{} $C<T_1, \ldots, T_n>$ = $M$; } in library $L$  is to introduce the name $C$ into the scope of $L$, bound to the class (\ref{classes}) defined by the mixin application $M$. The name of the class is also set to $C$. Iff the  class is prefixed by the built-in identifier \ABSTRACT{}, the class being defined is an abstract class.
- 
+
  Let $M_A$ be a mixin derived from a class $M$ with direct superclass $S_{static}$.
 
 Let $A$ be an application of $M_A$. It is a static warning if the superclass of $A$ is not a subtype of $S_{static}$.
 
 Let $C$ be a class declaration that includes $M_A$ in a with clause. It is a static warning if $C$ does not implement, directly or indirectly, all the direct superinterfaces of $M$.
- 
+
 
 \subsection{Mixin Composition}
 \LMLabel{mixinComposition}
@@ -2088,25 +2088,25 @@
 }
 
 \LMHash{}
-The {\em composition of two mixins}, $M_1<T_1 \ldots T_{k_{M_1}}>$ and $M_2<U_1  \ldots U_{k_{M_2}}>$, written $M_1<T_1 \ldots T_{k_{M_1}}> * M_2<U_1  \ldots U_{k_{M_2}}>$ defines an anonymous mixin such that for any class $S<V_1 \ldots V_{k_S}>$, the application of 
+The {\em composition of two mixins}, $M_1<T_1 \ldots T_{k_{M_1}}>$ and $M_2<U_1  \ldots U_{k_{M_2}}>$, written $M_1<T_1 \ldots T_{k_{M_1}}> * M_2<U_1  \ldots U_{k_{M_2}}>$ defines an anonymous mixin such that for any class $S<V_1 \ldots V_{k_S}>$, the application of
 
-$M_1<T_1 \ldots T_{k_{M_1}}> * M_2<U_1  \ldots U_{k_{M_2}}>$  
+$M_1<T_1 \ldots T_{k_{M_1}}> * M_2<U_1  \ldots U_{k_{M_2}}>$
 
-to $S<V_1 \ldots V_{k_S}>$ is equivalent to 
+to $S<V_1 \ldots V_{k_S}>$ is equivalent to
 
 \begin{dartCode}
 \ABSTRACT{} \CLASS{} $Id_1<T_1  \ldots T_{k_{M_1}}, U_1  \ldots U_{k_{M_2}}, V_1  \ldots V_{k_S}> = $
       $Id_2<U_1  \ldots U_{k_{M_2}}, V_1  \ldots V_{k_S}>$ \WITH{} $M_1 <T_1  \ldots T_{k_{M_1}}>$;
 \end{dartCode}
 
-where $Id_2$ denotes 
+where $Id_2$ denotes
 
 \begin{dartCode}
 \ABSTRACT{}  \CLASS{} $Id_2<U_1 \ldots U_{k_{M_2}}, V_1 \ldots V_{k_S}> =$
-                         $S<V_1 \ldots V_{k_S}>$ \WITH{} $M_2<U_1  \ldots U_{k_{M_2}}>$; 
+                         $S<V_1 \ldots V_{k_S}>$ \WITH{} $M_2<U_1  \ldots U_{k_{M_2}}>$;
 \end{dartCode}
 
-and $Id_1$ and $Id_2$ are unique identifiers that do not exist anywhere in the program. 
+and $Id_1$ and $Id_2$ are unique identifiers that do not exist anywhere in the program.
 
 \rationale{
 The classes produced by mixin composition are regarded as abstract because they cannot be instantiated independently. They are only introduced as anonymous superclasses of ordinary class declarations and mixin applications. Consequently, no warning is given if a mixin composition includes abstract members, or incompletely implements an interface.
@@ -2157,8 +2157,8 @@
 \LMLabel{generics}
 
 \LMHash{}
-A class declaration (\ref{classes}) or type alias (\ref{typedef}) 
-$G$ may be {\em generic}, that is, $G$ may have formal type parameters declared. A generic declaration induces a family of declarations, one for each set of actual type parameters provided in the program. 
+A class declaration (\ref{classes}) or type alias (\ref{typedef})
+$G$ may be {\em generic}, that is, $G$ may have formal type parameters declared. A generic declaration induces a family of declarations, one for each set of actual type parameters provided in the program.
 
 \begin{grammar}
 {\bf typeParameter:}
@@ -2177,7 +2177,7 @@
 The type parameters of a generic $G$ are in scope in the bounds of all of the type parameters of $G$. The type parameters of a generic class declaration $G$ are also in scope in the \EXTENDS{} and \IMPLEMENTS{} clauses of $G$ (if these exist) and in the body of $G$.   However, a type parameter is considered to be a malformed type when referenced by a static member.
 
 \rationale{
-The restriction is necessary since a type variable has no meaning in the context of a static member, because statics are shared among all instantiations of a generic. However, a type variable may be referenced from an instance initializer, even though \THIS{} is not available. 
+The restriction is necessary since a type variable has no meaning in the context of a static member, because statics are shared among all instantiations of a generic. However, a type variable may be referenced from an instance initializer, even though \THIS{} is not available.
 }
 
 \commentary{
@@ -2206,7 +2206,7 @@
 The normative versions of these  are given in the appropriate sections of this specification. Some of these restrictions may be lifted in the future.
 }
 
-%The {\em induced type set}, $S$, of a parameterized type $T$ is the set consisting of 
+%The {\em induced type set}, $S$, of a parameterized type $T$ is the set consisting of
 %\begin{itemize}
 %\item The supertypes of any type in $S$.
 %\item The type arguments of any parameterized type in $S$.
@@ -2264,13 +2264,13 @@
     %.
 %\end{grammar}
 
-%\rationale{Since subinterface relations can be tested dynamically via \IS{}, interface injection is not just a directive to the static checker. The dynamic relations implied must  hold regardless of whether a static typecheck has succeeded, or has been performed at all. This makes sense from the perspective of preserving programmer intent. The injection describes a nominal type relation that the programmer wishes to hold. Just as a supertype mentioned within a class declaration is considered a supertype even though type errors might arise among (say) overridden and overriding methods, so it must be that the relation implied by an injection holds regardless of type errors. 
+%\rationale{Since subinterface relations can be tested dynamically via \IS{}, interface injection is not just a directive to the static checker. The dynamic relations implied must  hold regardless of whether a static typecheck has succeeded, or has been performed at all. This makes sense from the perspective of preserving programmer intent. The injection describes a nominal type relation that the programmer wishes to hold. Just as a supertype mentioned within a class declaration is considered a supertype even though type errors might arise among (say) overridden and overriding methods, so it must be that the relation implied by an injection holds regardless of type errors.
 %In addition, this decision helps to produce meaningful and localized error messages. Any errors are reported at the point of injection rather than at program points that rely on the relation (a well known problem with structural subtyping in OO systems).
 %}
 
 %\Q{When does an interface injection take effect? When the containing library is loaded?
-%What is the scope of such a declaration? Is it global, or only in the scope of the containing library? The scope of such a declaration is global. 
-%An injection must be at top level. Who has the right to inject an interface $I$ into another class $C$? Anybody? But since this affects dynamic behavior, is this a weird security issue? 
+%What is the scope of such a declaration? Is it global, or only in the scope of the containing library? The scope of such a declaration is global.
+%An injection must be at top level. Who has the right to inject an interface $I$ into another class $C$? Anybody? But since this affects dynamic behavior, is this a weird security issue?
 %The current theory is that there is no security within an isolate, and one can never refer to a type from another isolate, so supposedly not an issue. This assumption (no mutually suspicious code in the same isolate) is suspect but it seems there is nothing to be done at this point.
 %If libs are first class, they get created dynamically in order, and new libs might modify the type relations among other libs types - but then it is clear when that happened and order is ok.
 %}
@@ -2282,7 +2282,7 @@
 \LMLabel{metadata}
 
 \LMHash{}
-Dart supports metadata which is used to attach user defined annotations to program structures.  
+Dart supports metadata which is used to attach user defined annotations to program structures.
 
 \begin{grammar}
 {\bf metadata:}
@@ -2316,7 +2316,7 @@
 Metadata can appear before a library, part header, class, typedef, type parameter, constructor, factory, function, field, parameter, or variable declaration and before an import, export or part directive.
 
 \LMHash{}
-The constant expression given in an annotation  is type checked and evaluated in the scope surrounding the declaration being annotated.  
+The constant expression given in an annotation  is type checked and evaluated in the scope surrounding the declaration being annotated.
 
 
 \section{Expressions}
@@ -2330,19 +2330,19 @@
 
 {\bf expression:}assignableExpression assignmentOperator expression;
        conditionalExpression cascadeSection*;
-       throwExpression     
+       throwExpression
     .
-     
-    
+
+
 {\bf expressionWithoutCascade:}assignableExpression assignmentOperator expressionWithoutCascade;
       conditionalExpression;
-      throwExpressionWithoutCascade    
+      throwExpressionWithoutCascade
     .
 
 {\bf expressionList:}
       expression (`,' expression)* %should these be top level expressions?
     .
-\end{grammar}    
+\end{grammar}
 
 \begin{grammar}
 {\bf primary:}thisExpression;
@@ -2355,8 +2355,8 @@
       constObjectExpression;
       `(' expression `)'
     .
-    
-\end{grammar}   
+
+\end{grammar}
 
 \LMHash{}
 An expression $e$ may always be enclosed in parentheses, but this never has any semantic effect on $e$.
@@ -2367,7 +2367,7 @@
 
  \subsubsection{Object Identity}
  \LMLabel{objectIdentity}
- 
+
 \LMHash{}
 The predefined Dart function \cd{identical()} is defined such that \code{identical($c_1$, $c_2$)} iff:
  \begin{itemize}
@@ -2387,9 +2387,9 @@
  \item $c_1$ and $c_2$ are constant objects of the same class $C$ and each member field of $c_1$ is identical to the corresponding field of $c_2$. OR
 \item $c_1$ and $c_2$ are the same object.
 \end{itemize}
-    
+
 \commentary{
-The definition of \cd{identity} for doubles differs from that of equality in that a NaN is identical to itself, and that negative and positive zero are distinct.  
+The definition of \cd{identity} for doubles differs from that of equality in that a NaN is identical to itself, and that negative and positive zero are distinct.
 }
 
 \rationale{
@@ -2399,13 +2399,13 @@
 
 The rules for identity make it impossible for a Dart programmer to observe whether a boolean or numerical value is boxed or unboxed.
 }
-    
-    
+
+
 \subsection{Constants}
 \LMLabel{constants}
 
 \LMHash{}
-A {\em constant expression} is an expression whose value can never change, and that can be evaluated entirely at compile time. 
+A {\em constant expression} is an expression whose value can never change, and that can be evaluated entirely at compile time.
 
 \LMHash{}
 A constant expression is one of the following:
@@ -2416,14 +2416,14 @@
 \rationale{It would be tempting to allow string interpolation where the interpolated value is any compile-time constant.  However, this would require running the \code{toString()} method for constant objects, which could contain arbitrary code.}
 \item A literal symbol (\ref{symbols}).
 \item \NULL{} (\ref{null}).
-\item A qualified reference to a static constant variable (\ref{variables}) that is not qualified by a deferred prefix. 
+\item A qualified reference to a static constant variable (\ref{variables}) that is not qualified by a deferred prefix.
 \commentary {For example, If class C declares a constant static variable v, C.v is a constant. The same is true if C is accessed via a prefix p; p.C.v is a constant  unless p is a deferred prefix.
 }
-\item An identifier expression that denotes a constant variable. 
-\item A simple or qualified identifier denoting a class or type alias that is not qualified by a deferred prefix. 
+\item An identifier expression that denotes a constant variable.
+\item A simple or qualified identifier denoting a class or type alias that is not qualified by a deferred prefix.
 \commentary {For example, If C is a class or typedef, C is a constant, and if C is imported with a prefix p, p.C is a constant unless p is a deferred prefix.
 }
-\item A constant constructor invocation (\ref{const}) that is not qualified by a deferred prefix.  
+\item A constant constructor invocation (\ref{const}) that is not qualified by a deferred prefix.
 \item A constant list literal (\ref{lists}).
 \item A constant map literal (\ref{maps}).
 \item A simple or qualified identifier denoting a top-level function (\ref{functions}) or a static method (\ref{staticMethods}) that is not qualified by a deferred prefix.
@@ -2444,7 +2444,7 @@
 % designed so constants do not depend on check diode being on or not.
 
 \LMHash{}
-It is a compile-time error if an expression is required to be a constant expression but its evaluation  would raise an exception. 
+It is a compile-time error if an expression is required to be a constant expression but its evaluation  would raise an exception.
 
 % so, checked mode? analyzers? editor/development compilers?
 \commentary{
@@ -2465,15 +2465,15 @@
     \IF{} (z) \{\RETURN{} x; \}
     \ELSE{} \{ \RETURN{} 2;\}
   \}
-  
+
   m2() \{
     \IF{} (\TRUE{}) \{\RETURN{} y; \}
     \ELSE{} \{ \RETURN{} 3;\}
-  \}  
+  \}
 \}
 \end{dartCode}
 
-\commentary{An implementation is free to immediately issue a compilation error for  \code{x}, but it is not required to do so.  It could defer errors if it does not immediately compile the declarations that reference \code{x}. For example, it could delay giving a compilation error about the method \code{m1} until the first invocation of \code{m1}. However, it could not choose to execute \code{m1},  see that the branch that refers to \code{x} is not taken and return 2 successfully. 
+\commentary{An implementation is free to immediately issue a compilation error for  \code{x}, but it is not required to do so.  It could defer errors if it does not immediately compile the declarations that reference \code{x}. For example, it could delay giving a compilation error about the method \code{m1} until the first invocation of \code{m1}. However, it could not choose to execute \code{m1},  see that the branch that refers to \code{x} is not taken and return 2 successfully.
 
 The situation with respect to an invocation \code{m2} is different. Because \code{y} is not a compile-time constant (even though its value is), one need not give a compile-time error upon compiling \code{m2}. An implementation may run the code, which will cause  the getter for \code{y} to be invoked. At that point, the initialization of \code{y} must take place, which requires the initializer to be compiled, which will cause a compilation error.
 }
@@ -2481,11 +2481,19 @@
 \rationale{
 The treatment of \NULL{} merits some discussion. Consider \code{\NULL{} + 2}.  This expression always causes an error. We could have chosen not to treat it as a constant expression (and in general, not to allow \NULL{} as a subexpression of numeric or boolean constant expressions).  There are two arguments for including it:
 \begin{enumerate}
-\item It is constant. We can evaluate it at compile-time. 
+\item It is constant. We can evaluate it at compile-time.
 \item It seems more useful to give the error stemming from the evaluation explicitly.
 \end{enumerate}
 }
 
+\rationale {
+One might reasonably ask why $e_1? e_1: e_3$ and $e_1?? e_2$ have constant forms. For example, if $e_1$ is known statically, why do we need to test it?.
+The answer is that there are contexts where $e_1$ is a variable. In particular, constant constructor initializers such as
+
+\code{\CONST{} C(foo) : \THIS.foo = foo ?? someDefaultValue;}
+}
+
+
 \LMHash{}
 It is a compile-time error if the value of a compile-time constant expression depends on itself.
 
@@ -2494,7 +2502,7 @@
 }
 
 \begin{dartCode}
-\CLASS{} CircularConsts\{ 
+\CLASS{} CircularConsts\{
 // Illegal program - mutually recursive compile-time constants
   \STATIC{} \CONST{} i = j; // a compile-time constant
   \STATIC{} \CONST{} j = i; // a compile-time constant
@@ -2529,13 +2537,13 @@
 \end{grammar}
 
 \LMHash{}
-The null object is the sole instance of the built-in class \code{Null}. Attempting to instantiate \code{Null} causes a run-time error. It is a compile-time error for a class to attempt to extend, mix in or implement \code{Null}. 
+The null object is the sole instance of the built-in class \code{Null}. Attempting to instantiate \code{Null} causes a run-time error. It is a compile-time error for a class to attempt to extend, mix in or implement \code{Null}.
 Invoking a method on \NULL{}  yields a \code{NoSuchMethodError} unless the method is explicitly implemented by class \code{Null}.
 
 \LMHash{}
 The static type of \NULL{} is $\bot$.
 
-\rationale{The decision to use $\bot$ instead of \code{Null} allows \NULL{} to be be assigned everywhere without complaint by the static checker. 
+\rationale{The decision to use $\bot$ instead of \code{Null} allows \NULL{} to be be assigned everywhere without complaint by the static checker.
 }
 
 
@@ -2549,7 +2557,7 @@
 {\bf numericLiteral:}NUMBER;
       HEX\_NUMBER
     .
-    
+
   {\bf NUMBER:} DIGIT+ (`{\escapegrammar.}' DIGIT+)? EXPONENT?;
       {`\escapegrammar .}' DIGIT+ EXPONENT?
     .
@@ -2567,9 +2575,9 @@
       DIGIT
     .
  \end{grammar}
- 
+
 \LMHash{}
-If a numeric literal begins with the prefix `0x' or `0X', it denotes the hexadecimal integer represented by the part of the literal following `0x' (respectively `0X'). Otherwise, if the numeric literal does not include a decimal point  it denotes a decimal integer.  Otherwise, the numeric literal  denotes a 64 bit double precision floating point number as specified by the IEEE 754 standard. 
+If a numeric literal begins with the prefix `0x' or `0X', it denotes the hexadecimal integer represented by the part of the literal following `0x' (respectively `0X'). Otherwise, if the numeric literal does not include a decimal point  it denotes a decimal integer.  Otherwise, the numeric literal  denotes a 64 bit double precision floating point number as specified by the IEEE 754 standard.
 
 \LMHash{}
 In principle, the range of integers supported by a Dart implementations is unlimited. In practice, it is limited by available memory. Implementations may also be limited by other considerations.
@@ -2582,12 +2590,12 @@
 It is a compile-time error for a class to attempt to extend, mix in or implement \code{int}. It is a compile-time error for a class to attempt to extend, mix in or implement \code{double}. It is a compile-time error for any type other than the types \code{int} and \code{double} to attempt to extend, mix in or implement \code{num}.
 
 \LMHash{}
-An {\em integer literal} is either a hexadecimal integer literal or a  decimal integer literal. Invoking the getter \code{runtimeType} on an integer literal returns the \code{Type} object that is the value of the expression \code{int}. The static type of an integer literal is \code{int}. 
+An {\em integer literal} is either a hexadecimal integer literal or a  decimal integer literal. Invoking the getter \code{runtimeType} on an integer literal returns the \code{Type} object that is the value of the expression \code{int}. The static type of an integer literal is \code{int}.
 
 \LMHash{}
 A {\em literal double} is a numeric literal that is not an integer literal. Invoking the getter \code{runtimeType} on a literal double returns the \code{Type} object that is the value of the expression \code{double}.
 The static type of a literal double is \code{double}.
-    
+
 \subsection{Booleans}
 \LMLabel{booleans}
 
@@ -2601,10 +2609,10 @@
 \end{grammar}
 
 \LMHash{}
-Both  \TRUE{} and \FALSE{} implement the built-in class \code{bool}.  It is a compile-time error for a class to attempt to extend, mix in or implement\code{ bool}. 
+Both  \TRUE{} and \FALSE{} implement the built-in class \code{bool}.  It is a compile-time error for a class to attempt to extend, mix in or implement\code{ bool}.
 
 \commentary{
-It follows that the two boolean literals are the only two instances of \code{bool}. 
+It follows that the two boolean literals are the only two instances of \code{bool}.
 }
 
 \LMHash{}
@@ -2625,21 +2633,21 @@
 \end{dartCode}
 
 \rationale{
-Boolean conversion is used as part of control-flow constructs and boolean expressions.  Ideally, one would simply insist that control-flow decisions be based exclusively on booleans.  This is straightforward in a statically typed setting. In a dynamically typed language, it requires a dynamic check. Sophisticated virtual machines can minimize the penalty involved. Alas, Dart must be compiled into Javascript. Boolean conversion allows this to be done efficiently. 
+Boolean conversion is used as part of control-flow constructs and boolean expressions.  Ideally, one would simply insist that control-flow decisions be based exclusively on booleans.  This is straightforward in a statically typed setting. In a dynamically typed language, it requires a dynamic check. Sophisticated virtual machines can minimize the penalty involved. Alas, Dart must be compiled into Javascript. Boolean conversion allows this to be done efficiently.
 
 At the same time, this formulation differs radically from Javascript, where most numbers and objects are interpreted as \TRUE{}.  Dart's approach prevents usages such \code{\IF{} (a-b) ... ; }because it does not agree with the low level conventions whereby non-null objects or non-zero numbers are treated as \TRUE{}. Indeed, there is no way to derive \TRUE{} from a non-boolean object via boolean conversion, so this kind of low level hackery is nipped in the bud.
 
-Dart also avoids the strange behaviors that can arise due to the interaction of boolean conversion with autoboxing in Javascript. A notorious example is the situation where \FALSE{} can be interpreted as \TRUE{}. In Javascript, booleans are not objects, and instead are autoboxed into objects where ``needed''.  If \FALSE{} gets autoboxed into an object, that object can be coerced into \TRUE{} (as it is a non-null object). 
+Dart also avoids the strange behaviors that can arise due to the interaction of boolean conversion with autoboxing in Javascript. A notorious example is the situation where \FALSE{} can be interpreted as \TRUE{}. In Javascript, booleans are not objects, and instead are autoboxed into objects where ``needed''.  If \FALSE{} gets autoboxed into an object, that object can be coerced into \TRUE{} (as it is a non-null object).
 }
 
 \commentary{Because boolean conversion requires its parameter to be a boolean, any construct that makes use of boolean conversion will cause a dynamic type error in checked mode if the value to be converted is not a boolean.
  }
-    
+
 \subsection{Strings}
 \LMLabel{strings}
 
 \LMHash{}
-A {\em string} is a sequence of UTF-16 code units. 
+A {\em string} is a sequence of UTF-16 code units.
 
 \rationale{
 This decision was made for compatibility with web browsers and Javascript. Earlier versions of the specification required a string to be a sequence of valid Unicode code points.  Programmers should not depend on this distinction.
@@ -2651,7 +2659,7 @@
 \end{grammar}
 
 \LMHash{}
-A string can be either a sequence of single line strings or a multiline string. 
+A string can be either a sequence of single line strings or a multiline string.
 
 \begin{grammar}
  {\bf singleLineString:}`{\escapegrammar \code{"}}' stringContentDQ* `{\escapegrammar \code{"}}';
@@ -2662,7 +2670,7 @@
 \end{grammar}
 
 \LMHash{}
-A single line string is delimited by either matching single quotes or matching double quotes.  
+A single line string is delimited by either matching single quotes or matching double quotes.
 
 \commentary{
 Hence, `abc' and ``abc'' are both legal strings, as are `He said ``To be or not to be'' did he not?'  and ``He said `To be or not to be' didn't he''. However  ``This ` is not a valid string, nor is `this''.
@@ -2672,8 +2680,8 @@
 }
 
 \LMHash{}
-Adjacent 
-%single line 
+Adjacent
+%single line
 strings are implicitly concatenated to form a single string literal.
 %, and so are adjacent multiline strings, but the two forms may not be mixed.
 
@@ -2684,7 +2692,7 @@
 print("A string" "and then another"); // prints: A stringand then another
 \end{dartCode}
 
-\rationale{Dart also supports the operator + for string concatenation. 
+\rationale{Dart also supports the operator + for string concatenation.
 
 The + operator on Strings requires a String argument. It does not coerce its argument into a string. This helps avoid puzzlers such as
 }
@@ -2725,8 +2733,8 @@
       `r' `{\escapegrammar \texttt{"""}}'  (\~{} `{\escapegrammar \texttt{"""}}')*   `{\escapegrammar \texttt{"""}}';
       `r' `{\escapegrammar \code{'}\code{'}\code{'}}' (\~{} `{\escapegrammar \code{'}\code{'}\code{'}}')* `{\escapegrammar \code{'}\code{'}\code{'}}'
     .
-    
-    
+
+
     {\bf ESCAPE\_SEQUENCE:} `$\backslash$ n';
     `$\backslash$ r';
    `$\backslash$ f';
@@ -2737,21 +2745,21 @@
    `$\backslash$ u' HEX\_DIGIT HEX\_DIGIT HEX\_DIGIT HEX\_DIGIT;
    `$\backslash$ u\{' HEX\_DIGIT\_SEQUENCE `\}'
   .
-  
+
 {\bf HEX\_DIGIT\_SEQUENCE:}
-     HEX\_DIGIT HEX\_DIGIT? HEX\_DIGIT? HEX\_DIGIT? HEX\_DIGIT? HEX\_DIGIT? 
+     HEX\_DIGIT HEX\_DIGIT? HEX\_DIGIT? HEX\_DIGIT? HEX\_DIGIT? HEX\_DIGIT?
     .
-   
+
 \end{grammar}
 
 \LMHash{}
 Multiline strings are delimited by either matching triples of single quotes or matching triples of double quotes. If the first line of a multiline string consists solely of the whitespace characters defined by the production {\em WHITESPACE}  \ref{lexicalRules}), possibly prefixed by $\backslash$, then that line is ignored, including the new line at its end.
 
- 
+
  \rationale{
  The idea is to ignore whitespace, where whitespace is defined as tabs, spaces and newlines. These can be represented directly, but since for most characters prefixing by backslash is an identity, we allow those forms as well.
  }
- 
+
  % could be clearer. Is the first line in  """\t
  %    """ ignored not. It depends if we mean whitespace before escapes are interpreted,
  % or after, or both.  See https://code.google.com/p/dart/issues/detail?id=23020
@@ -2765,7 +2773,7 @@
 \item $\backslash$b for backspace, equivalent to $\backslash$x08.
 \item $\backslash$t for tab, equivalent to $\backslash$x09.
 \item $\backslash$v for vertical tab, equivalent to $\backslash$x0B
-\item $\backslash$x $HEX\_DIGIT_1$ $HEX\_DIGIT_2$, equivalent to 
+\item $\backslash$x $HEX\_DIGIT_1$ $HEX\_DIGIT_2$, equivalent to
 
 $\backslash$u\{$HEX\_DIGIT_1$ $HEX\_DIGIT_2$\}.
 \item $\backslash$u $HEX\_DIGIT_1$ $HEX\_DIGIT_2$ $HEX\_DIGIT_3$ $HEX\_DIGIT_4$, equivalent to $\backslash$u\{$HEX\_DIGIT_1$ $HEX\_DIGIT_2$ $HEX\_DIGIT_3$ $HEX\_DIGIT_4$\}.
@@ -2793,21 +2801,21 @@
      stringInterpolation
     .
 
-    
+
  {\bf stringContentTDQ:}\~{}( `$\backslash$' $|$  `{\escapegrammar \texttt{"""}}' $|$ `\$');
      stringInterpolation
-    .    
+    .
 
  {\bf stringContentTSQ:}\~{}( `$\backslash$' $|$ `{\escapegrammar \code{'}\code{'}\code{'}}' $|$ `\$');
      stringInterpolation
     .
-    
+
 {\bf NEWLINE:}$\backslash$ n;
       $\backslash$ r
     .
 
  \end{grammar}
- 
+
 \LMHash{}
 All string literals implement the built-in class \code{String}. It is a compile-time error for a class to attempt to extend, mix in or implement \code{String}. Invoking the getter \code{runtimeType} on a string literal returns the \code{Type} object that is the value of the expression \code{String}. The static type of a string literal is \code{String}.
 
@@ -2822,14 +2830,14 @@
       `\$' `\{' expression `\}' % could be top level expression, no?
     .
  \end{grammar}
- 
-\commentary{The reader will note that the expression inside the interpolation could itself include strings, which could again be interpolated recursively. 
+
+\commentary{The reader will note that the expression inside the interpolation could itself include strings, which could again be interpolated recursively.
 }
 
 \LMHash{}
 An unescaped \$ character in a string signifies the beginning of an interpolated expression.  The \$ sign may be followed by either:
 \begin{itemize}
-\item A single identifier $id$ that must not contain the \$ character. 
+\item A single identifier $id$ that must not contain the \$ character.
 \item An expression $e$ delimited by curly braces.
 \end{itemize}
 
@@ -2840,7 +2848,7 @@
 \LMLabel{symbols}
 
 \LMHash{}
-A {\em symbol literal} denotes the name of a declaration in a Dart program. 
+A {\em symbol literal} denotes the name of a declaration in a Dart program.
 
 \begin{grammar}
 {\bf symbolLiteral:}
@@ -2848,13 +2856,13 @@
 \end{grammar}
 
 \LMHash{}
-A symbol literal \code{\#id} where \code{id} does not begin with an underscore ('\code{\_}')  is equivalent to the expression \code{\CONST{} Symbol('id')}.  
+A symbol literal \code{\#id} where \code{id} does not begin with an underscore ('\code{\_}')  is equivalent to the expression \code{\CONST{} Symbol('id')}.
 
 \LMHash{}
 A symbol literal \code{\#\_id} evaluates to the object that would be returned by the call \code{mirror.getPrivateSymbol('id')} where mirror is an instance of the class \code{LibraryMirror} defined in the library \code{dart:mirrors}, reflecting the current library.
 
 \rationale{
-One may well ask what is the motivation for introducing literal symbols? In some languages, symbols are canonicalized whereas strings are not. However literal strings are already canonicalized in Dart.  Symbols are slightly easier to type compared to strings and their use can become strangely addictive, but this is not nearly sufficient justification for adding a literal form to the language. The primary motivation is related to the use of reflection and a web specific practice known as minification. 
+One may well ask what is the motivation for introducing literal symbols? In some languages, symbols are canonicalized whereas strings are not. However literal strings are already canonicalized in Dart.  Symbols are slightly easier to type compared to strings and their use can become strangely addictive, but this is not nearly sufficient justification for adding a literal form to the language. The primary motivation is related to the use of reflection and a web specific practice known as minification.
 
 Minification compresses identifiers consistently throughout a program in order to reduce download size.  This practice poses difficulties for reflective programs that refer to program declarations via strings. A string will refer to an identifier in the source, but the identifier will no longer be used in the minified code, and reflective code using these would fail.  Therefore, Dart reflection uses  objects of type \code{Symbol} rather than strings. Instances of \code{Symbol} are guaranteed to be stable with repeat to minification. Providing a literal form for symbols makes reflective code easier to read and write. The fact that symbols are easy to type and can often act as convenient substitutes for enums are secondary benefits.
 }
@@ -2866,7 +2874,7 @@
 \LMLabel{lists}
 
 \LMHash{}
-A {\em list literal} denotes a list, which is an integer indexed collection of objects. 
+A {\em list literal} denotes a list, which is an integer indexed collection of objects.
 
 \begin{grammar}
 {\bf listLiteral:}
@@ -2890,7 +2898,7 @@
 The value of a constant list literal  \CONST{} $<E>[e_1\ldots e_n]$ is an object $a$ whose class implements the built-in class $List<E>$. The $i$th element of $a$ is $v_{i+1}$, where $v_i$ is the value of the compile-time expression $e_i$.  The value of a constant list literal  \CONST{} $[e_1 \ldots e_n]$ is defined as the value of the constant list literal \CONST{}$ < \DYNAMIC{}>[e_1\ldots e_n]$.
 
 \LMHash{}
-Let $list_1 =$ \CONST{} $<V>[e_{11} \ldots e_{1n}]$ and $list_2 =$  \CONST{} $<U>[e_{21} \ldots e_{2n}]$ be two constant list literals and let the  elements of $list_1$ and $list_2$  evaluate to  $o_{11} \ldots o_{1n}$ and $o_{21} \ldots o_{2n}$ respectively. Iff \code{identical($o_{1i}$, $o_{2i}$)} for $i \in 1.. n$ and $V = U$ then \code{identical($list_1$, $list_2$)}. 
+Let $list_1 =$ \CONST{} $<V>[e_{11} \ldots e_{1n}]$ and $list_2 =$  \CONST{} $<U>[e_{21} \ldots e_{2n}]$ be two constant list literals and let the  elements of $list_1$ and $list_2$  evaluate to  $o_{11} \ldots o_{1n}$ and $o_{21} \ldots o_{2n}$ respectively. Iff \code{identical($o_{1i}$, $o_{2i}$)} for $i \in 1.. n$ and $V = U$ then \code{identical($list_1$, $list_2$)}.
 
 \commentary{In other words, constant list literals are canonicalized.}
 
@@ -2899,10 +2907,10 @@
 \begin{itemize}
 \item
 First, the expressions $e_1 \ldots e_n$ are evaluated in order they appear in the program, yielding objects $o_1 \ldots o_n$.
-\item A fresh instance  (\ref{generativeConstructors}) $a$, of size $n$,  whose class implements the built-in class $List<E>$ is allocated. 
+\item A fresh instance  (\ref{generativeConstructors}) $a$, of size $n$,  whose class implements the built-in class $List<E>$ is allocated.
 \item
 The operator \code{[]=} is invoked on $a$ with  first  argument $i$ and second argument
-%The $i$th element of $a$ is set to 
+%The $i$th element of $a$ is set to
 $o_{i+1}, 0 \le i < n$.
 \item
 The result of the evaluation is $a$.
@@ -2910,7 +2918,7 @@
 
 
 \commentary{
-Note that this document does not specify an order in which the elements are set. This allows for parallel assignments into the list if an implementation so desires.  The order can only be observed in checked mode (and may not be relied upon): if element $i$ is not a subtype of the element type of the list, a dynamic type error will occur when $a[i]$ is assigned $o_{i-1}$. 
+Note that this document does not specify an order in which the elements are set. This allows for parallel assignments into the list if an implementation so desires.  The order can only be observed in checked mode (and may not be relied upon): if element $i$ is not a subtype of the element type of the list, a dynamic type error will occur when $a[i]$ is assigned $o_{i-1}$.
 }
 
 \LMHash{}
@@ -2919,7 +2927,7 @@
 
 \commentary{
 There is no restriction precluding nesting of list literals. It follows from the rules above that
-$<List<int>>[[1, 2, 3], [4, 5, 6]]$ is a list with type parameter $List<int>$, containing two lists with type parameter  \DYNAMIC{}. 
+$<List<int>>[[1, 2, 3], [4, 5, 6]]$ is a list with type parameter $List<int>$, containing two lists with type parameter  \DYNAMIC{}.
 }
 
 \LMHash{}
@@ -2931,12 +2939,12 @@
 
 %Invoking the getter \code{runtimeType} on a list literal returns the \code{Type} object that is the value of the expression \code{List}. The static type of a list literal is \code{List}.
 % what about generics?
-    
+
 \subsection{Maps}
 \LMLabel{maps}
 
 \LMHash{}
-A {\em map literal} denotes a map object. 
+A {\em map literal} denotes a map object.
 
 \begin{grammar}
 {\bf mapLiteral:}
@@ -2945,28 +2953,28 @@
 
 {\bf mapLiteralEntry:}
         % identifier `{\escapegrammar :}' expression;
-	expression `{\escapegrammar :}' expression 
+	expression `{\escapegrammar :}' expression
     .
 \end{grammar}
 
 \LMHash{}
-A {\em map literal} consists of zero or more entries. Each entry has a {\em key} and a {\em value}.  Each key and each value is denoted by an expression. 
- 
+A {\em map literal} consists of zero or more entries. Each entry has a {\em key} and a {\em value}.  Each key and each value is denoted by an expression.
+
 \LMHash{}
 If a map literal begins with the reserved word \CONST{}, it is a {\em constant map literal} which is a compile-time constant (\ref{constants}) and therefore evaluated at compile-time. Otherwise, it is a {\em run-time map literal} and it is evaluated at run-time. Only run-time map literals can be mutated
 after they are created. Attempting to mutate a constant map literal will result in a dynamic error.
 
 \LMHash{}
-It is a compile-time error if either a key or a value of an entry in a constant map literal is not a compile-time constant. It is a compile-time error if the key of an entry in a constant map literal is an instance of a class that implements the operator $==$ unless the key is a 
-%symbol, 
-string, an integer, a literal symbol or the result of invoking a constant constructor of class \cd{Symbol}. 
-It is a compile-time error if the type arguments of a constant map literal include a type parameter. 
+It is a compile-time error if either a key or a value of an entry in a constant map literal is not a compile-time constant. It is a compile-time error if the key of an entry in a constant map literal is an instance of a class that implements the operator $==$ unless the key is a
+%symbol,
+string, an integer, a literal symbol or the result of invoking a constant constructor of class \cd{Symbol}.
+It is a compile-time error if the type arguments of a constant map literal include a type parameter.
 
 \LMHash{}
 The value of a constant map literal  \CONST{}$ <K, V>\{k_1:e_1\ldots k_n :e_n\}$ is an object $m$ whose class implements the built-in class $Map<K, V>$. The entries of $m$ are $u_i:v_i, i \in 1 .. n$, where $u_i$ is the value of the compile-time expression $k_i$ and $v_i$ is the value of the compile-time expression $e_i$.  The value of a constant map literal  \CONST{} $\{k_1:e_1\ldots k_n :e_n\}$ is defined as the value of a constant map literal \CONST{} $<\DYNAMIC{}, \DYNAMIC{}>\{k_1:e_1\ldots k_n :e_n\}$.
 
 \LMHash{}
-Let $map_1 =$ \CONST{}$ <K, V>\{k_{11}:e_{11} \ldots k_{1n} :e_{1n}\}$ and  $map_2 =$  \CONST{}$ <J, U>\{k_{21}:e_{21} \ldots k_{2n} :e_{2n}\}$ be two constant map literals. Let the keys of $map_1$ and $map_2$ evaluate to  $s_{11} \ldots  s_{1n}$  and   $s_{21} \ldots  s_{2n}$ respectively, and let the elements of $map_1$ and $map_2$ evaluate to $o_{11} \ldots  o_{1n}$ and $o_{21} \ldots  o_{2n}$ respectively. Iff \code{identical($o_{1i}$, $o_{2i}$)}  and \code{identical($s_{1i}$, $s_{2i}$)} for $i \in 1.. n$, and $K = J, V = U$ then \code{identical($map_1$, $map_2$)}. 
+Let $map_1 =$ \CONST{}$ <K, V>\{k_{11}:e_{11} \ldots k_{1n} :e_{1n}\}$ and  $map_2 =$  \CONST{}$ <J, U>\{k_{21}:e_{21} \ldots k_{2n} :e_{2n}\}$ be two constant map literals. Let the keys of $map_1$ and $map_2$ evaluate to  $s_{11} \ldots  s_{1n}$  and   $s_{21} \ldots  s_{2n}$ respectively, and let the elements of $map_1$ and $map_2$ evaluate to $o_{11} \ldots  o_{1n}$ and $o_{21} \ldots  o_{2n}$ respectively. Iff \code{identical($o_{1i}$, $o_{2i}$)}  and \code{identical($s_{1i}$, $s_{2i}$)} for $i \in 1.. n$, and $K = J, V = U$ then \code{identical($map_1$, $map_2$)}.
 
 \commentary{In other words, constant map literals are canonicalized.}
 
@@ -2976,8 +2984,8 @@
 \item
 First, the expression $k_i$ is evaluated yielding object $u_i$, the $e_i$ is vaulted yielding object $o_i$, for $i \in 1..n$ in left to right order, yielding objects $u_1, o_1\ldots u_n, o_n$.
 \item  A fresh instance (\ref{generativeConstructors}) $m$ whose class implements the built-in class
- 
- $Map<K, V>$ is allocated. 
+
+ $Map<K, V>$ is allocated.
 \item
 The operator \code{[]=} is invoked on $m$ with  first  argument $u_i$ and second argument $o_i,  i \in 1.. n$.
 \item
@@ -2986,7 +2994,7 @@
 
 
 \LMHash{}
-A runtime map literal  $\{k_1:e_1\ldots k_n :e_n\}$ is evaluated as  
+A runtime map literal  $\{k_1:e_1\ldots k_n :e_n\}$ is evaluated as
 
 $<\DYNAMIC{},  \DYNAMIC{}>\{k_1:e_1\ldots k_n :e_n\}$.
 
@@ -2994,15 +3002,15 @@
 Iff all the keys in a map literal are compile-time constants, it is a static warning if the values of any two keys in a map literal are equal.
 
 \LMHash{}
-A map literal is ordered: iterating over the keys and/or values of the maps always happens in the 
+A map literal is ordered: iterating over the keys and/or values of the maps always happens in the
  order the keys appeared in the source code.
 
 \commentary{
 Of course, if a key repeats, the order is defined by first occurrence, but the value is defined by the last.
-} 
+}
 
 \LMHash{}
-The static type of a map literal of the form  \CONST{}$ <K, V>\{k_1:e_1\ldots k_n :e_n\}$ or the form $<K, V>\{k_1:e_1\ldots k_n :e_n\}$ is $Map<K, V>$. The static type a map literal of the form  \CONST{}$\{k_1:e_1\ldots k_n :e_n\}$ or the form $\{k_1:e_1\ldots k_n :e_n\}$ is $Map<\DYNAMIC{},  \DYNAMIC{}>$. 
+The static type of a map literal of the form  \CONST{}$ <K, V>\{k_1:e_1\ldots k_n :e_n\}$ or the form $<K, V>\{k_1:e_1\ldots k_n :e_n\}$ is $Map<K, V>$. The static type a map literal of the form  \CONST{}$\{k_1:e_1\ldots k_n :e_n\}$ or the form $\{k_1:e_1\ldots k_n :e_n\}$ is $Map<\DYNAMIC{},  \DYNAMIC{}>$.
 
 
 \subsection{Throw}
@@ -3013,23 +3021,23 @@
 
  \begin{grammar}
 {\bf throwExpression:}
-     \THROW{} expression 
+     \THROW{} expression
     .
-    
+
    {\bf throwExpressionWithoutCascade:}
-     \THROW{} expressionWithoutCascade 
+     \THROW{} expressionWithoutCascade
     .
- 
+
  \end{grammar}
- 
+
 \LMHash{}
- The {\em current exception} is the last exception raised and not subsequently caught at a given moment during runtime. 
+ The {\em current exception} is the last exception raised and not subsequently caught at a given moment during runtime.
 
 \LMHash{}
  Evaluation of a throw expression of the form  \code{\THROW{} $e$;} proceeds as follows:
- 
+
 \LMHash{}
-The expression $e$ is evaluated yielding a value $v$. 
+The expression $e$ is evaluated yielding a value $v$.
 
 \commentary{
 There is no requirement that the expression $e$ evaluate to a special kind of exception or error object.
@@ -3038,11 +3046,11 @@
 \LMHash{}
 If $e$ evaluates to \NULL{} (\ref{null}), then a \code{NullThrownError} is thrown. Otherwise the current exception is set to $v$ and the current return value (\ref{return}) becomes undefined.
 
-\rationale{The current exception and the current return value must never be simultaneously defined, as they represent mutually exclusive options for exiting the current function. 
+\rationale{The current exception and the current return value must never be simultaneously defined, as they represent mutually exclusive options for exiting the current function.
 }
 
 \LMHash{}
-Let $f$ be the immediately enclosing function. 
+Let $f$ be the immediately enclosing function.
 
 \LMHash{}
 If $f$ is synchronous (\ref{functions}), control is transferred to the nearest dynamically enclosing exception handler.
@@ -3069,16 +3077,16 @@
 \LMLabel{functionExpressions}
 
 \LMHash{}
-A {\em function literal} is an object that encapsulates an executable unit of code. 
+A {\em function literal} is an object that encapsulates an executable unit of code.
 
 \begin{grammar}
 {\bf functionExpression:}
     formalParameterList functionBody
     .
- \end{grammar}   
- 
+ \end{grammar}
+
 \LMHash{}
-The class of a function literal implements the built-in class \code{Function}.   
+The class of a function literal implements the built-in class \code{Function}.
 %Invoking the getter \code{runtimeType} on a function literal returns the \code{Type} object that is the value of the expression \code{Function}.
 % not necessarily
 
@@ -3086,35 +3094,35 @@
 %Q{Can anyone implement it? Then we should define things via call}
 
 \LMHash{}
-The static type of a function literal of the form 
+The static type of a function literal of the form
 
-$(T_1$ $a_1, \ldots, T_n$ $a_n, [T_{n+1}$ $x_{n+1} = d_1, \ldots,  T_{n+k}$ $x_{n+k} = d_k]) => e$ 
-is 
+$(T_1$ $a_1, \ldots, T_n$ $a_n, [T_{n+1}$ $x_{n+1} = d_1, \ldots,  T_{n+k}$ $x_{n+k} = d_k]) => e$
+is
 
-$(T_1 \ldots, T_n, [T_{n+1}$ $x_{n+1}, \ldots, T_{n+k}$ $x_{n+k}]) \rightarrow T_0$, where $T_0$ is the static type of $e$. 
+$(T_1 \ldots, T_n, [T_{n+1}$ $x_{n+1}, \ldots, T_{n+k}$ $x_{n+k}]) \rightarrow T_0$, where $T_0$ is the static type of $e$.
 
 \LMHash{}
-The static type of a function literal of the form 
+The static type of a function literal of the form
 
 $(T_1$ $a_1, \ldots, T_n$ $a_n, [T_{n+1}$ $x_{n+1} = d_1, \ldots,  T_{n+k}$ $x_{n+k} = d_k])$ \ASYNC{} $=> e$
 is $(T_1 \ldots, T_n, [T_{n+1}$ $x_{n+1}, \ldots, T_{n+k}$ $x_{n+k}]) \rightarrow Future<flatten(T_0)>$, where $T_0$ is the static type of $e$ and  $flatten(T)$ is defined as follows:
 
- If $T = Future<S>$ then $flatten(T) = flatten(S)$. 
- 
- Otherwise if $T <: Future$ then let $S$ be a type such that $T << Future<S>$ and for all $R$, if $T << Future<R>$ then $S << R$.  
+ If $T = Future<S>$ then $flatten(T) = flatten(S)$.
+
+ Otherwise if $T <: Future$ then let $S$ be a type such that $T << Future<S>$ and for all $R$, if $T << Future<R>$ then $S << R$.
 
 \rationale{
 This ensures that $Future<S>$ is the most specific instantiation of \cd{Future} that is a super type of $T$.
 }
 
-Then $flatten(T) =  S$.  
+Then $flatten(T) =  S$.
 
 In any other circumstance, $flatten(T) = T$.
 
 
 
 \rationale{
-We collapse multiple layers of futures into one. If $e$ evaluates to a future $f$, the future will not invoke its \code{then()} callback until f completes to a non-future value, and so the result of an await is never a future, and the result of an async function will never have type \code{Future$<X>$} where $X$ itself is an invocation of \code{Future}. 
+We collapse multiple layers of futures into one. If $e$ evaluates to a future $f$, the future will not invoke its \code{then()} callback until f completes to a non-future value, and so the result of an await is never a future, and the result of an async function will never have type \code{Future$<X>$} where $X$ itself is an invocation of \code{Future}.
 
 The  exception to that would be a type $X$ that extended or implemented \code{Future}. In that case, only one unwrapping takes place. As an example of why this is done, consider
 
@@ -3126,73 +3134,73 @@
 
 
 \LMHash{}
-The static type of a function literal of the form 
+The static type of a function literal of the form
 
-$(T_1$ $a_1, \ldots, T_n$ $a_n, \{T_{n+1}$ $x_{n+1} : d_1, \ldots,  T_{n+k}$ $x_{n+k} : d_k\}) => e$ 
-is 
+$(T_1$ $a_1, \ldots, T_n$ $a_n, \{T_{n+1}$ $x_{n+1} : d_1, \ldots,  T_{n+k}$ $x_{n+k} : d_k\}) => e$
+is
 
-$(T_1 \ldots, T_n, \{T_{n+1}$ $x_{n+1}, \ldots, T_{n+k}$ $x_{n+k}\}) \rightarrow T_0$, where $T_0$ is the static type of $e$. 
+$(T_1 \ldots, T_n, \{T_{n+1}$ $x_{n+1}, \ldots, T_{n+k}$ $x_{n+k}\}) \rightarrow T_0$, where $T_0$ is the static type of $e$.
 
 \LMHash{}
-The static type of a function literal of the form 
+The static type of a function literal of the form
 
 $(T_1$ $a_1, \ldots, T_n$ $a_n, \{T_{n+1}$ $x_{n+1} : d_1, \ldots,  T_{n+k}$ $x_{n+k} : d_k\})$ \ASYNC{}  $=> e$
 
-is $(T_1 \ldots, T_n, \{T_{n+1}$ $x_{n+1}, \ldots, T_{n+k}$ $x_{n+k}\}) \rightarrow Future<flatten(T_0)>$, where $T_0$ is the static type of $e$. 
+is $(T_1 \ldots, T_n, \{T_{n+1}$ $x_{n+1}, \ldots, T_{n+k}$ $x_{n+k}\}) \rightarrow Future<flatten(T_0)>$, where $T_0$ is the static type of $e$.
 
 \LMHash{}
-The static type of a function literal of the form  
+The static type of a function literal of the form
 
-$(T_1$ $a_1, \ldots, T_n$ $a_n, [T_{n+1}$ $x_{n+1} = d_1, \ldots,  T_{n+k}$ $x_{n+k}= d_k])\{s\}$ 
+$(T_1$ $a_1, \ldots, T_n$ $a_n, [T_{n+1}$ $x_{n+1} = d_1, \ldots,  T_{n+k}$ $x_{n+k}= d_k])\{s\}$
 
 is $(T_1 \ldots, T_n, [T_{n+1}$ $x_{n+1}, \ldots, T_{n+k}$ $x_{n+k}]) \rightarrow  \DYNAMIC{}$.
 
 \LMHash{}
-The static type of a function literal of the form  
+The static type of a function literal of the form
 
-$(T_1$ $a_1, \ldots, T_n$ $a_n, [T_{n+1}$ $x_{n+1} = d_1, \ldots,  T_{n+k}$ $x_{n+k}= d_k])$ $ \ASYNC{}$ $\{s\}$ 
-is $(T_1 \ldots, T_n, [T_{n+1}$ $x_{n+1}, \ldots, T_{n+k}$ $x_{n+k}]) \rightarrow Future$. 
+$(T_1$ $a_1, \ldots, T_n$ $a_n, [T_{n+1}$ $x_{n+1} = d_1, \ldots,  T_{n+k}$ $x_{n+k}= d_k])$ $ \ASYNC{}$ $\{s\}$
+is $(T_1 \ldots, T_n, [T_{n+1}$ $x_{n+1}, \ldots, T_{n+k}$ $x_{n+k}]) \rightarrow Future$.
 
 \LMHash{}
-The static type of a function literal of the form  
+The static type of a function literal of the form
 
-$(T_1$ $a_1, \ldots, T_n$ $a_n, [T_{n+1}$ $x_{n+1} = d_1, \ldots,  T_{n+k}$ $x_{n+k}= d_k])$ $ \ASYNC*{}$ $\{s\}$ 
-is $(T_1 \ldots, T_n, [T_{n+1}$ $x_{n+1}, \ldots, T_{n+k}$ $x_{n+k}]) \rightarrow Stream$. 
+$(T_1$ $a_1, \ldots, T_n$ $a_n, [T_{n+1}$ $x_{n+1} = d_1, \ldots,  T_{n+k}$ $x_{n+k}= d_k])$ $ \ASYNC*{}$ $\{s\}$
+is $(T_1 \ldots, T_n, [T_{n+1}$ $x_{n+1}, \ldots, T_{n+k}$ $x_{n+k}]) \rightarrow Stream$.
 
 \LMHash{}
-The static type of a function literal of the form  
+The static type of a function literal of the form
 
-$(T_1$ $a_1, \ldots, T_n$ $a_n, [T_{n+1}$ $x_{n+1} = d_1, \ldots,  T_{n+k}$ $x_{n+k}= d_k])$ $ \SYNC*{}$ $\{s\}$ 
-is $(T_1 \ldots, T_n, [T_{n+1}$ $x_{n+1}, \ldots, T_{n+k}$ $x_{n+k}]) \rightarrow Iterable$. 
+$(T_1$ $a_1, \ldots, T_n$ $a_n, [T_{n+1}$ $x_{n+1} = d_1, \ldots,  T_{n+k}$ $x_{n+k}= d_k])$ $ \SYNC*{}$ $\{s\}$
+is $(T_1 \ldots, T_n, [T_{n+1}$ $x_{n+1}, \ldots, T_{n+k}$ $x_{n+k}]) \rightarrow Iterable$.
 
 
 \LMHash{}
-The static type of a function literal of the form  
+The static type of a function literal of the form
 
-$(T_1$ $a_1, \ldots, T_n$ $a_n, [T_{n+1}$ $x_{n+1} = d_1, \ldots,  T_{n+k}$ $x_{n+k}= d_k])\{s\}$ 
+$(T_1$ $a_1, \ldots, T_n$ $a_n, [T_{n+1}$ $x_{n+1} = d_1, \ldots,  T_{n+k}$ $x_{n+k}= d_k])\{s\}$
 
-is $(T_1 \ldots, T_n, [T_{n+1}$ $x_{n+1}, \ldots, T_{n+k}$ $x_{n+k}]) \rightarrow  \DYNAMIC{}$. 
+is $(T_1 \ldots, T_n, [T_{n+1}$ $x_{n+1}, \ldots, T_{n+k}$ $x_{n+k}]) \rightarrow  \DYNAMIC{}$.
 
 
 \LMHash{}
-The static type of a function literal of the form  
+The static type of a function literal of the form
 
 $(T_1$ $a_1, \ldots, T_n$ $a_n, \{T_{n+1}$ $x_{n+1} : d_1, \ldots,  T_{n+k}$ $x_{n+k} : d_k\})$ $\ASYNC{}$ $\{s\}$
- 
+
 is $(T_1 \ldots, T_n, \{T_{n+1}$ $x_{n+1}, \ldots, T_{n+k}$ $x_{n+k}\}) \rightarrow  Future{}$.
 
 \LMHash{}
-The static type of a function literal of the form  
+The static type of a function literal of the form
 
 $(T_1$ $a_1, \ldots, T_n$ $a_n, \{T_{n+1}$ $x_{n+1} : d_1, \ldots,  T_{n+k}$ $x_{n+k} : d_k\})$ $\ASYNC*{}$ $\{s\}$
- 
+
 is $(T_1 \ldots, T_n, \{T_{n+1}$ $x_{n+1}, \ldots, T_{n+k}$ $x_{n+k}\}) \rightarrow  Stream{}$.
 
 \LMHash{}
-The static type of a function literal of the form  
+The static type of a function literal of the form
 
 $(T_1$ $a_1, \ldots, T_n$ $a_n, \{T_{n+1}$ $x_{n+1} : d_1, \ldots,  T_{n+k}$ $x_{n+k} : d_k\})$ $\SYNC*{}$ $\{s\}$
- 
+
 is $(T_1 \ldots, T_n, \{T_{n+1}$ $x_{n+1}, \ldots, T_{n+k}$ $x_{n+k}\}) \rightarrow  Iterable{}$.
 
 \LMHash{}
@@ -3215,44 +3223,44 @@
 The static type of \THIS{} is the interface of the immediately enclosing class.
 
 \commentary{
-We do not support self-types at this point. 
+We do not support self-types at this point.
 }
 
 \LMHash{}
-It is a compile-time error if \THIS{} appears, implicitly or explicitly,  in a top-level function or variable initializer,  in a factory constructor, or in a static method or variable initializer, or in the initializer of an instance variable. 
+It is a compile-time error if \THIS{} appears, implicitly or explicitly,  in a top-level function or variable initializer,  in a factory constructor, or in a static method or variable initializer, or in the initializer of an instance variable.
 
 \subsection{ Instance Creation}
 \LMLabel{instanceCreation}
 
 \LMHash{}
-Instance creation expressions invoke constructors to produce instances. 
+Instance creation expressions invoke constructors to produce instances.
 
-%It is a compile-time error if any of the type arguments to a constructor of a generic type invoked by a new expression or a constant object expression do not denote types in the enclosing lexical scope. 
+%It is a compile-time error if any of the type arguments to a constructor of a generic type invoked by a new expression or a constant object expression do not denote types in the enclosing lexical scope.
 
 %It is a compile-time error if a constructor of a non-generic type invoked by a new expression or a constant object expression is passed any type arguments. It is a compile-time error if a constructor of a generic type with $n$ type parameters invoked by a new expression or a constant object expression is passed $m$ type arguments where $m \ne n$, or if any of its type arguments is  misconstructed (\ref{parameterizedTypes}).
 
 \LMHash{}
 It is a static type warning if
-the type $T$ in an instance creation expression of one of  the forms 
+the type $T$ in an instance creation expression of one of  the forms
 
-\NEW{} $T.id(a_1, \ldots , a_n, x_{n+1}: a_{n+1}, \ldots , x_{n+k}: a_{n+k})$,  
+\NEW{} $T.id(a_1, \ldots , a_n, x_{n+1}: a_{n+1}, \ldots , x_{n+k}: a_{n+k})$,
 
 \NEW{} $T(a_1, \ldots , a_n, x_{n+1}: a_{n+1}, \ldots , x_{n+k}: a_{n+k})$,
 
-\CONST{} $T.id(a_1, \ldots , a_n, x_{n+1}: a_{n+1}, \ldots , x_{n+k}: a_{n+k})$,  
+\CONST{} $T.id(a_1, \ldots , a_n, x_{n+1}: a_{n+1}, \ldots , x_{n+k}: a_{n+k})$,
 
 \CONST{} $T(a_1, \ldots , a_n, x_{n+1}: a_{n+1}, \ldots , x_{n+k}: a_{n+k})$ is malformed (\ref{dynamicTypeSystem}) or malbounded (\ref{parameterizedTypes}).
 
 \LMHash{}
-It is a compile-time error if the type $T$ in an instance creation expression of one of the forms 
+It is a compile-time error if the type $T$ in an instance creation expression of one of the forms
 
-\NEW{} $T.id(a_1, \ldots , a_n, x_{n+1}: a_{n+1}, \ldots , x_{n+k}: a_{n+k})$,  
+\NEW{} $T.id(a_1, \ldots , a_n, x_{n+1}: a_{n+1}, \ldots , x_{n+k}: a_{n+k})$,
 
 \NEW{} $T(a_1, \ldots , a_n, x_{n+1}: a_{n+1}, \ldots , x_{n+k}: a_{n+k})$,
 
-\CONST{} $T.id(a_1, \ldots , a_n, x_{n+1}: a_{n+1}, \ldots , x_{n+k}: a_{n+k})$,  
+\CONST{} $T.id(a_1, \ldots , a_n, x_{n+1}: a_{n+1}, \ldots , x_{n+k}: a_{n+k})$,
 
-\CONST{} $T(a_1, \ldots , a_n, x_{n+1}: a_{n+1}, \ldots , x_{n+k}: a_{n+k})$ 
+\CONST{} $T(a_1, \ldots , a_n, x_{n+1}: a_{n+1}, \ldots , x_{n+k}: a_{n+k})$
 
 is an enumerated type (\ref{enums}).
 %any of the type arguments to a constructor of a generic type $G$ invoked by a new expression or a constant object expression are not subtypes of the bounds of the corresponding formal type parameters of $G$.
@@ -3271,49 +3279,49 @@
 \end{grammar}
 
 \LMHash{}
-Let $e$ be a new expression of the form  
+Let $e$ be a new expression of the form
 
-\NEW{} $T.id(a_1, \ldots , a_n, x_{n+1}: a_{n+1}, \ldots , x_{n+k}: a_{n+k})$ or the form  
+\NEW{} $T.id(a_1, \ldots , a_n, x_{n+1}: a_{n+1}, \ldots , x_{n+k}: a_{n+k})$ or the form
 
-\NEW{} $T(a_1, \ldots , a_n, x_{n+1}: a_{n+1}, \ldots , x_{n+k}: a_{n+k})$. 
+\NEW{} $T(a_1, \ldots , a_n, x_{n+1}: a_{n+1}, \ldots , x_{n+k}: a_{n+k})$.
 
 %It is a runtime type error if
 %the type $T$  is malformed.
 %It is a static warning if $T$ is a malformed type.
 
-%not a class accessible in the current scope,  optionally followed by type arguments. 
+%not a class accessible in the current scope,  optionally followed by type arguments.
 
 \LMHash{}
 If $T$ is  a class or parameterized type accessible in the current scope then:
 \begin{itemize}
 \item
-If $e$ is of the form \NEW{} $T.id(a_1, \ldots , a_n, x_{n+1}: a_{n+1}, \ldots , x_{n+k}: a_{n+k})$ it is a static warning if $T.id$ is not the name of a constructor declared by the type $T$. If $e$ is of the form  \NEW{} $T(a_1, \ldots , a_n, x_{n+1}: a_{n+1}, \ldots , x_{n+k}: a_{n+k})$ it is a static warning if the type $T$ does not declare a constructor with the same name as the declaration of $T$. 
+If $e$ is of the form \NEW{} $T.id(a_1, \ldots , a_n, x_{n+1}: a_{n+1}, \ldots , x_{n+k}: a_{n+k})$ it is a static warning if $T.id$ is not the name of a constructor declared by the type $T$. If $e$ is of the form  \NEW{} $T(a_1, \ldots , a_n, x_{n+1}: a_{n+1}, \ldots , x_{n+k}: a_{n+k})$ it is a static warning if the type $T$ does not declare a constructor with the same name as the declaration of $T$.
 \end{itemize}
 
 \LMHash{}
-If $T$ is a parameterized type (\ref{parameterizedTypes}) $S<U_1,  \ldots, U_m>$, let $R = S$.  
-%It is a 
+If $T$ is a parameterized type (\ref{parameterizedTypes}) $S<U_1,  \ldots, U_m>$, let $R = S$.
+%It is a
 %compile-time CHANGED
 %runtime type
-%error if $S$ is not a generic (\ref{generics}) type with $m$ type parameters. 
+%error if $S$ is not a generic (\ref{generics}) type with $m$ type parameters.
 If $T$ is not a parameterized type, let $R = T$.
-Furthermore, if $e$ is of the form \NEW{} $T.id(a_1, \ldots , a_n, x_{n+1}: a_{n+1}, \ldots , x_{n+k}: a_{n+k})$ then let  $q$ be the constructor  $T.id$, otherwise let $q$ be the constructor $T$. 
+Furthermore, if $e$ is of the form \NEW{} $T.id(a_1, \ldots , a_n, x_{n+1}: a_{n+1}, \ldots , x_{n+k}: a_{n+k})$ then let  $q$ be the constructor  $T.id$, otherwise let $q$ be the constructor $T$.
 
 \LMHash{}
 If $R$ is a generic with $l = m$ type parameters then
 \begin{itemize}
 \item  If $T$ is not a parameterized type, then for $ i \in 1 .. l$, let $V_i =  \DYNAMIC{}$.
-\item  If $T$ is  a parameterized type then let $V_i = U_i$ for $ i \in 1 .. m$.  
+\item  If $T$ is  a parameterized type then let $V_i = U_i$ for $ i \in 1 .. m$.
 \end{itemize}
 
 \LMHash{}
-If $R$ is a generic with $l \ne m$ type parameters then for $ i \in 1 .. l$, let $V_i =  \DYNAMIC{}$. In any other case, let $V_i = U_i$ for $ i \in 1 .. m$.  
+If $R$ is a generic with $l \ne m$ type parameters then for $ i \in 1 .. l$, let $V_i =  \DYNAMIC{}$. In any other case, let $V_i = U_i$ for $ i \in 1 .. m$.
 
 \LMHash{}
 Evaluation of $e$ proceeds as follows:
 
 \LMHash{}
-First, the argument list $(a_1, \ldots , a_n, x_{n+1}: a_{n+1}, \ldots , x_{n+k}: a_{n+k})$ is evaluated. 
+First, the argument list $(a_1, \ldots , a_n, x_{n+1}: a_{n+1}, \ldots , x_{n+k}: a_{n+k})$ is evaluated.
 
 \LMHash{}
 If $T$ is a deferred type with prefix $p$, then if $p$ has not been successfully loaded, a dynamic error occurs.
@@ -3344,7 +3352,7 @@
 Otherwise, $q$ is a factory constructor (\ref{factories}). Then:
 
 \LMHash{}
-If $q$ is a redirecting factory constructor of the form $T(p_1, \ldots, p_{n+k}) = c;$ or of the form  $T.id(p_1, \ldots, p_{n+k}) = c;$ then the result of the evaluation of $e$ is equivalent to evaluating the expression 
+If $q$ is a redirecting factory constructor of the form $T(p_1, \ldots, p_{n+k}) = c;$ or of the form  $T.id(p_1, \ldots, p_{n+k}) = c;$ then the result of the evaluation of $e$ is equivalent to evaluating the expression
 
 $[V_1,  \ldots, V_m/T_1,  \ldots, T_m]($\code{\NEW{} $c(a_1, \ldots, a_n, x_{n+1}: a_{n+1}, \ldots, x_{n+k}: a_{n+k}))$}.  If evaluation of $q$ causes $q$ to be re-evaluated cyclically, a runtime error occurs.
 
@@ -3355,23 +3363,23 @@
 \LMHash{}
 It is a static warning if $q$ is a constructor of an abstract class and $q$ is not a factory constructor.
 
-\commentary{The above gives precise meaning to the idea that instantiating an abstract class leads to a warning. 
+\commentary{The above gives precise meaning to the idea that instantiating an abstract class leads to a warning.
 A similar clause applies to constant object creation in the next section.
-} 
+}
 
 \rationale{In particular, a factory constructor can be declared in an abstract class and used safely, as it will either produce a valid instance or lead to a warning inside its own declaration.
 }
 
 \LMHash{}
-The static type of an instance creation expression of either the form 
+The static type of an instance creation expression of either the form
 
-\NEW{} $T.id(a_1, \ldots , a_n, x_{n+1}: a_{n+1}, \ldots , x_{n+k}: a_{n+k})$ 
+\NEW{} $T.id(a_1, \ldots , a_n, x_{n+1}: a_{n+1}, \ldots , x_{n+k}: a_{n+k})$
 
-or the form 
+or the form
 
-\NEW{} $T(a_1, \ldots , a_n, x_{n+1}: a_{n+1}, \ldots , x_{n+k}: a_{n+k})$ 
+\NEW{} $T(a_1, \ldots , a_n, x_{n+1}: a_{n+1}, \ldots , x_{n+k}: a_{n+k})$
 
-is $T$.  
+is $T$.
 It is a static warning if the static type of $a_i, 1 \le i \le n+ k$ may not be assigned to the type of the corresponding formal parameter of the constructor $T.id$ (respectively $T$).
 
 
@@ -3380,7 +3388,7 @@
 \LMLabel{const}
 
 \LMHash{}
-A {\em constant object expression} invokes a constant constructor (\ref{constantConstructors}). 
+A {\em constant object expression} invokes a constant constructor (\ref{constantConstructors}).
 
 \begin{grammar}
 {\bf constObjectExpression:}
@@ -3389,9 +3397,9 @@
 \end{grammar}
 
 \LMHash{}
-Let $e$ be a constant object expression of the form  
+Let $e$ be a constant object expression of the form
 
-\CONST{} $T.id(a_1, \ldots , a_n, x_{n+1}: a_{n+1}, \ldots , x_{n+k}: a_{n+k})$ 
+\CONST{} $T.id(a_1, \ldots , a_n, x_{n+1}: a_{n+1}, \ldots , x_{n+k}: a_{n+k})$
 
 or the form  \CONST{} $T(a_1, \ldots , a_n, x_{n+1}: a_{n+1}, \ldots , x_{n+k}: a_{n+k})$. It is a compile-time error if $T$ does not denote a class accessible in the current scope.  It is a compile-time error if $T$ is a deferred type (\ref{staticTypes}).
 
@@ -3401,40 +3409,40 @@
 If $T$ is a parameterized type, it is a compile-time error if $T$ includes a type variable among its type arguments.
 
 \LMHash{}
-If $e$ is of the form \CONST{} $T.id(a_1, \ldots , a_n, x_{n+1}: a_{n+1}, \ldots , x_{n+k}: a_{n+k})$ it is a compile-time error if $T.id$ is not the name of a constant constructor declared by the type $T$. If $e$ is of the form  \CONST{} $T(a_1, \ldots , a_n, x_{n+1}: a_{n+1}, \ldots , x_{n+k}: a_{n+k})$ it is a compile-time error if the type $T$ does not declare a constant constructor with the same name as the declaration of $T$. 
+If $e$ is of the form \CONST{} $T.id(a_1, \ldots , a_n, x_{n+1}: a_{n+1}, \ldots , x_{n+k}: a_{n+k})$ it is a compile-time error if $T.id$ is not the name of a constant constructor declared by the type $T$. If $e$ is of the form  \CONST{} $T(a_1, \ldots , a_n, x_{n+1}: a_{n+1}, \ldots , x_{n+k}: a_{n+k})$ it is a compile-time error if the type $T$ does not declare a constant constructor with the same name as the declaration of $T$.
 
 \LMHash{}
 In all of the above cases, it is a compile-time error if $a_i,  i\in 1 .. n + k$, is not a compile-time constant expression.
 
 %If $T$ is a parameterized type (\ref{parameterizedTypes}) $S<U_1,  \ldots, U_m>$, let $R = S$.  It is a compile-time error if $T$ is is malformed. If $T$ is not a parameterized type, let $R = T$.
- %Finally, 
-% If $T$ is a generic with $l$ retype parameters, then for all $ i \in 1 .. l$, let $V_i =  \DYNAMIC{}$.  
+ %Finally,
+% If $T$ is a generic with $l$ retype parameters, then for all $ i \in 1 .. l$, let $V_i =  \DYNAMIC{}$.
 
 \LMHash{}
 Evaluation of $e$ proceeds as follows:
 
 \LMHash{}
-First, if $e$ is of the form 
+First, if $e$ is of the form
 
-\CONST{} $T.id(a_1, \ldots , a_n, x_{n+1}: a_{n+1}, \ldots , x_{n+k}: a_{n+k})$ 
+\CONST{} $T.id(a_1, \ldots , a_n, x_{n+1}: a_{n+1}, \ldots , x_{n+k}: a_{n+k})$
 
-then let $i$ be the value of the expression 
+then let $i$ be the value of the expression
 
-\NEW{} $T.id(a_1, \ldots , a_n, x_{n+1}: a_{n+1}, \ldots , x_{n+k}: a_{n+k})$. 
+\NEW{} $T.id(a_1, \ldots , a_n, x_{n+1}: a_{n+1}, \ldots , x_{n+k}: a_{n+k})$.
 
 \LMHash{}
-Otherwise, $e$ must be of the form  
+Otherwise, $e$ must be of the form
 
-\CONST{} $T(a_1, \ldots , a_n, x_{n+1}: a_{n+1}, \ldots , x_{n+k}: a_{n+k})$, 
+\CONST{} $T(a_1, \ldots , a_n, x_{n+1}: a_{n+1}, \ldots , x_{n+k}: a_{n+k})$,
 
-in which case let $i$ be the result of evaluating 
+in which case let $i$ be the result of evaluating
 
-\NEW{} $T(a_1, \ldots , a_n, x_{n+1}: a_{n+1}, \ldots , x_{n+k}: a_{n+k})$. 
+\NEW{} $T(a_1, \ldots , a_n, x_{n+1}: a_{n+1}, \ldots , x_{n+k}: a_{n+k})$.
 
 \LMHash{}
 Then:
 \begin{itemize}
-\item If during execution of the program, a constant object expression has already evaluated to an instance $j$ of class $R$ with type arguments $V_i, 1 \le i \le m$, then: 
+\item If during execution of the program, a constant object expression has already evaluated to an instance $j$ of class $R$ with type arguments $V_i, 1 \le i \le m$, then:
 \begin{itemize}
 \item For each instance variable $f$ of $i$, let $v_{if}$ be the value of the field $f$ in $i$, and let $v_{jf}$ be the value of the field $f$ in $j$. If  \code{identical($v_{if}$, $v_{jf}$)} for all fields $f$ in $i$, then the value of $e$ is $j$, otherwise the value of $e$ is $i$.
 \end{itemize}
@@ -3446,13 +3454,13 @@
 }
 
 \LMHash{}
-The static type of a constant object expression of either the form 
+The static type of a constant object expression of either the form
 
-\CONST{} $T.id(a_1, \ldots , a_n, x_{n+1}: a_{n+1}, \ldots , x_{n+k}: a_{n+k})$ 
+\CONST{} $T.id(a_1, \ldots , a_n, x_{n+1}: a_{n+1}, \ldots , x_{n+k}: a_{n+k})$
 
-or the form  
+or the form
 
-\CONST{} $T(a_1, \ldots , a_n, x_{n+1}: a_{n+1}, \ldots , x_{n+k}: a_{n+k})$ 
+\CONST{} $T(a_1, \ldots , a_n, x_{n+1}: a_{n+1}, \ldots , x_{n+k}: a_{n+k})$
 
 is $T$. It is a static warning if the static type of $a_i, 1 \le i \le n+ k$ may not be assigned to the type of the corresponding formal parameter of the constructor $T.id$ (respectively $T$).
 
@@ -3484,12 +3492,12 @@
 \end{dartCode}
 
 \commentary{
-Due to the rules governing constant constructors, evaluating the constructor \code{A()} with the argument \code{''x''} or the argument \code{\CONST{} IntPair(1, 2)} would cause it to throw an exception, resulting in a compile-time error.
+Due to the rules governing constant constructors, evaluating the constructor \code{A()} with the argument \code{"x"} or the argument \code{\CONST{} IntPair(1, 2)} would cause it to throw an exception, resulting in a compile-time error.
 }
 
 
 \LMHash{}
-Given an instance creation expression of the form \CONST{} $q(a_1, \ldots , a_n)$ it is a static warning if $q$ is a constructor of an abstract class  (\ref{abstractInstanceMembers}) but $q$ is not a factory constructor.  
+Given an instance creation expression of the form \CONST{} $q(a_1, \ldots , a_n)$ it is a static warning if $q$ is a constructor of an abstract class  (\ref{abstractInstanceMembers}) but $q$ is not a factory constructor.
 
 
 \subsection{ Spawning an Isolate}
@@ -3509,27 +3517,27 @@
 
 \subsection{ Function Invocation}
 \LMLabel{functionInvocation}
- 
-\LMHash{}
-Function invocation occurs in the following cases: when a function expression  (\ref{functionExpressions}) is invoked (\ref{functionExpressionInvocation}), when a method (\ref{methodInvocation}), getter (\ref{topLevelGetterInvocation}, \ref{propertyExtraction}) or setter (\ref{assignment}) is invoked or when a constructor is invoked (either via instance creation (\ref{instanceCreation}), constructor redirection (\ref{redirectingConstructors}) or super initialization). The various kinds of function invocation differ as to how the function to be invoked, $f$,  is determined, as well as whether \THIS{} (\ref{this}) is bound. Once $f$ has been determined, the formal parameters of $f$ are bound to corresponding actual arguments. When the body of $f$ is executed it will be executed with the aforementioned bindings. 
 
 \LMHash{}
-If $f$ is marked \ASYNC{} (\ref{functions}), then a fresh instance (\ref{generativeConstructors}) $o$ implementing the built-in class \code{Future} is associated with the invocation and immediately returned to the caller. The body of $f$ is scheduled for execution at some future time. The future $o$ will complete when $f$ terminates. The value used to complete $o$ is the current return value (\ref{return}), if it is defined, and the current exception (\ref{throw}) otherwise. 
+Function invocation occurs in the following cases: when a function expression  (\ref{functionExpressions}) is invoked (\ref{functionExpressionInvocation}), when a method (\ref{methodInvocation}), getter (\ref{topLevelGetterInvocation}, \ref{propertyExtraction}) or setter (\ref{assignment}) is invoked or when a constructor is invoked (either via instance creation (\ref{instanceCreation}), constructor redirection (\ref{redirectingConstructors}) or super initialization). The various kinds of function invocation differ as to how the function to be invoked, $f$,  is determined, as well as whether \THIS{} (\ref{this}) is bound. Once $f$ has been determined, the formal parameters of $f$ are bound to corresponding actual arguments. When the body of $f$ is executed it will be executed with the aforementioned bindings.
+
+\LMHash{}
+If $f$ is marked \ASYNC{} (\ref{functions}), then a fresh instance (\ref{generativeConstructors}) $o$ implementing the built-in class \code{Future} is associated with the invocation and immediately returned to the caller. The body of $f$ is scheduled for execution at some future time. The future $o$ will complete when $f$ terminates. The value used to complete $o$ is the current return value (\ref{return}), if it is defined, and the current exception (\ref{throw}) otherwise.
 
 \LMHash{}
 If $f$ is marked \ASYNC* (\ref{functions}), then a fresh instance $s$ implementing the built-in class \code{Stream} is associated with the invocation and immediately returned. When $s$ is listened to, execution of the body of $f$ will begin.  When $f$ terminates:
 \begin{itemize}
-\item If the current return value is defined then, if $s$ has been canceled then its cancellation future is completed with \NULL{} (\ref{null}). 
+\item If the current return value is defined then, if $s$ has been canceled then its cancellation future is completed with \NULL{} (\ref{null}).
 \item If the current exception $x$ is defined:
   \begin{itemize}
-  \item $x$ is added to $s$. 
+  \item $x$ is added to $s$.
   \item If $s$ has been canceled then its cancellation future is completed with $x$ as an error.
   \end{itemize}
 \item $s$ is closed.
 \end{itemize}
 
 \rationale{
-When an asynchronous generator's stream has been canceled, cleanup will occur in the \FINALLY{} clauses (\ref{try}) inside the generator. We choose to direct any exceptions that occur at this time to the cancellation future rather than have them be lost. 
+When an asynchronous generator's stream has been canceled, cleanup will occur in the \FINALLY{} clauses (\ref{try}) inside the generator. We choose to direct any exceptions that occur at this time to the cancellation future rather than have them be lost.
 }
 
 \LMHash{}
@@ -3542,11 +3550,11 @@
 %When a stream is canceled, the implementation must wait for the cancelation future returned by \cd{cancell()} to complete before proceeding.
 
 \LMHash{}
-If $f$ is marked \SYNC* (\ref{functions}), then a fresh instance $i$ implementing the built-in class \code{Iterable} is associated with the invocation and immediately returned.  
+If $f$ is marked \SYNC* (\ref{functions}), then a fresh instance $i$ implementing the built-in class \code{Iterable} is associated with the invocation and immediately returned.
 
 
 \commentary{
-A Dart implementation will need to provide a specific implementation of \code{Iterable} that will be returned by \SYNC* methods. A typical strategy would be to produce an instance of a subclass of class \code{IterableBase} defined in \code{dart:core}. The only method that needs to be added by the Dart implementation in that case is \code{iterator}.  
+A Dart implementation will need to provide a specific implementation of \code{Iterable} that will be returned by \SYNC* methods. A typical strategy would be to produce an instance of a subclass of class \code{IterableBase} defined in \code{dart:core}. The only method that needs to be added by the Dart implementation in that case is \code{iterator}.
 }
 
 \LMHash{}
@@ -3559,7 +3567,7 @@
 \LMHash{}
 When iteration over the iterable is started, by getting an iterator $j$ from the iterable and calling \code{moveNext()}, execution of the body of $f$ will begin. When $f$ terminates, $j$ is positioned after its last element, so that its current value is \NULL{} and the current call to \code{moveNext()} on $j$ returns false, as will all further calls.
 
-Each iterator starts a separate computation. If the \SYNC* function is impure, the sequence of values yielded by each iterator may differ. 
+Each iterator starts a separate computation. If the \SYNC* function is impure, the sequence of values yielded by each iterator may differ.
 
 \commentary{
 One can derive more than one iterator from a given iterable.   Note that operations on the iterable itself can create distinct iterators. An example would be \code{length}.  It is conceivable that different iterators might yield sequences of different length. The same care needs to be taken when writing \SYNC* functions as when
@@ -3570,7 +3578,7 @@
 }
 
 \LMHash{}
-Each iterator runs with its own shallow copies of all local variables; in particular, each iterator has the same initial arguments, even if their bindings are modified by the function. 
+Each iterator runs with its own shallow copies of all local variables; in particular, each iterator has the same initial arguments, even if their bindings are modified by the function.
 \commentary{
 Two executions of an iterator interact only via state outside the function.
 }
@@ -3584,7 +3592,7 @@
 \LMHash{}
 Execution of $f$ terminates when the first of the following occurs:
 \begin{itemize}
-\item An exception is thrown and not caught within the current function activation. 
+\item An exception is thrown and not caught within the current function activation.
 \item A return statement (\ref{return}) immediately nested in the body of $f$ is executed and not intercepted in a \FINALLY{} (\ref{try}) clause.
 \item The last statement of the body completes execution.
 \end{itemize}
@@ -3615,9 +3623,9 @@
  \end{grammar}
 
 \LMHash{}
-Evaluation of an actual argument list of the form 
+Evaluation of an actual argument list of the form
 
-$(a_1, \ldots, a_m, q_1: a_{m+1}, \ldots, q_l: a_{m+l})$ 
+$(a_1, \ldots, a_m, q_1: a_{m+1}, \ldots, q_l: a_{m+l})$
 
 proceeds as follows:
 
@@ -3647,7 +3655,7 @@
 
 
 \LMHash{}
-If  $m < h$, or $m > n$, a \cd{NoSuchMethodError} is thrown. Furthermore, each $q_i, 1 \le i \le l$,  must have a corresponding named parameter in the set $\{p_{n+1}, \ldots, p_{n +k}\}$ or a \cd{NoSuchMethodError} is thrown. Then $p_i$ is bound to $o_i, i \in 1.. m$, and $q_j$  is bound to $o_{m+j}, j \in 1.. l$.  All remaining formal parameters of $f$  are bound to their default values. 
+If  $m < h$, or $m > n$, a \cd{NoSuchMethodError} is thrown. Furthermore, each $q_i, 1 \le i \le l$,  must have a corresponding named parameter in the set $\{p_{n+1}, \ldots, p_{n +k}\}$ or a \cd{NoSuchMethodError} is thrown. Then $p_i$ is bound to $o_i, i \in 1.. m$, and $q_j$  is bound to $o_{m+j}, j \in 1.. l$.  All remaining formal parameters of $f$  are bound to their default values.
 
 \commentary{All of these remaining parameters are necessarily optional and thus have default values.}
 
@@ -3664,16 +3672,16 @@
 \LMLabel{unqualifiedInvocation}
 
 \LMHash{}
-An unqualified function invocation $i$ has the form 
+An unqualified function invocation $i$ has the form
 
-$id(a_1, \ldots, a_n, x_{n+1}: a_{n+1}, \ldots, x_{n+k}: a_{n+k})$, 
+$id(a_1, \ldots, a_n, x_{n+1}: a_{n+1}, \ldots, x_{n+k}: a_{n+k})$,
 
-where $id$ is an identifier. 
+where $id$ is an identifier.
 
 \LMHash{}
 If there exists a lexically visible declaration named $id$, let $f_{id}$ be the innermost such declaration. Then:
 \begin{itemize}
-\item 
+\item
 If $f_{id}$ is a prefix object, a compile-time error occurs.
 \item
  If $f_{id}$ is a local function, a library function, a library or static getter or a variable then $i$ is interpreted as a function expression invocation (\ref{functionExpressionInvocation}).
@@ -3700,11 +3708,11 @@
 \LMLabel{functionExpressionInvocation}
 
 \LMHash{}
-A function expression invocation $i$ has the form 
+A function expression invocation $i$ has the form
 
-$e_f(a_1, \ldots , a_n, x_{n+1}: a_{n+1}, \ldots , x_{n+k}: a_{n+k})$, 
+$e_f(a_1, \ldots , a_n, x_{n+1}: a_{n+1}, \ldots , x_{n+k}: a_{n+k})$,
 
-where $e_f$ is an expression. If $e_f$ is an identifier $id$, then $id$ must necessarily denote a local function, a library function, a library or static getter or a variable as described above, or $i$ is not considered a function expression invocation. If $e_f$ is a property extraction expression (\ref{propertyExtraction}), then $i$ is is not a function expression invocation and is instead recognized as an ordinary method invocation (\ref{ordinaryInvocation}). 
+where $e_f$ is an expression. If $e_f$ is an identifier $id$, then $id$ must necessarily denote a local function, a library function, a library or static getter or a variable as described above, or $i$ is not considered a function expression invocation. If $e_f$ is a property extraction expression (\ref{propertyExtraction}), then $i$ is is not a function expression invocation and is instead recognized as an ordinary method invocation (\ref{ordinaryInvocation}).
 
 \commentary{
 \code{$a.b(x)$} is parsed as a method invocation of method \code{$b()$} on object \code{$a$}, not as an invocation of getter \code{$b$} on \code{$a$} followed by a function call \code{$(a.b)(x)$}.  If a method or getter \code{$b$} exists, the two will be equivalent. However, if \code{$b$} is not defined on \code{$a$}, the resulting invocation of \code{noSuchMethod()} would differ.  The \code{Invocation} passed to \code{noSuchMethod()} would describe a call to a method \code{$b$} with argument \code{$x$} in the former case, and a call to a getter \code{$b$} (with no arguments) in the latter.
@@ -3720,8 +3728,8 @@
 }
 
 \LMHash{}
-It is a static warning if the static type $F$ of $e_f$ may not be assigned to a function type.  If $F$ is not a function type, the static type of $i$ is \DYNAMIC{}. Otherwise 
-the static type of $i$ is the declared return type of  $F$.  
+It is a static warning if the static type $F$ of $e_f$ may not be assigned to a function type.  If $F$ is not a function type, the static type of $i$ is \DYNAMIC{}. Otherwise
+the static type of $i$ is the declared return type of  $F$.
 %\item Let $T_i$ be the static type of $a_i, i \in 1 .. n+k$. It is a static warning if $F$ is not a supertype of  $(T_1, \ldots, T_n, [T_{n+1}$ $x_{n+1}, \ldots, T_{n+k}$ $x_{n+k}]) \to \bot$.
 %\end{itemize}
 
@@ -3739,7 +3747,7 @@
 If $C$ declares a concrete instance method named $m$ that is accessible to $L$,  then that method is the result of the lookup, and we say that the method was {\em looked up in $C$}. Otherwise, if $C$ has a superclass $S$, then the result of the lookup is the result of looking up $m$  in $S$ with respect to $L$. Otherwise, we say that the method lookup has failed.
 
 \rationale {
-The motivation for skipping abstract members during lookup is largely to allow smoother mixin composition. 
+The motivation for skipping abstract members during lookup is largely to allow smoother mixin composition.
 }
 
 
@@ -3754,7 +3762,7 @@
 If $C$ declares a concrete instance getter (respectively setter) named $m$  that is accessible to $L$,  then that getter (respectively setter) is the result of the lookup, and we say that the getter (respectively setter) was {\em looked up in $C$}. Otherwise, if $C$ has a superclass $S$, then the result of the lookup is the result of looking up getter (respectively setter) $m$ in $S$ with respect to $L$. Otherwise, we say that the lookup has failed.
 
 \rationale {
-The motivation for skipping abstract members during lookup is largely to allow smoother mixin composition. 
+The motivation for skipping abstract members during lookup is largely to allow smoother mixin composition.
 }
 
 
@@ -3767,7 +3775,7 @@
 \LMHash{}
 The getter function $m$ is invoked. The value of $i$ is the result returned by the call to the getter function.
 \commentary{
-Note that the invocation is always defined. Per the rules for identifier references, an identifier will not be treated as a top-level getter invocation unless the getter $i$ is defined. 
+Note that the invocation is always defined. Per the rules for identifier references, an identifier will not be treated as a top-level getter invocation unless the getter $i$ is defined.
 }
 
 \LMHash{}
@@ -3777,7 +3785,7 @@
 \LMLabel{methodInvocation}
 
 \LMHash{}
-Method invocation can take several forms as specified below. 
+Method invocation can take several forms as specified below.
 
 \subsubsection{Ordinary Invocation}
 \LMLabel{ordinaryInvocation}
@@ -3786,16 +3794,16 @@
 An ordinary method invocation can be {\em conditional} or {\em unconditional}.
 
 \LMHash{}
-Evaluation of a {\em conditional ordinary method invocation} $e$ of the form 
+Evaluation of a {\em conditional ordinary method invocation} $e$ of the form
 
 \LMHash{}
-$o?.m(a_1, \ldots , a_n, x_{n+1}: a_{n+1}, \ldots , x_{n+k}: a_{n+k})$  
+$o?.m(a_1, \ldots , a_n, x_{n+1}: a_{n+1}, \ldots , x_{n+k}: a_{n+k})$
 
 \LMHash{}
-is equivalent to the evaluation of the expression  
+is equivalent to the evaluation of the expression
 
 \LMHash{}
-$((x) => x == \NULL ? \NULL : x.m(a_1, \ldots , a_n, x_{n+1}: a_{n+1}, \ldots , x_{n+k}: a_{n+k}))(o)$. 
+$((x) => x == \NULL ? \NULL : x.m(a_1, \ldots , a_n, x_{n+1}: a_{n+1}, \ldots , x_{n+k}: a_{n+k}))(o)$.
 
 unless $o$ is  a type literal, in which case it is equivalent to $o.m(a_1, \ldots , a_n, x_{n+1}: a_{n+1}, \ldots , x_{n+k}: a_{n+k})$.
 
@@ -3803,25 +3811,25 @@
 The static type of $e$ is the same as the static type of $o.m(a_1, \ldots , a_n, x_{n+1}: a_{n+1}, \ldots , x_{n+k}: a_{n+k})$. Exactly the same static warnings that would be caused by $o.m(a_1, \ldots , a_n, x_{n+1}: a_{n+1}, \ldots , x_{n+k}: a_{n+k})$ are also generated in the case of $o?.m(a_1, \ldots , a_n, x_{n+1}: a_{n+1}, \ldots , x_{n+k}: a_{n+k})$.
 
 \LMHash{}
-An {\em unconditional ordinary method invocation} $i$ has the form 
+An {\em unconditional ordinary method invocation} $i$ has the form
 
 $o.m(a_1, \ldots , a_n, x_{n+1}: a_{n+1}, \ldots , x_{n+k}: a_{n+k})$.
 
 \LMHash{}
-Evaluation of an unconditional ordinary method invocation $i$ of the form 
+Evaluation of an unconditional ordinary method invocation $i$ of the form
 
-$o.m(a_1, \ldots , a_n, x_{n+1}: a_{n+1}, \ldots , x_{n+k}: a_{n+k})$ 
+$o.m(a_1, \ldots , a_n, x_{n+1}: a_{n+1}, \ldots , x_{n+k}: a_{n+k})$
 
 proceeds as follows:
 
 \LMHash{}
-First, the expression $o$ is evaluated to a value $v_o$. Next, the argument list $(a_1, \ldots , a_n, x_{n+1}: a_{n+1}, \ldots , x_{n+k}: a_{n+k})$ is evaluated yielding actual argument objects $o_1, \ldots , o_{n+k}$. Let $f$ be the result of looking up (\ref{methodLookup}) method $m$  in $v_o$ with respect to the current library $L$. 
+First, the expression $o$ is evaluated to a value $v_o$. Next, the argument list $(a_1, \ldots , a_n, x_{n+1}: a_{n+1}, \ldots , x_{n+k}: a_{n+k})$ is evaluated yielding actual argument objects $o_1, \ldots , o_{n+k}$. Let $f$ be the result of looking up (\ref{methodLookup}) method $m$  in $v_o$ with respect to the current library $L$.
 
 \LMHash{}
 Let $p_1 \ldots p_h$ be the required parameters of $f$,  let $p_1 \ldots p_m$ be the positional parameters of $f$ and let $p_{h+1}, \ldots, p_{h+l}$ be the optional parameters declared by $f$.
 
 \commentary{
-We have an argument list consisting of $n$ positional arguments and $k$ named arguments. We have a function with $h$ required parameters and $l$ optional parameters. The number of positional arguments must be at least as large as the number of required parameters, and no larger than the number of positional parameters. All named arguments must have a corresponding named parameter. 
+We have an argument list consisting of $n$ positional arguments and $k$ named arguments. We have a function with $h$ required parameters and $l$ optional parameters. The number of positional arguments must be at least as large as the number of required parameters, and no larger than the number of positional parameters. All named arguments must have a corresponding named parameter.
 }
 
 \LMHash{}
@@ -3831,9 +3839,9 @@
 If the method lookup succeeded, the body of $f$ is executed with respect to the bindings that resulted from the evaluation of the argument list, and with \THIS{} bound to $v_o$. The value of $i$ is the value returned after $f$ is executed.
 
 \LMHash{}
-If the method lookup has failed, then let $g$ be the result of looking up getter (\ref{getterAndSetterLookup}) $m$ in $v_o$ with respect to $L$. 
+If the method lookup has failed, then let $g$ be the result of looking up getter (\ref{getterAndSetterLookup}) $m$ in $v_o$ with respect to $L$.
 If $v_o$ is an instance of \code{Type} but $o$ is not a constant type literal, then if $g$ is a getter that forwards to a static getter, getter lookup fails.
-If the getter lookup succeeded, let $v_g$ be the value of the getter invocation $o.m$. Then the value of $i$ is the result of invoking 
+If the getter lookup succeeded, let $v_g$ be the value of the getter invocation $o.m$. Then the value of $i$ is the result of invoking
 the static method \code{Function.apply()} with arguments $v.g, [o_1, \ldots , o_n], \{x_{n+1}: o_{n+1}, \ldots , x_{n+k}: o_{n+k}\}$.
 
 \LMHash{}
@@ -3864,7 +3872,7 @@
     noSuchMethod(x,y) =$>$ x + y;
 \}
 
-\NEW{} Perverse.unknownMethod(); 
+\NEW{} Perverse.unknownMethod();
 \end{code}
 
 \commentary{Notice that the wording carefully avoids re-evaluating the receiver $o$ and the arguments $a_i$. }
@@ -3875,12 +3883,12 @@
 \item
 $T$ or a superinterface of $T$ is annotated with an annotation denoting a constant identical to the constant \code{@proxy} defined in \code{dart:core}.  Or
 \item  $T$ is \code{Type}, $e$ is a constant type literal and the class corresponding to $e$ has a static getter named $m$.
-\item $T$ is \code{Function} and $m$ is \CALL. \rationale {The type \code{Function} is treated as if it has a \code{call} method for any possible signature of \CALL. The expectation is that any concrete subclass of \code{Function} will implement \CALL. Note that a warning will be issue if this is not the case. Furthermore, any use of \CALL{} on a subclass of \code{Function} that fails to implement \CALL{} will also provoke a a warning, as this exemption is limited to type \code{Function}, and does not apply to its subtypes. 
+\item $T$ is \code{Function} and $m$ is \CALL. \rationale {The type \code{Function} is treated as if it has a \code{call} method for any possible signature of \CALL. The expectation is that any concrete subclass of \code{Function} will implement \CALL. Note that a warning will be issue if this is not the case. Furthermore, any use of \CALL{} on a subclass of \code{Function} that fails to implement \CALL{} will also provoke a warning, as this exemption is limited to type \code{Function}, and does not apply to its subtypes.
 }
 \end{itemize}
 
 \LMHash{}
-If $T.m$ exists, it  is a static type warning if the type $F$ of $T.m$ may not be assigned to a function type. If $T.m$ does not exist, or if $F$ is not a function type, the static type of $i$ is \DYNAMIC{}; otherwise the static type of $i$ is the declared return type of  $F$.  
+If $T.m$ exists, it  is a static type warning if the type $F$ of $T.m$ may not be assigned to a function type. If $T.m$ does not exist, or if $F$ is not a function type, the static type of $i$ is \DYNAMIC{}; otherwise the static type of $i$ is the declared return type of  $F$.
 
 \LMHash{}
 It is a compile-time error to invoke any of the methods of class \cd{Object} on a prefix object (\ref{imports}) or on a constant type literal that is  immediately followed by the token `.'.
@@ -3897,7 +3905,7 @@
 {\bf cascadeSection:}
       `{\escapegrammar ..}' (cascadeSelector arguments*) (assignableSelector arguments*)* (assignmentOperator expressionWithoutCascade)?
       .
-     
+
 {\bf cascadeSelector:}`['  expression `]';
       identifier
       .
@@ -3916,7 +3924,7 @@
 \LMLabel{superInvocation}
 
 \LMHash{}
-A super method invocation $i$ has the form 
+A super method invocation $i$ has the form
 
 $\SUPER{}.m(a_1, \ldots , a_n, x_{n+1}: a_{n+1}, \ldots , x_{n+k}: a_{n+k})$.
 
@@ -3924,7 +3932,7 @@
 Evaluation of $i$ proceeds as follows:
 
 \LMHash{}
-First, the argument list $(a_1, \ldots , a_n, x_{n+1}: a_{n+1}, \ldots , x_{n+k}: a_{n+k})$ is evaluated  yielding actual argument objects $o_1, \ldots , o_{n+k}$. Let $g$ be the method currently executing, and let $C$ be the class in which $g$ was looked up (\ref{methodLookup}). Let $S_{dynamic}$ be the superclass of $C$, and let $f$ be the result of looking up method (\ref{methodLookup})  $m$ in $S_{dynamic}$  with respect to the current library $L$. 
+First, the argument list $(a_1, \ldots , a_n, x_{n+1}: a_{n+1}, \ldots , x_{n+k}: a_{n+k})$ is evaluated  yielding actual argument objects $o_1, \ldots , o_{n+k}$. Let $g$ be the method currently executing, and let $C$ be the class in which $g$ was looked up (\ref{methodLookup}). Let $S_{dynamic}$ be the superclass of $C$, and let $f$ be the result of looking up method (\ref{methodLookup})  $m$ in $S_{dynamic}$  with respect to the current library $L$.
 Let $p_1 \ldots p_h$ be the required parameters of $f$,  let $p_1 \ldots p_m$ be the positional parameters of $f$ and let $p_{h+1}, \ldots, p_{h+l}$ be the optional parameters declared by $f$.
 
 \LMHash{}
@@ -3934,9 +3942,9 @@
 If the method lookup succeeded, the body of $f$ is executed with respect to the bindings that resulted from the evaluation of the argument list, and with \THIS{} bound to the current value of \THIS{}. The value of $i$ is the value returned after $f$ is executed.
 
 \LMHash{}
-If the method lookup has failed, then let $g$ be the result of looking up getter (\ref{getterAndSetterLookup}) $m$ in $S_{dynamic}$ with respect to $L$. If the getter lookup succeeded, let $v_g$ be the value of the getter invocation $\SUPER{}.m$. Then the value of $i$ is the result of invoking 
+If the method lookup has failed, then let $g$ be the result of looking up getter (\ref{getterAndSetterLookup}) $m$ in $S_{dynamic}$ with respect to $L$. If the getter lookup succeeded, let $v_g$ be the value of the getter invocation $\SUPER{}.m$. Then the value of $i$ is the result of invoking
 the static method \code{Function.apply()} with arguments $v_g, [o_1, \ldots , o_n], \{x_{n+1}: o_{n+1}, \ldots , x_{n+k}: o_{n+k}\}$.
- 
+
 \LMHash{}
 If  getter lookup has also failed, then a new instance $im$  of the predefined class  \code{Invocation}  is created, such that :
 \begin{itemize}
@@ -3960,7 +3968,7 @@
 It is a compile-time error if a super method invocation occurs in a top-level function or variable initializer, in an instance variable initializer or initializer list, in class \code{Object}, in a factory constructor or in a static method or variable initializer.
 
 \LMHash{}
-Let $S_{static}$ be the superclass of the immediately enclosing class. It is a static type warning if $S_{static}$ does not have an accessible (\ref{privacy}) instance member named $m$ unless $S_{static}$ or a superinterface of $S_{static}$ is annotated with an annotation denoting a constant identical to the constant \code{@proxy} defined in \code{dart:core}. If $S_{static}.m$ exists, it  is a static type warning if the type $F$ of $S_{static}.m$ may not be assigned to a function type. If $S_{static}.m$ does not exist, or if $F$ is not a function type, the static type of $i$ is \DYNAMIC{}; otherwise the static type of $i$ is the declared return type of  $F$.  
+Let $S_{static}$ be the superclass of the immediately enclosing class. It is a static type warning if $S_{static}$ does not have an accessible (\ref{privacy}) instance member named $m$ unless $S_{static}$ or a superinterface of $S_{static}$ is annotated with an annotation denoting a constant identical to the constant \code{@proxy} defined in \code{dart:core}. If $S_{static}.m$ exists, it  is a static type warning if the type $F$ of $S_{static}.m$ may not be assigned to a function type. If $S_{static}.m$ does not exist, or if $F$ is not a function type, the static type of $i$ is \DYNAMIC{}; otherwise the static type of $i$ is the declared return type of  $F$.
 % The following is not needed because it is specified in 'Binding Actuals to Formals"
 %Let $T_i$ be the static type of $a_i, i \in 1 .. n+k$. It is a static warning if $F$ is not a supertype of  $(T_1, \ldots, t_n, \{T_{n+1}$ $x_{n+1}, \ldots, T_{n+k}$ $x_{n+k}\}) \to \bot$.
 
@@ -3971,7 +3979,7 @@
 \LMLabel{sendingMessages}
 
 \LMHash{}
-Messages are the sole means of communication among isolates. Messages are sent by invoking specific  methods in the Dart libraries; there is no specific syntax for sending a message. 
+Messages are the sole means of communication among isolates. Messages are sent by invoking specific  methods in the Dart libraries; there is no specific syntax for sending a message.
 
 \commentary{In other words, the methods supporting sending messages embody primitives of Dart that are not accessible to ordinary code, much like the methods that spawn isolates.
 }
@@ -3998,7 +4006,7 @@
 Tear-offs using the \cd{ x\#id}  syntax cannot be conditional at this time; this is inconsistent, and is likely to be addressed in the near future, perhaps via  notation such as  \cd{ x?\#id} . As indicated in section \ref{ecmaConformance}, experimentation in this area is allowed.
 }
 
-Evaluation of a {\em conditional property extraction expression} $e$ of the form $e_1?.id$  is equivalent to the evaluation of the expression  $((x) => x == \NULL ? \NULL : x.id)(e_1)$. 
+Evaluation of a {\em conditional property extraction expression} $e$ of the form $e_1?.id$  is equivalent to the evaluation of the expression  $((x) => x == \NULL ? \NULL : x.id)(e_1)$.
 unless $e_1$ is  a type literal, in which case it is equivalent to $e_1.m$.
 
 The static type of $e$ is the same as the static type of $e_1.id$. Let $T$ be the static type of $e_1$ and let $y$ be a fresh variable of type $T$. Exactly the same static warnings that would be caused by $y.id$ are also generated in the case of $e_1?.id$.
@@ -4013,7 +4021,7 @@
 Evaluation of a property extraction $i$ of the form $e.m$ proceeds as follows:
 
 \LMHash{}
-First, the expression $e$ is evaluated to an object $o$. Let $f$ be the result of looking up (\ref{methodLookup}) method  (\ref{instanceMethods}) $m$ in $o$ with respect to the current library $L$.  If $o$ is an instance of \code{Type} but $e$ is not a constant type literal, then if $f$ is a method that forwards (\ref{functionDeclarations}) to a static method,  method lookup fails. If method lookup succeeds then $i$ evaluates to the closurization of method $f$ on object $o$ (\ref{ordinaryMemberClosurization}).  
+First, the expression $e$ is evaluated to an object $o$. Let $f$ be the result of looking up (\ref{methodLookup}) method  (\ref{instanceMethods}) $m$ in $o$ with respect to the current library $L$.  If $o$ is an instance of \code{Type} but $e$ is not a constant type literal, then if $f$ is a method that forwards (\ref{functionDeclarations}) to a static method,  method lookup fails. If method lookup succeeds then $i$ evaluates to the closurization of method $f$ on object $o$ (\ref{ordinaryMemberClosurization}).
 
 \commentary {
 Note that $f$ is never an abstract method, because method lookup skips abstract methods. Hence, if $m$ refers to an abstract method, we will continue to the next step. However, since methods and getters never override each other, getter lookup will necessarily fail as well, and \cd{noSuchMethod()} will ultimately be invoked. The regrettable implication is that the error will refer to a missing getter rather than an attempt to closurize an abstract method.
@@ -4021,7 +4029,7 @@
 
 \LMHash{}
 Otherwise, $i$ is a getter invocation.  Let $f$ be the result of looking up
-(\ref{getterAndSetterLookup}) getter (\ref{getters}) $m$ in $o$  with respect to $L$. If $o$ is an instance of \code{Type} but $e$ is not a constant type literal, then if $f$ is a getter that forwards  to a static getter,  getter lookup fails. Otherwise, the body of $f$ is executed with \THIS{} bound to $o$.  The value of $i$ is the result returned by the call to the getter function. 
+(\ref{getterAndSetterLookup}) getter (\ref{getters}) $m$ in $o$  with respect to $L$. If $o$ is an instance of \code{Type} but $e$ is not a constant type literal, then if $f$ is a getter that forwards  to a static getter,  getter lookup fails. Otherwise, the body of $f$ is executed with \THIS{} bound to $o$.  The value of $i$ is the result returned by the call to the getter function.
 
 \LMHash{}
 If the getter lookup has failed, then a new instance $im$  of the predefined class  \code{Invocation}  is created, such that :
@@ -4063,7 +4071,7 @@
 \item The declared return type of $m$, if  $T$ is \code{Type}, $e$ is a constant type literal and the class corresponding to $e$ declares an accessible static getter named $m$.
 \item The static type of function $T.m$ if $T$ has an accessible instance method named $m$.
 \item The static type of function $m$, if  $T$ is \code{Type}, $e$ is a constant type literal and the class corresponding to $e$ declares an accessible static method named $m$.
-\item  The type \DYNAMIC{} otherwise.  
+\item  The type \DYNAMIC{} otherwise.
 \end{itemize}
 
 
@@ -4074,10 +4082,10 @@
 Evaluation of a property extraction $i$ of the form $\SUPER.m$ proceeds as follows:
 
 \LMHash{}
-Let $g$ be the method currently executing, and let $C$ be the class in which $g$ was looked up.  Let $S_{dynamic}$ be the superclass of $C$. Let $f$ be the result of looking up method $m$ in $S_{dynamic}$ with respect to the current library $L$.  If method lookup succeeds then $i$ evaluates to the closurization of method $f$ with respect to superclass $S_{dynamic}$ (\ref{superClosurization}).  
- 
+Let $g$ be the method currently executing, and let $C$ be the class in which $g$ was looked up.  Let $S_{dynamic}$ be the superclass of $C$. Let $f$ be the result of looking up method $m$ in $S_{dynamic}$ with respect to the current library $L$.  If method lookup succeeds then $i$ evaluates to the closurization of method $f$ with respect to superclass $S_{dynamic}$ (\ref{superClosurization}).
+
 \LMHash{}
- Otherwise, $i$ is a getter invocation.  Let $f$ be the result of  looking up getter $m$ in $S_{dynamic}$  with respect to $L$.  The body of $f$  is executed with \THIS{} bound to the current value of  \THIS{}.  The value of $i$ is the result returned by the call to the getter function. 
+ Otherwise, $i$ is a getter invocation.  Let $f$ be the result of  looking up getter $m$ in $S_{dynamic}$  with respect to $L$.  The body of $f$  is executed with \THIS{} bound to the current value of  \THIS{}.  The value of $i$ is the result returned by the call to the getter function.
 
 \LMHash{}
 If the getter lookup has failed, then a new instance $im$  of the predefined class  \code{Invocation}  is created, such that :
@@ -4097,13 +4105,13 @@
 and the result of this latter invocation is the result of evaluating $i$.
 
 \LMHash{}
-Let $S_{static}$ be the superclass of the immediately enclosing class. It is a static type warning if $S_{static}$ does not have an accessible instance method or getter named $m$.  
+Let $S_{static}$ be the superclass of the immediately enclosing class. It is a static type warning if $S_{static}$ does not have an accessible instance method or getter named $m$.
 
 The static type of $i$ is:
 \begin{itemize}
 \item The declared return type of $S_{static}.m$, if $S_{static}$ has an accessible instance getter named $m$.
 \item The static type of function $S_{static}.m$ if $S_{static}$ has an accessible instance method named $m$.
-\item The type  \DYNAMIC{} otherwise. 
+\item The type  \DYNAMIC{} otherwise.
 \end{itemize}
 
 
@@ -4117,23 +4125,23 @@
 First, the expression $e$ is evaluated to an object $o$.  Then:
 
 \LMHash{}
- if $m$ is a setter name, let $f$ be the result of looking up setter $m$ in $o$ with respect to the current library $L$.   If $o$ is an instance of \cd{Type} but $e$ is not a constant type literal, then if $f$ is a method that forwards to a static setter, setter lookup fails. If setter lookup succeeds then $i$ evaluates to the closurization of setter $f$ on object $o$ (\ref{ordinaryMemberClosurization}). 
+ if $m$ is a setter name, let $f$ be the result of looking up setter $m$ in $o$ with respect to the current library $L$.   If $o$ is an instance of \cd{Type} but $e$ is not a constant type literal, then if $f$ is a method that forwards to a static setter, setter lookup fails. If setter lookup succeeds then $i$ evaluates to the closurization of setter $f$ on object $o$ (\ref{ordinaryMemberClosurization}).
  If setter lookup failed, a \cd{NoSuchMethodError} is thrown.
- 
+
   \rationale {
 It would be more in keeping with the rules of Dart to invoke \cd{noSuchMethod} in this and similar cases below. However,  current implementations of \cd{noSuchMethod} cannot distinguish between an invocation of a closurization and an actual call.  It is likely that future versions of Dart will provide a mechanism to detect whether \cd{noSuchMethod} is invoked in response to a closurization, say by means of a getter like \cd{isTearOff}. By being conservative at this stage and insisting on failure, we can ensure that no functioning code will break when/if this functionality is introduced.
  }
 
- 
+
  \LMHash{}
-If $m$ is not a setter name, let $f$ be the result of looking up method $m$ in $o$ with respect to the current library $L$.   If $o$ is an instance of \cd{Type} but $e$ is not a constant type literal, then if $f$ is a method that forwards to a static method, method lookup fails. If method lookup succeeds then $i$ evaluates to the closurization of method $f$ on object $o$ (\ref{ordinaryMemberClosurization}).  
- 
+If $m$ is not a setter name, let $f$ be the result of looking up method $m$ in $o$ with respect to the current library $L$.   If $o$ is an instance of \cd{Type} but $e$ is not a constant type literal, then if $f$ is a method that forwards to a static method, method lookup fails. If method lookup succeeds then $i$ evaluates to the closurization of method $f$ on object $o$ (\ref{ordinaryMemberClosurization}).
+
 \LMHash{}
-If method lookup failed, let $f$ be the result of looking up getter $m$ in $o$ with respect to the current library $L$.   If $o$ is an instance of \cd{Type} but $e$ is not a constant type literal, then if $f$ is a method that forwards to a static getter, getter lookup fails. If getter lookup succeeds then $i$ evaluates to the closurization of getter $f$ on object $o$ (\ref{ordinaryMemberClosurization}).  
+If method lookup failed, let $f$ be the result of looking up getter $m$ in $o$ with respect to the current library $L$.   If $o$ is an instance of \cd{Type} but $e$ is not a constant type literal, then if $f$ is a method that forwards to a static getter, getter lookup fails. If getter lookup succeeds then $i$ evaluates to the closurization of getter $f$ on object $o$ (\ref{ordinaryMemberClosurization}).
  If getter lookup failed, a \cd{NoSuchMethodError} is thrown.
- 
-  
- 
+
+
+
 
 %\LMHash{}
 %Otherwise,  a new instance $im$  of the predefined class  \code{Invocation}  is created, such that :
@@ -4153,7 +4161,7 @@
 %and the result of this latter invocation is the result of evaluating $i$.
 
 \LMHash{}
-It is a compile-time error if $e$ is a prefix object (\ref{imports}) and $m$ refers to a type or a member of class \cd{Object}.
+It is a compile-time error if $e$ is a prefix object, $p$, (\ref{imports}) and $m$ refers to a type accessible via $p$ or to a member of class \cd{Object}.
 
 \commentary{
 This restriction is in line with other limitations on the use of prefixes as objects. The only permitted uses of $p\#m$ are closurizing top level methods and getters imported via the prefix $p$. Top level methods are directly available by their qualified names: $p.m$. However, getters and setters are not, and allowing their closurization is the whole point of the $e\#m$ syntax.
@@ -4172,7 +4180,7 @@
 \item The static type of function $T.m$, if $T$ has an accessible instance member named $m$.
 \item The static type of function $T.m$, if $T$ is \cd{Type}, $e$ is a constant type literal and the class corresponding to $e$ declares an accessible static member or constructor named $m$.
 \item \code{Function} if $T$ is \code{Function} and $m$ is \CALL.
-\item The type  \DYNAMIC{} otherwise. 
+\item The type  \DYNAMIC{} otherwise.
 \end{itemize}
 
 \subsubsection{Named Constructor Extraction}
@@ -4189,6 +4197,8 @@
 \LMHash{}
 The static type of $i$ is the type of the constructor function, if $T$ denotes a class in the surrounding scope with an accessible constructor $f$ named $m$. Otherwise the static type of $i$ is \DYNAMIC{}.
 
+It is a compile-time error if $T$ is an enumerated type (\ref{enums}).
+
 \subsubsection{Anonymous Constructor Extraction}
 \LMLabel{anonymousConstructorExtraction}
 
@@ -4203,6 +4213,9 @@
 \LMHash{}
 The static type of $i$ is the type of the constructor function $T()$, if $T$ denotes a class in the surrounding scope with an anonymous constructor $T()$. Otherwise the static type of $i$ is \DYNAMIC{}.
 
+It is a compile-time error if $T$ is an enumerated type (\ref{enums}).
+
+
 \subsubsection{General Super Property Extraction}
 \LMLabel{generalSuperPropertyExtraction}
 
@@ -4211,7 +4224,7 @@
 Evaluation of a property extraction $i$ of the form \SUPER$\#m$ proceeds as follows:
 
  \LMHash{}
-Let $g$ be the method currently executing, and let $C$ be the class in which $g$ was looked up.  Let $S_{dynamic}$ be the superclass of $C$. 
+Let $g$ be the method currently executing, and let $C$ be the class in which $g$ was looked up.  Let $S_{dynamic}$ be the superclass of $C$.
 
  \LMHash{}
 If $m$ is a setter name, let $f$ be the result of looking up setter $m$ in $S_{dynamic}$ with respect to the current library $L$. If setter lookup succeeds then $i$ evaluates to the closurization of setter $f$  with respect to superclass $S_{dynamic}$  (\ref{superClosurization}).  If setter lookup failed, a \cd{NoSuchMethodError} is thrown.
@@ -4241,14 +4254,14 @@
 \item $() \{\RETURN{}$ \~{} $u;$\} if $f$ is named \~{}.
 \item $(a) \{\RETURN{}$ $u[a];$\} if $f$ is named $[]$.
 \item $(a, b) \{\RETURN{}$ $u[a] = b;$\} if $f$ is named $[]=$.
-\item  
+\item
 \begin{dartCode}
 $(r_1, \ldots, r_n, \{p_1 : d_1, \ldots , p_k : d_k\})$ \{
   \RETURN{} $ u.m(r_1, \ldots, r_n, p_1: p_1, \ldots, p_k: p_k);$
-\} 
+\}
 \end{dartCode}
 if $f$ is named $m$ and has required parameters $r_1, \ldots, r_n$, and named parameters $p_1, \ldots, p_k$ with defaults $d_1, \ldots, d_k$.
-\item 
+\item
 \begin{dartCode}
 $(r_1, \ldots, r_n, [p_1 = d_1, \ldots , p_k = d_k])$\{
   \RETURN{} $u.m(r_1, \ldots, r_n, p_1, \ldots, p_k)$;
@@ -4291,15 +4304,15 @@
 \LMHash{}
 The {\em closurization of constructor $f$ of type $T$} is defined to be equivalent to:
 \begin{itemize}
-\item  
+\item
 \begin{dartCode}
 $(r_1, \ldots, r_n, \{p_1 : d_1, \ldots , p_k : d_k\})$ \{
   \RETURN{} \NEW{} $T.m(r_1, \ldots, r_n, p_1: p_1, \ldots, p_k: p_k);$
-\} 
+\}
 \end{dartCode}
 
 if $f$ is a named constructor with name $m$ that has required parameters $r_1, \ldots, r_n$, and named parameters $p_1, \ldots, p_k$ with defaults $d_1, \ldots, d_k$.
-\item 
+\item
 \begin{dartCode}
 $(r_1, \ldots, r_n, [p_1 = d_1, \ldots , p_k = d_k])$\{
   \RETURN{} \NEW{} $T.m(r_1, \ldots, r_n, p_1, \ldots, p_k)$;
@@ -4312,7 +4325,7 @@
 \LMHash{}
 Except that iff  \code{identical($T_1, T_2$)}  then  \cd{\NEW{} $T_1\#m$ == \NEW{} $T_2\#m$}.
 
-\commentary{ 
+\commentary{
 The above implies that for non-parameterized types, one can rely on the equality of closures resulting from closurization on the ``same'' type. For parameterized types, one cannot, since there is no requirement to canonicalize them.
 }
 
@@ -4322,15 +4335,15 @@
 \LMHash{}
 The {\em closurization of anonymous constructor $f$ of type $T$} is defined to be equivalent to:
 \begin{itemize}
-\item  
+\item
 \begin{dartCode}
 $(r_1, \ldots, r_n, \{p_1 : d_1, \ldots , p_k : d_k\})$ \{
   \RETURN{} \NEW{} $T(r_1, \ldots, r_n, p_1: p_1, \ldots, p_k: p_k);$
-\} 
+\}
 \end{dartCode}
 
 if $f$ is an anonymous constructor that has required parameters $r_1, \ldots, r_n$, and named parameters $p_1, \ldots, p_k$ with defaults $d_1, \ldots, d_k$.
-\item 
+\item
 \begin{dartCode}
 $(r_1, \ldots, r_n, [p_1 = d_1, \ldots , p_k = d_k])$\{
   \RETURN{} \NEW{} $T(r_1, \ldots, r_n, p_1, \ldots, p_k)$;
@@ -4356,14 +4369,14 @@
 \item $() \{\RETURN{}$ \~{}\SUPER;\} if $f$ is named \~{}.
 \item $(a) \{\RETURN{}$ $\SUPER[a];$\} if $f$ is named $[]$.
 \item $(a, b) \{\RETURN{}$ $\SUPER[a] = b;$\} if $f$ is named $[]=$.
-\item  
+\item
 \begin{dartCode}
 $(r_1, \ldots, r_n, \{p_1 : d_1, \ldots , p_k : d_k\})$ \{
   \RETURN{} \SUPER$.m(r_1, \ldots, r_n, p_1: p_1, \ldots, p_k: p_k);$
-\} 
+\}
 \end{dartCode}
 if $f$ is named $m$ and has required parameters $r_1, \ldots, r_n$, and named parameters $p_1, \ldots, p_k$ with defaults $d_1, \ldots, d_k$.
-\item 
+\item
 \begin{dartCode}
 $(r_1, \ldots, r_n, [p_1 = d_1, \ldots , p_k = d_k])$\{
   \RETURN{} \SUPER$.m(r_1, \ldots, r_n, p_1, \ldots, p_k)$;
@@ -4403,11 +4416,11 @@
 
 %If there is neither a local variable declaration with name $v$ nor a setter declaration with name $v=$ in the lexical scope enclosing $a$, then:
 %\begin{itemize}
-% \item If  $a$ occurs inside a top level or static function (be it function, method, getter,  or setter) or variable initializer, evaluation of $a$ causes $e$ to be evaluated, after which a \code{NoSuchMethodError} is thrown. 
-% \item Otherwise, the assignment is equivalent to the assignment \code{ \THIS{}.$v$ = $e$}. 
+% \item If  $a$ occurs inside a top level or static function (be it function, method, getter,  or setter) or variable initializer, evaluation of $a$ causes $e$ to be evaluated, after which a \code{NoSuchMethodError} is thrown.
+% \item Otherwise, the assignment is equivalent to the assignment \code{ \THIS{}.$v$ = $e$}.
 % \end{itemize}
- 
-%Otherwise 
+
+%Otherwise
 
 \LMHash{}
 Let $d$ be the innermost declaration whose name is $v$ or $v=$, if it exists.
@@ -4415,21 +4428,21 @@
 
 \LMHash{}
 If $d$ is the declaration of a local variable, the expression $e$ is evaluated to an object $o$. Then, the variable $v$ is bound to $o$ unless $v$ is \FINAL{} or \CONST{}, in which case a dynamic error occurs.
-If no error occurs, the value of the assignment expression is $o$.  
+If no error occurs, the value of the assignment expression is $o$.
 
 % add local functions per bug 23218
 
 \LMHash{}
-If $d$ is the declaration of a library variable, top level getter or top level setter, the expression $e$ is evaluated to an object $o$. Then the setter $v=$ is invoked with its formal parameter bound to $o$. The value of the assignment expression is $o$.  
+If $d$ is the declaration of a library variable, top level getter or top level setter, the expression $e$ is evaluated to an object $o$. Then the setter $v=$ is invoked with its formal parameter bound to $o$. The value of the assignment expression is $o$.
 
 \LMHash{}
 Otherwise, if $d$ is the declaration of a static variable, static getter or static setter in class $C$, then the assignment is equivalent to the assignment \code{$C.v$ = $e$}.
 
 \LMHash{}
-Otherwise, If  $a$ occurs inside a top level or static function (be it function, method, getter,  or setter) or variable initializer, evaluation of $a$ causes $e$ to be evaluated, after which a \code{NoSuchMethodError} is thrown. 
+Otherwise, If  $a$ occurs inside a top level or static function (be it function, method, getter,  or setter) or variable initializer, evaluation of $a$ causes $e$ to be evaluated, after which a \code{NoSuchMethodError} is thrown.
 
 \LMHash{}
-Otherwise, the assignment is equivalent to the assignment \code{ \THIS{}.$v$ = $e$}. 
+Otherwise, the assignment is equivalent to the assignment \code{ \THIS{}.$v$ = $e$}.
 
 \LMHash{}
 In checked mode, it is a dynamic type error if $o$ is not \NULL{} and the interface of the class of $o$ is not a subtype of the actual type (\ref{actualTypeOfADeclaration}) of $v$.
@@ -4446,7 +4459,7 @@
 Evaluation of an assignment of the form $e_1.v$ \code{=} $e_2$ proceeds as follows:
 
 \LMHash{}
-The expression $e_1$ is evaluated to an object $o_1$. Then, the expression $e_2$  is evaluated to an object $o_2$. Then, the setter $v=$ is looked up (\ref{getterAndSetterLookup}) in $o_1$ with respect to the current library.  If $o_1$ is an instance of \code{Type} but $e_1$ is not a constant type literal, then if $v=$ is a setter that forwards (\ref{functionDeclarations}) to a static setter, setter lookup fails. Otherwise, the body  of $v=$ is executed with its formal parameter bound to $o_2$ and \THIS{} bound to $o_1$. 
+The expression $e_1$ is evaluated to an object $o_1$. Then, the expression $e_2$  is evaluated to an object $o_2$. Then, the setter $v=$ is looked up (\ref{getterAndSetterLookup}) in $o_1$ with respect to the current library.  If $o_1$ is an instance of \code{Type} but $e_1$ is not a constant type literal, then if $v=$ is a setter that forwards (\ref{functionDeclarations}) to a static setter, setter lookup fails. Otherwise, the body  of $v=$ is executed with its formal parameter bound to $o_2$ and \THIS{} bound to $o_1$.
 
 \LMHash{}
 If the setter lookup has failed, then a new instance $im$  of the predefined class  \code{Invocation}  is created, such that :
@@ -4458,7 +4471,7 @@
 \end{itemize}
 
 \LMHash{}
-Then the method \code{noSuchMethod()} is looked up in $o_1$ and invoked  with argument $im$. 
+Then the method \code{noSuchMethod()} is looked up in $o_1$ and invoked  with argument $im$.
 However, if the implementation found cannot be invoked with a single positional argument, the implementation  of \code{noSuchMethod()} in class \code{Object} is invoked on $o_1$ with argument $im'$, where $im'$ is an instance of \code{Invocation} such that :
 \begin{itemize}
 \item  \code{im'.isMethod} evaluates to \code{\TRUE{}}.
@@ -4477,7 +4490,7 @@
 Let $T$ be the static type of $e_1$. It is a static type warning if $T$ does not have an accessible instance setter named $v=$ unless either:
 \begin{itemize}
 \item $T$ or a superinterface of $T$ is annotated with an annotation denoting a constant identical to the constant \code{@proxy} defined in \code{dart:core}. Or
-\item $T$ is \code{Type}, $e_1$ is a constant type literal and the class corresponding to $e_1$ has a static setter named $v=$. 
+\item $T$ is \code{Type}, $e_1$ is a constant type literal and the class corresponding to $e_1$ has a static setter named $v=$.
 \end{itemize}
 
 
@@ -4489,8 +4502,8 @@
 Evaluation of an assignment of the form $\SUPER.v$ \code{=} $e$ proceeds as follows:
 
 \LMHash{}
-Let $g$ be the method currently executing, and let $C$ be the class in which $g$ was looked up.  Let $S_{dynamic}$ be the superclass of $C$. 
-The expression $e$ is evaluated to an object $o$.  Then, the setter $v=$ is looked up (\ref{getterAndSetterLookup}) in $S_{dynamic}$ with respect to the current library.  The body  of $v=$ is executed with its formal parameter bound to $o$ and \THIS{} bound to \THIS{}. 
+Let $g$ be the method currently executing, and let $C$ be the class in which $g$ was looked up.  Let $S_{dynamic}$ be the superclass of $C$.
+The expression $e$ is evaluated to an object $o$.  Then, the setter $v=$ is looked up (\ref{getterAndSetterLookup}) in $S_{dynamic}$ with respect to the current library.  The body  of $v=$ is executed with its formal parameter bound to $o$ and \THIS{} bound to \THIS{}.
 
 \LMHash{}
 If the setter lookup has failed, then a new instance $im$  of the predefined class  \code{Invocation}  is created, such that :
@@ -4502,7 +4515,7 @@
 \end{itemize}
 
 \LMHash{}
-Then the method \code{noSuchMethod()} is looked up in $S_{dynamic}$ and invoked  with argument $im$. 
+Then the method \code{noSuchMethod()} is looked up in $S_{dynamic}$ and invoked  with argument $im$.
 However, if the implementation found cannot be invoked with a single positional argument, the implementation  of \code{noSuchMethod()} in class \code{Object} is invoked on \THIS{} with argument $im'$, where $im'$ is an instance of \code{Invocation} such that :
 \begin{itemize}
 \item  \code{im'.isMethod} evaluates to \code{\TRUE{}}.
@@ -4518,7 +4531,7 @@
 In checked mode, it is a dynamic type error if $o$ is not \NULL{} and the interface of the class of $o$ is not a subtype of the actual type of $S.v$.
 
 \LMHash{}
-Let $S_{static}$ be the superclass of the immediately enclosing class. It is a static type warning if $S_{static}$ does not have an accessible instance setter named $v=$ unless $S_{static}$ or a superinterface of $S_{static}$ is annotated with an annotation denoting a constant identical to the constant \code{@proxy} defined in \code{dart:core}. 
+Let $S_{static}$ be the superclass of the immediately enclosing class. It is a static type warning if $S_{static}$ does not have an accessible instance setter named $v=$ unless $S_{static}$ or a superinterface of $S_{static}$ is annotated with an annotation denoting a constant identical to the constant \code{@proxy} defined in \code{dart:core}.
 
 \LMHash{}
 It is a static type warning if the static type of $e$ may not be assigned to the static type of the formal parameter of the setter $v=$.   The static type of the expression $\SUPER.v$ \code{=} $e$ is the static type of $e$.
@@ -4549,27 +4562,27 @@
 \LMLabel{compoundAssignment}
 
 \LMHash{}
-Evaluation of a compound assignment of the form $v$ {\em ??=} $e$ is equivalent to the evaluation of the expression  $((x) => x == \NULL{}$ ?  $v=e : x)(v)$ where $x$ is a fresh variable that is not used in $e$. 
+Evaluation of a compound assignment of the form $v$ {\em ??=} $e$ is equivalent to the evaluation of the expression  $((x) => x == \NULL{}$ ?  $v=e : x)(v)$ where $x$ is a fresh variable that is not used in $e$.
 
 \LMHash{}
-Evaluation of a compound assignment of the form $C.v$ {\em ??=} $e$, where $C$ is a type literal, is equivalent to the evaluation of the expression  $((x) => x == \NULL{}$?  $C.v=e: x)(C.v)$ where $x$ is a fresh variable that is not used in $e$. 
+Evaluation of a compound assignment of the form $C.v$ {\em ??=} $e$, where $C$ is a type literal, is equivalent to the evaluation of the expression  $((x) => x == \NULL{}$?  $C.v=e: x)(C.v)$ where $x$ is a fresh variable that is not used in $e$.
 
 \commentary {
 The two rules above also apply when the variable v or the type C is prefixed.
 }
 
 \LMHash{}
-Evaluation of a compound assignment of the form $e_1.v$ {\em ??=} $e_2$ is equivalent to the evaluation of the expression  $((x) =>((y) => y == \NULL{}$ ? $ x.v = e_2: y)(x.v))(e_1)$ where $x$ and $y$ are distinct fresh variables that are not used in $e_2$. 
+Evaluation of a compound assignment of the form $e_1.v$ {\em ??=} $e_2$ is equivalent to the evaluation of the expression  $((x) =>((y) => y == \NULL{}$ ? $ x.v = e_2: y)(x.v))(e_1)$ where $x$ and $y$ are distinct fresh variables that are not used in $e_2$.
 
 \LMHash{}
-Evaluation of a compound assignment of the form  $e_1[e_2]$  {\em ??=} $e_3$ is equivalent to the evaluation of the expression  
-$((a, i) => ((x) => x == \NULL{}$ ?  $a[i] = e_3: x)(a[i]))(e_1, e_2)$ where $x$, $a$ and $i$ are distinct fresh variables that are not used in $e_3$. 
+Evaluation of a compound assignment of the form  $e_1[e_2]$  {\em ??=} $e_3$ is equivalent to the evaluation of the expression
+$((a, i) => ((x) => x == \NULL{}$ ?  $a[i] = e_3: x)(a[i]))(e_1, e_2)$ where $x$, $a$ and $i$ are distinct fresh variables that are not used in $e_3$.
 
 \LMHash{}
 Evaluation of a compound assignment of the form $\SUPER.v$  {\em ??=} $e$ is equivalent to the evaluation of the expression  $((x) => x == \NULL{}$ ? $\SUPER.v = e: x)(\SUPER.v)$ where $x$ is a fresh variable that is not used in $e$.
 
 \LMHash{}
-Evaluation of a compound assignment of the form $e_1?.v$  {\em ??=} $e_2$ is equivalent to the evaluation of the expression \code{((x) $=>$ x == \NULL{} ?  \NULL: $x.v ??=  e_2$)($e_1$)} where $x$ is a variable that is not used in $e_2$. 
+Evaluation of a compound assignment of the form $e_1?.v$  {\em ??=} $e_2$ is equivalent to the evaluation of the expression \code{((x) $=>$ x == \NULL{} ?  \NULL: $x.v ??=  e_2$)($e_1$)} where $x$ is a variable that is not used in $e_2$.
 % But what about C?.v ??= e
 
 \LMHash{}
@@ -4592,15 +4605,15 @@
 The static type of a compound assignment of the form $\SUPER.v$  {\em ??=} $e$  is the least upper bound of the static type of $\SUPER.v$ and the static type of $e$. Exactly the same static warnings that would be caused by $\SUPER.v = e$ are also generated in the case of $\SUPER.v$  {\em ??=} $e$.
 
 \LMHash{}
-For any other valid operator $op$, a compound assignment of the form $v$ $op\code{=} e$ is equivalent to $v \code{=} v$ $op$ $e$. A compound assignment of the form $C.v$ $op \code{=} e$ is equivalent to $C.v \code{=} C.v$ $op$ $e$. A compound assignment of the form $e_1.v$ $op = e_2$ is equivalent to \code{((x) $=>$ x.v = x.v $op$ $e_2$)($e_1$)} where $x$ is a variable that is not used in $e_2$. A compound assignment of the form  $e_1[e_2]$ $op\code{=} e_3$ is equivalent to 
-\code{((a, i) $=>$ a[i] = a[i] $op$ $e_3$)($e_1, e_2$)} where $a$ and $i$ are a variables that are not used in $e_3$. 
+For any other valid operator $op$, a compound assignment of the form $v$ $op\code{=} e$ is equivalent to $v \code{=} v$ $op$ $e$. A compound assignment of the form $C.v$ $op \code{=} e$ is equivalent to $C.v \code{=} C.v$ $op$ $e$. A compound assignment of the form $e_1.v$ $op = e_2$ is equivalent to \code{((x) $=>$ x.v = x.v $op$ $e_2$)($e_1$)} where $x$ is a variable that is not used in $e_2$. A compound assignment of the form  $e_1[e_2]$ $op\code{=} e_3$ is equivalent to
+\code{((a, i) $=>$ a[i] = a[i] $op$ $e_3$)($e_1, e_2$)} where $a$ and $i$ are a variables that are not used in $e_3$.
 
 \LMHash{}
 Evaluation of a compound assignment of the form $e_1?.v$ $op = e_2$ is equivalent to \code{((x) $=>$ x?.v = x.v $op$ $e_2$)($e_1$)} where $x$ is a variable that is not used in $e_2$. The static type of $e_1?.v$ $op = e_2$ is the static type of $e_1.v$ $op$ $e_2$. Exactly the same static warnings that would be caused by $e_1.v$ $op = e_2$ are also generated in the case of $e_1?.v$ $op = e_2$.
 
 \LMHash{}
-A compound assignment of the form $C?.v$ $op = e_2$ is equivalent to the expression 
-$C.v$ $op = e_2$. 
+A compound assignment of the form $C?.v$ $op = e_2$ is equivalent to the expression
+$C.v$ $op = e_2$.
 
 \begin{grammar}
 {\bf compoundAssignmentOperator:}`*=';
@@ -4618,7 +4631,7 @@
     .
 \end{grammar}
 
-    
+
 \subsection{ Conditional}
 \LMLabel{conditional}
 
@@ -4635,7 +4648,7 @@
 Evaluation of a conditional expression $c$ of the form $e_1 ? e_2 : e_3$ proceeds as follows:
 
 \LMHash{}
-First, $e_1$ is evaluated to an object $o_1$.  Then, $o_1$ is  subjected to boolean conversion (\ref{booleanConversion}) producing an object $r$.  If $r$ is \TRUE, then the value of $c$ is the result of evaluating the expression $e_2$. Otherwise the value of $c$ is the result of evaluating the expression $e_3$. 
+First, $e_1$ is evaluated to an object $o_1$.  Then, $o_1$ is  subjected to boolean conversion (\ref{booleanConversion}) producing an object $r$.  If $r$ is \TRUE, then the value of $c$ is the result of evaluating the expression $e_2$. Otherwise the value of $c$ is the result of evaluating the expression $e_3$.
 
 \LMHash{}
 If all of the following hold:
@@ -4650,11 +4663,11 @@
 
 \LMHash{}
  It is a static type warning if the static type of $e_1$ may not be assigned to \code{bool}.  The static type of $c$ is the least upper bound (\ref{leastUpperBounds}) of the static type of $e_2$ and the static type of $e_3$.
- 
-   
+
+
  \subsection{If-null Expressions}
  \label{ifNull}
- 
+
  \LMHash{}
  An {\em if-null expression}evaluates an expression and if the result is \NULL, evaluates another.
 
@@ -4662,10 +4675,10 @@
 {\bf ifNullExpression:}
   logicalOrExpression (`??' logicalOrExpression)*
 \end{grammar}
-  
+
 \LMHash{}
 Evaluation of an if-null expression $e$ of the form $e_1??e_2 $ is equivalent to the evaluation of the expression $((x) => x == \NULL? e_2: x)(e_1)$. The static type of $e$ is least upper bound (\ref{leastUpperBounds}) of the static type of $e_1$ and the static type of $e_2$.
-  
+
 
 \subsection{ Logical Boolean Expressions}
 \LMLabel{logicalBooleanExpressions}
@@ -4684,18 +4697,18 @@
 %      bitwiseOrExpression (`\&\&' bitwiseOrExpression)*
     .
  \end{grammar}
- 
+
 \LMHash{}
 A {\em logical boolean expression} is either an equality expression (\ref{equality}), or an invocation of a logical boolean operator on an expression $e_1$ with argument $e_2$.
- 
-\LMHash{}
-Evaluation of a logical boolean expression $b$ of the form $e_1 || e_2$ causes the evaluation of $e_1$ which is then  subjected to boolean conversion, yielding an object $o_1$; if $o_1$ is \TRUE, the result of evaluating $b$ is \TRUE, otherwise $e_2$ is evaluated to an object $o_2$, which is then subjected to boolean conversion (\ref{booleanConversion}) producing an object $r$, which is the value of $b$. 
 
 \LMHash{}
-Evaluation of a logical boolean expression $b$ of the form $e_1 \&\& e_2$ causes the evaluation of $e_1$ which is then subjected to boolean conversion, yielding an object $o_1$; if $o_1$ is not  \TRUE, the result of evaluating $b$ is \FALSE, otherwise $e_2$ is evaluated to an object $o_2$, which is then subjected to boolean conversion producing an object $r$, which is the value of $b$. 
+Evaluation of a logical boolean expression $b$ of the form $e_1 || e_2$ causes the evaluation of $e_1$ which is then  subjected to boolean conversion, yielding an object $o_1$; if $o_1$ is \TRUE, the result of evaluating $b$ is \TRUE, otherwise $e_2$ is evaluated to an object $o_2$, which is then subjected to boolean conversion (\ref{booleanConversion}) producing an object $r$, which is the value of $b$.
 
 \LMHash{}
-A logical boolean expression $b$ of the form $e_1 \&\& e_2$ shows that a variable $v$ has type 
+Evaluation of a logical boolean expression $b$ of the form $e_1 \&\& e_2$ causes the evaluation of $e_1$ which is then subjected to boolean conversion, yielding an object $o_1$; if $o_1$ is not  \TRUE, the result of evaluating $b$ is \FALSE, otherwise $e_2$ is evaluated to an object $o_2$, which is then subjected to boolean conversion producing an object $r$, which is the value of $b$.
+
+\LMHash{}
+A logical boolean expression $b$ of the form $e_1 \&\& e_2$ shows that a variable $v$ has type
 $T$ if all of the following conditions hold:
 \begin{itemize}
 \item Either $e_1$ shows that $v$ has type $T$ or $e_2$ shows that $v$ has type $T$.
@@ -4708,17 +4721,17 @@
 \begin{itemize}
 \item $e_1$ shows that $v$ has type $T$.
 \item $v$ is not mutated in either $e_1$, $e_2$ or within a closure.
-\item If the variable $v$ is accessed by a closure in $e_2$ then the variable $v$ is not potentially mutated anywhere in the scope of $v$. 
+\item If the variable $v$ is accessed by a closure in $e_2$ then the variable $v$ is not potentially mutated anywhere in the scope of $v$.
 \end{itemize}
 then the type of $v$ is known to be $T$ in $e_2$.
 
 \LMHash{}
 It is a static warning if the static type of $e_1$ may not be assigned to \cd{bool} or if the static type of $e_2$ may not be assigned to \cd{bool}. The static type of a logical boolean expression is \code{bool}.
 
-     
+
  \subsection{ Equality}
  \LMLabel{equality}
- 
+
 \LMHash{}
 Equality expressions test objects for equality.
 
@@ -4731,7 +4744,7 @@
       `!='
     .
  \end{grammar}
- 
+
 \LMHash{}
 An {\em equality expression} is either a relational expression (\ref{relationalExpressions}), or an invocation of an equality operator on either \SUPER{} or an expression $e_1$, with argument $e_2$.
 
@@ -4755,24 +4768,24 @@
 \end{itemize}
 
 \commentary{As a result of the above definition, user defined \code{==} methods can assume that their argument is non-null, and avoid the standard boiler-plate prelude:
- 
+
  \code{if (identical(\NULL{}, arg)) return \FALSE{};}
 
-Another implication is that there is never a need to use \code{identical()} to test against \NULL{}, nor should anyone ever worry about whether to write \NULL{} == $e$ or $e$ == \NULL{}. 
+Another implication is that there is never a need to use \code{identical()} to test against \NULL{}, nor should anyone ever worry about whether to write \NULL{} == $e$ or $e$ == \NULL{}.
 }
 
 \LMHash{}
 An equality expression of the form \code{$e_1$ != $e_2$}  is equivalent to the expression \code{!($e_1$ == $e_2$)}. An equality expression of the form \code{\SUPER{} != $e$} is equivalent to the expression \code{!(\SUPER{} == $e$)}.
 
 
- 
- %The expression $e_1$ is evaluated to an object $o_1$; then the expression $e_2$ is evaluated to an object $o_2$.  Next, if $o_1$ and $o_2$ are the same object, then $ee$ evaluates to \TRUE{}, otherwise $ee$ evaluates to \FALSE{}. 
- 
+
+ %The expression $e_1$ is evaluated to an object $o_1$; then the expression $e_2$ is evaluated to an object $o_2$.  Next, if $o_1$ and $o_2$ are the same object, then $ee$ evaluates to \TRUE{}, otherwise $ee$ evaluates to \FALSE{}.
+
 
 \LMHash{}
  The static type of an equality expression is \code{bool}.
- 
- 
+
+
 \subsection{ Relational Expressions}
 \LMLabel{relationalExpressions}
 
@@ -4791,12 +4804,12 @@
       `{\escapegrammar \lt}'
     .
  \end{grammar}
- 
+
 \LMHash{}
 A {\em relational expression} is either a bitwise expression (\ref{bitwiseExpressions}), or an invocation of a relational operator on either \SUPER{} or an expression $e_1$, with argument $e_2$.
- 
+
 \LMHash{}
-A relational expression of the form  $e_1$ $op$ $e_2$ is equivalent to the method invocation \code{$e_1$.$op$($e_2$)}. A relational expression of the form  \SUPER{} $op$ $e_2$ is equivalent to the method invocation \code{\SUPER{}.$op$($e_2$)}. 
+A relational expression of the form  $e_1$ $op$ $e_2$ is equivalent to the method invocation \code{$e_1$.$op$($e_2$)}. A relational expression of the form  \SUPER{} $op$ $e_2$ is equivalent to the method invocation \code{\SUPER{}.$op$($e_2$)}.
 
 \subsection{ Bitwise Expressions}
 \LMLabel{bitwiseExpressions}
@@ -4822,10 +4835,10 @@
       `$|$'
     .
  \end{grammar}
- 
+
 \LMHash{}
 A {\em bitwise expression} is either a shift expression (\ref{shift}), or an invocation of a bitwise operator on either \SUPER{} or an expression $e_1$, with argument $e_2$.
- 
+
 \LMHash{}
  A bitwise expression of the form  $e_1$ $op$ $e_2$ is equivalent to the method invocation $e_1.op(e_2)$.
 A bitwise expression of the form  \code{\SUPER{} $op$ $e_2$} is equivalent to the method invocation \code{\SUPER{}.op($e_2$)}.
@@ -4834,7 +4847,7 @@
 It should be obvious that the static type rules for these expressions are defined by the equivalence above - ergo, by the type rules for method invocation and the signatures of the operators on the type $e_1$. The same holds in similar situations throughout this specification.
 }
 
- 
+
 \subsection{ Shift}
 \LMLabel{shift}
 
@@ -4847,25 +4860,25 @@
     .
 
 {\bf shiftOperator:}`{\escapegrammar \lt\lt'};
-       `{\escapegrammar \gt \gt}' 
+       `{\escapegrammar \gt \gt}'
     .
  \end{grammar}
- 
+
 \LMHash{}
 A {\em shift expression} is either an additive expression (\ref{additiveExpressions}), or an invocation of a shift operator on either \SUPER{} or an expression $e_1$, with argument $e_2$.
- 
+
 \LMHash{}
- A shift expression of the form  $e_1$ $op$ $e_2$ is equivalent to the method invocation \code{$e_1$.$op$($e_2$)}. A shift expression of the form  \SUPER{} $op$ $e_2$ is equivalent to the method invocation \code{\SUPER{}.$op$($e_2$)}. 
+ A shift expression of the form  $e_1$ $op$ $e_2$ is equivalent to the method invocation \code{$e_1$.$op$($e_2$)}. A shift expression of the form  \SUPER{} $op$ $e_2$ is equivalent to the method invocation \code{\SUPER{}.$op$($e_2$)}.
 
 \commentary{
-Note that this definition implies left-to-right evaluation order among shift expressions: 
+Note that this definition implies left-to-right evaluation order among shift expressions:
 
 $e_1 << e_2 << e_3$
 
 is evaluated as  $(e_1 << e_2 ).<< (e_3)$  which is equivalent to $(e_1 << e_2) << e_3$.
 The same holds for additive and multiplicative expressions.
 }
-     
+
  \subsection{ Additive Expressions}
 \LMLabel{additiveExpressions}
 
@@ -4881,17 +4894,17 @@
       `-'
     .
  \end{grammar}
- 
+
 \LMHash{}
 An {\em additive expression} is either a multiplicative expression (\ref{multiplicativeExpressions}), or an invocation of an additive operator on either \SUPER{} or an expression $e_1$, with argument $e_2$.
 
 \LMHash{}
-An additive expression of the form  $e_1$ $op$ $e_2$ is equivalent to the method invocation \code{$e_1$.$op$($e_2$)}. An additive expression of the form  \SUPER{} $op$ $e_2$ is equivalent to the method invocation \code{\SUPER{}.$op$($e_2$)}. 
+An additive expression of the form  $e_1$ $op$ $e_2$ is equivalent to the method invocation \code{$e_1$.$op$($e_2$)}. An additive expression of the form  \SUPER{} $op$ $e_2$ is equivalent to the method invocation \code{\SUPER{}.$op$($e_2$)}.
 
 \LMHash{}
 The static type of an additive expression is usually determined by the signature given in the declaration of the operator used. However, invocations of the operators \cd{+}  and \cd{-} of class \cd{int} are treated specially by the typechecker. The static type of an expression $e_1 + e_2$ where $e_1$ has static type \cd{int} is \cd{int} if the static type of $e_2$ is \cd{int}, and \cd{double} if the static type of $e_2$ is \cd{double}. The static type of an expression $e_1 - e_2$ where $e_1$ has static type \cd{int} is \cd{int} if the static type of $e_2$ is \cd{int}, and \cd{double} if the static type of $e_2$ is \cd{double}.
- 
- 
+
+
 \subsection{ Multiplicative Expressions}
 \LMLabel{multiplicativeExpressions}
 
@@ -4908,18 +4921,18 @@
       `\%';
       `\~{}/'
     .
-    
+
  \end{grammar}
-    
+
 \LMHash{}
  A {\em multiplicative expression} is either a unary expression (\ref{unaryExpressions}), or an invocation of a multiplicative operator on either \SUPER{} or an expression $e_1$, with argument $e_2$.
- 
+
 \LMHash{}
- A multiplicative expression of the form  $e_1$ $op$ $e_2$ is equivalent to the method invocation \code{$e_1$.$op$($e_2$)}. A multiplicative expression of the form  \SUPER{} $op$ $e_2$ is equivalent to the method invocation \code{\SUPER{}.$op$($e_2$)}.   
- 
+ A multiplicative expression of the form  $e_1$ $op$ $e_2$ is equivalent to the method invocation \code{$e_1$.$op$($e_2$)}. A multiplicative expression of the form  \SUPER{} $op$ $e_2$ is equivalent to the method invocation \code{\SUPER{}.$op$($e_2$)}.
+
 \LMHash{}
 The static type of an multiplicative expression is usually determined by the signature given in the declaration of the operator used. However, invocations of the operators \cd{*}, \cd{\%}  and \cd{\~{}/} of class \cd{int} are treated specially by the typechecker. The static type of an expression $e_1 * e_2$ where $e_1$ has static type \cd{int} is \cd{int} if the static type of $e_2$ is \cd{int}, and \cd{double} if the static type of $e_2$ is \cd{double}. The static type of an expression $e_1 \% e_2$ where $e_1$ has static type \cd{int} is \cd{int} if the static type of $e_2$ is \cd{int}, and \cd{double} if the static type of $e_2$ is \cd{double}.  The static type of an expression \cd{$e_1$ \~{}/ $e_2$} where $e_1$ has static type \cd{int} is \cd{int} if the static type of $e_2$ is \cd{int}.
- 
+
 \subsection{ Unary Expressions}
 \LMLabel{unaryExpressions}
 
@@ -4933,33 +4946,33 @@
       (minusOperator $|$ tildeOperator) \SUPER{};
       incrementOperator assignableExpression
     .
- 
+
  {\bf prefixOperator:}minusOperator;
       negationOperator;
       tildeOperator
     .
-       
-       
+
+
   {\bf minusOperator:}`-';  .
 
 
     {\bf negationOperator:}`!' ;
       .
-      
+
     {\bf tildeOperator:}  `\~{}'
     .
-    
-    
+
+
 \end{grammar}
 
 \LMHash{}
 A {\em unary expression} is either a postfix expression  (\ref{postfixExpressions}), an await expression (\ref{awaitExpressions}) or an invocation of a prefix operator on an expression or an invocation of a unary operator on either \SUPER{} or an expression $e$.
 
 \LMHash{}
-The expression $!e$ is equivalent to the expression $e?$ $ \FALSE{} :\TRUE{}$. 
+The expression $!e$ is equivalent to the expression $e?$ $ \FALSE{} :\TRUE{}$.
 
 \LMHash{}
-Evaluation of an expression of the form \code{++$e$} is equivalent to \code{$e$ += 1}.  Evaluation of an expression of the form \code{-{}-$e$} is equivalent to \code{$e$ -= 1}. 
+Evaluation of an expression of the form \code{++$e$} is equivalent to \code{$e$ += 1}.  Evaluation of an expression of the form \code{-{}-$e$} is equivalent to \code{$e$ -= 1}.
 
 %The expression $-e$ is equivalent to the method invocation \code{$e$.-()}.  The expression \code{-\SUPER{}} is equivalent  to the method invocation \code{\SUPER{}.-()}.
 
@@ -4970,7 +4983,7 @@
 \LMLabel{awaitExpressions}
 
 \LMHash{}
-An {\em await expression} allows code to yield control until an asynchronous operation (\ref{functions}) completes. 
+An {\em await expression} allows code to yield control until an asynchronous operation (\ref{functions}) completes.
 
  \begin{grammar}
 {\bf awaitExpression:}
@@ -4982,7 +4995,7 @@
 First, the expression $e$ is evaluated. Next:
 
 \LMHash{}
-If $e$ raises an exception $x$, then an instance $f$ of class \code{Future} is allocated and later completed with $x$. Otherwise, if $e$ evaluates to an object $o$ that is not an instance of \code{Future}, then let $f$ be the result of calling \code{Future.value()} with $o$ as its argument; otherwise let $f$ be the result of evaluating $e$. 
+If $e$ raises an exception $x$, then an instance $f$ of class \code{Future} is allocated and later completed with $x$. Otherwise, if $e$ evaluates to an object $o$ that is not an instance of \code{Future}, then let $f$ be the result of calling \code{Future.value()} with $o$ as its argument; otherwise let $f$ be the result of evaluating $e$.
 
 \LMHash{}
 Next,  execution of the function $m$ immediately enclosing $a$ is suspended until after $f$ completes. The stream associated with the innermost enclosing asynchronous for loop (\ref{asynchronousFor-in}), if any, is paused. At some time after $f$ is completed, control returns to the current invocation. The stream associated with the innermost enclosing asynchronous for loop  (\ref{asynchronousFor-in}), if any, is resumed. If $f$ has completed with an exception $x$, $a$ raises $x$. If $f$ completes with a value $v$, $a$ evaluates to $v$.
@@ -5005,7 +5018,7 @@
 The static type of $a$ is $flatten(T)$ (the $flatten$ function is defined in section \ref{functionExpressions}) where $T$ is the static type of $e$.
 
 
-     
+
 \subsection{ Postfix Expressions}
 \LMLabel{postfixExpressions}
 
@@ -5028,9 +5041,9 @@
 {\bf incrementOperator:}`++';
       `-{}-'
     .
-        
+
  \end{grammar}
- 
+
 \LMHash{}
  A {\em postfix expression} is either a primary expression, a function, method or getter invocation, or an invocation of a postfix operator on an expression $e$.
 
@@ -5041,11 +5054,11 @@
 The static type of such an expression is the static type of $v$.
 
 
-\rationale{The above ensures that if $v$ is a field, the getter gets called exactly once. Likewise in the cases below. 
+\rationale{The above ensures that if $v$ is a field, the getter gets called exactly once. Likewise in the cases below.
 }
 
 \LMHash{}
-Execution of a postfix expression of the form \code{$C.v$ ++} is equivalent to executing 
+Execution of a postfix expression of the form \code{$C.v$ ++} is equivalent to executing
 
 \code{()\{\VAR{} r = $C.v$; $C.v$ = r + 1; \RETURN{} r\}()}.
 
@@ -5054,7 +5067,7 @@
 
 
 \LMHash{}
-Execution of a postfix expression of the form \code{$e_1.v$++} is equivalent to executing 
+Execution of a postfix expression of the form \code{$e_1.v$++} is equivalent to executing
 
 \code{(x)\{\VAR{} r = x.v; x.v = r + 1; \RETURN{} r\}($e_1$)}.
 
@@ -5063,7 +5076,7 @@
 
 
 \LMHash{}
-Execution of a postfix expression of the form \code{$e_1[e_2]$++},  is equivalent to executing 
+Execution of a postfix expression of the form \code{$e_1[e_2]$++},  is equivalent to executing
 
 \code{(a, i)\{\VAR{} r = a[i]; a[i] = r + 1; \RETURN{} r\}($e_1$, $e_2$)}.
 
@@ -5072,7 +5085,7 @@
 
 
 \LMHash{}
-Execution of a postfix expression of the form \code{$v$-{}-}, where $v$ is an identifier, is equivalent to executing 
+Execution of a postfix expression of the form \code{$v$-{}-}, where $v$ is an identifier, is equivalent to executing
 
 \code{()\{\VAR{} r = $v$; $v$ = r - 1; \RETURN{} r\}()}.
 
@@ -5081,7 +5094,7 @@
 
 
 \LMHash{}
-Execution of a postfix expression of the form \code{$C.v$-{}-} is equivalent to executing 
+Execution of a postfix expression of the form \code{$C.v$-{}-} is equivalent to executing
 
 \code{()\{\VAR{} r = $C.v$; $C.v$ = r - 1; \RETURN{} r\}()}.
 
@@ -5090,7 +5103,7 @@
 
 
 \LMHash{}
-Execution of a postfix expression of the form \code{$e_1.v$-{}-} is equivalent to executing 
+Execution of a postfix expression of the form \code{$e_1.v$-{}-} is equivalent to executing
 
 \code{(x)\{\VAR{} r = x.v; x.v = r - 1; \RETURN{} r\}($e_1$)}.
 
@@ -5099,7 +5112,7 @@
 
 
 \LMHash{}
-Execution of a postfix expression of the form \code{$e_1[e_2]$-{}-},  is equivalent to executing 
+Execution of a postfix expression of the form \code{$e_1[e_2]$-{}-},  is equivalent to executing
 
 \code{(a, i)\{\VAR{} r = a[i]; a[i] = r - 1; \RETURN{} r\}($e_1$, $e_2$)}.
 
@@ -5107,7 +5120,7 @@
 The static type of such an expression is the static type of $e_1[e_2]$.
 
 \LMHash{}
-Execution of a postfix expression of the form \code{$e_1?.v$++} is equivalent to executing 
+Execution of a postfix expression of the form \code{$e_1?.v$++} is equivalent to executing
 
 \code{((x) =$>$ x == \NULL? \NULL : x.v++)($e_1$)}
 unless $e_1$ is a type literal, in which case it is equivalent to \code{$e_1.v$++}
@@ -5117,7 +5130,7 @@
 The static type of such an expression is the static type of $e_1.v$.
 
 \LMHash{}
-Execution of a postfix expression of the form \code{$e_1?.v$-{}-} is equivalent to executing 
+Execution of a postfix expression of the form \code{$e_1?.v$-{}-} is equivalent to executing
 
 \code{((x) =$>$ x == \NULL? \NULL : x.v-{}-)($e_1$)}
 unless $e_1$ is a type literal, in which case it is equivalent to \code{$e_1.v$-{}-}
@@ -5146,7 +5159,7 @@
       \SUPER{} unconditionalAssignableSelector;
       identifier
     .
-    
+
 {\bf unconditionalAssignableSelector:}`[' expression `]'; % again, could be top level
          `{\escapegrammar .}' identifier
     .
@@ -5162,7 +5175,7 @@
 \LMHash{}
 An {\em assignable expression} is either:
 \begin{itemize}
- \item An identifier. 
+ \item An identifier.
 \item An invocation (possibly conditional) of a getter (\ref{getters}) or list access operator on an expression $e$.
 \item An invocation of a getter or list access operator on  \SUPER{}.
 \end{itemize}
@@ -5195,8 +5208,8 @@
 {\bf identifier:}
      IDENTIFIER
      .
-     
-         
+
+
  {\bf IDENTIFIER\_NO\_DOLLAR:}
       IDENTIFIER\_START\_NO\_DOLLAR IDENTIFIER\_PART\_NO\_DOLLAR*
     .
@@ -5243,8 +5256,8 @@
 
 
 {\bf qualified:}
-      identifier (`{\escapegrammar .}' identifier)?  
-      . 
+      identifier (`{\escapegrammar .}' identifier)?
+      .
 \end{grammar}
 
 \LMHash{}
@@ -5266,27 +5279,27 @@
 
 
 \LMHash{}
-Let $d$ be the innermost declaration in the enclosing lexical scope whose name is $id$ or $id=$.  If no such declaration exists in the lexical scope, let $d$ be the declaration of the inherited member named $id$ if it exists. 
+Let $d$ be the innermost declaration in the enclosing lexical scope whose name is $id$ or $id=$.  If no such declaration exists in the lexical scope, let $d$ be the declaration of the inherited member named $id$ if it exists.
 %If no such member exists, let $d$ be the declaration of the static member name $id$ declared in a superclass of the current class, if it exists.
 
 \begin{itemize}
-\item if $d$ is a prefix $p$, a compile-time error occurs unless the token immediately following $d$ is \code{'.'}.
+\item if $d$ is a prefix $p$, a compile-time error occurs unless the token immediately following $d$ is \code{'.'} or \code{'\#'}.
 \item If $d$ is a class or type alias $T$, the value of $e$ is an instance of  class \code{Type} (or a subclass thereof) reifying $T$.
 \item If $d$ is a type parameter $T$, then the value of $e$ is the value of the actual type argument corresponding to $T$ that was  passed to the generative constructor that created the current binding of \THIS{}. If, however, $e$ occurs inside a static member, a compile-time error occurs.
 
 %\commentary{ We are assured that \THIS{} is well defined, because if we were in a static member the reference to $T$ is a compile-time error (\ref{generics}.)}
 %\item If $d$ is a library variable then:
 %  \begin{itemize}
-%  \item If $d$ is of one of the forms \code{\VAR{} $v$ = $e_i$;} , \code{$T$ $v$ = $e_i$;} , \code{\FINAL{} $v$ = $e_i$;}  or \code{\FINAL{} $T$ $v$ = $e_i$;} and no value has yet been stored into $v$ then the initializer expression $e_i$ is evaluated. If, during the evaluation of $e_i$, the getter for $v$ is referenced, a \code{CyclicInitializationError} is thrown. If the evaluation succeeded yielding an object $o$, let $r = o$, otherwise let $r = \NULL{}$. In any case, $r$ is stored into $v$. The value of $e$ is $r$. 
+%  \item If $d$ is of one of the forms \code{\VAR{} $v$ = $e_i$;} , \code{$T$ $v$ = $e_i$;} , \code{\FINAL{} $v$ = $e_i$;}  or \code{\FINAL{} $T$ $v$ = $e_i$;} and no value has yet been stored into $v$ then the initializer expression $e_i$ is evaluated. If, during the evaluation of $e_i$, the getter for $v$ is referenced, a \code{CyclicInitializationError} is thrown. If the evaluation succeeded yielding an object $o$, let $r = o$, otherwise let $r = \NULL{}$. In any case, $r$ is stored into $v$. The value of $e$ is $r$.
  \item  If $d$ is a constant variable of one of the forms  \code{\CONST{} $v$ = $e$;} or \code{\CONST{} $T$ $v$ = $e$;} then the value $id$ is the value of the compile-time constant $e$.
 %  Otherwise
-%  \item $e$ evaluates to the current binding of $id$.  
+%  \item $e$ evaluates to the current binding of $id$.
 %  \end{itemize}
-\item If $d$ is a local variable or formal parameter then $e$ evaluates to the current binding of $id$. 
+\item If $d$ is a local variable or formal parameter then $e$ evaluates to the current binding of $id$.
 %\item If $d$ is a library variable, local variable, or formal parameter, then $e$ evaluates to the current binding of $id$. \commentary{This case also applies if d is a library or local function declaration, as these are equivalent to function-valued variable declarations.}
 \item If $d$ is a static method, top-level function or local function then $e$ evaluates to the function defined by $d$.
-\item If $d$ is the declaration of a static variable, static getter or static setter declared in class $C$, then $e$ is equivalent to the property extraction (\ref{propertyExtraction}) $C.id$. 
-\item If $d$ is the declaration of a library variable, top-level getter or top-level setter, then $e$ is equivalent to the top level getter invocation (\ref{topLevelGetterInvocation}) $id$. 
+\item If $d$ is the declaration of a static variable, static getter or static setter declared in class $C$, then $e$ is equivalent to the property extraction (\ref{propertyExtraction}) $C.id$.
+\item If $d$ is the declaration of a library variable, top-level getter or top-level setter, then $e$ is equivalent to the top level getter invocation (\ref{topLevelGetterInvocation}) $id$.
 \item Otherwise, if $e$ occurs inside a top level or static function (be it function, method, getter,  or setter) or variable initializer, evaluation of $e$ causes a \code{NoSuchMethod} to be thrown.
 \item Otherwise, $e$ is equivalent to the property extraction (\ref{propertyExtraction}) \THIS{}.$id$.
 % This implies that referring to an undefined static getter by simple name is an error, whereas doing so by qualified name is only a warning. Same with assignments.  Revise?
@@ -5297,20 +5310,20 @@
 
 \begin{itemize}
 \item If $d$ is a class, type alias or type parameter the static type of $e$ is \code{Type}.
-\item If $d$ is a local variable or formal parameter the static type of $e$ is the type of the variable $id$, unless $id$ is known to have some type $T$, in which case the static type of $e$ is $T$, provided that $T$ is more specific than any other type $S$ such that $v$ is known to have type $S$. 
+\item If $d$ is a local variable or formal parameter the static type of $e$ is the type of the variable $id$, unless $id$ is known to have some type $T$, in which case the static type of $e$ is $T$, provided that $T$ is more specific than any other type $S$ such that $v$ is known to have type $S$.
 \item If $d$ is a static method, top-level function or local function the static type of $e$ is the function type defined by $d$.
 \item If $d$ is the declaration of a static variable, static getter or static setter declared in class $C$, the static type of $e$ is the static type of the getter invocation (\ref{propertyExtraction}) $C.id$.
-\item If $d$ is the declaration of a library variable, top-level getter or top-level setter, the static type of $e$  is the static type of the top level getter invocation $id$. 
+\item If $d$ is the declaration of a library variable, top-level getter or top-level setter, the static type of $e$  is the static type of the top level getter invocation $id$.
 \item Otherwise, if $e$ occurs inside a top level or static function (be it function, method, getter,  or setter) or variable initializer, the static type of $e$ is \DYNAMIC{}.
 \item Otherwise, the static type of $e$ is the type of the property extraction (\ref{propertyExtraction}) \THIS{}.$id$.
 \end{itemize}
 
  \commentary{Note that if one declares a setter, we bind to the corresponding getter even if it does not exist.}
- 
+
  \rationale{
  This prevents situations where one uses uncorrelated setters and getters. The intent is to prevent errors when a  getter in a surrounding scope is used accidentally.
  }
- 
+
 \LMHash{}
 It is a static warning if an identifier expression $id$ occurs inside a top level or static function (be it function, method, getter, or setter) or variable initializer and there is no declaration $d$ with name $id$ in the lexical scope enclosing the expression.
 
@@ -5324,20 +5337,20 @@
  {\bf typeTest:}
  isOperator type
  .
- 
- 
+
+
 {\bf isOperator:}
 \IS{} `!'?
     .
  \end{grammar}
- 
+
 \LMHash{}
  Evaluation of the is-expression \code{$e$ \IS{} $T$} proceeds as follows:
 
 \LMHash{}
 The expression $e$ is evaluated to a value $v$. Then, if $T$ is a malformed or deferred type (\ref{staticTypes}), a dynamic error occurs. Otherwise, if the interface of the class of $v$ is a subtype of $T$, the is-expression evaluates to \TRUE. Otherwise it evaluates to \FALSE.
 
-\commentary{It follows that \code{$e$ \IS{} Object} is always true. This makes sense in a language where everything is an object. 
+\commentary{It follows that \code{$e$ \IS{} Object} is always true. This makes sense in a language where everything is an object.
 
 Also note that \code{\NULL{} \IS{} $T$} is false unless $T = \code{Object}$, $T = \code{\DYNAMIC{}}$ or $T = \code{Null}$.  The former two are useless, as is anything of the form \code{$e$ \IS{} Object} or \code{$e$ \IS{} \DYNAMIC{}}.  Users should test for a null value directly rather than via type tests.
 }
@@ -5362,7 +5375,7 @@
 }
 
 \LMHash{}
-The static type of an is-expression is \code{bool}. 
+The static type of an is-expression is \code{bool}.
 
 
 \subsection{ Type Cast}
@@ -5375,22 +5388,22 @@
  {\bf typeCast:}
  asOperator type
  .
- 
- 
+
+
 {\bf asOperator:}
 \AS{}
     .
  \end{grammar}
- 
+
 \LMHash{}
  Evaluation of the cast expression \code{$e$ \AS{} $T$} proceeds as follows:
 
 \LMHash{}
-The expression $e$ is evaluated to a value $v$. Then, if $T$ is a malformed or deferred type (\ref{staticTypes}), a dynamic error occurs. Otherwise, if the interface of the class of $v$ is a subtype of $T$, the cast expression evaluates to $v$. Otherwise, if $v$ is \NULL{}, the cast expression evaluates to $v$. 
+The expression $e$ is evaluated to a value $v$. Then, if $T$ is a malformed or deferred type (\ref{staticTypes}), a dynamic error occurs. Otherwise, if the interface of the class of $v$ is a subtype of $T$, the cast expression evaluates to $v$. Otherwise, if $v$ is \NULL{}, the cast expression evaluates to $v$.
 In all other cases,  a \code{CastError} is thrown.
- 
+
 \LMHash{}
-The static type of a cast expression  \code{$e$ \AS{} $T$}  is $T$. 
+The static type of a cast expression  \code{$e$ \AS{} $T$}  is $T$.
 
 
 \section{Statements}
@@ -5406,7 +5419,7 @@
       label* nonLabelledStatement
     .
 
-{\bf nonLabelledStatement:}block; 
+{\bf nonLabelledStatement:}block;
       localVariableDeclaration;
       forStatement;
       whileStatement;
@@ -5425,10 +5438,10 @@
       localFunctionDeclaration
     .
  \end{grammar}
- 
+
  \subsection{Blocks}
  \LMLabel{blocks}
- 
+
 \LMHash{}
  A {\em block statement} supports sequencing of code.
 
@@ -5445,9 +5458,9 @@
 
  \subsection{Expression Statements}
  \LMLabel{expressionStatements}
- 
+
 \LMHash{}
-An {\em expression statement} consists of an expression other than a non-constant map literal (\ref{maps}) that has no explicit type arguments. 
+An {\em expression statement} consists of an expression other than a non-constant map literal (\ref{maps}) that has no explicit type arguments.
 
 \rationale{
 The restriction on maps  is designed to resolve an ambiguity in the grammar, when a statement begins with \{.
@@ -5458,9 +5471,9 @@
   expression? `{\escapegrammar ;}'
   .
  \end{grammar}
- 
+
 \LMHash{}
-Execution of an expression statement \code{$e$;} proceeds by evaluating $e$. 
+Execution of an expression statement \code{$e$;} proceeds by evaluating $e$.
 
 \LMHash{}
 It is a compile-time error if a non-constant map literal that has no explicit type arguments appears in a place where a statement is expected.
@@ -5477,15 +5490,15 @@
     initializedVariableDeclaration {\escapegrammar';'}
   .
  \end{grammar}
- 
+
 \LMHash{}
  Executing a variable declaration statement of one of the forms  \VAR{} $v = e;$, $T$ $v = e; $, \CONST{}  $v = e;$, \CONST{} $T$ $v = e;$, \FINAL{}  $v = e;$ or \FINAL{} $T$ $v = e;$ proceeds as follows:
- 
+
 \LMHash{}
  The expression $e$ is evaluated to an object $o$. Then, the variable $v$ is set to $o$.
- 
+
 \LMHash{}
- A variable declaration statement of the form \VAR{} $v;$ is equivalent to \VAR{} $v = \NULL{};$. A variable declaration statement of the form $T$ $v;$ is equivalent to $T$ $v = \NULL{};$. 
+ A variable declaration statement of the form \VAR{} $v;$ is equivalent to \VAR{} $v = \NULL{};$. A variable declaration statement of the form $T$ $v;$ is equivalent to $T$ $v = \NULL{};$.
 
 \commentary{
 This holds regardless of the type $T$. For example, \code{int i;} does not cause \code{i} to be initialized to zero. Instead, \code{i} is initialized to \NULL{}, just as if we had written \VAR{} \code{i;} or \code{Object i;} or \code{Collection$<$String$>$ i;}.
@@ -5495,13 +5508,13 @@
 To do otherwise would undermine the optionally typed nature of Dart, causing type annotations to modify program behavior.
 }
 
-%A variable declaration statement of one of the forms $T$ $v;$, $T$ $v = e;$, \CONST{} $T$ $v = e;$,  or \FINAL{} $T$ $v = e;$ causes a new getter named $v$ with static return type $T$ to be added to the innermost enclosing scope at the point following the variable declaration statement. The result of executing this getter is the value stored in $v$. 
+%A variable declaration statement of one of the forms $T$ $v;$, $T$ $v = e;$, \CONST{} $T$ $v = e;$,  or \FINAL{} $T$ $v = e;$ causes a new getter named $v$ with static return type $T$ to be added to the innermost enclosing scope at the point following the variable declaration statement. The result of executing this getter is the value stored in $v$.
 
-%A variable declaration statement \VAR{} $v;$, \VAR{} $v = e;$, \CONST{}  $v = e;$ or  \FINAL{}  $v = e;$ causes a new getter named $v$ with static return type  \DYNAMIC{} to be added to the innermost enclosing scope at the point following the variable declaration statement. The result of executing this getter  is the value stored in $v$. 
+%A variable declaration statement \VAR{} $v;$, \VAR{} $v = e;$, \CONST{}  $v = e;$ or  \FINAL{}  $v = e;$ causes a new getter named $v$ with static return type  \DYNAMIC{} to be added to the innermost enclosing scope at the point following the variable declaration statement. The result of executing this getter  is the value stored in $v$.
 
-%A variable declaration statement of one of the forms $T$ $v;$, or $T$ $v = e;$ causes a new setter named $v=$ with argument type $T$ to be added to the innermost enclosing scope at the point following the variable declaration statement. The effect of executing this setter  is to store its argument in $v$. 
+%A variable declaration statement of one of the forms $T$ $v;$, or $T$ $v = e;$ causes a new setter named $v=$ with argument type $T$ to be added to the innermost enclosing scope at the point following the variable declaration statement. The effect of executing this setter  is to store its argument in $v$.
 
-%A variable declaration statement \VAR{} $v;$, \VAR{} $v = e;$, \CONST{}  $v = e;$ or  \FINAL{}  $v = e;$ causes a new setter named $v=$ with  argument type  \DYNAMIC{} to be added to the innermost enclosing scope at the point following the variable declaration statement. The effect of executing this setter  is to store its argument in $v$. 
+%A variable declaration statement \VAR{} $v;$, \VAR{} $v = e;$, \CONST{}  $v = e;$ or  \FINAL{}  $v = e;$ causes a new setter named $v=$ with  argument type  \DYNAMIC{} to be added to the innermost enclosing scope at the point following the variable declaration statement. The effect of executing this setter  is to store its argument in $v$.
 
 %\rationale{
  %The use of getters and setters here is a device to help make the specification more uniform. Introducing getters and setters for local variables has no performance consequences, since they can never be overridden, and so can always be optimized away.  It is not possible to declare a local getter or setter explicitly, since there is little reason to ever do so.
@@ -5512,16 +5525,16 @@
 \LMLabel{localFunctionDeclaration}
 
 \LMHash{}
-A function declaration statement declares a new local function (\ref{functionDeclarations}). 
+A function declaration statement declares a new local function (\ref{functionDeclarations}).
 
  \begin{grammar}
 {\bf localFunctionDeclaration:}
     functionSignature functionBody
   .
  \end{grammar}
- 
+
 \LMHash{}
-A function declaration statement of one of the forms $id$ $signature$ $\{ statements \}$ or $T$ $id$ $signature$ $\{ statements \}$ causes a new function named $id$ to be added to the innermost enclosing scope. It is a compile-time error to reference a local function before its declaration. 
+A function declaration statement of one of the forms $id$ $signature$ $\{ statements \}$ or $T$ $id$ $signature$ $\{ statements \}$ causes a new function named $id$ to be added to the innermost enclosing scope. It is a compile-time error to reference a local function before its declaration.
 
 
 \commentary{ This implies that local functions can be directly recursive, but not mutually recursive. Consider these examples:
@@ -5535,12 +5548,12 @@
   g1(x) $=>$ h(x, 1); // error: h is not declared yet
   h(x, n) $=>$ x $>$ 1? h(x-1, n*x): n; // again, recursion is fine
   g2(x) $=>$ h(x, 1); // legal
-  
+
   p1(x) $=>$ q(x,x); // illegal
   q1(a, b)$ =>$ a $>$ 0 ? p1(a-1): b; // fine
 
   q2(a, b) $=>$ a $>$ 0 ? p2(a-1): b; // illegal
-  p1(x) $=>$ q2(x,x); // fine  
+  p1(x) $=>$ q2(x,x); // fine
 \}
 \end{dartCode}
 
@@ -5550,9 +5563,9 @@
 
 \begin{dartCode}
 top2() \{ // a top level function
- \VAR{} p, q;  
-  p = (x) $=>$ q(x,x); 
-  q = (a, b) $=>$ a $>$ 0 ? p(a-1): b; 
+ \VAR{} p, q;
+  p = (x) $=>$ q(x,x);
+  q = (a, b) $=>$ a $>$ 0 ? p(a-1): b;
 
 \}
 \end{dartCode}
@@ -5574,24 +5587,24 @@
       \IF{} `(' expression `)' statement ( \ELSE{} statement)? % we could allow top level expression
     .
  \end{grammar}
- 
+
 Execution of an if statement of the form  \code {\IF{} (}$b$\code{)}$s_1$ \code{\ELSE{} } $s_2$ proceeds as follows:
- 
+
 \LMHash{}
  First, the expression $b$ is evaluated to an object $o$. Then, $o$ is  subjected to boolean conversion (\ref{booleanConversion}), producing an object $r$. If $r$ is \TRUE{}, then the statement $\{s_1\}$ is executed, otherwise statement $\{s_2\}$ is executed.
 
- 
+
  \commentary {
  Put another way, \code {\IF{} (}$b$\code{)}$s_1$ \code{\ELSE{} } $s_2$ is equivalent to
  \code {\IF{} (}$b$\code{)}$\{s_1\}$ \code{\ELSE{} } $\{s_2\}$
  }
- 
+
  \rationale {
  The reason for this equivalence is to catch errors such as
  }
  \begin{dartCode}
 \VOID{} main() \{
-  \IF{} (somePredicate) 
+  \IF{} (somePredicate)
     \VAR{} v = 2;
   print(v);
 \}
@@ -5600,26 +5613,26 @@
 \rationale {
 Under reasonable scope rules  such code is problematic. If we assume that \code{v} is declared in the scope of the method \code{main()}, then when \code{somePredicate} is false, \code{v} will be uninitialized when accessed.  The cleanest approach would be to require a block following the test, rather than an arbitrary statement. However, this goes against long standing custom, undermining Dart's goal of familiarity.  Instead, we choose to insert a block, introducing a scope,  around the statement following the predicate (and similarly for \ELSE{} and loops). This will cause both a warning and a runtime error in the case above.  Of course, if there is a declaration of \code{v} in the surrounding scope, programmers might still be surprised. We expect tools to highlight cases of shadowing to help avoid such situations.
  }
- 
+
 \LMHash{}
-  It is a static type warning if the type of the expression $b$ may not be assigned to \code{bool}.  
-  
+  It is a static type warning if the type of the expression $b$ may not be assigned to \code{bool}.
+
 \LMHash{}
 If:
 \begin{itemize}
 \item  $b$ shows that a variable $v$ has type $T$.
 \item  $v$ is not potentially mutated in $s_1$ or within a closure.
-\item If the variable $v$ is accessed by a closure in $s_1$ then the variable $v$ is not potentially mutated anywhere in the scope of $v$. 
+\item If the variable $v$ is accessed by a closure in $s_1$ then the variable $v$ is not potentially mutated anywhere in the scope of $v$.
 \end{itemize}
-then the type of $v$ is known to be $T$ in $s_1$.  
+then the type of $v$ is known to be $T$ in $s_1$.
 
 \LMHash{}
- An if statement of the form  \code {\IF{} (}$b$\code{)}$s_1$ is equivalent to the if statement  
- 
- \code {\IF{} (}$b$\code{)}$s_1$ \code{\ELSE{} \{\}}.
-     
+ An if statement of the form  \code {\IF{} (}$b$\code{)}$s_1$ is equivalent to the if statement
 
-    
+ \code {\IF{} (}$b$\code{)}$s_1$ \code{\ELSE{} \{\}}.
+
+
+
 \subsection{For}
 \LMLabel{for}
 
@@ -5640,7 +5653,7 @@
       expression? `{\escapegrammar ;}'
     .
  \end{grammar}
- 
+
 \LMHash{}
  The for statement has three forms - the traditional for loop and two forms of the for-in statement - synchronous and asynchronous.
 
@@ -5661,7 +5674,7 @@
 \label{beginFor}
 If this is the first iteration of the for loop, let $v^\prime$ be $v$. Otherwise,  let $v^\prime$ be the variable $v^{\prime\prime}$ created in the previous execution of step \ref{allocateFreshVar}.
 \item
-The expression $[v^\prime/v]c$ is evaluated and subjected to boolean conversion (\ref{booleans}). If the result is \FALSE{}, the for loop completes. Otherwise, execution continues at step    
+The expression $[v^\prime/v]c$ is evaluated and subjected to boolean conversion (\ref{booleans}). If the result is \FALSE{}, the for loop completes. Otherwise, execution continues at step
 \ref{beginIteration}.
 \item
 \label{beginIteration}
@@ -5699,19 +5712,19 @@
 \LMLabel{for-in}
 
 \LMHash{}
-A for statement of the form \code{ \FOR{} ($finalConstVarOrType?$ id \IN{} $e$) $s$} is equivalent to the following code: 
+A for statement of the form \code{ \FOR{} ($finalConstVarOrType?$ id \IN{} $e$) $s$} is equivalent to the following code:
 
 \begin{dartCode}
 var n0 = $e$.iterator;
 \WHILE{} (n0.moveNext()) \{
    $finalConstVarOrType?$ id = n0.current;
-   $s$ 
+   $s$
 \}
 \end{dartCode}
 where \code{n0} is an identifier that does not occur anywhere in the program, except that for purposes of static typechecking, it is checked under the assumption that $n0$ is declared to be of type $T$, where $T$ is the static type of $e.iterator$.
 
- 
- 
+
+
 \subsubsection{Asynchronous For-in}
 \LMLabel{asynchronousFor-in}
 
@@ -5739,7 +5752,7 @@
 
 \rationale{An asynchronous loop would make no sense within a synchronous function, for the same reasons that an await expression makes no sense in a synchronous function.}
 
- 
+
 \subsection{While}
 \LMLabel{while}
 
@@ -5751,16 +5764,16 @@
       \WHILE{} `(' expression `)' statement  % could do top level here, and in for
 .
  \end{grammar}
- 
+
 \LMHash{}
- Execution of a while statement of the form \code{\WHILE{} ($e$) $s$;} proceeds as follows: 
+ Execution of a while statement of the form \code{\WHILE{} ($e$) $s$;} proceeds as follows:
 
 \LMHash{}
 The expression $e$ is evaluated to an object $o$. Then, $o$ is  subjected to boolean conversion (\ref{booleanConversion}), producing an object $r$.  If $r$ is \TRUE{}, then the statement $\{s\}$ is executed and then the while statement is re-executed recursively. If $r$ is \FALSE{}, execution of the while statement is complete.
 
 \LMHash{}
 It is a static type warning if the static type of $e$ may not be assigned to \code{bool}.
-    
+
 
 \subsection{Do}
 \LMLabel{do}
@@ -5774,15 +5787,15 @@
       .
  \end{grammar}
 
-    
-\LMHash{}
-Execution of a do statement of the form \code{\DO{} $s$ \WHILE{} ($e$);} proceeds as follows: 
 
 \LMHash{}
-The statement $\{s\}$ is executed. Then, the expression $e$ is evaluated to an object $o$. Then, $o$ is  subjected to boolean conversion (\ref{booleanConversion}), producing an object $r$. If $r$ is \FALSE{}, execution of the do statement is complete. If $r$ is \TRUE{}, then the do statement is re-executed recursively. 
+Execution of a do statement of the form \code{\DO{} $s$ \WHILE{} ($e$);} proceeds as follows:
 
 \LMHash{}
-It is a static type warning if the static type of $e$ may not be assigned to \code{bool}. 
+The statement $\{s\}$ is executed. Then, the expression $e$ is evaluated to an object $o$. Then, $o$ is  subjected to boolean conversion (\ref{booleanConversion}), producing an object $r$. If $r$ is \FALSE{}, execution of the do statement is complete. If $r$ is \TRUE{}, then the do statement is re-executed recursively.
+
+\LMHash{}
+It is a static type warning if the static type of $e$ may not be assigned to \code{bool}.
 
 \subsection{Switch}
 \LMLabel{switch}
@@ -5804,10 +5817,10 @@
       label*  \DEFAULT{} `{\escapegrammar :}' statements
     .
  \end{grammar}
- 
+
 \LMHash{}
- Given a switch statement of the form 
- 
+ Given a switch statement of the form
+
 \begin{dartCode}
 \SWITCH{} ($e$) \{
    $label_{11} \ldots label_{1j_1}$ \CASE{} $e_1: s_1$
@@ -5816,9 +5829,9 @@
    $label_{(n+1)1} \ldots label_{(n+1)j_{n+1}}$ \DEFAULT{}: $s_{n+1}$
 \}
 \end{dartCode}
- 
- or the form 
- 
+
+ or the form
+
 \begin{dartCode}
 \SWITCH{} ($e$) \{
    $label_{11} \ldots label_{1j_1}$ \CASE{} $e_1: s_1$
@@ -5826,27 +5839,27 @@
    $label_{n1} \ldots label_{nj_n}$ \CASE{} $e_n: s_n$
 \}
 \end{dartCode}
- 
+
  it is a compile-time error if the expressions $e_k$ are not compile-time constants for all  $k \in 1..n$.  It is a compile-time error if the values of the expressions $e_k$ are not either:
  \begin{itemize}
- \item instances of the same class $C$, for all $k \in 1..n$,  or 
- \item instances of a class that implements \cd{int}, for all $k \in 1..n$,  or 
- \item instances of a class that implements \cd{String}, for all $k \in 1..n$. 
+ \item instances of the same class $C$, for all $k \in 1..n$,  or
+ \item instances of a class that implements \cd{int}, for all $k \in 1..n$,  or
+ \item instances of a class that implements \cd{String}, for all $k \in 1..n$.
  \end{itemize}
- 
+
 \commentary{In other words,  all the expressions in the cases evaluate to constants of the exact same user defined class or are of certain known types.  Note that the values of the expressions are known at compile-time, and are independent of any static type annotations.
 }
 
 \LMHash{}
 It is a compile-time error if the class $C$ has an implementation of the operator $==$ other than the one inherited from \code{Object} unless the value of the expression is a string, an integer, literal symbol or the result of invoking a constant constructor of class \cd{Symbol}.
- 
+
  \rationale{
  The prohibition on user defined equality allows us to implement the switch efficiently for user defined types. We could formulate matching in terms of identity instead with the same efficiency. However, if a type defines an equality operator, programmers would find it quite surprising that equal objects did not match.
- 
+
  }
 
 \commentary{
-The \SWITCH{}  statement should only be used in very limited situations (e.g., interpreters or scanners).  
+The \SWITCH{}  statement should only be used in very limited situations (e.g., interpreters or scanners).
 }
 
 \LMHash{}
@@ -5860,9 +5873,9 @@
    $label_{(n+1)1} \ldots label_{(n+1)j_{n+1}}$ \DEFAULT{}: $s_{n+1}$
 \}
 \end{dartCode}
- 
-or the form 
- 
+
+or the form
+
 \begin{dartCode}
 \SWITCH{} ($e$) \{
    $label_{11} \ldots label_{1j_1}$ \CASE{} $e_1: s_1$
@@ -5874,7 +5887,7 @@
 proceeds as follows:
 
 \LMHash{}
-The statement \code{\VAR{} id = $e$;} is evaluated, where \code{id} is a variable whose name is distinct from any other variable in the program. In checked mode, it is a run time error if the value of $e$ is not an instance of the same class as the constants $e_1 \ldots e_n$. 
+The statement \code{\VAR{} id = $e$;} is evaluated, where \code{id} is a variable whose name is distinct from any other variable in the program. In checked mode, it is a run time error if the value of $e$ is not an instance of the same class as the constants $e_1 \ldots e_n$.
 
 \commentary{Note that if there are no case clauses ($n = 0$), the type of $e$ does not matter.}
 
@@ -5885,7 +5898,7 @@
 A case clause introduces a new scope, nested in the lexically surrounding scope. The scope of a case clause ends immediately after the case clause's statement list.
 
 \LMHash{}
-Execution of a \CASE{} clause \CASE{} $e_{k}: s_{k}$ of a  switch statement  
+Execution of a \CASE{} clause \CASE{} $e_{k}: s_{k}$ of a  switch statement
 
 \begin{dartCode}
 \SWITCH{} ($e$) \{
@@ -5899,13 +5912,13 @@
 proceeds as follows:
 
 \LMHash{}
-The expression \code{$e_k$ == id} is evaluated to an object $o$ which is then subjected to boolean conversion yielding a value $v$. 
+The expression \code{$e_k$ == id} is evaluated to an object $o$ which is then subjected to boolean conversion yielding a value $v$.
 If $v$ is not  \TRUE{} the following case,  \CASE{} $e_{k+1}: s_{k+1}$ is executed if it exists. If  \CASE{} $e_{k+1}: s_{k+1}$ does not exist, then the \DEFAULT{} clause is executed by executing $s_{n+1}$.
 If $v$ is \TRUE{},   let $h$ be the smallest number such that $h \ge k$ and $s_h$ is non-empty. If no such $h$ exists, let $h = n + 1$. The  sequence of statements $s_h$ is then executed.
 If execution reaches the point after $s_h$  then  a runtime error occurs, unless $h = n+1$.
 
 \LMHash{}
-Execution of a \CASE{} clause \CASE{} $e_{k}: s_{k}$ of a  switch statement  
+Execution of a \CASE{} clause \CASE{} $e_{k}: s_{k}$ of a  switch statement
 
 \begin{dartCode}
 \SWITCH{} ($e$) \{
@@ -5918,8 +5931,8 @@
 proceeds as follows:
 
 \LMHash{}
-The expression \code{$e_k$ == id} is evaluated to an object $o$ which is then subjected to boolean conversion yielding a value $v$. 
-If $v$ is not  \TRUE{} the following case,  \CASE{} $e_{k+1}: s_{k+1}$ is executed if it exists. 
+The expression \code{$e_k$ == id} is evaluated to an object $o$ which is then subjected to boolean conversion yielding a value $v$.
+If $v$ is not  \TRUE{} the following case,  \CASE{} $e_{k+1}: s_{k+1}$ is executed if it exists.
 If $v$ is \TRUE{},   let $h$ be the smallest integer such that $h \ge k$ and $s_h$ is non-empty. The  sequence of statements $s_h$ is  executed if it exists.
 If execution reaches the point after $s_h$  then  a runtime error occurs, unless $h = n$.
 
@@ -5946,7 +5959,7 @@
 \rationale{
   Very elaborate code in a case clause is probably bad style in any case, and such code can always be refactored.
 }
- 
+
 \LMHash{}
  It is a static warning if all of the following conditions hold:
  \begin{itemize}
@@ -5959,7 +5972,7 @@
 In other words, a warning will be issued if a switch statement over an enum is not exhaustive.
 }
 
- 
+
 \subsection{ Rethrow}
 \LMLabel{rethrow}
 
@@ -5972,7 +5985,7 @@
      \RETHROW{}  `{\escapegrammar ;}'
     .
  \end{grammar}
- 
+
 \LMHash{}
 Execution of a \code{\RETHROW{}} statement proceeds as follows:
 
@@ -5999,7 +6012,7 @@
 \commentary{The change in control may result in multiple functions terminating if these functions do not catch the exception via a \CATCH{} or \FINALLY{} clause, both of which introduce a dynamically enclosing exception handler.}
 
 \LMHash{}
-It is a compile-time error if a  \code{\RETHROW{}} statement is not enclosed within an \ON-\CATCH{} clause. 
+It is a compile-time error if a  \code{\RETHROW{}} statement is not enclosed within an \ON-\CATCH{} clause.
 
 
 
@@ -6013,27 +6026,27 @@
 {\bf tryStatement:}
       \TRY{} block (onPart+ finallyPart? $|$ finallyPart)
     .
-    
+
  {\bf onPart:}catchPart block;
     \ON{} type catchPart? block
    .
 
 {\bf catchPart:}
-      \CATCH{} `(' identifier (`,' identifier)? `)' 
+      \CATCH{} `(' identifier (`,' identifier)? `)'
     .
 
 {\bf finallyPart:}
       \FINALLY{} block
     .
  \end{grammar}
- 
+
 \LMHash{}
  A try statement consists of a block statement, followed by at least one of:
  \begin{enumerate}
  \item
 A set of \ON{}-\CATCH{} clauses, each of which specifies  (either explicitly or implicitly) the type of exception object to be handled, one or two exception parameters and a block statement.
 \item
-A \FINALLY{} clause, which consists of a block statement. 
+A \FINALLY{} clause, which consists of a block statement.
 \end{enumerate}
 
 \rationale{
@@ -6052,7 +6065,7 @@
 
 
 \LMHash{}
-An \ON{}-\CATCH{} clause of the form  \code{\ON{} $T$ \CATCH{} ($p_1$) $s$} is equivalent to an \ON{}-\CATCH{} clause  \code{\ON{} $T$ \CATCH{} ($p_1, p_2$) $s$} where $p_2$ is an identifier that does not occur anywhere else in the program. 
+An \ON{}-\CATCH{} clause of the form  \code{\ON{} $T$ \CATCH{} ($p_1$) $s$} is equivalent to an \ON{}-\CATCH{} clause  \code{\ON{} $T$ \CATCH{} ($p_1, p_2$) $s$} where $p_2$ is an identifier that does not occur anywhere else in the program.
 
 
 \LMHash{}
@@ -6065,31 +6078,31 @@
 The {\em active stack trace} is an object whose \code{toString()} method produces a string that is a record of exactly those function activations within the current isolate that had not completed execution at the point where the current exception (\ref{throw}) was thrown.
 %\begin{enumerate}
 %\item Started execution after the currently executing function.
-%\item Had not completed execution at the point where the exception caught by the currently executing  \ON{}-\CATCH{} clause was initially thrown. 
+%\item Had not completed execution at the point where the exception caught by the currently executing  \ON{}-\CATCH{} clause was initially thrown.
 %\commentary{The active stack trace contains the frames between the exception handling code and the original point when an exception is thrown, not where it was rethrown.}
 %\end{enumerate}
 
  \commentary{
-This implies that no synthetic function activations may be added to the trace, nor may any source level activations be omitted. 
+This implies that no synthetic function activations may be added to the trace, nor may any source level activations be omitted.
 This means, for example, that any inlining of functions done as an optimization must not be visible in the trace. Similarly, any synthetic routines used by the implementation must not appear in the trace.
 
 Nothing is said about how any native function calls may be represented in the trace.
  }
- 
+
 \commentary{
 Note that we say nothing about the identity of the stack trace, or what notion of equality is defined for stack traces.
 }
- 
+
 % Sadly, the info below cannot be computed efficiently. It would need to be computed at the throw point, since at latte points it might be destroyed. Native code in calling frames executes relative to the stack pointer, which therefore needs to be reset as each frame is unwound.  This means that the
 % OS kernel can dispose of this stack memory - it is not reliably preserved. And such code must execute if only to test if the exception should be caught or sent onward.
 
 % For each such function activation, the active stack trace includes the name of the function, the bindings of all its formal parameters, local variables and \THIS{}, and the position at which the function was executing.
- 
+
  % Is this controversial? We were thinking of viewing the trace as a List<Invocation>,
  % but that won't capture the receiver or the locals. More generally, we need a standard interface that describes these traces, so one can type the stack trace variable in the catch.
- 
+
  \commentary{The term position should not be interpreted as a line number, but rather as a precise position - the exact character index of the  expression that raised  the exception. }
- 
+
  % A position can be represented via a Token. If we make that part of the core reflection facility, we can state this here.
 
 \LMHash{}
@@ -6103,7 +6116,7 @@
 A finally clause \FINALLY{} $s$ defines an exception handler $h$ that executes as follows:
 
 \LMHash{}
-Let $r$ be the current return value (\ref{return}). Then the current return value becomes undefined. Any open streams associated with any asynchronous for loops (\ref{asynchronousFor-in}) and yield-each (\ref{yieldEach}) statements executing within the dynamic scope of $h$ are canceled, in the order of their nesting, innermost first. 
+Let $r$ be the current return value (\ref{return}). Then the current return value becomes undefined. Any open streams associated with any asynchronous for loops (\ref{asynchronousFor-in}) and yield-each (\ref{yieldEach}) statements executing within the dynamic scope of $h$ are canceled, in the order of their nesting, innermost first.
 
 \rationale{
 Streams left open by for loops that were escaped for whatever reason would be canceled at function termination, but it is best to cancel them as soon as possible.
@@ -6115,7 +6128,7 @@
 \item
  if there is a dynamically enclosing error handler $g$ defined by a \FINALLY{} clause in $m$, control is transferred to $g$.
  \item
-Otherwise $m$ terminates. 
+Otherwise $m$ terminates.
 \end{itemize}
 
 Otherwise, execution resumes at the end of the try statement.
@@ -6124,7 +6137,7 @@
 Execution of an \ON{}-\CATCH{} clause \code{\ON{} $T$ \CATCH{} ($p_1$, $p_2$)} $s$ of a try statement $t$ proceeds as follows: The statement $s$ is executed in the dynamic scope of the exception handler defined by the finally clause of $t$. Then, the current exception and active stack trace both become undefined.
 
 \LMHash{}
-Execution of a \FINALLY{} clause \FINALLY{} $s$ of a try statement proceeds as follows: 
+Execution of a \FINALLY{} clause \FINALLY{} $s$ of a try statement proceeds as follows:
 
 \LMHash{}
 Let $x$ be the current exception and let $t$ be the active stack trace. Then the current exception and the active stack trace both become undefined. The statement $s$ is executed. Then, if $x$ is defined,  it is rethrown as if by a rethrow statement (\ref{rethrow}) enclosed in a \CATCH{} clause of the form \code{\CATCH{} ($v_x$, $v_t$)} where $v_x$ and $v_t$ are fresh variables bound to $x$ and $t$ respectively.
@@ -6137,20 +6150,20 @@
 The statement $s_1$ is executed in the dynamic scope of the exception handler defined by the try statement. Then, the \FINALLY{} clause is executed.
 
 \commentary{
-Whether any of the \ON{}-\CATCH{} clauses is executed depends on whether a matching exception has been raised by $s_1$ (see the specification of the throw statement). 
+Whether any of the \ON{}-\CATCH{} clauses is executed depends on whether a matching exception has been raised by $s_1$ (see the specification of the throw statement).
 
-If $s_1$ has raised an exception, it will transfer control to the try statement's handler, which will examine the catch clauses in order for a match as specified above. If no matches are found, the handler will execute the \FINALLY{} clause. 
+If $s_1$ has raised an exception, it will transfer control to the try statement's handler, which will examine the catch clauses in order for a match as specified above. If no matches are found, the handler will execute the \FINALLY{} clause.
 
-If a matching \ON{}-\CATCH{} was found, it will execute first, and then the \FINALLY{} clause will be executed. 
+If a matching \ON{}-\CATCH{} was found, it will execute first, and then the \FINALLY{} clause will be executed.
 
-If an exception is thrown during execution of an \ON{}-\CATCH{} clause, this will transfer control to the handler for the \FINALLY{} clause, causing the \FINALLY{} clause to execute in this case as well. 
+If an exception is thrown during execution of an \ON{}-\CATCH{} clause, this will transfer control to the handler for the \FINALLY{} clause, causing the \FINALLY{} clause to execute in this case as well.
 
-If no exception was raised, the \FINALLY{} clause is also executed. Execution of the \FINALLY{} clause could also raise an exception, which will cause transfer of control to the next enclosing handler. 
+If no exception was raised, the \FINALLY{} clause is also executed. Execution of the \FINALLY{} clause could also raise an exception, which will cause transfer of control to the next enclosing handler.
 }
 
 \LMHash{}
 A try statement of the form \code{\TRY{} $s_1$ $on-catch_1 \ldots on-catch_n$;} is equivalent to the statement \code{\TRY{} $s_1$ $on-catch_1 \ldots on-catch_n$ \FINALLY{} $\{\}$}.
- 
+
 
 \subsection{ Return}
 \LMLabel{return}
@@ -6164,24 +6177,24 @@
     \RETURN{} expression? `{\escapegrammar ;}' % could do top level here
     .
  \end{grammar}
- 
+
  \commentary{
- Due to \FINALLY{} clauses, the precise behavior of \RETURN{} is a little more involved. Whether the value a return statement is supposed to return is actually returned depends on the behavior of any \FINALLY{} clauses in effect when executing the return. A \FINALLY{} clause may choose to return another value, or throw an exception, or even redirect control flow leading to other returns or throws. All a return statement really does is set a value that is intended to be returned when the function terminates. 
+ Due to \FINALLY{} clauses, the precise behavior of \RETURN{} is a little more involved. Whether the value a return statement is supposed to return is actually returned depends on the behavior of any \FINALLY{} clauses in effect when executing the return. A \FINALLY{} clause may choose to return another value, or throw an exception, or even redirect control flow leading to other returns or throws. All a return statement really does is set a value that is intended to be returned when the function terminates.
  }
 
 \LMHash{}
 The {\em current return value} is a unique value specific to a given function activation. It is undefined unless explicitly set in this specification.
-    
+
 \LMHash{}
 Executing a return statement \code{\RETURN{} $e$;} proceeds as follows:
 
 \LMHash{}
 First the expression $e$ is evaluated, producing an object $o$. Next:
 \begin{itemize}
-\item  
+\item
 The current return value is set to $o$ and the current exception (\ref{throw}) and active stack trace (\ref{try}) become undefined.
 \item
-Let $c$ be the \FINALLY{} clause of the innermost enclosing try-finally statement (\ref{try}), if any. If $c$ is defined, let $h$ be the handler induced by $c$. If $h$ is defined, control is transferred to $h$. 
+Let $c$ be the \FINALLY{} clause of the innermost enclosing try-finally statement (\ref{try}), if any. If $c$ is defined, let $h$ be the handler induced by $c$. If $h$ is defined, control is transferred to $h$.
 \item
 Otherwise execution of the current method terminates.
 \end{itemize}
@@ -6192,15 +6205,15 @@
 }
 
 \LMHash{}
-Let $T$ be the static type of $e$ and let $f$ be the immediately enclosing function.  
+Let $T$ be the static type of $e$ and let $f$ be the immediately enclosing function.
 
 \LMHash{}
-It is a static type warning if the body of $f$ is marked \ASYNC{} and the type \code{Future$<$flatten(T)$>$} (\ref{functionExpressions}) may not be assigned to the declared return type of $f$.    Otherwise, it is a static type warning if $T$ may not be assigned to the declared return type of $f$. 
+It is a static type warning if the body of $f$ is marked \ASYNC{} and the type \code{Future$<$flatten(T)$>$} (\ref{functionExpressions}) may not be assigned to the declared return type of $f$.    Otherwise, it is a static type warning if $T$ may not be assigned to the declared return type of $f$.
 
 \LMHash{}
 Let $S$ be the runtime type of $o$. In checked mode:
 \begin{itemize}
-\item  If the body of $f$ is marked \ASYNC{} (\ref{functions}) it is a dynamic type error if $o$ is not \NULL{} (\ref{null}) and \code{Future$<$S$>$} is not a subtype of the actual return type  (\ref{actualTypeOfADeclaration}) of $f$.
+\item  If the body of $f$ is marked \ASYNC{} (\ref{functions}) it is a dynamic type error if $o$ is not \NULL{} (\ref{null}) and \code{Future$<$flatten(S)$>$} is not a subtype of the actual return type  (\ref{actualTypeOfADeclaration}) of $f$.
 \item Otherwise, it is a dynamic type error if $o$ is not \NULL{} and the runtime type of $o$ is not a subtype of the actual return type of $f$.
 \end{itemize}
 
@@ -6215,14 +6228,14 @@
 It is a compile-time error if a return statement of the form \code{\RETURN{} $e$;} appears in a generator function.
 
 \rationale{
-In the case of a generator function, the value returned by the function is the iterable or stream associated with it, and individual elements are added to that iterable using yield statements, and so returning a value makes no sense. 
+In the case of a generator function, the value returned by the function is the iterable or stream associated with it, and individual elements are added to that iterable using yield statements, and so returning a value makes no sense.
 }
 
 \LMHash{}
 Let $f$ be the function immediately enclosing a return statement of the form \RETURN{}; It is a static warning  $f$ is neither a generator nor a generative constructor and either:
 \begin{itemize}
-\item  $f$ is synchronous and the return type of $f$ may not be assigned to \VOID{} (\ref{typeVoid}) or, 
-\item  $f$ is asynchronous and the return type of $f$ may not be assigned to \code{Future$<$Null$>$}.  
+\item  $f$ is synchronous and the return type of $f$ may not be assigned to \VOID{} (\ref{typeVoid}) or,
+\item  $f$ is asynchronous and the return type of $f$ may not be assigned to \code{Future$<$Null$>$}.
 \end{itemize}
 
  \commentary{
@@ -6240,17 +6253,17 @@
 \begin{itemize}
 \item
 The current return value is set to \NULL{}.
-\item 
+\item
 Let $c$ be the \FINALLY{} clause of the innermost enclosing try-finally statement, if any. If $c$ is defined,  let $h$ be the handler induced by $c$. If $h$ is defined, control is transferred to $h$.
 \item
-Otherwise, execution of the current method terminates. 
+Otherwise, execution of the current method terminates.
 \end{itemize}
 
 \LMHash{}
 Otherwise the return statement is executed by executing the statement  \code{\RETURN{} \NULL{};} if it occurs inside a method, getter, setter or factory; otherwise, the return statement necessarily occurs inside a generative constructor, in which case it is executed by executing  \code{\RETURN{} \THIS{};}.
 
-\commentary{Despite the fact that \code{\RETURN{};} is executed as if by a \code{\RETURN{} $e$;}, it is important to understand that it is not a static warning to include a statement of the form \code{\RETURN{};} 
-%in a \VOID{} function; neither is it illegal 
+\commentary{Despite the fact that \code{\RETURN{};} is executed as if by a \code{\RETURN{} $e$;}, it is important to understand that it is not a static warning to include a statement of the form \code{\RETURN{};}
+%in a \VOID{} function; neither is it illegal
 in a generative constructor. The rules relate only to the specific syntactic form \code{\RETURN{} $e$;}.
 }
 
@@ -6275,14 +6288,14 @@
 
 \rationale{The sole role of labels is to provide targets for the break (\ref{break}) and continue (\ref{continue}) statements.}
 
-%\Q{Are labels in a separate namespace? Bug 49774299} 
+%\Q{Are labels in a separate namespace? Bug 49774299}
 
  \begin{grammar}
 {\bf label:}
       identifier `{\escapegrammar :}'
     .
  \end{grammar}
- 
+
 \LMHash{}
  The semantics of a labeled statement $L: s$ are identical to those of the statement $s$. The namespace of labels is distinct from the one used for types, functions and variables.
 
@@ -6292,24 +6305,24 @@
 \rationale{Labels should be avoided by programmers at all costs. The motivation for including labels in the language is primarily making Dart a better target for code generation.
 }
 
- 
+
 \subsection{ Break}
 \LMLabel{break}
 
 \LMHash{}
-The {\em break statement} consists of the reserved word \BREAK{} and an optional label (\ref{labels}). 
+The {\em break statement} consists of the reserved word \BREAK{} and an optional label (\ref{labels}).
 
 \begin{grammar}
 {\bf breakStatement:}
      \BREAK{} identifier? `{\escapegrammar ;}'
     .
  \end{grammar}
- 
-\LMHash{}
-Let $s_b$ be a \BREAK{} statement. If $s_b$ is of the form  \code{\BREAK{} $L$;}, then let $s_E$ be the innermost labeled statement with label $L$ enclosing $s_b$. If $s_b$ is of the form \code{\BREAK{};},  then let $s_E$ be the innermost  \DO{} (\ref{do}), \FOR{} (\ref{for}), \SWITCH{} (\ref{switch}) or \WHILE{} (\ref{while}) statement enclosing  $s_b$. It is a compile-time error if no such statement $s_E$ exists within the innermost function in which  $s_b$ occurs.  Furthermore, let $s_1, \ldots, s_n$ be those \TRY{} statements that are both enclosed in $s_E$ and that enclose  $s_b$, and that have a \FINALLY{} clause. Lastly, let $f_j$ be the \FINALLY{} clause of $s_j, 1 \le j \le n$.   Executing  $s_b$ first executes $f_1, \ldots,  f_n$ in innermost-clause-first  order and then terminates $s_E$. 
 
 \LMHash{}
-If $s_E$ is an asynchronous for loop (\ref{asynchronousFor-in}), its associated stream subscription is canceled. Furthermore, let $a_k$ be the set of asynchronous for loops  and yield-each statements (\ref{yieldEach}) enclosing $s_b$ that are enclosed in $s_E , 1 \le k \le m$, where $a_k$ is enclosed in $a_{k+1}$.   The stream subscriptions associated with $a_j$ are canceled, $1 \le j \le m$, innermost first, so that $a_j$ is canceled before $a_{j+1}$. 
+Let $s_b$ be a \BREAK{} statement. If $s_b$ is of the form  \code{\BREAK{} $L$;}, then let $s_E$ be the innermost labeled statement with label $L$ enclosing $s_b$. If $s_b$ is of the form \code{\BREAK{};},  then let $s_E$ be the innermost  \DO{} (\ref{do}), \FOR{} (\ref{for}), \SWITCH{} (\ref{switch}) or \WHILE{} (\ref{while}) statement enclosing  $s_b$. It is a compile-time error if no such statement $s_E$ exists within the innermost function in which  $s_b$ occurs.  Furthermore, let $s_1, \ldots, s_n$ be those \TRY{} statements that are both enclosed in $s_E$ and that enclose  $s_b$, and that have a \FINALLY{} clause. Lastly, let $f_j$ be the \FINALLY{} clause of $s_j, 1 \le j \le n$.   Executing  $s_b$ first executes $f_1, \ldots,  f_n$ in innermost-clause-first  order and then terminates $s_E$.
+
+\LMHash{}
+If $s_E$ is an asynchronous for loop (\ref{asynchronousFor-in}), its associated stream subscription is canceled. Furthermore, let $a_k$ be the set of asynchronous for loops  and yield-each statements (\ref{yieldEach}) enclosing $s_b$ that are enclosed in $s_E , 1 \le k \le m$, where $a_k$ is enclosed in $a_{k+1}$.   The stream subscriptions associated with $a_j$ are canceled, $1 \le j \le m$, innermost first, so that $a_j$ is canceled before $a_{j+1}$.
 
 
 
@@ -6317,30 +6330,30 @@
 \LMLabel{continue}
 
 \LMHash{}
-The {\em continue statement} consists of the reserved word \CONTINUE{} and an optional label (\ref{labels}). 
+The {\em continue statement} consists of the reserved word \CONTINUE{} and an optional label (\ref{labels}).
 
 \begin{grammar}
 {\bf continueStatement:}
     \CONTINUE{} identifier? `{\escapegrammar ;}'
         .
- \end{grammar}      
- 
+ \end{grammar}
+
 \LMHash{}
  Let $s_c$ be a \CONTINUE{} statement. If $s_c$ is of the form  \code{\CONTINUE{} $L$;}, then let $s_E$ be the innermost labeled \DO{} (\ref{do}), \FOR{} (\ref{for}) or \WHILE{} (\ref{while}) statement or case clause with label $L$ enclosing $s_c$. If $s_c$ is of the form \code{\CONTINUE{};}  then let $s_E$ be the innermost  \DO{} (\ref{do}), \FOR{} (\ref{for}) or \WHILE{} (\ref{while}) statement enclosing  $s_c$. It is a compile-time error if no such statement or case clause $s_E$ exists within the innermost function in which  $s_c$ occurs.  Furthermore, let $s_1, \ldots, s_n$ be those \TRY{} statements that are both enclosed in $s_E$ and that enclose  $s_c$, and that have a \FINALLY{} clause. Lastly, let $f_j$ be the \FINALLY{} clause of $s_j, 1 \le j \le n$.   Executing  $s_c$ first executes $f_1, \ldots,  f_n$ in innermost-clause-first  order. Then, if $s_E$ is a case clause, control is transferred to the case clause. Otherwise, $s_E$ is necessarily a loop and execution resumes after the last statement in the loop body.
- 
+
  \commentary{
  In a while loop, that would be the boolean expression before the body. In a do loop, it would be the boolean expression after the body. In a for loop, it would be the increment clause.  In other words, execution continues to the next iteration of the loop.
  }
- 
+
 \LMHash{}
- If $s_E$ is an asynchronous for loop (\ref{asynchronousFor-in}), let $a_k$ be the set of asynchronous for loops and yield-each statements (\ref{yieldEach}) enclosing $s_c$ that are enclosed in $s_E , 1 \le k \le m$, where $a_k$ is enclosed in $a_{k+1}$.   The stream subscriptions associated with $a_j$ are canceled, $1 \le j \le m$, innermost first, so that $a_j$ is canceled before $a_{j+1}$. 
- 
+ If $s_E$ is an asynchronous for loop (\ref{asynchronousFor-in}), let $a_k$ be the set of asynchronous for loops and yield-each statements (\ref{yieldEach}) enclosing $s_c$ that are enclosed in $s_E , 1 \le k \le m$, where $a_k$ is enclosed in $a_{k+1}$.   The stream subscriptions associated with $a_j$ are canceled, $1 \le j \le m$, innermost first, so that $a_j$ is canceled before $a_{j+1}$.
+
  \subsection{ Yield and Yield-Each}
  \LMLabel{yieldAndYieldEach}
- 
+
  \subsubsection{ Yield}
  \LMLabel{yield}
- 
+
 \LMHash{}
  The {\em yield statement} adds an element to the result of a generator function (\ref{functions}).
 
@@ -6357,7 +6370,7 @@
 First, the expression $e$ is evaluated to an object $o$. If the enclosing function $m$ is marked \ASYNC* (\ref{functions}) and the stream $u$ associated with $m$ has been paused,  then execution of $m$ is suspended until $u$ is resumed or canceled.
 
 \LMHash{}
-Next, $o$ is added to the iterable or stream associated with the immediately enclosing function. 
+Next, $o$ is added to the iterable or stream associated with the immediately enclosing function.
 
 \LMHash{}
 If the enclosing function $m$ is marked \ASYNC* and the stream $u$ associated with $m$ has been canceled, then let $c$ be the \FINALLY{} clause (\ref{try}) of the innermost enclosing try-finally statement, if any. If $c$ is defined, let $h$ be the handler induced by $c$. If $h$ is defined, control is transferred to $h$. If $h$ is undefined, the immediately enclosing function terminates.
@@ -6378,7 +6391,7 @@
 If the enclosing function $m$ is marked \SYNC* (\ref{functions}) then:
 \begin{itemize}
 \item
-Execution of the function $m$ immediately enclosing $s$ is suspended until the nullary method \code{moveNext()} is invoked upon the iterator used to initiate the current invocation of $m$. 
+Execution of the function $m$ immediately enclosing $s$ is suspended until the nullary method \code{moveNext()} is invoked upon the iterator used to initiate the current invocation of $m$.
 \item
 The current call to \code{moveNext()} returns \TRUE.
 \end{itemize}
@@ -6390,18 +6403,18 @@
 Let $T$ be the static type of $e$ and let $f$ be the immediately enclosing function.  It is a static type warning if either:
 \begin{itemize}
 \item
- the body of $f$ is marked \ASYNC* and the type \code{Stream$<$T$>$} may not be assigned to the declared return type of $f$. 
+ the body of $f$ is marked \ASYNC* and the type \code{Stream$<$T$>$} may not be assigned to the declared return type of $f$.
  \item
  the body of $f$ is marked \SYNC* and the type \code{Iterable$<$T$>$} may not be assigned to the declared return type of $f$.
- \end{itemize} 
+ \end{itemize}
 
- 
+
  \subsubsection{ Yield-Each}
  \LMLabel{yieldEach}
- 
+
 \LMHash{}
  The {\em yield-each statement} adds a series of values to the result of a generator function (\ref{functions}).
- 
+
  \begin{grammar}
 {\bf yieldEachStatement:}
    \YIELD* expression `{\escapegrammar ;}'
@@ -6412,16 +6425,16 @@
 Execution of a statement $s$ of the form \code{\YIELD* $e$;}  proceeds as follows:
 
 \LMHash{}
-First, the expression $e$ is evaluated to an object $o$. 
+First, the expression $e$ is evaluated to an object $o$.
 
 \LMHash{}
 If the immediately enclosing function $m$ is marked \SYNC* (\ref{functions}), then:
 \begin{enumerate}
 \item It is a dynamic error if the class of $o$ does not implement \code{Iterable}.  Otherwise
-\item The method \cd{iterator} is invoked upon $o$ returning an object $i$. 
+\item The method \cd{iterator} is invoked upon $o$ returning an object $i$.
 \item \label{moveNext} The \cd{moveNext} method of $i$ is invoked on it with no arguments. If \cd{moveNext} returns \FALSE{} execution of $s$ is complete. Otherwise
 \item The getter \cd{current} is invoked on $i$. If the invocation raises an exception $ex$, execution of $s$ throws $ex$. Otherwise, the result $x$ of the getter invocation is added to the iterable associated with $m$.
-Execution of the function $m$ immediately enclosing $s$ is suspended until the nullary method \code{moveNext()} is invoked upon the iterator used to initiate the current invocation of $m$, at which point execution of $s$ continues at \ref{moveNext}. 
+Execution of the function $m$ immediately enclosing $s$ is suspended until the nullary method \code{moveNext()} is invoked upon the iterator used to initiate the current invocation of $m$, at which point execution of $s$ continues at \ref{moveNext}.
 \item
 The current call to \code{moveNext()} returns \TRUE.
 \end{enumerate}
@@ -6439,7 +6452,7 @@
 \item
 Otherwise,  $x$ is added to the stream associated with $m$ in the order it appears in $o$.  The function $m$ may suspend.
 \end{itemize}
-\item If the stream $o$ is done, execution of $s$ is complete. 
+\item If the stream $o$ is done, execution of $s$ is complete.
 \end{itemize}
 
 
@@ -6447,7 +6460,7 @@
 It is a compile-time error if a yield-each statement appears in a function that is not a generator function.
 
 \LMHash{}
-Let $T$ be the static type of $e$ and let $f$ be the immediately enclosing function.  It is a static type warning if $T$ may not be assigned to the declared return type of $f$.  If $f$ is synchronous it is a static  type warning if $T$ may not be assigned to \code{Iterable}.  If $f$ is asynchronous it is a static  type warning if $T$ may not be assigned to \code{Stream}. 
+Let $T$ be the static type of $e$ and let $f$ be the immediately enclosing function.  It is a static type warning if $T$ may not be assigned to the declared return type of $f$.  If $f$ is synchronous it is a static  type warning if $T$ may not be assigned to \code{Iterable}.  If $f$ is asynchronous it is a static  type warning if $T$ may not be assigned to \code{Stream}.
 
 
 \subsection{ Assert}
@@ -6461,30 +6474,30 @@
    assert `(' conditionalExpression `)' `{\escapegrammar ;}'
       .
 \end{grammar}
-      
+
 \LMHash{}
 The assert statement has no effect in production mode. In checked mode, execution of an assert statement \code{\ASSERT{}($e$);} proceeds as follows:
 
 \LMHash{}
-The conditional expression $e$ is evaluated to an object $o$. If the class of $o$ is a subtype of \code{Function} then let $r$ be the result of invoking $o$ with no arguments. Otherwise, let $r$ be $o$. 
+The conditional expression $e$ is evaluated to an object $o$. If the class of $o$ is a subtype of \code{Function} then let $r$ be the result of invoking $o$ with no arguments. Otherwise, let $r$ be $o$.
 It is a dynamic type error if $o$ is not of type \code{bool} or of type \code{Function}, or if $r$ is not of type \code{bool}.  If $r$ is \FALSE{}, we say that the assertion failed. If $r$ is \TRUE{}, we say that the assertion succeeded. If the assertion succeeded, execution of the assert statement is complete. If the assertion failed, an \code{AssertionError} is thrown.
 
 %\Q{Might be cleaner to define it as \code{if (!$e$) \{\THROW{} \NEW{} AssertionError();\}} (in checked mode only).
 %What about an error message as part of the assert?}
 
 \LMHash{}
- It is a static type warning if the type of $e$ may not be assigned to either  \code{bool} or $() \rightarrow$ \code{bool}.  
+ It is a static type warning if the type of $e$ may not be assigned to either  \code{bool} or $() \rightarrow$ \code{bool}.
 
 \rationale{Why is this a statement, not a built in function call? Because it is handled magically so it has no effect and no overhead in production mode. Also, in the absence of final methods. one could not prevent it being overridden (though there is no real harm in that).  It cannot be viewed as a function call that is being optimized away because the argument might have side effects.
 }
 
-%If a lexically visible declaration named \code{assert} is in scope, an assert statement 
+%If a lexically visible declaration named \code{assert} is in scope, an assert statement
 %\code{\ASSERT{} (e); }
 %is interpreted as an expression statement \code{(assert(e));} .
 
 %\rationale{
 %Since \ASSERT{} is a built-in identifier, one might define a function or method with this name.
-%It is impossible to distinguish as \ASSERT{} statement from a method invocation in such a situation. 
+%It is impossible to distinguish as \ASSERT{} statement from a method invocation in such a situation.
 %One could choose to always interpret such code as an \ASSERT{} statement. Or we could choose to give priority to any lexically visible user defined function.  The former can cause rather puzzling situations, e.g.,}
 
 %\begin{dartCode}
@@ -6505,7 +6518,7 @@
 \LMLabel{librariesAndScripts}
 
 \LMHash{}
-A Dart program consists of one or more libraries, and may be built out of one or more {\em compilation units}. A compilation unit may be a library or a part (\ref{parts}). 
+A Dart program consists of one or more libraries, and may be built out of one or more {\em compilation units}. A compilation unit may be a library or a part (\ref{parts}).
 
 \LMHash{}
 A library consists of (a possibly empty) set of imports, a set of exports,  and a set of top-level declarations. A top-level declaration is either a class (\ref{classes}), a type alias declaration (\ref{typedef}), a function (\ref{functions}) or a variable declaration (\ref{variables}). The members of a library $L$ are those top level declarations given within $L$.
@@ -6519,14 +6532,14 @@
       typeAlias;
       \EXTERNAL{}? functionSignature `{\escapegrammar ;}';
       \EXTERNAL{}? getterSignature `{\escapegrammar ;}';
-      \EXTERNAL{}? setterSignature `{\escapegrammar ;}';      
+      \EXTERNAL{}? setterSignature `{\escapegrammar ;}';
       functionSignature functionBody;
       returnType? \GET{} identifier functionBody;
       returnType? \SET{} identifier formalParameterList functionBody;
       (\FINAL{} $|$ \CONST{}) type? staticFinalDeclarationList `{\escapegrammar ;}';
       variableDeclaration `{\escapegrammar ;}'
     .
-    
+
     {\bf getOrSet:} \GET{};
       \SET{}
     .
@@ -6545,47 +6558,47 @@
 %      library '\{' libraryBody '\}'
       scriptTag? libraryName? importOrExport* partDirective* topLevelDefinition*
     .
-    
+
     {\bf     scriptTag:}
    `\#!' {\escapegrammar (\~{}NEWLINE)*} NEWLINE
- .    
+ .
 
 {\bf libraryName:}
    metadata \LIBRARY{} identifier (`{\escapegrammar .}' identifier)* `{\escapegrammar ;}'
    .
 
 {\bf importOrExport:}libraryImport ;
-  libraryExport   
+  libraryExport
  \end{grammar}
- 
+
 \LMHash{}
  Libraries may be {\em explicitly named} or {\em implicitly named}. An explicitly named library begins with  the  word \LIBRARY{} (possibly prefaced with any applicable metadata annotations), followed by a qualified identifier that gives the name of the library.
- 
+
  \commentary{
  Technically, each dot and identifier is a separate token and so spaces between them are acceptable. However, the actual library name is the concatenation of the simple identifiers and dots and contains no spaces.
  }
- 
+
 \LMHash{}
-An implicitly named library has the empty string as its name. 
+An implicitly named library has the empty string as its name.
 
  \rationale{
 The name of a library is used to tie it to separately compiled parts of the library (called parts) and  can be used for printing and, more generally, reflection. The name may be relevant for further language evolution.
  }
- 
+
  \commentary{
 Libraries intended for widespread use should avoid name collisions.  Dart's \code{pub} package management system provides a mechanism for doing so.  Each pub package is guaranteed a unique name, effectively enforcing a global namespace.
  }
- 
+
 \LMHash{}
  A library may optionally begin with a {\em script tag}. Script tags are intended for use with scripts (\ref{scripts}).  A script tag can be used to identify the interpreter of the script to whatever computing environment the script is embedded in. The script tag must appear before any whitespace or comments.  A script  tag begins with the characters \#! and ends at the end of the line. Any characters that follow \#!  in the script tag are ignored by the Dart implementation.
- 
+
 \LMHash{}
 Libraries are units of privacy. A private declaration declared within a library $L$ can only be accessed by code within $L$. Any attempt to access a private member declaration from outside $L$ will cause a method, getter or setter lookup failure.
 
 \commentary{Since top level privates are not imported, using the top level privates of another library is never possible. }
 
 \LMHash{}
-The {\em public namespace} of library $L$ is the mapping that maps the simple name of each public top-level member $m$ of $L$ to $m$. 
+The {\em public namespace} of library $L$ is the mapping that maps the simple name of each public top-level member $m$ of $L$ to $m$.
 The scope of a library $L$ consists of the names introduced by all top-level declarations declared in $L$, and the names added by $L$'s imports (\ref{imports}).
 
 
@@ -6593,28 +6606,28 @@
 \LMLabel{imports}
 
 \LMHash{}
-An {\em import} specifies a library to be used in the scope of another library. 
+An {\em import} specifies a library to be used in the scope of another library.
 \begin{grammar}
 {\bf libraryImport:}
    metadata importSpecification
     .
- 
+
  {\bf importSpecification:}
     \IMPORT{}  uri (\AS{} identifier)?  combinator* `{\escapegrammar ;}';
      \IMPORT{}  uri \DEFERRED{} \AS{} identifier  combinator* `{\escapegrammar ;}'
     .
-       
+
 {\bf combinator:}\SHOW{} identifierList;
 \HIDE{} identifierList
     .
-    
+
     {\bf identifierList:}
       identifier (, identifier)*
  \end{grammar}
- 
+
 
 \LMHash{}
-An import specifies a URI $x$ where the declaration of an imported library is to be found. 
+An import specifies a URI $x$ where the declaration of an imported library is to be found.
 
 \LMHash{}
 Imports may be {\em deferred} or {\em immediate}. A deferred import is distinguished by the appearance of the built-in identifier \DEFERRED{} after the URI. Any import that is not deferred is immediate.
@@ -6629,10 +6642,10 @@
  One cannot detect the problem at compile time because compilation often occurs during execution and  one does not know what the URI refers to.  However the development environment should detect the problem.
  }
 
- 
+
 \LMHash{}
 The {\em current library} is the library currently being compiled. The import modifies the  namespace of the current library in a manner that is determined by the imported library and by the optional elements of  the import.
-     
+
 \LMHash{}
 An immediate import directive $I$ may optionally include a prefix clause of the form \AS{} \code{Id} used to prefix names imported by $I$. A deferred import must include a prefix clause or a compile time error occurs. It is a compile-time error if a prefix used in a deferred import is used in another import clause.
 
@@ -6668,7 +6681,7 @@
 The effect of a repeated call to \code{$p$.loadLibrary} is as follows:
 \begin{itemize}
 \item
-If another call to \code{$p$.loadLibrary} has already succeeded, the repeated call also succeeds. 
+If another call to \code{$p$.loadLibrary} has already succeeded, the repeated call also succeeds.
 Otherwise,
 \item
 If another call to  to \code{$p$.loadLibrary} has failed:
@@ -6700,23 +6713,23 @@
 \LMHash{}
 Let $NS_0$ be the exported namespace (\ref{exports}) of $B$. Then, for each combinator clause $C_i, i \in 1..n$ in $I$:
 \begin{itemize}
-\item If $C_i$ is of the form 
+\item If $C_i$ is of the form
 
-\code{\SHOW{} $id_1, \ldots, id_k$} 
+\code{\SHOW{} $id_1, \ldots, id_k$}
 
 then let $NS_i = \SHOW{}([id_1, \ldots, id_k], NS_{i-1}$)
 
 where $show(l,n)$ takes a list of identifiers $l$ and a namespace $n$, and produces a namespace that maps each name in $l$ to the same element that $n$ does. Furthermore, for each name $x$ in $l$, if $n$ defines the name  $x=$ then the new namespace maps $x=$ to the same element that $n$ does. Otherwise the resulting mapping is undefined.
-  
-\item If $C_i$ is of the form 
 
-\code{\HIDE{} $id_1, \ldots, id_k$} 
+\item If $C_i$ is of the form
 
-then let $NS_i = \HIDE{}([id_1, \ldots, id_k], NS_{i-1}$) 
+\code{\HIDE{} $id_1, \ldots, id_k$}
 
-where $hide(l, n)$ takes a list of identifiers $l$ and a namespace $n$, and produces a namespace that is identical to $n$ except that for each name $k$ in $l$, $k$ and $k=$ are undefined. 
+then let $NS_i = \HIDE{}([id_1, \ldots, id_k], NS_{i-1}$)
+
+where $hide(l, n)$ takes a list of identifiers $l$ and a namespace $n$, and produces a namespace that is identical to $n$ except that for each name $k$ in $l$, $k$ and $k=$ are undefined.
 \end{itemize}
- 
+
 \LMHash{}
 Next, if $I$ includes a prefix clause of the form \AS{} $p$, let $NS =  NS_n \cup \{p: prefixObject(NS_n)\}$ where $prefixObject(NS_n)$ is a {\em prefix object} for the namespace $NS_n$, which is an object that has the following members:
 
@@ -6748,7 +6761,7 @@
 \rationale{The greatly increases the chance that a member can be added to a library without breaking its importers.}
 
 \LMHash{}
-A {\em system library} is a library that is part of the Dart implementation. Any other library is a {\em non-system library}. If a name $N$ is referenced by a library $L$ and $N$ would be introduced into the top level scope of $L$ by 
+A {\em system library} is a library that is part of the Dart implementation. Any other library is a {\em non-system library}. If a name $N$ is referenced by a library $L$ and $N$ would be introduced into the top level scope of $L$ by
 imports of two libraries, $L_1$ and $L_2$, and the exported namespace of $L_1$ binds $N$ to a declaration originating in a system library:
 
 %an import of a  system library and an import of a non-system library:
@@ -6767,7 +6780,7 @@
 If a name $N$ is referenced by a library $L$ and $N$ is  introduced into  the top level scope of $L$ by more than one import, and not all the imports denote the same declaration, then:
 \begin{itemize}
 \item A static warning occurs.
-\item If $N$ is referenced as a function, getter or setter, a \code{NoSuchMethodError} is thrown. 
+\item If $N$ is referenced as a function, getter or setter, a \code{NoSuchMethodError} is thrown.
 \item  If $N$ is referenced as a type, it is treated as a malformed type.
 
 \end{itemize}
@@ -6780,7 +6793,7 @@
 }
 
 \rationale{
-The policy above makes libraries more robust in the face of additions made to their imports.  
+The policy above makes libraries more robust in the face of additions made to their imports.
 
 A clear distinction needs to be made between this approach, and seemingly similar policies with respect to classes or interfaces.  The use of a class or interface, and of its members, is separate from its declaration. The usage and declaration may occur in widely separated places in the code, and may in fact be authored by different people or organizations.  It is important that errors are given at the offending declaration so that the party that receives the error can respond to it a meaningful way.
 
@@ -6797,14 +6810,14 @@
 }
 
 \commentary{Note that no errors or warnings are given if one hides or shows a name that is not in a namespace.}
-\rationale{  
+\rationale{
 This prevents situations where removing a name from a library would cause breakage of a client library.
 }
 
 \LMHash{}
 The dart core library \code{dart:core} is implicitly imported into every dart library other than itself via an  import clause of the form
 
-\code{\IMPORT{}  `dart:core';} 
+\code{\IMPORT{}  `dart:core';}
 
 unless the importing library explicitly imports \code{dart:core}.
 
@@ -6813,7 +6826,7 @@
 }
 
 \rationale{
-It would be nice if there was nothing special about \code{dart:core}. However, its use is pervasive, which leads to the decision to import it automatically.  However, some library $L$  may wish to define entities with names used by \code{dart:core} (which it can easily do, as the names declared by a library take precedence). Other libraries may wish to use $L$ and may want to use members of $L$ that conflict with the core library without having to use a prefix and without encountering warnings. The above rule makes this possible, essentially canceling \code{dart:core}'s special treatment by means of yet another special rule. 
+It would be nice if there was nothing special about \code{dart:core}. However, its use is pervasive, which leads to the decision to import it automatically.  However, some library $L$  may wish to define entities with names used by \code{dart:core} (which it can easily do, as the names declared by a library take precedence). Other libraries may wish to use $L$ and may want to use members of $L$ that conflict with the core library without having to use a prefix and without encountering warnings. The above rule makes this possible, essentially canceling \code{dart:core}'s special treatment by means of yet another special rule.
 }
 
 \subsection{Exports}
@@ -6827,9 +6840,9 @@
    metadata \EXPORT{}  uri  combinator* `{\escapegrammar ;}'
     .
  \end{grammar}
- 
+
 \LMHash{}
- An export specifies a URI $x$ where the declaration of an exported library is to be found.  It is a compile-time error if  the specified URI does not refer to a library declaration.  
+ An export specifies a URI $x$ where the declaration of an exported library is to be found.  It is a compile-time error if  the specified URI does not refer to a library declaration.
 
 \LMHash{}
 We say that a name {\em is exported by a library} (or equivalently, that a library {\em exports a name}) if the name is in the library's exported namespace. We say that a declaration {\em is exported by a library} (or equivalently, that a library {\em exports a declaration}) if the declaration is in the library's exported namespace.
@@ -6853,17 +6866,17 @@
 \LMHash{}
 Let $NS_0$ be the exported namespace of $B$. Then, for each combinator clause $C_i, i \in 1..n$ in $E$:
 \begin{itemize}
-\item If $C_i$ is of the form \code{\SHOW{} $id_1, \ldots, id_k$} then let 
+\item If $C_i$ is of the form \code{\SHOW{} $id_1, \ldots, id_k$} then let
 
-$NS_i = \SHOW{}([id_1, \ldots, id_k], NS_{i-1}$).  
-\item If $C_i$ is of the form \code{\HIDE{} $id_1, \ldots, id_k$} 
+$NS_i = \SHOW{}([id_1, \ldots, id_k], NS_{i-1}$).
+\item If $C_i$ is of the form \code{\HIDE{} $id_1, \ldots, id_k$}
 
-then let $NS_i = \HIDE{}([id_1, \ldots, id_k], NS_{i-1}$). 
+then let $NS_i = \HIDE{}([id_1, \ldots, id_k], NS_{i-1}$).
 \end{itemize}
 
 \LMHash{}
 For each
-entry mapping key $k$ to declaration $d$ in $NS_n$ an entry mapping $k$ to $d$ is added to the exported namespace of $L$ unless a  top-level declaration with the name $k$ exists in $L$.  
+entry mapping key $k$ to declaration $d$ in $NS_n$ an entry mapping $k$ to $d$ is added to the exported namespace of $L$ unless a  top-level declaration with the name $k$ exists in $L$.
 
 \LMHash{}
 If a name $N$ is referenced by a library $L$ and $N$ would be introduced into the exported namespace of $L$ by exports of two libraries, $L_1$ and $L_2$, and the exported namespace of $L_1$ binds $N$ to a declaration originating in a system library:
@@ -6889,14 +6902,14 @@
 \LMLabel{parts}
 
 \LMHash{}
-A library may be divided into {\em parts}, each of which can be stored in a separate location. A library identifies its parts by listing them via \PART{} directives. 
+A library may be divided into {\em parts}, each of which can be stored in a separate location. A library identifies its parts by listing them via \PART{} directives.
 
 \LMHash{}
 A {\em part directive} specifies a URI where a Dart compilation unit that should be incorporated into the current library may be found.
 
 \begin{grammar}
 {\bf partDirective:}
-   metadata \PART{}   uri `{\escapegrammar ;}' 
+   metadata \PART{}   uri `{\escapegrammar ;}'
     .
 
 {\bf partHeader:}
@@ -6925,22 +6938,22 @@
 First, $S$ is compiled as a library as specified above. Then, the top-level function \code{main} that is in the exported namespace of $S$ is invoked. If \code{main} has no positional parameters, it is invoked with no arguments. Otherwise if \code{main} has exactly one positional parameter, it is invoked with a single actual argument whose runtime type implements \code{List$<$String$>$}.  Otherwise \code{main} is invoked with the following two actual arguments:
 \begin{enumerate}
 \item An object whose runtime type implements \code{List$<$String$>$}.
-\item The initial message of the current isolate $i$ as determined by the invocation of \code{Isolate.spawnUri} that spawned $i$. 
+\item The initial message of the current isolate $i$ as determined by the invocation of \code{Isolate.spawnUri} that spawned $i$.
 \end{enumerate}
 
 \LMHash{}
 It is a run time error if $S$ does not declare or export either:
 \begin{itemize}
-\item  A top-level function named  \code{main}, or 
+\item  A top-level function named  \code{main}, or
 \item A top-level getter named  \code{main} that returns a function.
 \end{itemize}
 
 \commentary {
-Note that if \code{main} requires more than two arguments, a run time error will occur. 
+Note that if \code{main} requires more than two arguments, a run time error will occur.
 }
 
 \rationale{
-The names of scripts are optional, in the interests of interactive, informal use. However, any script of long term value should be given a name as a matter of good practice. 
+The names of scripts are optional, in the interests of interactive, informal use. However, any script of long term value should be given a name as a matter of good practice.
 }
 
 \commentary {
@@ -6961,26 +6974,26 @@
 
 \LMHash{}
 It is a compile-time error if  the string literal $x$ that describes a URI is not a compile-time constant, or if $x$ involves string interpolation.
- 
+
 \LMHash{}
-This specification does not discuss the interpretation of URIs, with the following exceptions. 
- 
+This specification does not discuss the interpretation of URIs, with the following exceptions.
+
  \rationale{
  The interpretation of URIs is mostly left to the surrounding computing environment. For example, if Dart is running in a web browser, that browser will likely interpret some URIs. While it might seem attractive to specify, say, that URIs are interpreted with respect to a standard such as IETF RFC 3986, in practice this will usually depend on the browser and cannot be relied upon.
  }
- 
+
 \LMHash{}
-A URI of the form \code{dart:$s$} is interpreted as a reference to a system library (\ref{imports}) $s$. 
- 
+A URI of the form \code{dart:$s$} is interpreted as a reference to a system library (\ref{imports}) $s$.
+
 \LMHash{}
 A URI of the form \code{package:$s$} is interpreted in an implementation specific manner.
 
 \rationale{
-The intent is that, during development, Dart programmers can rely on a package manager to find elements of their program. 
+The intent is that, during development, Dart programmers can rely on a package manager to find elements of their program.
 }
 
 \LMHash{}
-Otherwise, any relative URI is interpreted as relative to the location of the current library. All further interpretation of URIs is implementation dependent. 
+Otherwise, any relative URI is interpreted as relative to the location of the current library. All further interpretation of URIs is implementation dependent.
 
 \commentary{This means it is dependent on the embedder.}
 
@@ -7006,7 +7019,7 @@
     .
 
 {\bf typeName:}
-      qualified 
+      qualified
     .
 
 {\bf typeArguments:}
@@ -7021,7 +7034,7 @@
 \LMHash{}
 A Dart implementation must provide a static checker that detects and reports exactly those situations this specification identifies as static warnings and only those situations. However:
 \begin{itemize}
-\item Running  the static checker on a program $P$ is not required for compiling and running $P$.  
+\item Running  the static checker on a program $P$ is not required for compiling and running $P$.
 \item Running the static checker on a program $P$ must not prevent successful compilation of $P$ nor may it prevent the execution of $P$, regardless of whether any static warnings occur.
 \end{itemize}
 
@@ -7035,17 +7048,17 @@
 \begin{itemize}
 \item $T$ has the form $id$ or the form $prefix.id$, and in the enclosing lexical scope, the name $id$ (respectively $prefix.id$) does not denote a type.
 \item $T$ denotes a type variable in the enclosing lexical scope, but occurs in the signature or body of a static member.
-\item $T$ is a parameterized type of the form $G<S_1, \ldots , S_n>$, and  $G$ is malformed. 
+\item $T$ is a parameterized type of the form $G<S_1, \ldots , S_n>$, and  $G$ is malformed.
 \item $T$ denotes declarations that were imported from multiple imports clauses.
-%Either $G$ or $S_i,  i \in 1.. n$ are malformed. 
+%Either $G$ or $S_i,  i \in 1.. n$ are malformed.
  % \item  $G$ is not a generic type with $n$ type parameters.
-%  \item Let $T_i$ be the type parameters of $G$ (if any) and let $B_i$ be the bound of $T_i,  i \in 1.. n$, and $S_i$ is not a subtype of $[S_1,  \ldots, S_n/T_1, \ldots, T_n]B_i,   i \in 1.. n$. 
+%  \item Let $T_i$ be the type parameters of $G$ (if any) and let $B_i$ be the bound of $T_i,  i \in 1.. n$, and $S_i$ is not a subtype of $[S_1,  \ldots, S_n/T_1, \ldots, T_n]B_i,   i \in 1.. n$.
 %  \end{itemize}
 \end{itemize}
 
 \LMHash{}
  Any use of a malformed  type gives rise to a static warning. A malformed type is then interpreted as \DYNAMIC{} by the static type checker and the runtime unless explicitly specified otherwise.
-  
+
  \rationale{
 This ensures that the developer is spared a series of cascading warnings as the malformed type interacts with other types.
 }
@@ -7057,11 +7070,11 @@
 % Now, when passed to a generic, p.T also has to be treated as dynamic - otherwise we have to fail immediately. Where do we say that? And how does this fit with idea that as a type object it fails? Should we say that the accessor on p returns dynamic instead of failing? Do we distinguish its use in a constructor vs its use in an annotation? It's not that we evaluate type objects in constructor args - these cannot represent parameterized types.
 
 
-\subsubsection{Type Promotion}   
+\subsubsection{Type Promotion}
 \LMLabel{typePromotion}
 
 \LMHash{}
-The static type system ascribes a static type to every expression.  In some cases, the types of local variables and formal parameters may be promoted from their declared types based on control flow. 
+The static type system ascribes a static type to every expression.  In some cases, the types of local variables and formal parameters may be promoted from their declared types based on control flow.
 
 \LMHash{}
 We say that a variable $v$ is known to have type $T$ whenever we allow the type of $v$ to be promoted. The exact circumstances when type promotion is allowed are given in the relevant sections of the specification (\ref{logicalBooleanExpressions}, \ref{conditional} and \ref{if}).
@@ -7088,11 +7101,11 @@
 %It is a run-time type error to access an undeclared type outside .
 
 \LMHash{}
-%It is a dynamic type error if a malformed type is used in a subtype test.  
-In checked mode, it is a dynamic type error if a deferred, malformed or malbounded (\ref{parameterizedTypes}) 
-type is used in a subtype test.  
+%It is a dynamic type error if a malformed type is used in a subtype test.
+In checked mode, it is a dynamic type error if a deferred, malformed or malbounded (\ref{parameterizedTypes})
+type is used in a subtype test.
 
-%In production mode, an undeclared type is treated as an instance of type \DYNAMIC{}. 
+%In production mode, an undeclared type is treated as an instance of type \DYNAMIC{}.
 
 \commentary{Consider the following program}
 
@@ -7108,7 +7121,7 @@
 
 \commentary{
 The type of the formal parameter of $f$ is $foo$, which is undeclared in the lexical scope. This will lead to a static type warning. At runtime the program will print \cd{yoyoma}, because $foo$ is treated as \DYNAMIC{}.
-%fail when executing the type test on the first line of $main()$ because it leads to a subtype comparison involving a malformed type ($foo$). 
+%fail when executing the type test on the first line of $main()$ because it leads to a subtype comparison involving a malformed type ($foo$).
 
 As another example take}
 
@@ -7116,7 +7129,7 @@
 \VAR{} i;
 i  j; //  a variable j of type i (supposedly)
 main() \{
-     j =  'I am not an i'; 
+     j =  'I am not an i';
 \}
 \end{dartCode}
 
@@ -7135,13 +7148,13 @@
 
 \CLASS{} A$<$T$>$ \IMPLEMENTS{} J,  I$<$T$>$ // type warning: T is not a subtype of num
 \{ ...
-\} 
+\}
 \end{dartCode}
 
 \commentary{Given the declarations above, the following}
 
 \begin{dartCode}
-I x = \NEW{} A$<$String$>$(); 
+I x = \NEW{} A$<$String$>$();
 \end{dartCode}
 
 \commentary{
@@ -7149,11 +7162,11 @@
 }
 
 \begin{dartCode}
-J x = \NEW{} A$<$String$>$(); 
+J x = \NEW{} A$<$String$>$();
 \end{dartCode}
 
 \commentary{
-does not cause a dynamic error, as there is no need to test against \code{I$<$String$>$} in this case. 
+does not cause a dynamic error, as there is no need to test against \code{I$<$String$>$} in this case.
 Similarly, in production mode
 }
 
@@ -7183,7 +7196,7 @@
 {\bf typeAlias:}
  	metadata  \TYPEDEF{} typeAliasBody
 	.
-	
+
 {\bf typeAliasBody:}
 	functionTypeAlias
 	.
@@ -7191,22 +7204,22 @@
 {\bf functionTypeAlias:}
        functionPrefix  typeParameters? formalParameterList '{\escapegrammar ;}'
     .
-    
+
     {\bf functionPrefix:}
     returnType? identifier
     .
-    
+
  \end{grammar}
- 
+
 \LMHash{}
  The effect of a type alias of the form  \code{\TYPEDEF{} $T$ $id (T_1$ $p_1, \ldots, T_n$ $p_n, [T_{n+1}$ $p_{n+1}, \ldots, T_{n+k}$ $p_{n+k}])$} declared in a library $L$ is is to introduce the name $id$ into the scope of $L$, bound to the function type $(T_1, \ldots, T_n, [T_{n+1}$ $p_{n+1}, \ldots, T_{n+k}$ $p_{n+k}])  \rightarrow T$.  The effect of a type alias of the form   \code{\TYPEDEF{} $T$ $id (T_1$ $p_1, \ldots, T_n$ $p_n, \{T_{n+1}$ $p_{n+1}, \ldots, T_{n+k}$ $p_{n+k}\})$} declared in a library $L$ is is to introduce the name $id$ into the scope of $L$, bound to the function type $(T_1, \ldots, T_n, \{T_{n+1}$ $p_{n+1}, \ldots, T_{n+k}$ $p_{n+k}\})  \rightarrow T$. . In either case, iff no return type is specified, it is taken to be \DYNAMIC{}. Likewise, if a type annotation is omitted on a formal parameter, it is taken to be \DYNAMIC{}.
- 
+
 \LMHash{}
-It is a compile-time error if any default values are specified in the signature of a function type alias. 
-%A typedef may only refer to itself  via the bounds of its generic parameters. 
+It is a compile-time error if any default values are specified in the signature of a function type alias.
+%A typedef may only refer to itself  via the bounds of its generic parameters.
 Any  self reference in a typedef, either directly, or recursively via another typedef, is a compile time error.
-%via a chain of references that does not include a class declaration.  
- 
+%via a chain of references that does not include a class declaration.
+
 
 
 \subsection{Interface Types}
@@ -7267,9 +7280,9 @@
 \commentary{The supertypes of an interface are its direct supertypes and their supertypes. }
 
 \LMHash{}
-An interface type $T$ may be assigned to a type $S$, written  $T \Longleftrightarrow S$, iff either $T <: S$ or $S <: T$. 
+An interface type $T$ may be assigned to a type $S$, written  $T \Longleftrightarrow S$, iff either $T <: S$ or $S <: T$.
 
-\rationale{This rule may surprise readers accustomed to conventional typechecking. The intent of the $\Longleftrightarrow$ relation is not to ensure that an assignment is correct. Instead, it aims to only flag assignments that are almost certain to be erroneous, without precluding assignments that may work. 
+\rationale{This rule may surprise readers accustomed to conventional typechecking. The intent of the $\Longleftrightarrow$ relation is not to ensure that an assignment is correct. Instead, it aims to only flag assignments that are almost certain to be erroneous, without precluding assignments that may work.
 
 For example, assigning a value of static type Object to a variable with static type String, while not guaranteed to be correct, might be fine if the runtime value happens to be a string.
 }
@@ -7278,7 +7291,7 @@
 \LMLabel{functionTypes}
 
 \LMHash{}
-Function types come in two variants: 
+Function types come in two variants:
 \begin{enumerate}
 \item
 The types of functions that only have positional parameters.  These have the general form $(T_1, \ldots, T_n, [T_{n+1} \ldots, T_{n+k}]) \rightarrow T$.
@@ -7297,7 +7310,7 @@
 %\end{enumerate}
 
 \LMHash{}
-%A function type $(T_1, \ldots T_n, [T_{n+1}  \ldots, T_{n+k}]) \rightarrow T$ is a subtype of the 
+%A function type $(T_1, \ldots T_n, [T_{n+1}  \ldots, T_{n+k}]) \rightarrow T$ is a subtype of the
 % the line below revises the rule to be more liberal
 A function type $(T_1, \ldots T_{k}, [T_{k+1}  \ldots, T_{n+m}]) \rightarrow T$ is a subtype of the
 function type $(S_1, \ldots, S_{k+j}, [S_{k+j+1} \ldots, S_{n}]) \rightarrow S$, if all of the following conditions are met:
@@ -7322,7 +7335,7 @@
 \item $\forall i \in 1 .. n, T_i \Longleftrightarrow S_i$.
 \item $k \ge m$ and $y_i \in \{x_1,  \ldots, x_k\}, i \in 1 .. m$.
 %\{x_1,  \ldots, x_k\}$ is a superset of $\{y_1,  \ldots, y_m\}$.
-\item For all $y_i \in \{y_1,  \ldots, y_m\}, y_i = x_j \Rightarrow T_j \Longleftrightarrow S_i$
+\item For all $y_i \in \{y_1,  \ldots, y_m\}, y_i = x_j \Rightarrow T_{x_j} \Longleftrightarrow S_{y_i}$
 \end{enumerate}
 
 %In addition, a function type $(T_1, \ldots, Tn, [T_{n+1} x_{n+1}, \ldots, T_{n+k} x_{n+k}]) \rightarrow T$ is a subtype of the function type $(T_1, \ldots, T_n, T_{n+1} , [T_{n+2} x_{n+2}, \ldots, T_{n+k} x_{n+k}]) \rightarrow T$.
@@ -7332,7 +7345,7 @@
 %We write $(T_1, \ldots, T_n) \rightarrow T$ as a shorthand for the type $(T_1, \ldots, T_n, []) \rightarrow T$.
 
 %The rules above need to be sanity checked, but the intent is that we view functions with rest parameters as having type $(T_1, ..., T_n, [\_{Tn+1}[] \_]) \rightarrow T$, where \_ is some magical identifier. Then the rules above may cover everything.
-% This is wrong - from the outside, the type takes an unbounded sequence of types, not a list. This can be modeled as $(T_1, \ldots, T_n, [T_{n+1}, \_ \ldots, T_{n+k} \_]) \rightarrow T$ for some finite $k$. 
+% This is wrong - from the outside, the type takes an unbounded sequence of types, not a list. This can be modeled as $(T_1, \ldots, T_n, [T_{n+1}, \_ \ldots, T_{n+k} \_]) \rightarrow T$ for some finite $k$.
 
 \LMHash{}
 In addition, the following subtype rules apply:
@@ -7351,7 +7364,7 @@
 }
 
 \LMHash{}
-A function type $T$ may be assigned to a function type $S$, written  $T \Longleftrightarrow S$, iff  $T <: S$. 
+A function type $T$ may be assigned to a function type $S$, written  $T \Longleftrightarrow S$, iff  $T <: S$.
 
 \LMHash{}
 % ensure that Object  and dynamic may be assign dot a function type
@@ -7398,7 +7411,7 @@
 \LMLabel{typeDynamic}
 
 \LMHash{}
-The type  \DYNAMIC{}  denotes the unknown type. 
+The type  \DYNAMIC{}  denotes the unknown type.
 
 \LMHash{}
 If no static type annotation has been provided the type system assumes the declaration has the unknown type. If a generic type is used but type arguments are not provided, then the  type arguments default to the unknown type.
@@ -7410,11 +7423,11 @@
 Type  \DYNAMIC{} has methods for every possible identifier and arity, with every possible combination of named parameters. These methods all have  \DYNAMIC{} as their return type, and their formal parameters all have type  \DYNAMIC{}.
 Type  \DYNAMIC{} has properties for every possible identifier. These properties all have type  \DYNAMIC{}.
 
-\rationale{From a usability perspective, we want to ensure that the checker does not issue errors everywhere an unknown type is used. The definitions above ensure that no secondary errors are reported when accessing an unknown type. 
+\rationale{From a usability perspective, we want to ensure that the checker does not issue errors everywhere an unknown type is used. The definitions above ensure that no secondary errors are reported when accessing an unknown type.
 
 The current rules say that missing type arguments are treated as if they were the type  \DYNAMIC{}.  An alternative is to consider them as meaning \code{Object}.  This would lead to earlier error detection in checked mode, and more aggressive errors during static typechecking. For example:
 
-(1)  \code{typedAPI(G\lt{String}\gt g)\{...\}} 
+(1)  \code{typedAPI(G\lt{String}\gt g)\{...\}}
 
 
 (2)  \code{typedAPI(new G()); }
@@ -7433,19 +7446,19 @@
 The name \DYNAMIC{} denotes a \cd{Type} object even though \DYNAMIC{} is not a class.
 
 %\rationale {
-%Type objects reify the runtime types of instances. No instance ever has type \DYNAMIC{}. 
+%Type objects reify the runtime types of instances. No instance ever has type \DYNAMIC{}.
 %}
 
 \subsection{Type Void}
 \LMLabel{typeVoid}
 
 \LMHash{}
-The special type \VOID{} may only be used as the return type of a function: it is a compile-time error to use \VOID{} in any other context. 
+The special type \VOID{} may only be used as the return type of a function: it is a compile-time error to use \VOID{} in any other context.
 
 \commentary{
 For example, as a type argument, or as the type of a variable or parameter
 
-Void is not an interface type. 
+Void is not an interface type.
 
 The only subtype relations that pertain to void are therefore:
 \begin{itemize}
@@ -7468,31 +7481,31 @@
 
 \rationale {
 It is syntacticly illegal to use \VOID{} as an expression, and it would make no sense to do so.
-Type objects reify the runtime types of instances. No instance ever has type \VOID{}.  
+Type objects reify the runtime types of instances. No instance ever has type \VOID{}.
 }
 
 
 
 \subsection{Parameterized Types}
-\LMLabel{parameterizedTypes} 
+\LMLabel{parameterizedTypes}
 
 \LMHash{}
 A {\em parameterized type} is an invocation of a generic type declaration.
 
 \LMHash{}
-Let $T$ be a parameterized type  $G<S_1,  \ldots, S_n>$. If $G$ is not a generic type, the type arguments $S_i$, $1 \le i \le n$ are discarded. If $G$ has $m \ne n$ type parameters, $T$ is treated as as a parameterized type with $m$ arguments, all of which are \DYNAMIC{}. 
+Let $T$ be a parameterized type  $G<S_1,  \ldots, S_n>$. If $G$ is not a generic type, the type arguments $S_i$, $1 \le i \le n$ are discarded. If $G$ has $m \ne n$ type parameters, $T$ is treated as as a parameterized type with $m$ arguments, all of which are \DYNAMIC{}.
 
-\commentary{In short, any arity mismatch results in all type arguments being dropped, and replaced with the correct number of type arguments, all set to \DYNAMIC{}. Of course, a static warning will be issued. 
+\commentary{In short, any arity mismatch results in all type arguments being dropped, and replaced with the correct number of type arguments, all set to \DYNAMIC{}. Of course, a static warning will be issued.
 }
 
 \LMHash{}
 Otherwise, let
- $T_i$ be the type parameters of $G$ and let $B_i$ be the bound of $T_i,  i \in 1.. n$,. $T$ is {\em malbounded} iff either $S_i$ is malbounded  or $S_i$ is not a subtype of $[S_1,  \ldots, S_n/T_1, \ldots, T_n]B_i,   i \in 1.. n$. 
- 
+ $T_i$ be the type parameters of $G$ and let $B_i$ be the bound of $T_i,  i \in 1.. n$,. $T$ is {\em malbounded} iff either $S_i$ is malbounded  or $S_i$ is not a subtype of $[S_1,  \ldots, S_n/T_1, \ldots, T_n]B_i,   i \in 1.. n$.
+
 \commentary{
 Note, that, in checked mode, it is a dynamic type error if a malbounded type is used in a type test as specified in \ref{dynamicTypeSystem}.
 }
- 
+
 \LMHash{}
 Any use of a malbounded type gives rise to a static warning.
 
@@ -7526,14 +7539,14 @@
 
 \LMHash{}
 % does this diverge in some cases?
-Given two interfaces $I$ and $J$, let $S_I$ be the set of superinterfaces of $I$,  let $S_J$ be the set of superinterfaces of $J$ and let $S =  (I \cup S_I) \cap (J \cup S_J)$.  Furthermore, we define $S_n = \{T | T \in S  \wedge depth(T) =n\}$ for any finite $n$ %, and $k=max(depth(T_1), \ldots, depth(T_m)), T_i \in S, i \in 1..m$, 
+Given two interfaces $I$ and $J$, let $S_I$ be the set of superinterfaces of $I$,  let $S_J$ be the set of superinterfaces of $J$ and let $S =  (I \cup S_I) \cap (J \cup S_J)$.  Furthermore, we define $S_n = \{T | T \in S  \wedge depth(T) =n\}$ for any finite $n$ %, and $k=max(depth(T_1), \ldots, depth(T_m)), T_i \in S, i \in 1..m$,
 where $depth(T)$ is the number of steps in the longest inheritance path from $T$ to \code{Object}. Let $q$ be the largest number such that $S_q$ has cardinality one. The least upper bound of $I$ and $J$ is the sole element of  $S_q$.
 
 \LMHash{}
 The least upper bound of \DYNAMIC{} and any type $T$ is \DYNAMIC{}.
 The least upper bound of \VOID{} and any type $T \ne \DYNAMIC{}$ is \VOID{}.
 The least upper bound of $\bot$ and any type $T$ is $T$.
-Let $U$ be a type variable with upper bound $B$. The least upper bound of $U$ and a type $T \ne \bot$ is the least upper bound of $B$ and $T$. 
+Let $U$ be a type variable with upper bound $B$. The least upper bound of $U$ and a type $T \ne \bot$ is the least upper bound of $B$ and $T$.
 
 \LMHash{}
 The least upper bound relation is symmetric and reflexive.
@@ -7544,41 +7557,41 @@
 The least upper bound of a function type and an interface type $T$ is the least upper bound of \cd{Function} and $T$.
 Let $F$ and $G$ be function types. If $F$ and $G$ differ in their number of required parameters, then the least upper bound of $F$ and $G$ is \cd{Function}.  Otherwise:
 \begin{itemize}
-\item If 
-
-$F= (T_1 \ldots T_r, [T_{r+1}, \ldots, T_n]) \longrightarrow T_0$, 
-
-$G= (S_1 \ldots S_r, [S_{r+1}, \ldots, S_k]) \longrightarrow S_0$ 
-
-where $k \le n$ then the least upper bound of $F$ and $G$ is 
-
-$(L_1 \ldots L_r, [L_{r+1}, \ldots, L_k]) \longrightarrow L_0$ 
-
-where $L_i$ is the least upper bound of $T_i$ and $S_i, i \in 0..k$.
-\item If 
+\item If
 
 $F= (T_1 \ldots T_r, [T_{r+1}, \ldots, T_n]) \longrightarrow T_0$,
 
-$G= (S_1 \ldots S_r, \{ \ldots \}) \longrightarrow S_0$ 
+$G= (S_1 \ldots S_r, [S_{r+1}, \ldots, S_k]) \longrightarrow S_0$
 
-then the least upper bound of $F$ and $G$ is 
+where $k \le n$ then the least upper bound of $F$ and $G$ is
 
-$(L_1 \ldots L_r) \longrightarrow L_0$ 
+$(L_1 \ldots L_r, [L_{r+1}, \ldots, L_k]) \longrightarrow L_0$
 
-where $L_i$ 
+where $L_i$ is the least upper bound of $T_i$ and $S_i, i \in 0..k$.
+\item If
+
+$F= (T_1 \ldots T_r, [T_{r+1}, \ldots, T_n]) \longrightarrow T_0$,
+
+$G= (S_1 \ldots S_r, \{ \ldots \}) \longrightarrow S_0$
+
+then the least upper bound of $F$ and $G$ is
+
+$(L_1 \ldots L_r) \longrightarrow L_0$
+
+where $L_i$
 is the least upper bound of $T_i$ and $S_i, i \in 0..r$.
-\item If 
+\item If
 
-$F= (T_1 \ldots T_r, \{T_{r+1}$  $p_{r+1}, \ldots, T_f$ $p_f\}) \longrightarrow T_0$,  
+$F= (T_1 \ldots T_r, \{T_{r+1}$  $p_{r+1}, \ldots, T_f$ $p_f\}) \longrightarrow T_0$,
 
-$G= (S_1 \ldots S_r, \{ S_{r+1}$  $q_{r+1}, \ldots, S_g$ $q_g\}) \longrightarrow S_0$ 
+$G= (S_1 \ldots S_r, \{ S_{r+1}$  $q_{r+1}, \ldots, S_g$ $q_g\}) \longrightarrow S_0$
 
 then let $\{x_m, \ldots x_n\}  = \{p_{r+1}, \ldots, p_f\} \cap \{q_{r+1}, \ldots, q_g\}$ and let $X_j$ be the least upper bound of the types of $x_j$ in $F$ and $G, j \in m..n$. Then
 the least upper bound of $F$ and $G$ is
 
-$(L_1 \ldots L_r, \{ X_m$ $x_m, \ldots, X_n$ $x_n\}) \longrightarrow L_0$ 
+$(L_1 \ldots L_r, \{ X_m$ $x_m, \ldots, X_n$ $x_n\}) \longrightarrow L_0$
 
-where $L_i$ is the least upper bound of $T_i$ and $S_i, i \in 0..r$ 
+where $L_i$ is the least upper bound of $T_i$ and $S_i, i \in 0..r$
 \end{itemize}
 
 
@@ -7591,7 +7604,7 @@
 \LMHash{}
 Dart source text is represented as a sequence of Unicode code points.  This sequence is first converted into a sequence of tokens according to the lexical rules given in this specification.  At any point in the tokenization process, the longest possible token is recognized.
 
-\subsubsection{Reserved Words} 
+\subsubsection{Reserved Words}
 \LMLabel{reservedWords}
 
 \LMHash{}
@@ -7631,15 +7644,15 @@
       `/*' (MULTI\_LINE\_COMMENT $|$ \~{} `*/')* `*/'
     .
  \end{grammar}
-  
-\LMHash{}
-Dart supports both single-line and multi-line comments. A {\em single line comment} begins with the token \code{//}. Everything between \code{//} and the end of line must be ignored by the Dart compiler unless the comment is a documentation comment. . 
 
 \LMHash{}
-A {\em multi-line comment} begins with the token \code{/*} and ends with the token \code{*/}.  Everything between \code{/}* and \code{*}/ must be ignored by the Dart compiler unless the comment is a documentation comment. Comments may nest. 
+Dart supports both single-line and multi-line comments. A {\em single line comment} begins with the token \code{//}. Everything between \code{//} and the end of line must be ignored by the Dart compiler unless the comment is a documentation comment. .
 
 \LMHash{}
-{\em Documentation comments} are comments that begin with the tokens  \code{///} or  \code{/**}. Documentation comments are intended to be processed by a tool that produces human readable documentation. 
+A {\em multi-line comment} begins with the token \code{/*} and ends with the token \code{*/}.  Everything between \code{/}* and \code{*}/ must be ignored by the Dart compiler unless the comment is a documentation comment. Comments may nest.
+
+\LMHash{}
+{\em Documentation comments} are comments that begin with the tokens  \code{///} or  \code{/**}. Documentation comments are intended to be processed by a tool that produces human readable documentation.
 
 
 \LMHash{}
@@ -7665,7 +7678,7 @@
 
 \begin{tabular}{| r | r | r | r |}
 \hline
-Description &  Operator & Associativity & Precedence \\ 
+Description &  Operator & Associativity & Precedence \\
 \hline
 Unary postfix &  ., ?., e++, e--, e1[e2], e1() , () & None & 16 \\
 \hline
@@ -7674,7 +7687,7 @@
 Multiplicative & *, /, \~/,  \%  & Left & 14\\
 \hline
 Additive & +, - & Left & 13\\
-\hline 
+\hline
 Shift &  $<<$, $>>$&  Left & 12\\
 \hline
 Bitwise AND & \& & Left & 11\\
@@ -7710,7 +7723,7 @@
 \LMLabel{namingConventions}
 
 \commentary{
-The following naming conventions are customary in Dart programs. 
+The following naming conventions are customary in Dart programs.
 \begin{itemize}
 \item The names of compile time constant variables never use lower case letters. If they consist of multiple words, those words are separated by underscores. Examples: PI,  I\_AM\_A\_CONSTANT.
 \item The names of functions (including getters, setters, methods and local or library functions) and non-constant variables begin with a lowercase letter. If the name consists of multiple words, each  word (except the first) begins with an uppercase letter.  No other uppercase letters are used. Examples: camlCase, dart4TheWorld
diff --git a/pkg/analysis_server/benchmark/perf/benchmark_local.dart b/pkg/analysis_server/benchmark/perf/benchmark_local.dart
new file mode 100644
index 0000000..8cbdb89
--- /dev/null
+++ b/pkg/analysis_server/benchmark/perf/benchmark_local.dart
@@ -0,0 +1,272 @@
+// Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+library server.performance.local;
+
+import 'dart:async';
+
+import 'package:analysis_server/plugin/protocol/protocol.dart';
+
+import 'benchmark_scenario.dart';
+import 'memory_tests.dart';
+
+main(List<String> args) async {
+  int length = args.length;
+  if (length < 1) {
+    print(
+        'Usage: dart benchmark_local.dart path_to_sdk_checkout [path_to_flutter_checkout]');
+    return;
+  } else if (length == 1) {
+    paths = new PathHolder(sdkPath: args[0]);
+  } else {
+    paths = new PathHolder(sdkPath: args[0], flutterPath: args[1]);
+  }
+  String now = new DateTime.now().toUtc().toIso8601String();
+  print('Benchmark started: $now');
+  print('');
+  print('');
+  await run_local_initialAnalysis_1();
+  await run_local_initialAnalysis_2();
+  await run_local_initialAnalysis_3();
+  await run_local_change_1();
+  await run_local_change_2();
+  await run_local_completion_1();
+  await run_local_completion_2();
+  await run_local_completion_3();
+  await run_local_completion_4();
+  await run_local_refactoring_1();
+
+  await run_memory_initialAnalysis_1();
+  await run_memory_initialAnalysis_2();
+}
+
+PathHolder paths;
+
+Future run_local_change_1() async {
+  String id = 'local-change-1';
+  String description = r'''
+1. Open 'analyzer'.
+2. Change a method body in src/task/dart.dart.
+3. Measure the time to finish analysis.
+4. Rollback changes to the file and wait for analysis.
+5. Go to (2).
+''';
+  List<int> times = await new BenchmarkScenario().waitAnalyze_change_analyze(
+      roots: [paths.analyzer],
+      file: '${paths.analyzer}/lib/src/task/dart.dart',
+      fileChange: new FileChange(
+          afterStr: 'if (hasDirectiveChange) {', insertStr: 'print(12345);'),
+      numOfRepeats: 10);
+  printBenchmarkResults(id, description, times);
+}
+
+Future run_local_change_2() async {
+  String id = 'local-change-2';
+  String description = r'''
+1. Open 'analyzer'.
+2. Change the name of a public method in src/task/dart.dart.
+3. Measure the time to finish analysis.
+4. Rollback changes to the file and wait for analysis.
+5. Go to (2).
+''';
+  List<int> times = await new BenchmarkScenario().waitAnalyze_change_analyze(
+      roots: [paths.analyzer],
+      file: '${paths.analyzer}/lib/src/task/dart.dart',
+      fileChange: new FileChange(
+          afterStr: 'resolveDirective(An',
+          afterStrBack: 3,
+          insertStr: 'NewName'),
+      numOfRepeats: 5);
+  printBenchmarkResults(id, description, times);
+}
+
+Future run_local_completion_1() async {
+  String id = 'local-completion-1';
+  String description = r'''
+1. Open 'analyzer'.
+2. Change a method body in src/task/dart.dart.
+3. Request code completion in this method and measure time to get results.
+4. Rollback changes to the file and wait for analysis.
+5. Go to (2).
+''';
+  List<int> times = await new BenchmarkScenario()
+      .waitAnalyze_change_getCompletion(
+          roots: [paths.analyzer],
+          file: '${paths.analyzer}/lib/src/task/dart.dart',
+          fileChange: new FileChange(
+              afterStr: 'if (hasDirectiveChange) {',
+              insertStr: 'print(12345);'),
+          completeAfterStr: 'print(12345);',
+          numOfRepeats: 10);
+  printBenchmarkResults(id, description, times);
+}
+
+Future run_local_completion_2() async {
+  String id = 'local-completion-2';
+  String description = r'''
+1. Open 'analyzer'.
+2. Change the name of a public method in src/task/dart.dart.
+3. Request code completion in this method and measure time to get results.
+4. Rollback changes to the file and wait for analysis.
+5. Go to (2).
+''';
+  List<int> times = await new BenchmarkScenario()
+      .waitAnalyze_change_getCompletion(
+          roots: [paths.analyzer],
+          file: '${paths.analyzer}/lib/src/task/dart.dart',
+          fileChange: new FileChange(
+              afterStr: 'DeltaResult validate(In',
+              afterStrBack: 3,
+              insertStr: 'NewName'),
+          completeAfterStr: 'if (hasDirectiveChange) {',
+          numOfRepeats: 5);
+  printBenchmarkResults(id, description, times);
+}
+
+Future run_local_completion_3() async {
+  String id = 'local-completion-3';
+  String description = r'''
+1. Open 'analysis_server' and 'analyzer'.
+2. Change a method body in src/task/dart.dart.
+3. Request code completion in this method and measure time to get results.
+4. Rollback changes to the file and wait for analysis.
+5. Go to (2).
+''';
+  List<int> times = await new BenchmarkScenario()
+      .waitAnalyze_change_getCompletion(
+          roots: [paths.analysisServer, paths.analyzer],
+          file: '${paths.analyzer}/lib/src/task/dart.dart',
+          fileChange: new FileChange(
+              afterStr: 'if (hasDirectiveChange) {',
+              insertStr: 'print(12345);'),
+          completeAfterStr: 'print(12345);',
+          numOfRepeats: 10);
+  printBenchmarkResults(id, description, times);
+}
+
+Future run_local_completion_4() async {
+  String id = 'local-completion-4';
+  String description = r'''
+1. Open 'analysis_server' and 'analyzer'.
+2. Change the name of a public method in src/task/dart.dart.
+3. Request code completion in this method and measure time to get results.
+4. Rollback changes to the file and wait for analysis.
+5. Go to (2).
+''';
+  List<int> times = await new BenchmarkScenario()
+      .waitAnalyze_change_getCompletion(
+          roots: [paths.analysisServer, paths.analyzer],
+          file: '${paths.analyzer}/lib/src/task/dart.dart',
+          fileChange: new FileChange(
+              afterStr: 'DeltaResult validate(In',
+              afterStrBack: 3,
+              insertStr: 'NewName'),
+          completeAfterStr: 'if (hasDirectiveChange) {',
+          numOfRepeats: 5);
+  printBenchmarkResults(id, description, times);
+}
+
+Future run_local_initialAnalysis_1() async {
+  String id = 'local-initialAnalysis-1';
+  String description = r'''
+1. Start server, set 'analyzer' analysis root.
+2. Measure the time to finish initial analysis.
+3. Shutdown the server.
+4. Go to (1).
+''';
+  List<int> times = await BenchmarkScenario.start_waitInitialAnalysis_shutdown(
+      roots: [paths.analyzer], numOfRepeats: 3);
+  printBenchmarkResults(id, description, times);
+}
+
+Future run_local_initialAnalysis_2() async {
+  String id = 'local-initialAnalysis-2';
+  String description = r'''
+1. Start server, set 'analyzer' and 'analysis_server' analysis roots.
+2. Measure the time to finish initial analysis.
+3. Shutdown the server.
+4. Go to (1).
+''';
+  List<int> times = await BenchmarkScenario.start_waitInitialAnalysis_shutdown(
+      roots: [paths.analyzer, paths.analysisServer], numOfRepeats: 3);
+  printBenchmarkResults(id, description, times);
+}
+
+Future run_local_initialAnalysis_3() async {
+  String id = 'local-initialAnalysis-3';
+  String description = r'''
+1. Start server, set 'hello_world' and 'stocks' analysis roots.
+2. Measure the time to finish initial analysis.
+3. Shutdown the server.
+4. Go to (1).
+''';
+  List<int> times = await BenchmarkScenario.start_waitInitialAnalysis_shutdown(
+      roots: [paths.flutterHelloWorld, paths.flutterStocks], numOfRepeats: 3);
+  printBenchmarkResults(id, description, times);
+}
+
+Future run_local_refactoring_1() async {
+  String id = 'local-refactoring-1';
+  String description = r'''
+1. Open 'analyzer'.
+2. Change the name of a public method in src/context/cache.dart.
+3. Request rename refactoring for `getSourcesWithFullName` and measure time to get results.
+4. Rollback changes to the file and wait for analysis.
+5. Go to (2).
+''';
+  List<int> times = await new BenchmarkScenario()
+      .waitAnalyze_change_getRefactoring(
+          roots: [paths.analyzer],
+          file: '${paths.analyzer}/lib/src/context/cache.dart',
+          fileChange: new FileChange(
+              afterStr: 'getState(An', afterStrBack: 3, insertStr: 'NewName'),
+          refactoringAtStr: 'getSourcesWithFullName(String path)',
+          refactoringKind: RefactoringKind.RENAME,
+          refactoringOptions: new RenameOptions('getSourcesWithFullName2'),
+          numOfRepeats: 5);
+  printBenchmarkResults(id, description, times);
+}
+
+Future run_memory_initialAnalysis_1() async {
+  String id = 'memory-initialAnalysis-1';
+  String description = r'''
+1. Start server, set 'analyzer' and 'analysis_server' analysis roots.
+2. Measure the memory usage after finishing initial analysis.
+3. Shutdown the server.
+4. Go to (1).
+''';
+  List<int> sizes = await AnalysisServerMemoryUsageTest
+      .start_waitInitialAnalysis_shutdown(
+          roots: <String>[paths.analyzer], numOfRepeats: 3);
+  printMemoryResults(id, description, sizes);
+}
+
+Future run_memory_initialAnalysis_2() async {
+  String id = 'memory-initialAnalysis-2';
+  String description = r'''
+1. Start server, set 'analyzer' and 'analysis_server' analysis roots.
+2. Measure the memory usage after finishing initial analysis.
+3. Shutdown the server.
+4. Go to (1).
+''';
+  List<int> sizes = await AnalysisServerMemoryUsageTest
+      .start_waitInitialAnalysis_shutdown(
+          roots: <String>[paths.analyzer, paths.analysisServer],
+          numOfRepeats: 3);
+  printMemoryResults(id, description, sizes);
+}
+
+class PathHolder {
+  String analysisServer;
+  String analyzer;
+  String flutterHelloWorld;
+  String flutterStocks;
+
+  PathHolder({String sdkPath, String flutterPath}) {
+    analysisServer = '$sdkPath/pkg/analysis_server';
+    analyzer = '$sdkPath/pkg/analyzer';
+    flutterHelloWorld = '$flutterPath/examples/hello_world';
+    flutterStocks = '$flutterPath/examples/stocks';
+  }
+}
diff --git a/pkg/analysis_server/benchmark/perf/benchmark_scenario.dart b/pkg/analysis_server/benchmark/perf/benchmark_scenario.dart
new file mode 100644
index 0000000..aac844b
--- /dev/null
+++ b/pkg/analysis_server/benchmark/perf/benchmark_scenario.dart
@@ -0,0 +1,287 @@
+// Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+library server.performance.scenarios;
+
+import 'dart:async';
+import 'dart:io';
+
+import 'package:analysis_server/plugin/protocol/protocol.dart';
+import 'package:unittest/unittest.dart';
+
+import 'performance_tests.dart';
+
+void printBenchmarkResults(String id, String description, List<int> times) {
+  String now = new DateTime.now().toUtc().toIso8601String();
+  print('$now ========== $id');
+  print('times: $times');
+  print(description.trim());
+  print('--------------------');
+  print('');
+  print('');
+}
+
+class BenchmarkScenario extends AbstractTimingTest {
+  /**
+   * Init.
+   *  - Start Analysis Server.
+   *  - Set the analysis [roots].
+   *  - Wait for analysis to complete.
+   *  - Make [file] the priority file.
+   *
+   * Measurement.
+   *  - Change the [file] according to the [fileChange].
+   *  - Record the time to finish analysis.
+   *
+   * Repeat.
+   *  - Undo changes to the [file].
+   *  - Repeat measurement [numOfRepeats] times.
+   */
+  Future<List<int>> waitAnalyze_change_analyze(
+      {List<String> roots,
+      String file,
+      FileChange fileChange,
+      int numOfRepeats}) async {
+    expect(roots, isNotNull, reason: 'roots');
+    expect(file, isNotNull, reason: 'file');
+    expect(fileChange, isNotNull, reason: 'fileChange');
+    expect(numOfRepeats, isNotNull, reason: 'numOfRepeats');
+    // Initialize Analysis Server.
+    await super.setUp();
+    await subscribeToStatusNotifications();
+    // Set roots and analyze.
+    await sendAnalysisSetAnalysisRoots(roots, []);
+    await analysisFinished;
+    // Make the file priority.
+    await sendAnalysisSetPriorityFiles([file]);
+    // Repeat.
+    List<int> times = <int>[];
+    for (int i = 0; i < numOfRepeats; i++) {
+      // Update and wait for analysis.
+      Stopwatch stopwatch = new Stopwatch()..start();
+      await _applyFileChange(file, fileChange);
+      await analysisFinished;
+      times.add(stopwatch.elapsed.inMilliseconds);
+      // Remove the overlay and analyze.
+      await sendAnalysisUpdateContent({file: new RemoveContentOverlay()});
+      await analysisFinished;
+    }
+    // Done.
+    await shutdown();
+    return times;
+  }
+
+  /**
+   * Init.
+   * 1. Start Analysis Server.
+   * 2. Set the analysis [roots].
+   * 3. Wait for analysis to complete.
+   * 4. Make [file] the priority file.
+   *
+   * Measurement.
+   * 5. Change the [file] according to the [fileChange].
+   * 6. Request [completeAfterStr] in the updated file content.
+   * 7. Record the time to get completion results.
+   * 8. Undo changes to the [file] and analyze.
+   * 9. Go to (5).
+   */
+  Future<List<int>> waitAnalyze_change_getCompletion(
+      {List<String> roots,
+      String file,
+      FileChange fileChange,
+      String completeAfterStr,
+      int numOfRepeats}) async {
+    expect(roots, isNotNull, reason: 'roots');
+    expect(file, isNotNull, reason: 'file');
+    expect(fileChange, isNotNull, reason: 'fileChange');
+    expect(completeAfterStr, isNotNull, reason: 'completeAfterStr');
+    expect(numOfRepeats, isNotNull, reason: 'numOfRepeats');
+    // Initialize Analysis Server.
+    await super.setUp();
+    await subscribeToStatusNotifications();
+    // Set roots and analyze.
+    await sendAnalysisSetAnalysisRoots(roots, []);
+    await analysisFinished;
+    // Make the file priority.
+    await sendAnalysisSetPriorityFiles([file]);
+    // Repeat.
+    List<int> times = <int>[];
+    for (int i = 0; i < numOfRepeats; i++) {
+      String updatedContent = await _applyFileChange(file, fileChange);
+      // Measure completion time.
+      int completionOffset =
+          _indexOfEnd(file, updatedContent, completeAfterStr);
+      Duration completionDuration =
+          await _measureCompletionTime(file, completionOffset);
+      times.add(completionDuration.inMilliseconds);
+      // Remove the overlay and analyze.
+      await sendAnalysisUpdateContent({file: new RemoveContentOverlay()});
+      await analysisFinished;
+    }
+    // Done.
+    await shutdown();
+    return times;
+  }
+
+  /**
+   * Init.
+   * 1. Start Analysis Server.
+   * 2. Set the analysis [roots].
+   * 3. Wait for analysis to complete.
+   * 4. Make [file] the priority file.
+   *
+   * Measurement.
+   * 5. Change the [file] according to the [fileChange].
+   * 6. Request [refactoringAtStr] in the updated file content.
+   * 7. Record the time to get refactoring.
+   * 8. Undo changes to the [file] and analyze.
+   * 9. Go to (5).
+   */
+  Future<List<int>> waitAnalyze_change_getRefactoring(
+      {List<String> roots,
+      String file,
+      FileChange fileChange,
+      String refactoringAtStr,
+      RefactoringKind refactoringKind,
+      RefactoringOptions refactoringOptions,
+      int numOfRepeats}) async {
+    expect(roots, isNotNull, reason: 'roots');
+    expect(file, isNotNull, reason: 'file');
+    expect(fileChange, isNotNull, reason: 'fileChange');
+    expect(refactoringAtStr, isNotNull, reason: 'refactoringAtStr');
+    expect(refactoringKind, isNotNull, reason: 'refactoringKind');
+    expect(refactoringOptions, isNotNull, reason: 'refactoringOptions');
+    expect(numOfRepeats, isNotNull, reason: 'numOfRepeats');
+    // Initialize Analysis Server.
+    await super.setUp();
+    await subscribeToStatusNotifications();
+    // Set roots and analyze.
+    await sendAnalysisSetAnalysisRoots(roots, []);
+    await analysisFinished;
+    // Make the file priority.
+    await sendAnalysisSetPriorityFiles([file]);
+    // Repeat.
+    List<int> times = <int>[];
+    for (int i = 0; i < numOfRepeats; i++) {
+      String updatedContent = await _applyFileChange(file, fileChange);
+      // Measure time to get refactoring.
+      int refactoringOffset = _indexOf(file, updatedContent, refactoringAtStr);
+      Duration refactoringDuration = await _measureRefactoringTime(
+          file, refactoringOffset, refactoringKind, refactoringOptions);
+      times.add(refactoringDuration.inMilliseconds);
+      // Remove the overlay and analyze.
+      await sendAnalysisUpdateContent({file: new RemoveContentOverlay()});
+      await analysisFinished;
+    }
+    // Done.
+    await shutdown();
+    return times;
+  }
+
+  /**
+   * Compute updated content of the [file] as described by [desc], add overlay
+   * for the [file], and return the updated content.
+   */
+  Future<String> _applyFileChange(String file, FileChange desc) async {
+    String originalContent = _getFileContent(file);
+    int offset = _indexOfEnd(file, originalContent, desc.afterStr);
+    offset -= desc.afterStrBack;
+    String updatedContent = originalContent.substring(0, offset) +
+        desc.insertStr +
+        originalContent.substring(offset);
+    await sendAnalysisUpdateContent(
+        {file: new AddContentOverlay(updatedContent)});
+    return updatedContent;
+  }
+
+  Future<Duration> _measureCompletionTime(String file, int offset) async {
+    Stopwatch stopwatch = new Stopwatch();
+    stopwatch.start();
+    Completer<Duration> completer = new Completer<Duration>();
+    var completionSubscription = onCompletionResults.listen((_) {
+      completer.complete(stopwatch.elapsed);
+    });
+    try {
+      await sendCompletionGetSuggestions(file, offset);
+      return await completer.future;
+    } finally {
+      completionSubscription.cancel();
+    }
+  }
+
+  Future<Duration> _measureRefactoringTime(
+      String file,
+      int offset,
+      RefactoringKind refactoringKind,
+      RefactoringOptions refactoringOptions) async {
+    Stopwatch stopwatch = new Stopwatch();
+    stopwatch.start();
+    await sendEditGetRefactoring(refactoringKind, file, offset, 0, false,
+        options: refactoringOptions);
+    return stopwatch.elapsed;
+  }
+
+  /**
+   *  1. Start Analysis Server.
+   *  2. Set the analysis [roots].
+   *  3. Wait for analysis to complete.
+   *  4. Record the time to finish analysis.
+   *  5. Shutdown.
+   *  6. Go to (1).
+   */
+  static Future<List<int>> start_waitInitialAnalysis_shutdown(
+      {List<String> roots, int numOfRepeats}) async {
+    expect(roots, isNotNull, reason: 'roots');
+    expect(numOfRepeats, isNotNull, reason: 'numOfRepeats');
+    // Repeat.
+    List<int> times = <int>[];
+    for (int i = 0; i < numOfRepeats; i++) {
+      BenchmarkScenario instance = new BenchmarkScenario();
+      // Initialize Analysis Server.
+      await instance.setUp();
+      await instance.subscribeToStatusNotifications();
+      // Set roots and analyze.
+      Stopwatch stopwatch = new Stopwatch()..start();
+      await instance.sendAnalysisSetAnalysisRoots(roots, []);
+      await instance.analysisFinished;
+      times.add(stopwatch.elapsed.inMilliseconds);
+      // Stop the server.
+      await instance.shutdown();
+    }
+    return times;
+  }
+
+  static String _getFileContent(String path) {
+    File file = new File(path);
+    expect(file.existsSync(), isTrue, reason: 'File $path does not exist.');
+    return file.readAsStringSync();
+  }
+
+  /**
+   * Return the index of [what] in [where] in the [file], fail if not found.
+   */
+  static int _indexOf(String file, String where, String what) {
+    int index = where.indexOf(what);
+    expect(index, isNot(-1), reason: 'Cannot find |$what| in $file.');
+    return index;
+  }
+
+  /**
+   * Return the end index if [what] in [where] in the [file], fail if not found.
+   */
+  static int _indexOfEnd(String file, String where, String what) {
+    return _indexOf(file, where, what) + what.length;
+  }
+}
+
+class FileChange {
+  final String afterStr;
+  final int afterStrBack;
+  final String insertStr;
+
+  FileChange({this.afterStr, this.afterStrBack: 0, this.insertStr}) {
+    expect(afterStr, isNotNull, reason: 'afterStr');
+    expect(insertStr, isNotNull, reason: 'insertStr');
+  }
+}
diff --git a/pkg/analysis_server/benchmark/perf/memory_tests.dart b/pkg/analysis_server/benchmark/perf/memory_tests.dart
new file mode 100644
index 0000000..8522430
--- /dev/null
+++ b/pkg/analysis_server/benchmark/perf/memory_tests.dart
@@ -0,0 +1,125 @@
+// Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+import 'dart:async';
+import 'dart:convert';
+import 'dart:io';
+
+import 'package:analysis_server/plugin/protocol/protocol.dart';
+import 'package:unittest/unittest.dart';
+
+import '../../test/integration/integration_tests.dart';
+
+void printMemoryResults(String id, String description, List<int> sizes) {
+  String now = new DateTime.now().toUtc().toIso8601String();
+  print('$now ========== $id');
+  print('memory: $sizes');
+  print(description.trim());
+  print('--------------------');
+  print('');
+  print('');
+}
+
+/**
+ * Base class for analysis server memory usage tests.
+ */
+class AnalysisServerMemoryUsageTest
+    extends AbstractAnalysisServerIntegrationTest {
+  static const int vmServicePort = 12345;
+
+  int getMemoryUsage() {
+    ProcessResult result = _run('curl', <String>[
+      'localhost:$vmServicePort/_getAllocationProfile\?isolateId=isolates/root\&gc=full'
+    ]);
+    Map json = JSON.decode(result.stdout);
+    Map heaps = json['result']['heaps'];
+    int newSpace = heaps['new']['used'];
+    int oldSpace = heaps['old']['used'];
+    return newSpace + oldSpace;
+  }
+
+  /**
+   * Send the server an 'analysis.setAnalysisRoots' command directing it to
+   * analyze [sourceDirectory].
+   */
+  Future setAnalysisRoot() =>
+      sendAnalysisSetAnalysisRoots([sourceDirectory.path], []);
+
+  /**
+   * The server is automatically started before every test.
+   */
+  @override
+  Future setUp() {
+    onAnalysisErrors.listen((AnalysisErrorsParams params) {
+      currentAnalysisErrors[params.file] = params.errors;
+    });
+    onServerError.listen((ServerErrorParams params) {
+      // A server error should never happen during an integration test.
+      fail('${params.message}\n${params.stackTrace}');
+    });
+    Completer serverConnected = new Completer();
+    onServerConnected.listen((_) {
+      expect(serverConnected.isCompleted, isFalse);
+      serverConnected.complete();
+    });
+    return startServer(servicesPort: vmServicePort).then((_) {
+      server.listenToOutput(dispatchNotification);
+      server.exitCode.then((_) {
+        skipShutdown = true;
+      });
+      return serverConnected.future;
+    });
+  }
+
+  /**
+   * After every test, the server is stopped.
+   */
+  Future shutdown() async => await shutdownIfNeeded();
+
+  /**
+   * Enable [ServerService.STATUS] notifications so that [analysisFinished]
+   * can be used.
+   */
+  Future subscribeToStatusNotifications() async {
+    await sendServerSetSubscriptions([ServerService.STATUS]);
+  }
+
+  /**
+   * Synchronously run the given [executable] with the given [arguments]. Return
+   * the result of running the process.
+   */
+  ProcessResult _run(String executable, List<String> arguments) {
+    return Process.runSync(executable, arguments,
+        stderrEncoding: UTF8, stdoutEncoding: UTF8);
+  }
+
+  /**
+   *  1. Start Analysis Server.
+   *  2. Set the analysis [roots].
+   *  3. Wait for analysis to complete.
+   *  4. Record the time to finish analysis.
+   *  5. Shutdown.
+   *  6. Go to (1).
+   */
+  static Future<List<int>> start_waitInitialAnalysis_shutdown(
+      {List<String> roots, int numOfRepeats}) async {
+    expect(roots, isNotNull, reason: 'roots');
+    expect(numOfRepeats, isNotNull, reason: 'numOfRepeats');
+    // Repeat.
+    List<int> sizes = <int>[];
+    for (int i = 0; i < numOfRepeats; i++) {
+      AnalysisServerMemoryUsageTest test = new AnalysisServerMemoryUsageTest();
+      // Initialize Analysis Server.
+      await test.setUp();
+      await test.subscribeToStatusNotifications();
+      // Set roots and analyze.
+      await test.sendAnalysisSetAnalysisRoots(roots, []);
+      await test.analysisFinished;
+      sizes.add(test.getMemoryUsage());
+      // Stop the server.
+      await test.shutdown();
+    }
+    return sizes;
+  }
+}
diff --git a/pkg/analysis_server/benchmark/perf/performance_tests.dart b/pkg/analysis_server/benchmark/perf/performance_tests.dart
index b784c0c..6d301c9 100644
--- a/pkg/analysis_server/benchmark/perf/performance_tests.dart
+++ b/pkg/analysis_server/benchmark/perf/performance_tests.dart
@@ -61,13 +61,11 @@
   Future shutdown() async => await shutdownIfNeeded();
 
   /**
-   * Enable [SERVER_STATUS] notifications so that [analysisFinished]
+   * Enable [ServerService.STATUS] notifications so that [analysisFinished]
    * can be used.
    */
-  Future subscribeToStatusNotifications() {
-    List<Future> futures = <Future>[];
-    futures.add(sendServerSetSubscriptions([ServerService.STATUS]));
-    return Future.wait(futures);
+  Future subscribeToStatusNotifications() async {
+    await sendServerSetSubscriptions([ServerService.STATUS]);
   }
 }
 
diff --git a/pkg/analysis_server/doc/api.html b/pkg/analysis_server/doc/api.html
index 0c6b2dd..343d50b 100644
--- a/pkg/analysis_server/doc/api.html
+++ b/pkg/analysis_server/doc/api.html
@@ -61,7 +61,7 @@
 </style></head>
   <body>
     <h1>Analysis Server API Specification</h1>
-    <h1 style="color:#999999">Version 1.15.0</h1>
+    <h1 style="color:#999999">Version 1.16.0</h1>
     <p>
       This document contains a specification of the API provided by the
       analysis server.  The API in this document is currently under
@@ -305,6 +305,7 @@
   "event": "server.connected"
   "params": {
     "<b>version</b>": String
+    "<b>pid</b>": int
   }
 }</pre></div>
         <p>
@@ -321,7 +322,10 @@
       <h4>Parameters</h4><dl><dt class="field"><b><i>version ( String )</i></b></dt><dd>
             
             <p>The version number of the analysis server.</p>
-          </dd></dl></dd><dt class="notification"><a name="notification_server.error">server.error</a> (<a href="#notification_server.error">#</a>)</dt><dd><div class="box"><pre>notification: {
+          </dd><dt class="field"><b><i>pid ( int )</i></b></dt><dd>
+           
+           <p>The process id of the analysis server process.</p>
+         </dd></dl></dd><dt class="notification"><a name="notification_server.error">server.error</a> (<a href="#notification_server.error">#</a>)</dt><dd><div class="box"><pre>notification: {
   "event": "server.error"
   "params": {
     "<b>isFatal</b>": bool
diff --git a/pkg/analysis_server/lib/plugin/protocol/generated_protocol.dart b/pkg/analysis_server/lib/plugin/protocol/generated_protocol.dart
index c547af5..4b8fe8b 100644
--- a/pkg/analysis_server/lib/plugin/protocol/generated_protocol.dart
+++ b/pkg/analysis_server/lib/plugin/protocol/generated_protocol.dart
@@ -266,6 +266,7 @@
  *
  * {
  *   "version": String
+ *   "pid": int
  * }
  *
  * Clients may not extend, implement or mix-in this class.
@@ -273,6 +274,8 @@
 class ServerConnectedParams implements HasToJson {
   String _version;
 
+  int _pid;
+
   /**
    * The version number of the analysis server.
    */
@@ -286,8 +289,22 @@
     this._version = value;
   }
 
-  ServerConnectedParams(String version) {
+  /**
+   * The process id of the analysis server process.
+   */
+  int get pid => _pid;
+
+  /**
+   * The process id of the analysis server process.
+   */
+  void set pid(int value) {
+    assert(value != null);
+    this._pid = value;
+  }
+
+  ServerConnectedParams(String version, int pid) {
     this.version = version;
+    this.pid = pid;
   }
 
   factory ServerConnectedParams.fromJson(JsonDecoder jsonDecoder, String jsonPath, Object json) {
@@ -301,7 +318,13 @@
       } else {
         throw jsonDecoder.missingKey(jsonPath, "version");
       }
-      return new ServerConnectedParams(version);
+      int pid;
+      if (json.containsKey("pid")) {
+        pid = jsonDecoder.decodeInt(jsonPath + ".pid", json["pid"]);
+      } else {
+        throw jsonDecoder.missingKey(jsonPath, "pid");
+      }
+      return new ServerConnectedParams(version, pid);
     } else {
       throw jsonDecoder.mismatch(jsonPath, "server.connected params", json);
     }
@@ -315,6 +338,7 @@
   Map<String, dynamic> toJson() {
     Map<String, dynamic> result = {};
     result["version"] = version;
+    result["pid"] = pid;
     return result;
   }
 
@@ -328,7 +352,8 @@
   @override
   bool operator==(other) {
     if (other is ServerConnectedParams) {
-      return version == other.version;
+      return version == other.version &&
+          pid == other.pid;
     }
     return false;
   }
@@ -337,6 +362,7 @@
   int get hashCode {
     int hash = 0;
     hash = JenkinsSmiHash.combine(hash, version.hashCode);
+    hash = JenkinsSmiHash.combine(hash, pid.hashCode);
     return JenkinsSmiHash.finish(hash);
   }
 }
diff --git a/pkg/analysis_server/lib/src/analysis_manager.dart b/pkg/analysis_server/lib/src/analysis_manager.dart
index 126dbb8..28c7488 100644
--- a/pkg/analysis_server/lib/src/analysis_manager.dart
+++ b/pkg/analysis_server/lib/src/analysis_manager.dart
@@ -117,6 +117,7 @@
     } catch (error) {
       onError(error);
     }
+    return null;
   }
 
   /**
diff --git a/pkg/analysis_server/lib/src/analysis_server.dart b/pkg/analysis_server/lib/src/analysis_server.dart
index ed02817..7e695a2 100644
--- a/pkg/analysis_server/lib/src/analysis_server.dart
+++ b/pkg/analysis_server/lib/src/analysis_server.dart
@@ -7,6 +7,7 @@
 import 'dart:async';
 import 'dart:collection';
 import 'dart:core' hide Resource;
+import 'dart:io' as io;
 import 'dart:math' show max;
 
 import 'package:analysis_server/plugin/protocol/protocol.dart'
@@ -27,7 +28,6 @@
 import 'package:analyzer/dart/element/element.dart';
 import 'package:analyzer/file_system/file_system.dart';
 import 'package:analyzer/instrumentation/instrumentation.dart';
-import 'package:analyzer/plugin/embedded_resolver_provider.dart';
 import 'package:analyzer/plugin/resolver_provider.dart';
 import 'package:analyzer/source/embedder.dart';
 import 'package:analyzer/source/pub_package_map_provider.dart';
@@ -43,6 +43,7 @@
 import 'package:analyzer/src/util/glob.dart';
 import 'package:analyzer/task/dart.dart';
 import 'package:plugin/plugin.dart';
+import 'package:yaml/yaml.dart';
 
 typedef void OptionUpdater(AnalysisOptionsImpl options);
 
@@ -151,11 +152,6 @@
   List<RequestHandler> handlers;
 
   /**
-   * The function used to create a new SDK using the default SDK.
-   */
-  final SdkCreator defaultSdkCreator;
-
-  /**
    * The object used to manage the SDK's known to this server.
    */
   DartSdkManager sdkManager;
@@ -296,6 +292,12 @@
       new StreamController<ContextsChangedEvent>.broadcast();
 
   /**
+   * The file resolver provider used to override the way file URI's are
+   * resolved in some contexts.
+   */
+  ResolverProvider fileResolverProvider;
+
+  /**
    * Initialize a newly created server to receive requests from and send
    * responses to the given [channel].
    *
@@ -311,9 +313,9 @@
       Index _index,
       this.serverPlugin,
       this.options,
-      this.defaultSdkCreator,
+      this.sdkManager,
       this.instrumentationService,
-      {EmbeddedResolverProvider embeddedResolverProvider: null,
+      {ResolverProvider fileResolverProvider: null,
       ResolverProvider packageResolverProvider: null,
       bool useSingleContextManager: false,
       this.rethrowExceptions: true})
@@ -325,9 +327,10 @@
         options.enableIncrementalResolutionApi;
     defaultContextOptions.incrementalValidation =
         options.enableIncrementalResolutionValidation;
+    defaultContextOptions.finerGrainedInvalidation =
+        options.finerGrainedInvalidation;
     defaultContextOptions.generateImplicitErrors = false;
     operationQueue = new ServerOperationQueue();
-    sdkManager = new DartSdkManager(defaultSdkCreator);
     if (useSingleContextManager) {
       contextManager = new SingleContextManager(resourceProvider, sdkManager,
           packageResolverProvider, analyzedFilesGlobs, defaultContextOptions);
@@ -336,12 +339,12 @@
           resourceProvider,
           sdkManager,
           packageResolverProvider,
-          embeddedResolverProvider,
           packageMapProvider,
           analyzedFilesGlobs,
           instrumentationService,
           defaultContextOptions);
     }
+    this.fileResolverProvider = fileResolverProvider;
     ServerContextManagerCallbacks contextManagerCallbacks =
         new ServerContextManagerCallbacks(this, resourceProvider);
     contextManager.callbacks = contextManagerCallbacks;
@@ -360,7 +363,7 @@
     });
     _setupIndexInvalidation();
     Notification notification =
-        new ServerConnectedParams(VERSION).toNotification();
+        new ServerConnectedParams(VERSION, io.pid).toNotification();
     channel.sendNotification(notification);
     channel.listen(handleRequest, onDone: done, onError: error);
     handlers = serverPlugin.createDomains(this);
@@ -1506,6 +1509,7 @@
 class AnalysisServerOptions {
   bool enableIncrementalResolutionApi = false;
   bool enableIncrementalResolutionValidation = false;
+  bool finerGrainedInvalidation = false;
   bool noErrorNotification = false;
   bool noIndex = false;
   bool useAnalysisHighlight2 = false;
@@ -1559,13 +1563,14 @@
         AnalysisEngine.instance.createAnalysisContext();
     context.contentCache = analysisServer.overlayState;
     analysisServer.folderMap[folder] = context;
-    _locateEmbedderYamls(context, disposition);
+    context.fileResolverProvider = analysisServer.fileResolverProvider;
     context.sourceFactory =
         _createSourceFactory(context, options, disposition, folder);
     context.analysisOptions = options;
     analysisServer._onContextsChangedController
         .add(new ContextsChangedEvent(added: [context]));
     analysisServer.schedulePerformAnalysisOperation(context);
+
     return context;
   }
 
@@ -1626,48 +1631,42 @@
     List<UriResolver> packageUriResolvers =
         disposition.createPackageUriResolvers(resourceProvider);
 
-    EmbedderUriResolver embedderUriResolver;
-
-    // First check for a resolver provider.
-    ContextManager contextManager = analysisServer.contextManager;
-    if (contextManager is ContextManagerImpl) {
-      EmbeddedResolverProvider resolverProvider =
-          contextManager.embeddedUriResolverProvider;
-      if (resolverProvider != null) {
-        embedderUriResolver = resolverProvider(folder);
-      }
-    }
-
     // If no embedded URI resolver was provided, defer to a locator-backed one.
-    embedderUriResolver ??= new EmbedderUriResolver(
-        context.embedderYamlLocator.embedderYamls);
-    if (embedderUriResolver.length == 0) {
-      // The embedder uri resolver has no mappings. Use the default Dart SDK
-      // uri resolver.
+    EmbedderYamlLocator locator =
+        disposition.getEmbedderLocator(resourceProvider);
+    Map<Folder, YamlMap> embedderYamls = locator.embedderYamls;
+    EmbedderSdk embedderSdk = new EmbedderSdk(embedderYamls);
+    if (embedderSdk.libraryMap.size() == 0) {
+      // There was no embedder file, or the file was empty, so used the default
+      // SDK.
       resolvers.add(new DartUriResolver(
           analysisServer.sdkManager.getSdkForOptions(options)));
     } else {
-      // The embedder uri resolver has mappings, use it instead of the default
-      // Dart SDK uri resolver.
-      resolvers.add(embedderUriResolver);
+      // The embedder file defines an alternate SDK, so use it.
+      List<String> paths = <String>[];
+      for (Folder folder in embedderYamls.keys) {
+        paths.add(folder
+            .getChildAssumingFile(EmbedderYamlLocator.EMBEDDER_FILE_NAME)
+            .path);
+      }
+      DartSdk dartSdk = analysisServer.sdkManager
+          .getSdk(new SdkDescription(paths, options), () {
+        embedderSdk.analysisOptions = options;
+        // TODO(brianwilkerson) Enable summary use after we have decided where
+        // summary files for embedder files will live.
+        embedderSdk.useSummary = false;
+        return embedderSdk;
+      });
+      resolvers.add(new DartUriResolver(dartSdk));
     }
 
     resolvers.addAll(packageUriResolvers);
-    resolvers.add(new ResourceUriResolver(resourceProvider));
-    return new SourceFactory(resolvers, disposition.packages);
-  }
-
-  /// If [disposition] has a package map, attempt to locate `_embedder.yaml`
-  /// files.
-  void _locateEmbedderYamls(
-      InternalAnalysisContext context, FolderDisposition disposition) {
-    Map<String, List<Folder>> packageMap;
-    if (disposition is PackageMapDisposition) {
-      packageMap = disposition.packageMap;
-    } else if (disposition is PackagesFileDisposition) {
-      packageMap = disposition.buildPackageMap(resourceProvider);
+    if (context.fileResolverProvider == null) {
+      resolvers.add(new ResourceUriResolver(resourceProvider));
+    } else {
+      resolvers.add(context.fileResolverProvider(folder));
     }
-    context.embedderYamlLocator.refresh(packageMap);
+    return new SourceFactory(resolvers, disposition.packages);
   }
 }
 
diff --git a/pkg/analysis_server/lib/src/context_manager.dart b/pkg/analysis_server/lib/src/context_manager.dart
index 44c3595..585da46 100644
--- a/pkg/analysis_server/lib/src/context_manager.dart
+++ b/pkg/analysis_server/lib/src/context_manager.dart
@@ -12,7 +12,6 @@
 import 'package:analysis_server/src/analysis_server.dart';
 import 'package:analyzer/file_system/file_system.dart';
 import 'package:analyzer/instrumentation/instrumentation.dart';
-import 'package:analyzer/plugin/embedded_resolver_provider.dart';
 import 'package:analyzer/plugin/options.dart';
 import 'package:analyzer/plugin/resolver_provider.dart';
 import 'package:analyzer/source/analysis_options_provider.dart';
@@ -77,6 +76,11 @@
   String packageDescriptionPath;
 
   /**
+   * The folder disposition for this context.
+   */
+  final FolderDisposition disposition;
+
+  /**
    * Paths to files which determine the folder disposition and package map.
    *
    * TODO(paulberry): if any of these files are outside of [folder], they won't
@@ -97,7 +101,7 @@
   Map<String, Source> sources = new HashMap<String, Source>();
 
   ContextInfo(ContextManagerImpl contextManager, this.parent, Folder folder,
-      File packagespecFile, this.packageRoot)
+      File packagespecFile, this.packageRoot, this.disposition)
       : folder = folder,
         pathFilter = new PathFilter(
             folder.path, null, contextManager.resourceProvider.pathContext) {
@@ -111,7 +115,9 @@
    */
   ContextInfo._root()
       : folder = null,
-        pathFilter = null;
+        pathFilter = null,
+        packageRoot = null,
+        disposition = null;
 
   /**
    * Iterate through all [children] and their children, recursively.
@@ -408,13 +414,6 @@
   pathos.Context pathContext;
 
   /**
-   * A function that will return a [UriResolver] that can be used to resolve
-   * URI's for embedded libraries within a given folder, or `null` if we should
-   * fall back to the standard URI resolver.
-   */
-  final EmbeddedResolverProvider embeddedUriResolverProvider;
-
-  /**
    * The list of excluded paths (folders and files) most recently passed to
    * [setRoots].
    */
@@ -496,7 +495,6 @@
       this.resourceProvider,
       this.sdkManager,
       this.packageResolverProvider,
-      this.embeddedUriResolverProvider,
       this._packageMapProvider,
       this.analyzedFilesGlobs,
       this._instrumentationService,
@@ -592,13 +590,13 @@
       info.context.analysisOptions = new AnalysisOptionsImpl();
 
       // Apply inherited options.
-      options = _toStringMap(_getEmbeddedOptions(info.context));
+      options = _toStringMap(_getEmbeddedOptions(info));
       if (options != null) {
         configureContextOptions(info.context, options);
       }
     } else {
       // Check for embedded options.
-      Map embeddedOptions = _getEmbeddedOptions(info.context);
+      Map embeddedOptions = _getEmbeddedOptions(info);
       if (embeddedOptions != null) {
         options = _toStringMap(new Merger().merge(embeddedOptions, options));
       }
@@ -910,22 +908,19 @@
           // If there is no embedded URI resolver, a new source factory needs to
           // be recreated.
           if (sourceFactory is SourceFactoryImpl) {
-            if (!sourceFactory.resolvers
-                .any((UriResolver r) => r is EmbedderUriResolver)) {
-              // Get all but the dart: Uri resolver.
-              List<UriResolver> resolvers = sourceFactory.resolvers
-                  .where((r) => r is! DartUriResolver)
-                  .toList();
-              // Add an embedded URI resolver in its place.
-              resolvers.add(new EmbedderUriResolver(embedderYamls));
+            // Get all but the dart: Uri resolver.
+            List<UriResolver> resolvers = sourceFactory.resolvers
+                .where((r) => r is! DartUriResolver)
+                .toList();
+            // Add an embedded URI resolver in its place.
+            resolvers.add(new DartUriResolver(new EmbedderSdk(embedderYamls)));
 
-              // Set a new source factory.
-              SourceFactoryImpl newFactory = sourceFactory.clone();
-              newFactory.resolvers.clear();
-              newFactory.resolvers.addAll(resolvers);
-              info.context.sourceFactory = newFactory;
-              return;
-            }
+            // Set a new source factory.
+            SourceFactoryImpl newFactory = sourceFactory.clone();
+            newFactory.resolvers.clear();
+            newFactory.resolvers.addAll(resolvers);
+            info.context.sourceFactory = newFactory;
+            return;
           }
         }
 
@@ -1051,17 +1046,11 @@
    */
   ContextInfo _createContext(
       ContextInfo parent, Folder folder, File packagespecFile) {
-    ContextInfo info = new ContextInfo(this, parent, folder, packagespecFile,
-        normalizedPackageRoots[folder.path]);
-
-    FolderDisposition disposition;
     List<String> dependencies = <String>[];
-
-    // Next resort to a package uri resolver.
-    if (disposition == null) {
-      disposition =
-          _computeFolderDisposition(folder, dependencies.add, packagespecFile);
-    }
+    FolderDisposition disposition =
+        _computeFolderDisposition(folder, dependencies.add, packagespecFile);
+    ContextInfo info = new ContextInfo(this, parent, folder, packagespecFile,
+        normalizedPackageRoots[folder.path], disposition);
 
     Map<String, Object> optionMap = readOptions(info.folder);
     AnalysisOptions options =
@@ -1166,24 +1155,31 @@
     List<UriResolver> packageUriResolvers =
         disposition.createPackageUriResolvers(resourceProvider);
 
-    EmbedderUriResolver embedderUriResolver;
-
-    // First check for a resolver provider.
-    if (embeddedUriResolverProvider != null) {
-      embedderUriResolver = embeddedUriResolverProvider(folder);
-    }
-
-    // If no embedded URI resolver was provided, defer to a locator-backed one.
-    embedderUriResolver ??=
-        new EmbedderUriResolver(context.embedderYamlLocator.embedderYamls);
-    if (embedderUriResolver.length == 0) {
-      // The embedder uri resolver has no mappings. Use the default Dart SDK
-      // uri resolver.
+    EmbedderYamlLocator locator =
+        disposition.getEmbedderLocator(resourceProvider);
+    Map<Folder, YamlMap> embedderYamls = locator.embedderYamls;
+    EmbedderSdk embedderSdk = new EmbedderSdk(embedderYamls);
+    if (embedderSdk.libraryMap.size() == 0) {
+      // There was no embedder file, or the file was empty, so used the default
+      // SDK.
       resolvers.add(new DartUriResolver(sdkManager.getSdkForOptions(options)));
     } else {
-      // The embedder uri resolver has mappings, use it instead of the default
-      // Dart SDK uri resolver.
-      resolvers.add(embedderUriResolver);
+      // The embedder file defines an alternate SDK, so use it.
+      List<String> paths = <String>[];
+      for (Folder folder in embedderYamls.keys) {
+        paths.add(folder
+            .getChildAssumingFile(EmbedderYamlLocator.EMBEDDER_FILE_NAME)
+            .path);
+      }
+      DartSdk dartSdk =
+          sdkManager.getSdk(new SdkDescription(paths, options), () {
+        embedderSdk.analysisOptions = options;
+        // TODO(brianwilkerson) Enable summary use after we have decided where
+        // summary files for embedder files will live.
+        embedderSdk.useSummary = false;
+        return embedderSdk;
+      });
+      resolvers.add(new DartUriResolver(dartSdk));
     }
 
     resolvers.addAll(packageUriResolvers);
@@ -1263,29 +1259,26 @@
   /// skipped.
   ///
   /// Returns null if there are no embedded/configured options.
-  Map _getEmbeddedOptions(AnalysisContext context) {
-    Map embeddedOptions;
+  Map _getEmbeddedOptions(ContextInfo info) {
+    Map embeddedOptions = null;
+    EmbedderYamlLocator locator =
+        info.disposition.getEmbedderLocator(resourceProvider);
+    Iterable<YamlMap> maps = locator.embedderYamls.values;
+    if (maps.length == 1) {
+      embeddedOptions = maps.first;
+    }
 
-    if (context is InternalAnalysisContext) {
-      EmbedderYamlLocator locator = context.embedderYamlLocator;
-      Iterable<YamlMap> maps = locator.embedderYamls.values;
-      if (maps.length == 1) {
-        embeddedOptions = maps.first;
-      }
-
-      AnalysisConfiguration configuration = getConfiguration(context);
-      if (configuration != null) {
-        Map configMap = configuration.options;
-        if (configMap != null) {
-          if (embeddedOptions != null) {
-            embeddedOptions = new Merger().merge(embeddedOptions, configMap);
-          } else {
-            embeddedOptions = configMap;
-          }
+    AnalysisConfiguration configuration = getConfiguration(info.context);
+    if (configuration != null) {
+      Map configMap = configuration.options;
+      if (configMap != null) {
+        if (embeddedOptions != null) {
+          embeddedOptions = new Merger().merge(embeddedOptions, configMap);
+        } else {
+          embeddedOptions = configMap;
         }
       }
     }
-
     return embeddedOptions;
   }
 
@@ -1717,6 +1710,10 @@
   Iterable<UriResolver> createPackageUriResolvers(
           ResourceProvider resourceProvider) =>
       <UriResolver>[resolver];
+
+  @override
+  EmbedderYamlLocator getEmbedderLocator(ResourceProvider resourceProvider) =>
+      new EmbedderYamlLocator(null);
 }
 
 /**
@@ -1757,6 +1754,13 @@
    */
   Iterable<UriResolver> createPackageUriResolvers(
       ResourceProvider resourceProvider);
+
+  /**
+   * Return the locator used to locate the _embedder.yaml file used to configure
+   * the SDK. The [resourceProvider] is used to access the file system in cases
+   * where that is necessary.
+   */
+  EmbedderYamlLocator getEmbedderLocator(ResourceProvider resourceProvider);
 }
 
 /**
@@ -1776,6 +1780,10 @@
   Iterable<UriResolver> createPackageUriResolvers(
           ResourceProvider resourceProvider) =>
       const <UriResolver>[];
+
+  @override
+  EmbedderYamlLocator getEmbedderLocator(ResourceProvider resourceProvider) =>
+      new EmbedderYamlLocator(null);
 }
 
 /**
@@ -1785,6 +1793,8 @@
 class PackageMapDisposition extends FolderDisposition {
   final Map<String, List<Folder>> packageMap;
 
+  EmbedderYamlLocator _embedderLocator;
+
   @override
   final String packageRoot;
 
@@ -1800,6 +1810,14 @@
         new SdkExtUriResolver(packageMap),
         new PackageMapUriResolver(resourceProvider, packageMap)
       ];
+
+  @override
+  EmbedderYamlLocator getEmbedderLocator(ResourceProvider resourceProvider) {
+    if (_embedderLocator == null) {
+      _embedderLocator = new EmbedderYamlLocator(packageMap);
+    }
+    return _embedderLocator;
+  }
 }
 
 /**
@@ -1810,22 +1828,27 @@
   @override
   final Packages packages;
 
+  Map<String, List<Folder>> packageMap;
+
+  EmbedderYamlLocator _embedderLocator;
+
   PackagesFileDisposition(this.packages);
 
   @override
   String get packageRoot => null;
 
   Map<String, List<Folder>> buildPackageMap(ResourceProvider resourceProvider) {
-    Map<String, List<Folder>> packageMap = <String, List<Folder>>{};
-    if (packages == null) {
-      return packageMap;
-    }
-    packages.asMap().forEach((String name, Uri uri) {
-      if (uri.scheme == 'file' || uri.scheme == '' /* unspecified */) {
-        var path = resourceProvider.pathContext.fromUri(uri);
-        packageMap[name] = <Folder>[resourceProvider.getFolder(path)];
+    if (packageMap == null) {
+      packageMap = <String, List<Folder>>{};
+      if (packages != null) {
+        packages.asMap().forEach((String name, Uri uri) {
+          if (uri.scheme == 'file' || uri.scheme == '' /* unspecified */) {
+            var path = resourceProvider.pathContext.fromUri(uri);
+            packageMap[name] = <Folder>[resourceProvider.getFolder(path)];
+          }
+        });
       }
-    });
+    }
     return packageMap;
   }
 
@@ -1840,4 +1863,13 @@
       return const <UriResolver>[];
     }
   }
+
+  @override
+  EmbedderYamlLocator getEmbedderLocator(ResourceProvider resourceProvider) {
+    if (_embedderLocator == null) {
+      _embedderLocator =
+          new EmbedderYamlLocator(buildPackageMap(resourceProvider));
+    }
+    return _embedderLocator;
+  }
 }
diff --git a/pkg/analysis_server/lib/src/protocol_server.dart b/pkg/analysis_server/lib/src/protocol_server.dart
index 5c86115..138c20f 100644
--- a/pkg/analysis_server/lib/src/protocol_server.dart
+++ b/pkg/analysis_server/lib/src/protocol_server.dart
@@ -43,14 +43,7 @@
             .add(newAnalysisError_fromEngine(lineInfo, error, severity));
       }
     } else {
-      AnalysisError error2 = newAnalysisError_fromEngine(lineInfo, error);
-      bool isStrongMode = context.analysisOptions.strongMode;
-      if (isStrongMode &&
-          error is engine.StaticWarningCode &&
-          (error as engine.StaticWarningCode).isStrongModeError) {
-        error2.severity = AnalysisErrorSeverity.ERROR;
-      }
-      serverErrors.add(error2);
+      serverErrors.add(newAnalysisError_fromEngine(lineInfo, error));
     }
   }
   return serverErrors;
diff --git a/pkg/analysis_server/lib/src/server/driver.dart b/pkg/analysis_server/lib/src/server/driver.dart
index ae8c50f..b6c878f 100644
--- a/pkg/analysis_server/lib/src/server/driver.dart
+++ b/pkg/analysis_server/lib/src/server/driver.dart
@@ -19,7 +19,6 @@
 import 'package:analyzer/file_system/physical_file_system.dart';
 import 'package:analyzer/instrumentation/file_instrumentation.dart';
 import 'package:analyzer/instrumentation/instrumentation.dart';
-import 'package:analyzer/plugin/embedded_resolver_provider.dart';
 import 'package:analyzer/plugin/resolver_provider.dart';
 import 'package:analyzer/src/generated/engine.dart';
 import 'package:analyzer/src/generated/incremental_logger.dart';
@@ -245,6 +244,11 @@
       "incremental-resolution-validation";
 
   /**
+   * The name of the option used to enable fined grained invalidation.
+   */
+  static const String FINER_GRAINED_INVALIDATION = 'finer-grained-invalidation';
+
+  /**
    * The name of the option used to cause instrumentation to also be written to
    * a local file.
    */
@@ -298,10 +302,10 @@
   InstrumentationServer instrumentationServer;
 
   /**
-   * The embedded library URI resolver provider used to override the way
-   * embedded library URI's are resolved in some contexts.
+   * The file resolver provider used to override the way file URI's are
+   * resolved in some contexts.
    */
-  EmbeddedResolverProvider embeddedUriResolverProvider;
+  ResolverProvider fileResolverProvider;
 
   /**
    * The package resolver provider used to override the way package URI's are
@@ -375,6 +379,8 @@
         results[ENABLE_INCREMENTAL_RESOLUTION_API];
     analysisServerOptions.enableIncrementalResolutionValidation =
         results[INCREMENTAL_RESOLUTION_VALIDATION];
+    analysisServerOptions.finerGrainedInvalidation =
+        results[FINER_GRAINED_INVALIDATION];
     analysisServerOptions.noErrorNotification = results[NO_ERROR_NOTIFICATION];
     analysisServerOptions.noIndex = results[NO_INDEX];
     analysisServerOptions.useAnalysisHighlight2 =
@@ -407,10 +413,12 @@
       // Use DirectoryBasedDartSdk.defaultSdkDirectory, which will make a guess.
       defaultSdkDirectory = DirectoryBasedDartSdk.defaultSdkDirectory;
     }
+    bool useSummaries = analysisServerOptions.fileReadMode == 'as-is';
     SdkCreator defaultSdkCreator = (AnalysisOptions options) {
       DirectoryBasedDartSdk sdk =
           new DirectoryBasedDartSdk(defaultSdkDirectory);
       sdk.analysisOptions = options;
+      sdk.useSummary = useSummaries;
       return sdk;
     };
     // TODO(brianwilkerson) It would be nice to avoid creating an SDK that
@@ -440,11 +448,12 @@
     //
     socketServer = new SocketServer(
         analysisServerOptions,
-        defaultSdkCreator,
+        new DartSdkManager(defaultSdkDirectory.getAbsolutePath(), useSummaries,
+            defaultSdkCreator),
         defaultSdk,
         service,
         serverPlugin,
-        embeddedUriResolverProvider,
+        fileResolverProvider,
         packageResolverProvider,
         useSingleContextManager);
     httpServer = new HttpAnalysisServer(socketServer);
@@ -523,6 +532,10 @@
         help: "enable validation of incremental resolution results (slow)",
         defaultsTo: false,
         negatable: false);
+    parser.addFlag(FINER_GRAINED_INVALIDATION,
+        help: "enable finer grained invalidation",
+        defaultsTo: false,
+        negatable: false);
     parser.addOption(INSTRUMENTATION_LOG_FILE,
         help:
             "the path of the file to which instrumentation data will be written");
diff --git a/pkg/analysis_server/lib/src/services/completion/dart/keyword_contributor.dart b/pkg/analysis_server/lib/src/services/completion/dart/keyword_contributor.dart
index 6b328b2..5a8d8cb 100644
--- a/pkg/analysis_server/lib/src/services/completion/dart/keyword_contributor.dart
+++ b/pkg/analysis_server/lib/src/services/completion/dart/keyword_contributor.dart
@@ -77,6 +77,16 @@
   }
 
   @override
+  visitAsExpression(AsExpression node) {
+    if (identical(entity, node.asOperator) &&
+        node.expression is ParenthesizedExpression) {
+      _addSuggestion2(ASYNC, relevance: DART_RELEVANCE_HIGH);
+      _addSuggestion2(ASYNC_STAR, relevance: DART_RELEVANCE_HIGH);
+      _addSuggestion2(SYNC_STAR, relevance: DART_RELEVANCE_HIGH);
+    }
+  }
+
+  @override
   visitBlock(Block node) {
     if (entity is ExpressionStatement) {
       Expression expression = (entity as ExpressionStatement).expression;
diff --git a/pkg/analysis_server/lib/src/services/completion/dart/optype.dart b/pkg/analysis_server/lib/src/services/completion/dart/optype.dart
index 3a5608c..917542c 100644
--- a/pkg/analysis_server/lib/src/services/completion/dart/optype.dart
+++ b/pkg/analysis_server/lib/src/services/completion/dart/optype.dart
@@ -211,8 +211,9 @@
       optype.includeTypeNameSuggestions = true;
       optype.typeNameSuggestionsFilter = (DartType dartType, int relevance) {
         DartType staticType = node.expression.staticType;
-        if (staticType.isDynamic ||
-            (dartType.isSubtypeOf(staticType) && dartType != staticType)) {
+        if (staticType != null &&
+            (staticType.isDynamic ||
+                (dartType.isSubtypeOf(staticType) && dartType != staticType))) {
           return relevance;
         } else {
           return null;
@@ -555,7 +556,7 @@
         if (node.parent is VariableDeclaration) {
           VariableDeclaration varDeclaration =
               node.parent as VariableDeclaration;
-          localTypeAssertion = varDeclaration.element.type;
+          localTypeAssertion = varDeclaration.element?.type;
         } else if (node.parent is AssignmentExpression) {
           AssignmentExpression assignmentExpression =
               node.parent as AssignmentExpression;
@@ -592,8 +593,9 @@
       optype.includeTypeNameSuggestions = true;
       optype.typeNameSuggestionsFilter = (DartType dartType, int relevance) {
         DartType staticType = node.expression.staticType;
-        if (staticType.isDynamic ||
-            (dartType.isSubtypeOf(staticType) && dartType != staticType)) {
+        if (staticType != null &&
+            (staticType.isDynamic ||
+                (dartType.isSubtypeOf(staticType) && dartType != staticType))) {
           return relevance;
         } else {
           return null;
@@ -770,6 +772,10 @@
     if (identical(entity, node.expression)) {
       optype.includeReturnValueSuggestions = true;
       optype.includeTypeNameSuggestions = true;
+    } else if (node.statements.contains(entity)) {
+      optype.includeReturnValueSuggestions = true;
+      optype.includeTypeNameSuggestions = true;
+      optype.includeVoidReturnSuggestions = true;
     }
   }
 
@@ -875,14 +881,6 @@
     }
   }
 
-  bool _isEntityPrevToken(TokenType expectedType) {
-    Object entity = this.entity;
-    if (entity is SimpleIdentifier && entity.token.isSynthetic) {
-      return entity.token.previous.type == expectedType;
-    }
-    return false;
-  }
-
   bool _isEntityPrevTokenSynthetic() {
     Object entity = this.entity;
     if (entity is AstNode && entity.beginToken.previous?.isSynthetic ?? false) {
diff --git a/pkg/analysis_server/lib/src/services/correction/assist_internal.dart b/pkg/analysis_server/lib/src/services/correction/assist_internal.dart
index 244fb3d..f58469d 100644
--- a/pkg/analysis_server/lib/src/services/correction/assist_internal.dart
+++ b/pkg/analysis_server/lib/src/services/correction/assist_internal.dart
@@ -338,7 +338,7 @@
     }
     // add edit
     Token keyword = declarationList.keyword;
-    if (keyword.keyword == Keyword.VAR) {
+    if (keyword?.keyword == Keyword.VAR) {
       SourceRange range = rangeToken(keyword);
       _addReplaceEdit(range, typeSource);
     } else {
diff --git a/pkg/analysis_server/lib/src/socket_server.dart b/pkg/analysis_server/lib/src/socket_server.dart
index 1da32a1..bce9dc8 100644
--- a/pkg/analysis_server/lib/src/socket_server.dart
+++ b/pkg/analysis_server/lib/src/socket_server.dart
@@ -11,7 +11,6 @@
 import 'package:analysis_server/src/services/index/index.dart';
 import 'package:analyzer/file_system/physical_file_system.dart';
 import 'package:analyzer/instrumentation/instrumentation.dart';
-import 'package:analyzer/plugin/embedded_resolver_provider.dart';
 import 'package:analyzer/plugin/resolver_provider.dart';
 import 'package:analyzer/source/pub_package_map_provider.dart';
 import 'package:analyzer/src/generated/sdk.dart';
@@ -30,12 +29,12 @@
   /**
    * The function used to create a new SDK using the default SDK.
    */
-  final SdkCreator defaultSdkCreator;
+  final DartSdkManager sdkManager;
 
   final DirectoryBasedDartSdk defaultSdk;
   final InstrumentationService instrumentationService;
   final ServerPlugin serverPlugin;
-  final EmbeddedResolverProvider embeddedResolverProvider;
+  final ResolverProvider fileResolverProvider;
   final ResolverProvider packageResolverProvider;
   final bool useSingleContextManager;
 
@@ -52,11 +51,11 @@
 
   SocketServer(
       this.analysisServerOptions,
-      this.defaultSdkCreator,
+      this.sdkManager,
       this.defaultSdk,
       this.instrumentationService,
       this.serverPlugin,
-      this.embeddedResolverProvider,
+      this.fileResolverProvider,
       this.packageResolverProvider,
       this.useSingleContextManager);
 
@@ -98,9 +97,9 @@
         index,
         serverPlugin,
         analysisServerOptions,
-        defaultSdkCreator,
+        sdkManager,
         instrumentationService,
-        embeddedResolverProvider: embeddedResolverProvider,
+        fileResolverProvider: fileResolverProvider,
         packageResolverProvider: packageResolverProvider,
         useSingleContextManager: useSingleContextManager,
         rethrowExceptions: false);
diff --git a/pkg/analysis_server/lib/src/status/ast_writer.dart b/pkg/analysis_server/lib/src/status/ast_writer.dart
index 7303f69..424132e 100644
--- a/pkg/analysis_server/lib/src/status/ast_writer.dart
+++ b/pkg/analysis_server/lib/src/status/ast_writer.dart
@@ -9,6 +9,7 @@
 import 'package:analysis_server/src/status/tree_writer.dart';
 import 'package:analyzer/dart/ast/ast.dart';
 import 'package:analyzer/dart/ast/visitor.dart';
+import 'package:analyzer/src/dart/ast/ast.dart';
 
 /**
  * A visitor that will produce an HTML representation of an AST structure.
@@ -41,34 +42,75 @@
     Map<String, Object> properties = new HashMap<String, Object>();
 
     properties['name'] = _getName(node);
-    if (node is BinaryExpression) {
+    if (node is ArgumentListImpl) {
+      properties['static parameter types'] = node.correspondingStaticParameters;
+      properties['propagated parameter types'] =
+          node.correspondingPropagatedParameters;
+    } else if (node is Annotation) {
+      properties['element'] = node.element;
+      properties['element annotation'] = node.elementAnnotation;
+    } else if (node is BinaryExpression) {
       properties['static element'] = node.staticElement;
       properties['static type'] = node.staticType;
       properties['propagated element'] = node.propagatedElement;
       properties['propagated type'] = node.propagatedType;
+    } else if (node is ClassDeclaration) {
+      properties['element'] = node.element;
+      properties['abstract keyword'] = node.abstractKeyword;
+    } else if (node is ClassTypeAlias) {
+      properties['element'] = node.element;
+      properties['abstract keyword'] = node.abstractKeyword;
     } else if (node is CompilationUnit) {
       properties['element'] = node.element;
+    } else if (node is ConstructorName) {
+      properties['static element'] = node.staticElement;
+    } else if (node is DeclaredIdentifier) {
+      properties['element'] = node.element;
+      properties['keyword'] = node.keyword;
     } else if (node is ExportDirective) {
       properties['element'] = node.element;
       properties['source'] = node.source;
+    } else if (node is FieldDeclaration) {
+      properties['static keyword'] = node.staticKeyword;
+    } else if (node is FormalParameter) {
+      properties['element'] = node.element;
+      properties['kind'] = node.kind;
     } else if (node is FunctionDeclaration) {
+      properties['element'] = node.element;
       properties['external keyword'] = node.externalKeyword;
       properties['property keyword'] = node.propertyKeyword;
     } else if (node is FunctionExpressionInvocation) {
       properties['static element'] = node.staticElement;
+      properties['static invoke type'] = node.staticInvokeType;
       properties['static type'] = node.staticType;
       properties['propagated element'] = node.propagatedElement;
+      properties['propagated invoke type'] = node.propagatedInvokeType;
       properties['propagated type'] = node.propagatedType;
     } else if (node is ImportDirective) {
       properties['element'] = node.element;
       properties['source'] = node.source;
+    } else if (node is IndexExpression) {
+      properties['static element'] = node.staticElement;
+      properties['static type'] = node.staticType;
+      properties['propagated element'] = node.propagatedElement;
+      properties['propagated type'] = node.propagatedType;
+    } else if (node is InstanceCreationExpression) {
+      properties['static element'] = node.staticElement;
+      properties['static type'] = node.staticType;
+      properties['propagated type'] = node.propagatedType;
     } else if (node is LibraryDirective) {
       properties['element'] = node.element;
     } else if (node is MethodDeclaration) {
+      properties['element'] = node.element;
       properties['external keyword'] = node.externalKeyword;
       properties['modifier keyword'] = node.modifierKeyword;
       properties['operator keyword'] = node.operatorKeyword;
       properties['property keyword'] = node.propertyKeyword;
+    } else if (node is MethodInvocation) {
+      properties['static invoke type'] = node.staticInvokeType;
+      properties['static type'] = node.staticType;
+      properties['propagated invoke type'] = node.propagatedInvokeType;
+      properties['propagated type'] = node.propagatedType;
     } else if (node is PartDirective) {
       properties['element'] = node.element;
       properties['source'] = node.source;
@@ -84,6 +126,8 @@
       properties['static type'] = node.staticType;
       properties['propagated element'] = node.propagatedElement;
       properties['propagated type'] = node.propagatedType;
+    } else if (node is RedirectingConstructorInvocation) {
+      properties['static element'] = node.staticElement;
     } else if (node is SimpleIdentifier) {
       properties['static element'] = node.staticElement;
       properties['static type'] = node.staticType;
@@ -91,9 +135,25 @@
       properties['propagated type'] = node.propagatedType;
     } else if (node is SimpleStringLiteral) {
       properties['value'] = node.value;
+    } else if (node is SuperConstructorInvocation) {
+      properties['static element'] = node.staticElement;
+    } else if (node is TypeName) {
+      properties['type'] = node.type;
+    } else if (node is VariableDeclarationList) {
+      properties['keyword'] = node.keyword;
+    } else if (node is Declaration) {
+      properties['element'] = node.element;
     } else if (node is Expression) {
       properties['static type'] = node.staticType;
       properties['propagated type'] = node.propagatedType;
+    } else if (node is FunctionBody) {
+      properties['isAsynchronous'] = node.isAsynchronous;
+      properties['isGenerator'] = node.isGenerator;
+    } else if (node is Identifier) {
+      properties['static element'] = node.staticElement;
+      properties['static type'] = node.staticType;
+      properties['propagated element'] = node.propagatedElement;
+      properties['propagated type'] = node.propagatedType;
     }
 
     return properties;
@@ -169,10 +229,11 @@
     buffer.write(node.offset);
     buffer.write('..');
     buffer.write(node.offset + node.length - 1);
-    buffer.write(']</span>');
+    buffer.write(']');
     if (node.isSynthetic) {
       buffer.write(' (synthetic)');
     }
+    buffer.write('</span>');
     buffer.write('<br>');
   }
 }
diff --git a/pkg/analysis_server/lib/src/status/element_writer.dart b/pkg/analysis_server/lib/src/status/element_writer.dart
index c3e4bc2..6f5649b 100644
--- a/pkg/analysis_server/lib/src/status/element_writer.dart
+++ b/pkg/analysis_server/lib/src/status/element_writer.dart
@@ -42,8 +42,6 @@
   Map<String, Object> _computeProperties(Element element) {
     Map<String, Object> properties = new HashMap<String, Object>();
 
-    properties['isDeprecated'] = element.isDeprecated;
-    properties['isOverride'] = element.isOverride;
     properties['metadata'] = element.metadata;
     properties['nameOffset'] = element.nameOffset;
     if (element is ClassElement) {
@@ -53,16 +51,18 @@
       properties['interfaces'] = element.interfaces;
       properties['isAbstract'] = element.isAbstract;
       properties['isEnum'] = element.isEnum;
+      properties['isMixinApplication'] = element.isMixinApplication;
       properties['isOrInheritsProxy'] = element.isOrInheritsProxy;
       properties['isProxy'] = element.isProxy;
-      properties['isTypedef'] = element.isMixinApplication;
       properties['isValidMixin'] = element.isValidMixin;
       properties['mixins'] = element.mixins;
       properties['supertype'] = element.supertype;
-      properties['type'] = element.type;
+    }
+    if (element is ClassMemberElement) {
+      properties['isStatic'] = element.isStatic;
     }
     if (element is CompilationUnitElement) {
-      properties['isEnumConstant'] = element.hasLoadLibraryFunction;
+      properties['hasLoadLibraryFunction'] = element.hasLoadLibraryFunction;
       properties['source'] = element.source;
     }
     if (element is ConstFieldElementImpl) {
@@ -81,7 +81,10 @@
       properties['redirectedConstructor'] = element.redirectedConstructor;
     }
     if (element is ExecutableElement) {
+      properties['hasImplicitReturnType'] = element.hasImplicitReturnType;
+      properties['isAbstract'] = element.isAbstract;
       properties['isAsynchronous'] = element.isAsynchronous;
+      properties['isExternal'] = element.isExternal;
       properties['isGenerator'] = element.isGenerator;
       properties['isOperator'] = element.isOperator;
       properties['isStatic'] = element.isStatic;
@@ -99,12 +102,16 @@
     if (element is FieldFormalParameterElement) {
       properties['field'] = element.field;
     }
-    if (element is FunctionTypeAliasElement) {
+    if (element is FunctionElement) {
+      properties['isEntryPoint'] = element.isEntryPoint;
+    }
+    if (element is FunctionTypedElement) {
       properties['returnType'] = element.returnType;
       properties['type'] = element.type;
     }
     if (element is ImportElement) {
       properties['combinators'] = element.combinators;
+      properties['isDeferred'] = element.isDeferred;
       properties['library'] = element.library;
     }
     if (element is LibraryElement) {
@@ -113,20 +120,19 @@
       properties['hasExtUri'] = element.hasExtUri;
       properties['hasLoadLibraryFunction'] = element.hasLoadLibraryFunction;
       properties['isBrowserApplication'] = element.isBrowserApplication;
+      properties['isDartAsync'] = element.isDartAsync;
+      properties['isDartCore'] = element.isDartCore;
+      properties['isInSdk'] = element.isInSdk;
     }
     if (element is LocalElement) {
       properties['visibleRange'] = element.visibleRange;
     }
-    if (element is MethodElement) {
-      properties['isAbstract'] = element.isAbstract;
-    }
     if (element is ParameterElement) {
       properties['defaultValueCode'] = element.defaultValueCode;
       properties['isInitializingFormal'] = element.isInitializingFormal;
       properties['parameterKind'] = element.parameterKind;
     }
     if (element is PropertyAccessorElement) {
-      properties['isAbstract'] = element.isAbstract;
       properties['isGetter'] = element.isGetter;
       properties['isSetter'] = element.isSetter;
     }
@@ -134,15 +140,24 @@
       properties['isStatic'] = element.isStatic;
       properties['propagatedType'] = element.propagatedType;
     }
+    if (element is TypeDefiningElement) {
+      properties['type'] = element.type;
+    }
     if (element is TypeParameterElement) {
       properties['bound'] = element.bound;
     }
+    if (element is TypeParameterizedElement) {
+      properties['typeParameters'] = element.typeParameters;
+    }
     if (element is UriReferencedElement) {
       properties['uri'] = element.uri;
     }
     if (element is VariableElement) {
+      properties['constantValue'] = element.constantValue;
+      properties['hasImplicitType'] = element.hasImplicitType;
       properties['isConst'] = element.isConst;
       properties['isFinal'] = element.isFinal;
+      properties['isStatic'] = element.isStatic;
       properties['type'] = element.type;
     }
 
diff --git a/pkg/analysis_server/lib/src/status/get_handler.dart b/pkg/analysis_server/lib/src/status/get_handler.dart
index fccb189..0e9a2ed 100644
--- a/pkg/analysis_server/lib/src/status/get_handler.dart
+++ b/pkg/analysis_server/lib/src/status/get_handler.dart
@@ -27,7 +27,9 @@
 import 'package:analyzer/dart/element/element.dart';
 import 'package:analyzer/dart/element/visitor.dart';
 import 'package:analyzer/file_system/file_system.dart';
+import 'package:analyzer/source/embedder.dart';
 import 'package:analyzer/source/error_processor.dart';
+import 'package:analyzer/source/sdk_ext.dart';
 import 'package:analyzer/src/context/cache.dart';
 import 'package:analyzer/src/context/context.dart' show AnalysisContextImpl;
 import 'package:analyzer/src/context/source.dart';
@@ -35,6 +37,8 @@
 import 'package:analyzer/src/generated/error.dart';
 import 'package:analyzer/src/generated/java_engine.dart';
 import 'package:analyzer/src/generated/resolver.dart';
+import 'package:analyzer/src/generated/sdk.dart';
+import 'package:analyzer/src/generated/sdk_io.dart';
 import 'package:analyzer/src/generated/source.dart';
 import 'package:analyzer/src/generated/utilities_collection.dart';
 import 'package:analyzer/src/generated/utilities_general.dart';
@@ -396,6 +400,20 @@
   }
 
   /**
+   * Produce an encoded version of the given [descriptor] that can be used to
+   * find the descriptor later.
+   */
+  String _encodeSdkDescriptor(SdkDescription descriptor) {
+    StringBuffer buffer = new StringBuffer();
+    buffer.write(descriptor.options.encodeCrossContextOptions());
+    for (String path in descriptor.paths) {
+      buffer.write('+');
+      buffer.write(path);
+    }
+    return buffer.toString();
+  }
+
+  /**
    * Return the folder being managed by the given [analysisServer] that matches
    * the given [contextFilter], or `null` if there is none.
    */
@@ -558,10 +576,30 @@
   }
 
   /**
+   * Return the context for the SDK whose descriptor is encoded to be the same
+   * as the given [contextFilter]. The [analysisServer] is used to access the
+   * SDKs.
+   */
+  AnalysisContext _getSdkContext(
+      AnalysisServer analysisServer, String contextFilter) {
+    DartSdkManager manager = analysisServer.sdkManager;
+    List<SdkDescription> descriptors = manager.sdkDescriptors;
+    for (SdkDescription descriptor in descriptors) {
+      if (contextFilter == _encodeSdkDescriptor(descriptor)) {
+        return manager.getSdk(descriptor, () => null)?.context;
+      }
+    }
+    return null;
+  }
+
+  /**
    * Return `true` if the given analysis [context] has at least one entry with
    * an exception.
    */
   bool _hasException(InternalAnalysisContext context) {
+    if (context == null) {
+      return false;
+    }
     MapIterator<AnalysisTarget, CacheEntry> iterator =
         context.analysisCache.iterator();
     while (iterator.moveNext()) {
@@ -898,9 +936,17 @@
       return _returnFailure(
           request, 'Query parameter $CONTEXT_QUERY_PARAM required');
     }
+    InternalAnalysisContext context = null;
     Folder folder = _findFolder(analysisServer, contextFilter);
     if (folder == null) {
-      return _returnFailure(request, 'Invalid context: $contextFilter');
+      context = _getSdkContext(analysisServer, contextFilter);
+      if (context == null) {
+        return _returnFailure(request, 'Invalid context: $contextFilter');
+      }
+      return _returnFailure(request,
+          'Cannot view cache entries from an SDK context: $contextFilter');
+    } else {
+      context = analysisServer.folderMap[folder];
     }
     String sourceUri = request.uri.queryParameters[SOURCE_QUERY_PARAM];
     if (sourceUri == null) {
@@ -942,7 +988,6 @@
     });
     allContexts.sort((Folder firstFolder, Folder secondFolder) =>
         firstFolder.path.compareTo(secondFolder.path));
-    InternalAnalysisContext context = analysisServer.folderMap[folder];
 
     _writeResponse(request, (StringBuffer buffer) {
       _writePage(buffer, 'Analysis Server - Cache Entry',
@@ -996,7 +1041,7 @@
         }
         for (CacheEntry entry in entries) {
           Map<String, String> linkParameters = <String, String>{
-            CONTEXT_QUERY_PARAM: folder.path,
+            CONTEXT_QUERY_PARAM: contextFilter,
             SOURCE_QUERY_PARAM: sourceUri
           };
           List<ResultDescriptor> results = _getExpectedResults(entry);
@@ -1213,17 +1258,21 @@
       return _returnFailure(
           request, 'Query parameter $CONTEXT_QUERY_PARAM required');
     }
+    InternalAnalysisContext context = null;
     Folder folder = _findFolder(analysisServer, contextFilter);
     if (folder == null) {
-      return _returnFailure(request, 'Invalid context: $contextFilter');
+      context = _getSdkContext(analysisServer, contextFilter);
+      if (context == null) {
+        return _returnFailure(request, 'Invalid context: $contextFilter');
+      }
+    } else {
+      context = analysisServer.folderMap[folder];
     }
 
-    InternalAnalysisContext context = analysisServer.folderMap[folder];
-
     _writeResponse(request, (StringBuffer buffer) {
       _writePage(buffer, 'Analysis Server - Context Diagnostics',
           ['Context: $contextFilter'], (StringBuffer buffer) {
-        _writeContextDiagnostics(buffer, context);
+        _writeContextDiagnostics(buffer, context, contextFilter);
       });
     });
   }
@@ -1242,9 +1291,15 @@
       return _returnFailure(
           request, 'Query parameter $CONTEXT_QUERY_PARAM required');
     }
+    InternalAnalysisContext context = null;
     Folder folder = _findFolder(analysisServer, contextFilter);
     if (folder == null) {
-      return _returnFailure(request, 'Invalid context: $contextFilter');
+      context = _getSdkContext(analysisServer, contextFilter);
+      if (context == null) {
+        return _returnFailure(request, 'Invalid context: $contextFilter');
+      }
+    } else {
+      context = analysisServer.folderMap[folder];
     }
 
     List<String> priorityNames = <String>[];
@@ -1252,7 +1307,6 @@
     List<String> implicitNames = <String>[];
     Map<String, String> links = new HashMap<String, String>();
     List<CaughtException> exceptions = <CaughtException>[];
-    InternalAnalysisContext context = analysisServer.folderMap[folder];
     context.prioritySources.forEach((Source source) {
       priorityNames.add(source.fullName);
     });
@@ -1271,7 +1325,7 @@
           String link = makeLink(
               CACHE_ENTRY_PATH,
               {
-                CONTEXT_QUERY_PARAM: folder.path,
+                CONTEXT_QUERY_PARAM: contextFilter,
                 SOURCE_QUERY_PARAM: target.uri.toString()
               },
               sourceName,
@@ -1313,7 +1367,8 @@
         buffer.write('</table></p>');
       }
     }
-    void writeOptions(StringBuffer buffer, AnalysisOptionsImpl options) {
+    void writeOptions(StringBuffer buffer, AnalysisOptionsImpl options,
+        {void writeAdditionalOptions(StringBuffer buffer)}) {
       if (options == null) {
         buffer.write('<p>No option information available.</p>');
         return;
@@ -1328,6 +1383,8 @@
       _writeOption(
           buffer, 'Enable strict call checks', options.enableStrictCallChecks);
       _writeOption(buffer, 'Enable super mixins', options.enableSuperMixins);
+      _writeOption(
+          buffer, 'Enable trailing commas', options.enableTrailingCommas);
       _writeOption(buffer, 'Generate dart2js hints', options.dart2jsHint);
       _writeOption(buffer, 'Generate errors in implicit files',
           options.generateImplicitErrors);
@@ -1339,8 +1396,10 @@
           options.incrementalApi);
       _writeOption(buffer, 'Preserve comments', options.preserveComments);
       _writeOption(buffer, 'Strong mode', options.strongMode);
-      _writeOption(buffer, 'Strong mode hints', options.strongModeHints,
-          last: true);
+      _writeOption(buffer, 'Strong mode hints', options.strongModeHints);
+      if (writeAdditionalOptions != null) {
+        writeAdditionalOptions(buffer);
+      }
       buffer.write('</p>');
     }
 
@@ -1357,8 +1416,13 @@
           },
           (StringBuffer buffer) {
             buffer.write('<p><b>SDK Context Options</b></p>');
-            writeOptions(buffer,
-                context?.sourceFactory?.dartSdk?.context?.analysisOptions);
+            DartSdk sdk = context?.sourceFactory?.dartSdk;
+            writeOptions(buffer, sdk?.context?.analysisOptions,
+                writeAdditionalOptions: (StringBuffer buffer) {
+              if (sdk is DirectoryBasedDartSdk) {
+                _writeOption(buffer, 'Use summaries', sdk.useSummary);
+              }
+            });
           },
           (StringBuffer buffer) {
             List<Linter> lints =
@@ -1388,6 +1452,25 @@
           for (UriResolver resolver in sourceFactory.resolvers) {
             buffer.write('<p>');
             buffer.write(resolver.runtimeType);
+            if (resolver is DartUriResolver) {
+              DartSdk sdk = resolver.dartSdk;
+              buffer.write(' (sdk = ');
+              buffer.write(sdk.runtimeType);
+              if (sdk is DirectoryBasedDartSdk) {
+                buffer.write(' (path = ');
+                buffer.write(sdk.directory.getAbsolutePath());
+                buffer.write(')');
+              } else if (sdk is EmbedderSdk) {
+                buffer.write(' (map = ');
+                _writeMapOfStringToString(buffer, sdk.urlMappings);
+                buffer.write(')');
+              }
+              buffer.write(')');
+            } else if (resolver is SdkExtUriResolver) {
+              buffer.write(' (map = ');
+              _writeMapOfStringToString(buffer, resolver.urlMappings);
+              buffer.write(')');
+            }
             buffer.write('</p>');
           }
         }
@@ -1447,13 +1530,17 @@
       return _returnFailure(
           request, 'Query parameter $CONTEXT_QUERY_PARAM required');
     }
+    InternalAnalysisContext context = null;
     Folder folder = _findFolder(analysisServer, contextFilter);
     if (folder == null) {
-      return _returnFailure(request, 'Invalid context: $contextFilter');
+      context = _getSdkContext(analysisServer, contextFilter);
+      if (context == null) {
+        return _returnFailure(request, 'Invalid context: $contextFilter');
+      }
+    } else {
+      context = analysisServer.folderMap[folder];
     }
 
-    InternalAnalysisContext context = analysisServer.folderMap[folder];
-
     _writeResponse(request, (StringBuffer buffer) {
       _writePage(buffer, 'Analysis Server - Context Validation Diagnostics',
           ['Context: $contextFilter'], (StringBuffer buffer) {
@@ -1686,7 +1773,40 @@
           buffer.write(' <small>[no .packages file]</small>');
         }
       });
-      // TODO(brianwilkerson) Add items for the SDK contexts (currently only one).
+      buffer.write('</p>');
+      buffer.write('<p><b>SDK Contexts</b></p>');
+      buffer.write('<p>');
+      first = true;
+      DartSdkManager manager = analysisServer.sdkManager;
+      List<SdkDescription> descriptors = manager.sdkDescriptors;
+      if (descriptors.isEmpty) {
+        buffer.write('none');
+      } else {
+        Map<String, SdkDescription> sdkMap = <String, SdkDescription>{};
+        for (SdkDescription descriptor in descriptors) {
+          sdkMap[descriptor.toString()] = descriptor;
+        }
+        List<String> descriptorNames = sdkMap.keys.toList();
+        descriptorNames.sort();
+        for (String name in descriptorNames) {
+          if (first) {
+            first = false;
+          } else {
+            buffer.write('<br>');
+          }
+          SdkDescription descriptor = sdkMap[name];
+          String contextId = _encodeSdkDescriptor(descriptor);
+          buffer.write(makeLink(
+              CONTEXT_PATH,
+              {CONTEXT_QUERY_PARAM: contextId},
+              name,
+              _hasException(manager.getSdk(descriptor, () => null)?.context)));
+          buffer.write(' <small><b>[');
+          buffer.write(makeLink(CONTEXT_DIAGNOSTICS_PATH,
+              {CONTEXT_QUERY_PARAM: contextId}, 'diagnostics'));
+          buffer.write(']</b></small>');
+        }
+      }
       buffer.write('</p>');
 
       int freq = AnalysisServer.performOperationDelayFrequency;
@@ -1826,14 +1946,14 @@
    * Write diagnostic information about the given [context] to the given
    * [buffer].
    */
-  void _writeContextDiagnostics(
-      StringBuffer buffer, InternalAnalysisContext context) {
+  void _writeContextDiagnostics(StringBuffer buffer,
+      InternalAnalysisContext context, String contextFilter) {
     AnalysisDriver driver = (context as AnalysisContextImpl).driver;
     List<WorkItem> workItems = driver.currentWorkOrder?.workItems;
 
     buffer.write('<p>');
     buffer.write(makeLink(CONTEXT_VALIDATION_DIAGNOSTICS_PATH,
-        {CONTEXT_QUERY_PARAM: context.name}, 'Run validation'));
+        {CONTEXT_QUERY_PARAM: contextFilter}, 'Run validation'));
     buffer.write('</p>');
 
     buffer.write('<h3>Most Recently Perfomed Tasks</h3>');
@@ -2075,6 +2195,27 @@
   }
 
   /**
+   * Write to the given [buffer] a representation of the given [map] of strings
+   * to strings.
+   */
+  void _writeMapOfStringToString(StringBuffer buffer, Map<String, String> map) {
+    List<String> keys = map.keys.toList();
+    keys.sort();
+    int length = keys.length;
+    buffer.write('{');
+    for (int i = 0; i < length; i++) {
+      String key = keys[i];
+      if (i > 0) {
+        buffer.write(', ');
+      }
+      buffer.write(key);
+      buffer.write(' = ');
+      buffer.write(map[key]);
+    }
+    buffer.write('}');
+  }
+
+  /**
    * Write a representation of an analysis option with the given [name] and
    * [value] to the given [buffer]. The option should be separated from other
    * options unless the [last] flag is true, indicating that this is the last
diff --git a/pkg/analysis_server/lib/starter.dart b/pkg/analysis_server/lib/starter.dart
index bb2f0f6..be8aba7 100644
--- a/pkg/analysis_server/lib/starter.dart
+++ b/pkg/analysis_server/lib/starter.dart
@@ -6,7 +6,6 @@
 
 import 'package:analysis_server/src/server/driver.dart';
 import 'package:analyzer/instrumentation/instrumentation.dart';
-import 'package:analyzer/plugin/embedded_resolver_provider.dart';
 import 'package:analyzer/plugin/resolver_provider.dart';
 import 'package:plugin/plugin.dart';
 
@@ -23,19 +22,18 @@
   factory ServerStarter() = Driver;
 
   /**
-   * Set the embedded resolver provider used to override the way embedded
-   * library URI's are resolved in some contexts. The provider should return
-   * `null` if the embedded library URI resolution scheme should be used
-   * instead.
-   */
-  void set embeddedUriResolverProvider(EmbeddedResolverProvider provider);
-
-  /**
    * Set the instrumentation [server] that is to be used by the analysis server.
    */
   void set instrumentationServer(InstrumentationServer server);
 
   /**
+   * Set the file resolver provider used to override the way file URI's
+   * are resolved in some contexts. The provider should return `null` if the
+   * default file resolution scheme should be used instead.
+   */
+  void set fileResolverProvider(ResolverProvider provider);
+
+  /**
    * Set the package resolver provider used to override the way package URI's
    * are resolved in some contexts. The provider should return `null` if the
    * default package resolution scheme should be used instead.
diff --git a/pkg/analysis_server/test/analysis_abstract.dart b/pkg/analysis_server/test/analysis_abstract.dart
index cf8aed1..063329d 100644
--- a/pkg/analysis_server/test/analysis_abstract.dart
+++ b/pkg/analysis_server/test/analysis_abstract.dart
@@ -18,6 +18,7 @@
 import 'package:analyzer/file_system/memory_file_system.dart';
 import 'package:analyzer/instrumentation/instrumentation.dart';
 import 'package:analyzer/src/generated/engine.dart';
+import 'package:analyzer/src/generated/sdk.dart';
 import 'package:linter/src/plugin/linter_plugin.dart';
 import 'package:plugin/manager.dart';
 import 'package:plugin/plugin.dart';
@@ -129,7 +130,7 @@
         index,
         serverPlugin,
         new AnalysisServerOptions(),
-        (_) => new MockSdk(),
+        new DartSdkManager('', false, (_) => new MockSdk()),
         InstrumentationService.NULL_SERVICE);
   }
 
diff --git a/pkg/analysis_server/test/analysis_server_test.dart b/pkg/analysis_server/test/analysis_server_test.dart
index c238d59..eaea9a6 100644
--- a/pkg/analysis_server/test/analysis_server_test.dart
+++ b/pkg/analysis_server/test/analysis_server_test.dart
@@ -19,6 +19,7 @@
 import 'package:analyzer/source/package_map_resolver.dart';
 import 'package:analyzer/src/generated/engine.dart';
 import 'package:analyzer/src/generated/java_engine.dart';
+import 'package:analyzer/src/generated/sdk.dart';
 import 'package:analyzer/src/generated/source.dart';
 import 'package:plugin/manager.dart';
 import 'package:plugin/plugin.dart';
@@ -70,7 +71,7 @@
 ''');
     Source barSource = bar.createSource();
     server.setAnalysisRoots('0', ['/foo', '/bar'], [], {});
-    return pumpEventQueue(40).then((_) {
+    return server.onAnalysisComplete.then((_) {
       expect(server.statusAnalyzing, isFalse);
       // Make sure getAnalysisContext returns the proper context for each.
       AnalysisContext fooContext =
@@ -110,7 +111,7 @@
     AnalysisContext barContext = server.getAnalysisContextForSource(barSource);
     expect(barContext, isNotNull);
     expect(fooContext, isNot(same(barContext)));
-    return pumpEventQueue(40).then((_) {
+    return server.onAnalysisComplete.then((_) {
       expect(server.statusAnalyzing, isFalse);
       // Make sure getAnalysisContext returned the proper context for each.
       expect(fooContext.getKindOf(fooSource), SourceKind.LIBRARY);
@@ -144,7 +145,7 @@
         null,
         plugin,
         new AnalysisServerOptions(),
-        (_) => new MockSdk(),
+        new DartSdkManager('', false, (_) => new MockSdk()),
         InstrumentationService.NULL_SERVICE,
         rethrowExceptions: true);
     processRequiredPlugins();
@@ -236,7 +237,7 @@
     File bar = resourceProvider.newFile('/bar/bar.dart', 'library lib;');
     Source barSource = bar.createSource();
     server.setAnalysisRoots('0', ['/foo', '/bar'], [], {});
-    return pumpEventQueue(500).then((_) {
+    return server.onAnalysisComplete.then((_) {
       expect(server.statusAnalyzing, isFalse);
       // Make sure getAnalysisContext returns the proper context for each.
       AnalysisContext fooContext =
@@ -338,12 +339,12 @@
       subscriptions[service] = <String>[bar.path].toSet();
     }
     server.setAnalysisSubscriptions(subscriptions);
-    await pumpEventQueue(1000);
+    await server.onAnalysisComplete;
     expect(server.statusAnalyzing, isFalse);
     channel.notificationsReceived.clear();
     server.updateContent(
         '0', {bar.path: new AddContentOverlay('library bar; void f() {}')});
-    await pumpEventQueue(1000);
+    await server.onAnalysisComplete;
     expect(server.statusAnalyzing, isFalse);
     expect(channel.notificationsReceived, isNotEmpty);
     Set<String> notificationTypesReceived = new Set<String>();
diff --git a/pkg/analysis_server/test/context_manager_test.dart b/pkg/analysis_server/test/context_manager_test.dart
index db85fcd..2f481cc 100644
--- a/pkg/analysis_server/test/context_manager_test.dart
+++ b/pkg/analysis_server/test/context_manager_test.dart
@@ -9,6 +9,7 @@
 import 'package:analysis_server/src/context_manager.dart';
 import 'package:analyzer/file_system/file_system.dart';
 import 'package:analyzer/file_system/memory_file_system.dart';
+import 'package:analyzer/file_system/physical_file_system.dart';
 import 'package:analyzer/instrumentation/instrumentation.dart';
 import 'package:analyzer/source/embedder.dart';
 import 'package:analyzer/source/error_processor.dart';
@@ -1738,8 +1739,6 @@
 
   UriResolver packageResolver = null;
 
-  UriResolver embeddedUriResolver = null;
-
   String projPath = '/my/proj';
 
   AnalysisError missing_required_param = new AnalysisError(
@@ -1815,23 +1814,18 @@
     manager.processPlugins(plugins);
   }
 
-  EmbedderUriResolver provideEmbeddedUriResolver(Folder folder) =>
-      embeddedUriResolver;
-
   UriResolver providePackageResolver(Folder folder) => packageResolver;
 
   void setUp() {
     processRequiredPlugins();
     resourceProvider = new MemoryResourceProvider();
     packageMapProvider = new MockPackageMapProvider();
-    DartSdkManager sdkManager = new DartSdkManager((_) {
-      return new MockSdk();
-    });
+    DartSdkManager sdkManager =
+        new DartSdkManager('', false, (_) => new MockSdk());
     manager = new ContextManagerImpl(
         resourceProvider,
         sdkManager,
         providePackageResolver,
-        provideEmbeddedUriResolver,
         packageMapProvider,
         analysisFilesGlobs,
         InstrumentationService.NULL_SERVICE,
@@ -2653,21 +2647,19 @@
     currentContextFilePaths[path] = <String, int>{};
     currentContextSources[path] = new HashSet<Source>();
     currentContext = AnalysisEngine.instance.createAnalysisContext();
-    _locateEmbedderYamls(currentContext, disposition);
     List<UriResolver> resolvers = [];
     if (currentContext is InternalAnalysisContext) {
       EmbedderYamlLocator embedderYamlLocator =
-          (currentContext as InternalAnalysisContext).embedderYamlLocator;
-      EmbedderUriResolver embedderUriResolver =
-          new EmbedderUriResolver(embedderYamlLocator.embedderYamls);
-      if (embedderUriResolver.length > 0) {
+          disposition.getEmbedderLocator(resourceProvider);
+      EmbedderSdk sdk = new EmbedderSdk(embedderYamlLocator.embedderYamls);
+      if (sdk.libraryMap.size() > 0) {
         // We have some embedder dart: uri mappings, add the resolver
         // to the list.
-        resolvers.add(embedderUriResolver);
+        resolvers.add(new DartUriResolver(sdk));
       }
     }
     resolvers.addAll(disposition.createPackageUriResolvers(resourceProvider));
-    resolvers.add(new FileUriResolver());
+    resolvers.add(new ResourceUriResolver(PhysicalResourceProvider.INSTANCE));
     currentContext.analysisOptions = options;
     currentContext.sourceFactory =
         new SourceFactory(resolvers, disposition.packages);
@@ -2740,19 +2732,6 @@
   void updateContextPackageUriResolver(AnalysisContext context) {
     // Nothing to do.
   }
-
-  /// If [disposition] has a package map, attempt to locate `_embedder.yaml`
-  /// files.
-  void _locateEmbedderYamls(
-      InternalAnalysisContext context, FolderDisposition disposition) {
-    Map<String, List<Folder>> packageMap;
-    if (disposition is PackageMapDisposition) {
-      packageMap = disposition.packageMap;
-    } else if (disposition is PackagesFileDisposition) {
-      packageMap = disposition.buildPackageMap(resourceProvider);
-    }
-    context.embedderYamlLocator.refresh(packageMap);
-  }
 }
 
 /**
diff --git a/pkg/analysis_server/test/domain_analysis_test.dart b/pkg/analysis_server/test/domain_analysis_test.dart
index ad5319e..45ca634 100644
--- a/pkg/analysis_server/test/domain_analysis_test.dart
+++ b/pkg/analysis_server/test/domain_analysis_test.dart
@@ -13,6 +13,7 @@
 import 'package:analysis_server/src/plugin/server_plugin.dart';
 import 'package:analyzer/file_system/memory_file_system.dart';
 import 'package:analyzer/instrumentation/instrumentation.dart';
+import 'package:analyzer/src/generated/sdk.dart';
 import 'package:path/path.dart';
 import 'package:plugin/manager.dart';
 import 'package:test_reflective_loader/test_reflective_loader.dart';
@@ -47,7 +48,7 @@
         null,
         serverPlugin,
         new AnalysisServerOptions(),
-        (_) => new MockSdk(),
+        new DartSdkManager('', false, (_) => new MockSdk()),
         InstrumentationService.NULL_SERVICE);
     handler = new AnalysisDomainHandler(server);
   });
@@ -474,7 +475,7 @@
         null,
         serverPlugin,
         new AnalysisServerOptions(),
-        (_) => new MockSdk(),
+        new DartSdkManager('', false, (_) => new MockSdk()),
         InstrumentationService.NULL_SERVICE);
     handler = new AnalysisDomainHandler(server);
     // listen for notifications
diff --git a/pkg/analysis_server/test/domain_diagnostic_test.dart b/pkg/analysis_server/test/domain_diagnostic_test.dart
index d2288ae..3a6f3cb 100644
--- a/pkg/analysis_server/test/domain_diagnostic_test.dart
+++ b/pkg/analysis_server/test/domain_diagnostic_test.dart
@@ -11,6 +11,7 @@
 import 'package:analyzer/file_system/memory_file_system.dart';
 import 'package:analyzer/instrumentation/instrumentation.dart';
 import 'package:analyzer/src/generated/engine.dart';
+import 'package:analyzer/src/generated/sdk.dart';
 import 'package:plugin/manager.dart';
 import 'package:plugin/plugin.dart';
 import 'package:unittest/unittest.dart';
@@ -53,7 +54,7 @@
         null,
         serverPlugin,
         new AnalysisServerOptions(),
-        (_) => new MockSdk(),
+        new DartSdkManager('', false, (_) => new MockSdk()),
         InstrumentationService.NULL_SERVICE);
     handler = new DiagnosticDomainHandler(server);
   });
diff --git a/pkg/analysis_server/test/domain_execution_test.dart b/pkg/analysis_server/test/domain_execution_test.dart
index 7824a1b..e3de661 100644
--- a/pkg/analysis_server/test/domain_execution_test.dart
+++ b/pkg/analysis_server/test/domain_execution_test.dart
@@ -16,6 +16,7 @@
 import 'package:analyzer/file_system/memory_file_system.dart';
 import 'package:analyzer/instrumentation/instrumentation.dart';
 import 'package:analyzer/src/generated/engine.dart';
+import 'package:analyzer/src/generated/sdk.dart';
 import 'package:analyzer/src/generated/source.dart';
 import 'package:analyzer/src/generated/source_io.dart';
 import 'package:plugin/manager.dart';
@@ -48,7 +49,7 @@
           null,
           serverPlugin,
           new AnalysisServerOptions(),
-          (_) => new MockSdk(),
+          new DartSdkManager('', false, (_) => new MockSdk()),
           InstrumentationService.NULL_SERVICE);
       handler = new ExecutionDomainHandler(server);
     });
diff --git a/pkg/analysis_server/test/domain_server_test.dart b/pkg/analysis_server/test/domain_server_test.dart
index 5c3d75e..5ffd328 100644
--- a/pkg/analysis_server/test/domain_server_test.dart
+++ b/pkg/analysis_server/test/domain_server_test.dart
@@ -11,6 +11,7 @@
 import 'package:analysis_server/src/plugin/server_plugin.dart';
 import 'package:analyzer/file_system/memory_file_system.dart';
 import 'package:analyzer/instrumentation/instrumentation.dart';
+import 'package:analyzer/src/generated/sdk.dart';
 import 'package:plugin/manager.dart';
 import 'package:unittest/unittest.dart';
 
@@ -37,7 +38,7 @@
         null,
         serverPlugin,
         new AnalysisServerOptions(),
-        (_) => new MockSdk(),
+        new DartSdkManager('', false, (_) => new MockSdk()),
         InstrumentationService.NULL_SERVICE);
     handler = new ServerDomainHandler(server);
   });
diff --git a/pkg/analysis_server/test/integration/analysis/highlights_test2.dart b/pkg/analysis_server/test/integration/analysis/highlights_test2.dart
index 54dd3ef..3ac66ce 100644
--- a/pkg/analysis_server/test/integration/analysis/highlights_test2.dart
+++ b/pkg/analysis_server/test/integration/analysis/highlights_test2.dart
@@ -20,8 +20,9 @@
 
 @reflectiveTest
 class AnalysisHighlightsTest extends AbstractAnalysisServerIntegrationTest {
-  Future startServer() {
-    return server.start(useAnalysisHighlight2: true);
+  Future startServer({int servicesPort}) {
+    return server.start(
+        servicesPort: servicesPort, useAnalysisHighlight2: true);
   }
 
   test_highlights() {
diff --git a/pkg/analysis_server/test/integration/integration_test_methods.dart b/pkg/analysis_server/test/integration/integration_test_methods.dart
index 2508d56..1fbdd7e 100644
--- a/pkg/analysis_server/test/integration/integration_test_methods.dart
+++ b/pkg/analysis_server/test/integration/integration_test_methods.dart
@@ -94,6 +94,10 @@
    * version ( String )
    *
    *   The version number of the analysis server.
+   *
+   * pid ( int )
+   *
+   *   The process id of the analysis server process.
    */
   Stream<ServerConnectedParams> onServerConnected;
 
diff --git a/pkg/analysis_server/test/integration/integration_tests.dart b/pkg/analysis_server/test/integration/integration_tests.dart
index 58f4aaa..9e764a0 100644
--- a/pkg/analysis_server/test/integration/integration_tests.dart
+++ b/pkg/analysis_server/test/integration/integration_tests.dart
@@ -205,7 +205,8 @@
   /**
    * Start [server].
    */
-  Future startServer() => server.start();
+  Future startServer({int servicesPort}) =>
+      server.start(servicesPort: servicesPort);
 
   /**
    * After every test, the server is stopped and [sourceDirectory] is deleted.
@@ -509,6 +510,9 @@
         .listen((String line) {
       lastCommunicationTime = currentElapseTime;
       String trimmedLine = line.trim();
+      if (trimmedLine.startsWith('Observatory listening on ')) {
+        return;
+      }
       _recordStdio('RECV: $trimmedLine');
       var message;
       try {
@@ -596,6 +600,7 @@
       {bool debugServer: false,
       int diagnosticPort,
       bool profileServer: false,
+      int servicesPort,
       bool useAnalysisHighlight2: false}) {
     if (_process != null) {
       throw new Exception('Process already started');
@@ -610,8 +615,14 @@
       arguments.add('--debug');
     }
     if (profileServer) {
-      arguments.add('--observe');
+      if (servicesPort == null) {
+        arguments.add('--observe');
+      } else {
+        arguments.add('--observe=$servicesPort');
+      }
       arguments.add('--pause-isolates-on-exit');
+    } else if (servicesPort != null) {
+      arguments.add('--enable-vm-service=$servicesPort');
     }
     if (Platform.packageRoot != null) {
       arguments.add('--package-root=${Platform.packageRoot}');
diff --git a/pkg/analysis_server/test/integration/protocol_matchers.dart b/pkg/analysis_server/test/integration/protocol_matchers.dart
index 487d6a7..1443aa0 100644
--- a/pkg/analysis_server/test/integration/protocol_matchers.dart
+++ b/pkg/analysis_server/test/integration/protocol_matchers.dart
@@ -65,11 +65,13 @@
  *
  * {
  *   "version": String
+ *   "pid": int
  * }
  */
 final Matcher isServerConnectedParams = new LazyMatcher(() => new MatchesJsonObject(
   "server.connected params", {
-    "version": isString
+    "version": isString,
+    "pid": isInt
   }));
 
 /**
diff --git a/pkg/analysis_server/test/mocks.dart b/pkg/analysis_server/test/mocks.dart
index 04d3d0e..3d719e3 100644
--- a/pkg/analysis_server/test/mocks.dart
+++ b/pkg/analysis_server/test/mocks.dart
@@ -249,9 +249,6 @@
 
   Future<Response> waitForResponse(Request request) {
     String id = request.id;
-    pumpEventQueue().then((_) {
-      responseController.addError(new NoResponseException(request));
-    });
     return new Future<Response>(() =>
         responseController.stream.firstWhere((response) => response.id == id));
   }
diff --git a/pkg/analysis_server/test/services/completion/dart/completion_manager_test.dart b/pkg/analysis_server/test/services/completion/dart/completion_manager_test.dart
index 3505da6..b678bd1 100644
--- a/pkg/analysis_server/test/services/completion/dart/completion_manager_test.dart
+++ b/pkg/analysis_server/test/services/completion/dart/completion_manager_test.dart
@@ -13,7 +13,6 @@
 import 'package:analysis_server/src/services/completion/dart/imported_reference_contributor.dart';
 import 'package:analyzer/dart/element/element.dart';
 import 'package:analyzer/src/task/dart.dart';
-import 'package:analyzer/task/dart.dart';
 import 'package:test_reflective_loader/test_reflective_loader.dart';
 import 'package:unittest/unittest.dart';
 
@@ -53,8 +52,7 @@
     addTestSource('part of libB; main() {^}');
 
     // Associate part with library
-    context.computeResult(
-        new LibrarySpecificUnit(libSource, testSource), LIBRARY_CYCLE_UNITS);
+    context.computeResult(libSource, LIBRARY_CYCLE_UNITS);
 
     // Build the request
     CompletionRequestImpl baseRequest = new CompletionRequestImpl(
diff --git a/pkg/analysis_server/test/services/completion/dart/keyword_contributor_test.dart b/pkg/analysis_server/test/services/completion/dart/keyword_contributor_test.dart
index e51ed5f..2081aab 100644
--- a/pkg/analysis_server/test/services/completion/dart/keyword_contributor_test.dart
+++ b/pkg/analysis_server/test/services/completion/dart/keyword_contributor_test.dart
@@ -371,6 +371,22 @@
         pseudoKeywords: ['async', 'async*', 'sync*']);
   }
 
+  test_anonymous_function_async6() async {
+    addTestSource('main() {foo("bar", () as^{}}');
+    await computeSuggestions();
+    assertSuggestKeywords([],
+        pseudoKeywords: ['async', 'async*', 'sync*'],
+        relevance: DART_RELEVANCE_HIGH);
+  }
+
+  test_anonymous_function_async7() async {
+    addTestSource('main() {foo("bar", () as^ => null');
+    await computeSuggestions();
+    assertSuggestKeywords([],
+        pseudoKeywords: ['async', 'async*', 'sync*'],
+        relevance: DART_RELEVANCE_HIGH);
+  }
+
   test_argument() async {
     addTestSource('main() {foo(^);}');
     await computeSuggestions();
@@ -1179,6 +1195,12 @@
     assertSuggestKeywords([Keyword.IS], relevance: DART_RELEVANCE_HIGH);
   }
 
+  test_is_expression_partial() async {
+    addTestSource('main() {if (x i^)}');
+    await computeSuggestions();
+    assertSuggestKeywords([Keyword.IS], relevance: DART_RELEVANCE_HIGH);
+  }
+
   test_library() async {
     addTestSource('library foo;^');
     await computeSuggestions();
@@ -1340,6 +1362,18 @@
     assertSuggestKeywords(EXPRESSION_START_INSTANCE);
   }
 
+  test_method_invocation() async {
+    addTestSource('class A { foo() {bar.^}}');
+    await computeSuggestions();
+    assertNoSuggestions();
+  }
+
+  test_method_invocation2() async {
+    addTestSource('class A { foo() {bar.as^}}');
+    await computeSuggestions();
+    assertNoSuggestions();
+  }
+
   test_method_param() async {
     addTestSource('class A { foo(^) {});}');
     await computeSuggestions();
diff --git a/pkg/analysis_server/test/services/completion/dart/optype_test.dart b/pkg/analysis_server/test/services/completion/dart/optype_test.dart
index 9a6982b6..2fe206b 100644
--- a/pkg/analysis_server/test/services/completion/dart/optype_test.dart
+++ b/pkg/analysis_server/test/services/completion/dart/optype_test.dart
@@ -142,6 +142,16 @@
     assertOpType(typeNames: true);
   }
 
+  test_AsIdentifier() {
+    addTestSource('class A {var asdf; foo() {as^}');
+    assertOpType(returnValue: true, typeNames: true, voidReturn: true);
+  }
+
+  test_AsIdentifier2() {
+    addTestSource('class A {var asdf; foo() {A as^}');
+    assertOpType();
+  }
+
   test_Assert() {
     addTestSource('main() {assert(^)}');
     assertOpType(returnValue: true, typeNames: true);
@@ -1359,6 +1369,11 @@
     assertOpType(returnValue: true, typeNames: true, voidReturn: true);
   }
 
+  test_SwitchStatement_body_end2() {
+    addTestSource('main() {switch(k) {case 1:as^}}');
+    assertOpType(returnValue: true, typeNames: true, voidReturn: true);
+  }
+
   test_SwitchStatement_expression1() {
     // SimpleIdentifier  SwitchStatement  Block
     addTestSource('main() {switch(^k) {case 1:{}}}');
@@ -1557,5 +1572,8 @@
   @override
   Source get source => this;
 
+  @override
+  Uri get uri => new Uri.file(fullName);
+
   noSuchMethod(Invocation invocation) => super.noSuchMethod(invocation);
 }
diff --git a/pkg/analysis_server/test/services/search/search_engine_test.dart b/pkg/analysis_server/test/services/search/search_engine_test.dart
index b67ce1a..4bf728d 100644
--- a/pkg/analysis_server/test/services/search/search_engine_test.dart
+++ b/pkg/analysis_server/test/services/search/search_engine_test.dart
@@ -287,7 +287,7 @@
   }
 }
 ''');
-    FieldElement element = findElement('field');
+    FieldElement element = findElement('field', ElementKind.FIELD);
     Element main = findElement('main');
     Element fieldParameter = findElement('field', ElementKind.PARAMETER);
     var expected = [
diff --git a/pkg/analysis_server/test/single_context_manager_test.dart b/pkg/analysis_server/test/single_context_manager_test.dart
index 25ab3bd..d85214a 100644
--- a/pkg/analysis_server/test/single_context_manager_test.dart
+++ b/pkg/analysis_server/test/single_context_manager_test.dart
@@ -65,9 +65,8 @@
     packageResolver = new TestUriResolver();
 
     _processRequiredPlugins();
-    DartSdkManager sdkManager = new DartSdkManager((_) {
-      return new MockSdk();
-    });
+    DartSdkManager sdkManager =
+        new DartSdkManager('', false, (_) => new MockSdk());
     manager = new SingleContextManager(resourceProvider, sdkManager,
         (_) => packageResolver, analysisFilesGlobs, new AnalysisOptionsImpl());
     callbacks = new TestContextManagerCallbacks(resourceProvider);
diff --git a/pkg/analysis_server/test/socket_server_test.dart b/pkg/analysis_server/test/socket_server_test.dart
index 4ff5ea7..a3279d2 100644
--- a/pkg/analysis_server/test/socket_server_test.dart
+++ b/pkg/analysis_server/test/socket_server_test.dart
@@ -116,7 +116,7 @@
         new DirectoryBasedDartSdk(DirectoryBasedDartSdk.defaultSdkDirectory);
     return new SocketServer(
         new AnalysisServerOptions(),
-        sdkCreator,
+        new DartSdkManager('', false, sdkCreator),
         sdkCreator(null),
         InstrumentationService.NULL_SERVICE,
         serverPlugin,
diff --git a/pkg/analysis_server/tool/spec/spec_input.html b/pkg/analysis_server/tool/spec/spec_input.html
index 644a1a9..7aa782a 100644
--- a/pkg/analysis_server/tool/spec/spec_input.html
+++ b/pkg/analysis_server/tool/spec/spec_input.html
@@ -6,7 +6,7 @@
   </head>
   <body>
     <h1>Analysis Server API Specification</h1>
-    <h1 style="color:#999999">Version <version>1.15.0</version></h1>
+    <h1 style="color:#999999">Version <version>1.16.0</version></h1>
     <p>
       This document contains a specification of the API provided by the
       analysis server.  The API in this document is currently under
@@ -239,6 +239,10 @@
             <ref>String</ref>
             <p>The version number of the analysis server.</p>
           </field>
+         <field name="pid">
+           <ref>int</ref>
+           <p>The process id of the analysis server process.</p>
+         </field>
         </params>
       </notification>
       <notification event="error">
diff --git a/pkg/analyzer/CHANGELOG.md b/pkg/analyzer/CHANGELOG.md
index f02706f..07f8a30 100644
--- a/pkg/analyzer/CHANGELOG.md
+++ b/pkg/analyzer/CHANGELOG.md
@@ -1,6 +1,10 @@
-## 0.27.4-alpha.7.1
+## 0.27.4-alpha.9
 * Restore EmbedderUriResolver API.
 
+## 0.27.4-alpha.8
+* Ignore processing performance improvements.
+* EmbedderUriResolver API updates.
+
 ## 0.27.4
 
 * Added support for 'analysis_options.yaml' files as an alternative to '.analysis_options' files.
diff --git a/pkg/analyzer/benchmark/errors_in_all_libraries.dart b/pkg/analyzer/benchmark/errors_in_all_libraries.dart
index 0d68c2f..6f02534 100644
--- a/pkg/analyzer/benchmark/errors_in_all_libraries.dart
+++ b/pkg/analyzer/benchmark/errors_in_all_libraries.dart
@@ -15,6 +15,8 @@
 import 'package:analyzer/src/generated/sdk_io.dart' show DirectoryBasedDartSdk;
 import 'package:analyzer/src/generated/source.dart';
 import 'package:analyzer/src/generated/source_io.dart';
+import 'package:analyzer/file_system/file_system.dart';
+import 'package:analyzer/file_system/physical_file_system.dart';
 import 'package:path/path.dart' as p;
 
 void main(List<String> args) {
@@ -36,7 +38,7 @@
     var context = AnalysisEngine.instance.createAnalysisContext();
     context.sourceFactory = new SourceFactory([
       new DartUriResolver(DirectoryBasedDartSdk.defaultSdk),
-      new FileUriResolver(),
+      new ResourceUriResolver(PhysicalResourceProvider.INSTANCE),
       new PackageUriResolver([new JavaFile(packageRoot)])
     ]);
 
diff --git a/pkg/analyzer/doc/tasks.html b/pkg/analyzer/doc/tasks.html
index b8b2e568..eb19b8b 100644
--- a/pkg/analyzer/doc/tasks.html
+++ b/pkg/analyzer/doc/tasks.html
@@ -30,6 +30,7 @@
   BuildLibraryElementTask -> BUILD_LIBRARY_ERRORS
   BuildLibraryElementTask -> IS_LAUNCHABLE
   BuildLibraryElementTask -> LIBRARY_ELEMENT1
+  BuildLibraryElementTask -> REFERENCED_NAMES
   BuildPublicNamespaceTask -> LIBRARY_ELEMENT3
   BuildSourceExportClosureTask -> EXPORT_SOURCE_CLOSURE
   BuildTypeProviderTask -> TYPE_PROVIDER
@@ -104,6 +105,8 @@
   GenerateLintsTask -> LINTS
   HINTS -> LibraryUnitErrorsTask
   HINTS [shape=box]
+  IGNORE_INFO -> DartErrorsTask
+  IGNORE_INFO [shape=box]
   IMPORTED_LIBRARIES -> BuildDirectiveElementsTask
   IMPORTED_LIBRARIES -> ReadyLibraryElement2Task
   IMPORTED_LIBRARIES -> ReadyLibraryElement5Task
@@ -194,7 +197,6 @@
   MODIFICATION_TIME -> VerifyUnitTask
   MODIFICATION_TIME [shape=box]
   PARSED_UNIT -> BuildCompilationUnitElementTask
-  PARSED_UNIT -> DartErrorsTask
   PARSED_UNIT [shape=box]
   PARSE_ERRORS -> dartErrorsForSource
   PARSE_ERRORS [shape=box]
@@ -304,7 +306,6 @@
   ResolveInstanceFieldsInUnitTask -> CREATED_RESOLVED_UNIT10
   ResolveInstanceFieldsInUnitTask -> RESOLVED_UNIT10
   ResolveLibraryReferencesTask -> LIBRARY_ELEMENT9
-  ResolveLibraryReferencesTask -> REFERENCED_NAMES
   ResolveLibraryTask -> LIBRARY_ELEMENT
   ResolveLibraryTypeNamesTask -> LIBRARY_ELEMENT6
   ResolveTopLevelLibraryTypeBoundsTask -> LIBRARY_ELEMENT5
@@ -327,6 +328,7 @@
   SOURCE_KIND [shape=box]
   STRONG_MODE_ERRORS -> LibraryUnitErrorsTask
   STRONG_MODE_ERRORS [shape=box]
+  ScanDartTask -> IGNORE_INFO
   ScanDartTask -> LINE_INFO
   ScanDartTask -> SCAN_ERRORS
   ScanDartTask -> TOKEN_STREAM
diff --git a/pkg/analyzer/example/resolver_driver.dart b/pkg/analyzer/example/resolver_driver.dart
index 3ba20f7..885a5225 100755
--- a/pkg/analyzer/example/resolver_driver.dart
+++ b/pkg/analyzer/example/resolver_driver.dart
@@ -14,6 +14,8 @@
 import 'package:analyzer/src/generated/sdk_io.dart' show DirectoryBasedDartSdk;
 import 'package:analyzer/src/generated/source.dart';
 import 'package:analyzer/src/generated/source_io.dart';
+import 'package:analyzer/file_system/file_system.dart' hide File;
+import 'package:analyzer/file_system/physical_file_system.dart';
 
 void main(List<String> args) {
   print('working dir ${new File('.').resolveSymbolicLinksSync()}');
@@ -31,7 +33,10 @@
   JavaSystemIO.setProperty("com.google.dart.sdk", args[0]);
   DartSdk sdk = DirectoryBasedDartSdk.defaultSdk;
 
-  var resolvers = [new DartUriResolver(sdk), new FileUriResolver()];
+  var resolvers = [
+    new DartUriResolver(sdk),
+    new ResourceUriResolver(PhysicalResourceProvider.INSTANCE)
+  ];
 
   if (packageRoot != null) {
     var packageDirectory = new JavaFile(packageRoot);
diff --git a/pkg/analyzer/lib/analyzer.dart b/pkg/analyzer/lib/analyzer.dart
index c4642e8..59fea17 100644
--- a/pkg/analyzer/lib/analyzer.dart
+++ b/pkg/analyzer/lib/analyzer.dart
@@ -14,6 +14,8 @@
 import 'package:analyzer/src/generated/parser.dart';
 import 'package:analyzer/src/generated/source_io.dart';
 import 'package:analyzer/src/string_source.dart';
+import 'package:analyzer/file_system/file_system.dart' hide File;
+import 'package:analyzer/file_system/physical_file_system.dart';
 import 'package:path/path.dart' as pathos;
 
 export 'package:analyzer/dart/ast/ast.dart';
@@ -50,7 +52,8 @@
 CompilationUnit parseDartFile(String path,
     {bool suppressErrors: false, bool parseFunctionBodies: true}) {
   String contents = new File(path).readAsStringSync();
-  var sourceFactory = new SourceFactory([new FileUriResolver()]);
+  var sourceFactory = new SourceFactory(
+      [new ResourceUriResolver(PhysicalResourceProvider.INSTANCE)]);
 
   var absolutePath = pathos.absolute(path);
   var source = sourceFactory.forUri(pathos.toUri(absolutePath).toString());
diff --git a/pkg/analyzer/lib/dart/element/element.dart b/pkg/analyzer/lib/dart/element/element.dart
index 13fbe48..b829bee 100644
--- a/pkg/analyzer/lib/dart/element/element.dart
+++ b/pkg/analyzer/lib/dart/element/element.dart
@@ -624,6 +624,11 @@
   bool get isDeprecated;
 
   /**
+   * Return `true` if this element has an annotation of the form '@factory'.
+   */
+  bool get isFactory;
+
+  /**
    * Return `true` if this element has an annotation of the form '@JS(..)'.
    */
   bool get isJS;
@@ -819,6 +824,11 @@
   bool get isDeprecated;
 
   /**
+   * Return `true` if this annotation marks the associated member as a factory.
+   */
+  bool get isFactory;
+
+  /**
    * Return `true` if this annotation marks the associated element with the `JS`
    * annotation.
    */
diff --git a/pkg/analyzer/lib/file_system/file_system.dart b/pkg/analyzer/lib/file_system/file_system.dart
index b841a36..7a12c6f1c 100644
--- a/pkg/analyzer/lib/file_system/file_system.dart
+++ b/pkg/analyzer/lib/file_system/file_system.dart
@@ -106,6 +106,14 @@
   Resource getChild(String relPath);
 
   /**
+   * Return a [File] representing a child [Resource] with the given
+   * [relPath].  This call does not check whether a file with the given name
+   * exists on the filesystem - client must call the [File]'s `exists` getter
+   * to determine whether the folder actually exists.
+   */
+  File getChildAssumingFile(String relPath);
+
+  /**
    * Return a [Folder] representing a child [Resource] with the given
    * [relPath].  This call does not check whether a folder with the given name
    * exists on the filesystem--client must call the [Folder]'s `exists` getter
@@ -147,6 +155,13 @@
   String get shortName;
 
   /**
+   * Synchronously deletes this resource and its children.
+   *
+   * Throws an exception if the resource cannot be deleted.
+   */
+  void delete();
+
+  /**
    * Return `true` if absolute [path] references this resource or a resource in
    * this folder.
    */
@@ -204,7 +219,7 @@
   /**
    * The name of the `file` scheme.
    */
-  static String _FILE_SCHEME = "file";
+  static final String FILE_SCHEME = "file";
 
   final ResourceProvider _provider;
 
@@ -212,7 +227,7 @@
 
   @override
   Source resolveAbsolute(Uri uri, [Uri actualUri]) {
-    if (!_isFileUri(uri)) {
+    if (!isFileUri(uri)) {
       return null;
     }
     Resource resource =
@@ -227,8 +242,10 @@
   Uri restoreAbsolute(Source source) =>
       _provider.pathContext.toUri(source.fullName);
 
+  ResourceProvider get provider => _provider;
+
   /**
    * Return `true` if the given [uri] is a `file` URI.
    */
-  static bool _isFileUri(Uri uri) => uri.scheme == _FILE_SCHEME;
+  static bool isFileUri(Uri uri) => uri.scheme == FILE_SCHEME;
 }
diff --git a/pkg/analyzer/lib/file_system/memory_file_system.dart b/pkg/analyzer/lib/file_system/memory_file_system.dart
index 3ec640e..3391433 100644
--- a/pkg/analyzer/lib/file_system/memory_file_system.dart
+++ b/pkg/analyzer/lib/file_system/memory_file_system.dart
@@ -281,6 +281,11 @@
   }
 
   @override
+  void delete() {
+    throw new FileSystemException(path, 'File could not be deleted');
+  }
+
+  @override
   bool isOrContains(String path) {
     return path == this.path;
   }
@@ -342,6 +347,11 @@
   }
 
   @override
+  void delete() {
+    _provider.deleteFile(path);
+  }
+
+  @override
   bool isOrContains(String path) {
     return path == this.path;
   }
@@ -441,7 +451,7 @@
       return UriKind.PACKAGE_URI;
     } else if (scheme == DartUriResolver.DART_SCHEME) {
       return UriKind.DART_URI;
-    } else if (scheme == FileUriResolver.FILE_SCHEME) {
+    } else if (scheme == ResourceUriResolver.FILE_SCHEME) {
       return UriKind.FILE_URI;
     }
     return UriKind.FILE_URI;
@@ -449,7 +459,12 @@
 
   @override
   bool operator ==(other) {
-    return other is _MemoryFileSource && other.id == id;
+    if (other is _MemoryFileSource) {
+      return id == other.id;
+    } else if (other is Source) {
+      return uri == other.uri;
+    }
+    return false;
   }
 
   @override
@@ -483,6 +498,11 @@
   }
 
   @override
+  void delete() {
+    _provider.deleteFolder(path);
+  }
+
+  @override
   Resource getChild(String relPath) {
     String childPath = canonicalizePath(relPath);
     _MemoryResource resource = _provider._pathToResource[childPath];
@@ -493,6 +513,16 @@
   }
 
   @override
+  _MemoryFile getChildAssumingFile(String relPath) {
+    String childPath = canonicalizePath(relPath);
+    _MemoryResource resource = _provider._pathToResource[childPath];
+    if (resource is _MemoryFile) {
+      return resource;
+    }
+    return new _MemoryFile(_provider, childPath);
+  }
+
+  @override
   _MemoryFolder getChildAssumingFolder(String relPath) {
     String childPath = canonicalizePath(relPath);
     _MemoryResource resource = _provider._pathToResource[childPath];
diff --git a/pkg/analyzer/lib/file_system/physical_file_system.dart b/pkg/analyzer/lib/file_system/physical_file_system.dart
index e259f75..9cd62b6 100644
--- a/pkg/analyzer/lib/file_system/physical_file_system.dart
+++ b/pkg/analyzer/lib/file_system/physical_file_system.dart
@@ -179,6 +179,13 @@
   }
 
   @override
+  _PhysicalFile getChildAssumingFile(String relPath) {
+    String canonicalPath = canonicalizePath(relPath);
+    io.File file = new io.File(canonicalPath);
+    return new _PhysicalFile(file);
+  }
+
+  @override
   _PhysicalFolder getChildAssumingFolder(String relPath) {
     String canonicalPath = canonicalizePath(relPath);
     io.Directory directory = new io.Directory(canonicalPath);
@@ -256,5 +263,14 @@
   }
 
   @override
+  void delete() {
+    try {
+      _entry.deleteSync(recursive: true);
+    } on io.FileSystemException catch (exception) {
+      throw new FileSystemException(exception.path, exception.message);
+    }
+  }
+
+  @override
   String toString() => path;
 }
diff --git a/pkg/analyzer/lib/plugin/embedded_resolver_provider.dart b/pkg/analyzer/lib/plugin/embedded_resolver_provider.dart
index b248e34..217ab7f 100644
--- a/pkg/analyzer/lib/plugin/embedded_resolver_provider.dart
+++ b/pkg/analyzer/lib/plugin/embedded_resolver_provider.dart
@@ -2,6 +2,7 @@
 // for details. All rights reserved. Use of this source code is governed by a
 // BSD-style license that can be found in the LICENSE file.
 
+@deprecated
 library analyzer.plugin.embedded_resolver_provider;
 
 import 'package:analyzer/file_system/file_system.dart';
@@ -12,4 +13,5 @@
  * URI's for embedded libraries within a given folder, or `null` if we should
  * fall back to the standard URI resolver.
  */
+@deprecated
 typedef EmbedderUriResolver EmbeddedResolverProvider(Folder folder);
diff --git a/pkg/analyzer/lib/source/embedder.dart b/pkg/analyzer/lib/source/embedder.dart
index 1b43e00..56abc6a 100644
--- a/pkg/analyzer/lib/source/embedder.dart
+++ b/pkg/analyzer/lib/source/embedder.dart
@@ -10,14 +10,13 @@
 import 'package:analyzer/file_system/file_system.dart';
 import 'package:analyzer/source/package_map_provider.dart'
     show PackageMapProvider;
-import 'package:analyzer/src/context/context.dart';
-import 'package:analyzer/src/generated/engine.dart';
 import 'package:analyzer/src/generated/java_core.dart';
-import 'package:analyzer/src/generated/java_engine.dart';
 import 'package:analyzer/src/generated/java_io.dart' show JavaFile;
 import 'package:analyzer/src/generated/sdk.dart';
+import 'package:analyzer/src/generated/sdk_io.dart';
 import 'package:analyzer/src/generated/source.dart';
 import 'package:analyzer/src/generated/source_io.dart' show FileBasedSource;
+import 'package:analyzer/src/summary/idl.dart';
 import 'package:yaml/yaml.dart';
 
 const String _DART_COLON_PREFIX = 'dart:';
@@ -27,104 +26,28 @@
 bool definesEmbeddedLibs(Map map) => map[_EMBEDDED_LIB_MAP_KEY] != null;
 
 /// An SDK backed by URI mappings derived from an `_embedder.yaml` file.
-class EmbedderSdk implements DartSdk {
-  /// The resolver associated with this SDK.
-  EmbedderUriResolver _resolver;
-
-  /// The [AnalysisContext] used for this SDK's sources.
-  InternalAnalysisContext _analysisContext;
-
-  final LibraryMap _librariesMap = new LibraryMap();
-
+class EmbedderSdk extends AbstractDartSdk {
   final Map<String, String> _urlMappings = new HashMap<String, String>();
 
-  /// Analysis options for this SDK.
-  AnalysisOptions analysisOptions;
-
   EmbedderSdk([Map<Folder, YamlMap> embedderYamls]) {
     embedderYamls?.forEach(_processEmbedderYaml);
-    _resolver = new EmbedderUriResolver._forSdk(this);
   }
 
-  @override
-  AnalysisContext get context {
-    if (_analysisContext == null) {
-      _analysisContext = new SdkAnalysisContext(analysisOptions);
-      SourceFactory factory = new SourceFactory([_resolver]);
-      _analysisContext.sourceFactory = factory;
-
-      ChangeSet changeSet = new ChangeSet();
-      for (String uri in uris) {
-        changeSet.addedSource(factory.forUri(uri));
-      }
-      _analysisContext.applyChanges(changeSet);
-    }
-    return _analysisContext;
-  }
-
-  @override
-  List<SdkLibrary> get sdkLibraries => _librariesMap.sdkLibraries;
-
   // TODO(danrubel) Determine SDK version
   @override
   String get sdkVersion => '0';
 
-  @override
-  List<String> get uris => _librariesMap.uris;
-
   /// The url mappings for this SDK.
   Map<String, String> get urlMappings => _urlMappings;
 
   @override
-  Source fromFileUri(Uri uri) {
-    JavaFile file = new JavaFile.fromUri(uri);
-    String filePath = file.getAbsolutePath();
-
-    String path;
-    for (SdkLibrary library in _librariesMap.sdkLibraries) {
-      String libraryPath = library.path.replaceAll('/', JavaFile.separator);
-      if (filePath == libraryPath) {
-        path = library.shortName;
-        break;
-      }
-    }
-    if (path == null) {
-      for (SdkLibrary library in _librariesMap.sdkLibraries) {
-        String libraryPath = library.path.replaceAll('/', JavaFile.separator);
-        int index = libraryPath.lastIndexOf(JavaFile.separator);
-        if (index == -1) {
-          continue;
-        }
-        String prefix = libraryPath.substring(0, index + 1);
-        if (!filePath.startsWith(prefix)) {
-          continue;
-        }
-        var relPath = filePath
-            .substring(prefix.length)
-            .replaceAll(JavaFile.separator, '/');
-        path = '${library.shortName}/$relPath';
-        break;
-      }
-    }
-
-    if (path != null) {
-      try {
-        return new FileBasedSource(file, parseUriWithException(path));
-      } on URISyntaxException catch (exception, stackTrace) {
-        AnalysisEngine.instance.logger.logInformation(
-            "Failed to create URI: $path",
-            new CaughtException(exception, stackTrace));
-        return null;
-      }
-    }
-    return null;
-  }
+  String getRelativePathFromFile(JavaFile file) => file.getAbsolutePath();
 
   @override
-  SdkLibrary getSdkLibrary(String dartUri) => _librariesMap.getLibrary(dartUri);
+  PackageBundle getSummarySdkBundle(bool strongMode) => null;
 
   @override
-  Source mapDartUri(String dartUri) {
+  FileBasedSource internalMapDartUri(String dartUri) {
     String libraryName;
     String relativePath;
     int index = dartUri.indexOf('/');
@@ -173,7 +96,7 @@
     _urlMappings[name] = libPath;
     SdkLibraryImpl library = new SdkLibraryImpl(name);
     library.path = libPath;
-    _librariesMap.setLibrary(name, library);
+    libraryMap.setLibrary(name, library);
   }
 
   /// Given the 'embedderYamls' from [EmbedderYamlLocator] check each one for the
@@ -205,14 +128,27 @@
 ///
 /// If a key doesn't begin with `dart:` it is ignored.
 ///
+/// This class is deprecated; use DartUriResolver directly. In particular, if
+/// there used to be an instance creation of the form:
+///
+/// ```
+/// new EmbedderUriResolver(embedderMap)
+/// ```
+///
+/// This should be replaced by
+///
+/// ```
+/// new DartUriResolver(new EmbedderSdk(embedderMap))
+/// ```
+@deprecated
 class EmbedderUriResolver implements DartUriResolver {
   EmbedderSdk _embedderSdk;
   DartUriResolver _dartUriResolver;
 
   /// Construct a [EmbedderUriResolver] from a package map
   /// (see [PackageMapProvider]).
-  EmbedderUriResolver(Map<Folder, YamlMap> embedderMap) :
-      this._forSdk(new EmbedderSdk(embedderMap));
+  EmbedderUriResolver(Map<Folder, YamlMap> embedderMap)
+      : this._forSdk(new EmbedderSdk(embedderMap));
 
   /// (Provisional API.)
   EmbedderUriResolver._forSdk(this._embedderSdk) {
@@ -251,7 +187,7 @@
 
   EmbedderYamlLocator(Map<String, List<Folder>> packageMap) {
     if (packageMap != null) {
-      refresh(packageMap);
+      _processPackageMap(packageMap);
     }
   }
 
@@ -263,10 +199,9 @@
   void refresh(Map<String, List<Folder>> packageMap) {
     // Clear existing.
     embedderYamls.clear();
-    if (packageMap == null) {
-      return;
+    if (packageMap != null) {
+      _processPackageMap(packageMap);
     }
-    packageMap.forEach(_processPackage);
   }
 
   /// Given the yaml for an embedder ([embedderYaml]) and a folder
@@ -295,6 +230,10 @@
     }
   }
 
+  void _processPackageMap(Map<String, List<Folder>> packageMap) {
+    packageMap.forEach(_processPackage);
+  }
+
   /// Read the contents of [libDir]/[EMBEDDER_FILE_NAME] as a string.
   /// Returns null if the file doesn't exist.
   String _readEmbedderYaml(Folder libDir) {
diff --git a/pkg/analyzer/lib/source/error_processor.dart b/pkg/analyzer/lib/source/error_processor.dart
index 1fd9577..81778bc 100644
--- a/pkg/analyzer/lib/source/error_processor.dart
+++ b/pkg/analyzer/lib/source/error_processor.dart
@@ -121,6 +121,14 @@
   ErrorSeverity get severity => ErrorSeverity.ERROR;
 
   /// Check if this processor applies to the given [error].
-  bool appliesTo(AnalysisError error) =>
-      error.errorCode.type == ErrorType.STATIC_TYPE_WARNING;
+  bool appliesTo(AnalysisError error) {
+    ErrorCode errorCode = error.errorCode;
+    if (errorCode is StaticTypeWarningCode) {
+      return true;
+    }
+    if (errorCode is StaticWarningCode) {
+      return errorCode.isStrongModeError;
+    }
+    return false;
+  }
 }
diff --git a/pkg/analyzer/lib/source/sdk_ext.dart b/pkg/analyzer/lib/source/sdk_ext.dart
index f4bcdb9..3e833df 100644
--- a/pkg/analyzer/lib/source/sdk_ext.dart
+++ b/pkg/analyzer/lib/source/sdk_ext.dart
@@ -43,6 +43,13 @@
   /// Number of sdk extensions.
   int get length => _urlMappings.length;
 
+  /**
+   * Return a table mapping the names of extensions to the paths where those
+   * extensions can be found.
+   */
+  Map<String, String> get urlMappings =>
+      new Map<String, String>.from(_urlMappings);
+
   /// Return the path mapping for [libName] or null if there is none.
   String operator [](String libName) => _urlMappings[libName];
 
diff --git a/pkg/analyzer/lib/src/context/builder.dart b/pkg/analyzer/lib/src/context/builder.dart
new file mode 100644
index 0000000..6abd7bd
--- /dev/null
+++ b/pkg/analyzer/lib/src/context/builder.dart
@@ -0,0 +1,265 @@
+// Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+library analyzer.src.context.context_builder;
+
+import 'dart:collection';
+import 'dart:core' hide Resource;
+import 'dart:io' as io;
+
+import 'package:analyzer/file_system/file_system.dart';
+import 'package:analyzer/plugin/resolver_provider.dart';
+import 'package:analyzer/source/analysis_options_provider.dart';
+import 'package:analyzer/source/embedder.dart';
+import 'package:analyzer/source/package_map_resolver.dart';
+import 'package:analyzer/source/sdk_ext.dart';
+import 'package:analyzer/src/generated/engine.dart';
+import 'package:analyzer/src/generated/java_io.dart';
+import 'package:analyzer/src/generated/sdk.dart';
+import 'package:analyzer/src/generated/sdk_io.dart';
+import 'package:analyzer/src/generated/source.dart';
+import 'package:analyzer/src/generated/source_io.dart';
+import 'package:analyzer/src/task/options.dart';
+import 'package:package_config/discovery.dart';
+import 'package:package_config/packages.dart';
+import 'package:package_config/packages_file.dart';
+import 'package:package_config/src/packages_impl.dart';
+import 'package:path/path.dart' as path;
+import 'package:yaml/yaml.dart';
+
+/**
+ * A utility class used to build an analysis context for a given directory.
+ *
+ * The construction of analysis contexts is as follows:
+ *
+ * 1. Determine how package: URI's are to be resolved. This follows the lookup
+ *    algorithm defined by the [package specification][1].
+ *
+ * 2. Using the results of step 1, look in each package for an embedder file
+ *    (_embedder.yaml). If one exists then it defines the SDK. If multiple such
+ *    files exist then use the first one found. Otherwise, use the default SDK.
+ *
+ * 3. Look in each package for an SDK extension file (_sdkext). For each such
+ *    file, add the specified files to the SDK.
+ *
+ * 4. Look for an analysis options file (`analyis_options.yaml` or
+ *    `.analysis_options`) and process the options in the file.
+ *
+ * 5. Create a new context. Initialize its source factory based on steps 1, 2
+ *    and 3. Initialize its analysis options from step 4.
+ *
+ * [1]: https://github.com/dart-lang/dart_enhancement_proposals/blob/master/Accepted/0005%20-%20Package%20Specification/DEP-pkgspec.md.
+ */
+class ContextBuilder {
+  /**
+   * The [ResourceProvider] by which paths are converted into [Resource]s.
+   */
+  final ResourceProvider resourceProvider;
+
+  /**
+   * The manager used to manage the DartSdk's that have been created so that
+   * they can be shared across contexts.
+   */
+  final DartSdkManager sdkManager;
+
+  /**
+   * The cache containing the contents of overlayed files.
+   */
+  final ContentCache contentCache;
+
+  /**
+   * The resolver provider used to create a package: URI resolver, or `null` if
+   * the normal (Package Specification DEP) lookup mechanism is to be used.
+   */
+  ResolverProvider packageResolverProvider;
+
+  /**
+   * The file path of the .packages file that should be used in place of any
+   * file found using the normal (Package Specification DEP) lookup mechanism.
+   */
+  String defaultPackageFilePath;
+
+  /**
+   * The file path of the packages directory that should be used in place of any
+   * file found using the normal (Package Specification DEP) lookup mechanism.
+   */
+  String defaultPackagesDirectoryPath;
+
+  /**
+   * The file path of the analysis options file that should be used in place of
+   * any file in the root directory.
+   */
+  String defaultAnalysisOptionsFilePath;
+
+  /**
+   * The default analysis options that should be used unless some or all of them
+   * are overridden in the analysis options file.
+   */
+  AnalysisOptions defaultOptions;
+
+  /**
+   * Initialize a newly created builder to be ready to build a context rooted in
+   * the directory with the given [rootDirectoryPath].
+   */
+  ContextBuilder(this.resourceProvider, this.sdkManager, this.contentCache);
+
+  AnalysisContext buildContext(String rootDirectoryPath) {
+    // TODO(brianwilkerson) Split getAnalysisOptions so we can capture the
+    // option map and use it to run the options processors.
+    AnalysisOptions options = getAnalysisOptions(rootDirectoryPath);
+    InternalAnalysisContext context =
+        AnalysisEngine.instance.createAnalysisContext();
+    context.contentCache = contentCache;
+    context.sourceFactory = createSourceFactory(rootDirectoryPath, options);
+    context.analysisOptions = options;
+    //_processAnalysisOptions(context, optionMap);
+    return context;
+  }
+
+//  void _processAnalysisOptions(
+//      AnalysisContext context, Map<String, YamlNode> optionMap) {
+//    List<OptionsProcessor> optionsProcessors =
+//        AnalysisEngine.instance.optionsPlugin.optionsProcessors;
+//    try {
+//      optionsProcessors.forEach(
+//          (OptionsProcessor p) => p.optionsProcessed(context, optionMap));
+//
+//      // Fill in lint rule defaults in case lints are enabled and rules are
+//      // not specified in an options file.
+//      if (context.analysisOptions.lint && !containsLintRuleEntry(optionMap)) {
+//        setLints(context, linterPlugin.contributedRules);
+//      }
+//
+//      // Ask engine to further process options.
+//      if (optionMap != null) {
+//        configureContextOptions(context, optionMap);
+//      }
+//    } on Exception catch (e) {
+//      optionsProcessors.forEach((OptionsProcessor p) => p.onError(e));
+//    }
+//  }
+
+  Map<String, List<Folder>> convertPackagesToMap(Packages packages) {
+    if (packages == null || packages == Packages.noPackages) {
+      return null;
+    }
+    Map<String, List<Folder>> folderMap = new HashMap<String, List<Folder>>();
+    packages.asMap().forEach((String packagePath, Uri uri) {
+      folderMap[packagePath] = [resourceProvider.getFolder(path.fromUri(uri))];
+    });
+    return folderMap;
+  }
+
+  Packages createPackageMap(String rootDirectoryPath) {
+    if (defaultPackageFilePath != null) {
+      // TODO(brianwilkerson) Figure out why we're going through Uri rather than
+      // just creating the file from the path.
+      Uri fileUri = new Uri.file(defaultPackageFilePath);
+      io.File configFile = new io.File.fromUri(fileUri).absolute;
+      List<int> bytes = configFile.readAsBytesSync();
+      Map<String, Uri> map = parse(bytes, configFile.uri);
+      return new MapPackages(map);
+    } else if (defaultPackagesDirectoryPath != null) {
+      return getPackagesDirectory(
+          new Uri.directory(defaultPackagesDirectoryPath));
+    }
+    return findPackagesFromFile(new Uri.directory(rootDirectoryPath));
+  }
+
+  SourceFactory createSourceFactory(
+      String rootDirectoryPath, AnalysisOptions options) {
+    if (packageResolverProvider != null) {
+      Folder folder = resourceProvider.getResource('.');
+      UriResolver resolver = packageResolverProvider(folder);
+      if (resolver != null) {
+        // TODO(brianwilkerson) This doesn't support either embedder files or
+        // sdk extensions because we don't have a way to get the package map
+        // from the resolver.
+        List<UriResolver> resolvers = <UriResolver>[
+          new DartUriResolver(findSdk(null, options)),
+          resolver,
+          new ResourceUriResolver(resourceProvider)
+        ];
+        return new SourceFactory(resolvers);
+      }
+    }
+    Map<String, List<Folder>> packageMap =
+        convertPackagesToMap(createPackageMap(rootDirectoryPath));
+    List<UriResolver> resolvers = <UriResolver>[];
+    resolvers.add(new DartUriResolver(findSdk(packageMap, options)));
+    if (packageMap != null) {
+      resolvers.add(new SdkExtUriResolver(packageMap));
+      resolvers.add(new PackageMapUriResolver(resourceProvider, packageMap));
+    }
+    resolvers.add(new ResourceUriResolver(resourceProvider));
+    return new SourceFactory(resolvers);
+  }
+
+  /**
+   * Use the given [packageMap] and [options] to locate the SDK.
+   */
+  DartSdk findSdk(
+      Map<String, List<Folder>> packageMap, AnalysisOptions options) {
+    if (packageMap != null) {
+      EmbedderYamlLocator locator = new EmbedderYamlLocator(packageMap);
+      Map<Folder, YamlMap> embedderYamls = locator.embedderYamls;
+      EmbedderSdk embedderSdk = new EmbedderSdk(embedderYamls);
+      if (embedderSdk.sdkLibraries.length > 0) {
+        List<String> paths = <String>[];
+        for (Folder folder in embedderYamls.keys) {
+          paths.add(folder
+              .getChildAssumingFile(EmbedderYamlLocator.EMBEDDER_FILE_NAME)
+              .path);
+        }
+        SdkDescription description = new SdkDescription(paths, options);
+        DartSdk dartSdk = sdkManager.getSdk(description, () {
+          embedderSdk.analysisOptions = options;
+          embedderSdk.useSummary = sdkManager.canUseSummaries;
+          return embedderSdk;
+        });
+        return dartSdk;
+      }
+    }
+    String sdkPath = sdkManager.defaultSdkDirectory;
+    SdkDescription description = new SdkDescription(<String>[sdkPath], options);
+    return sdkManager.getSdk(description, () {
+      DirectoryBasedDartSdk sdk =
+          new DirectoryBasedDartSdk(new JavaFile(sdkPath));
+      sdk.analysisOptions = options;
+      sdk.useSummary = sdkManager.canUseSummaries;
+      return sdk;
+    });
+  }
+
+  AnalysisOptions getAnalysisOptions(String rootDirectoryPath) {
+    AnalysisOptionsImpl options = new AnalysisOptionsImpl.from(defaultOptions);
+    File optionsFile = getOptionsFile(rootDirectoryPath);
+    if (optionsFile != null) {
+      Map<String, YamlNode> fileOptions =
+          new AnalysisOptionsProvider().getOptionsFromFile(optionsFile);
+      applyToAnalysisOptions(options, fileOptions);
+    }
+    return options;
+  }
+
+  File getOptionsFile(String rootDirectoryPath) {
+    if (defaultAnalysisOptionsFilePath != null) {
+      return resourceProvider.getFile(defaultAnalysisOptionsFilePath);
+    }
+    Folder root = resourceProvider.getFolder(rootDirectoryPath);
+    for (Folder folder = root; folder != null; folder = folder.parent) {
+      File file =
+          folder.getChildAssumingFile(AnalysisEngine.ANALYSIS_OPTIONS_FILE);
+      if (file.exists) {
+        return file;
+      }
+      file = folder
+          .getChildAssumingFile(AnalysisEngine.ANALYSIS_OPTIONS_YAML_FILE);
+      if (file.exists) {
+        return file;
+      }
+    }
+    return null;
+  }
+}
diff --git a/pkg/analyzer/lib/src/context/cache.dart b/pkg/analyzer/lib/src/context/cache.dart
index ae79615..1f0f42c 100644
--- a/pkg/analyzer/lib/src/context/cache.dart
+++ b/pkg/analyzer/lib/src/context/cache.dart
@@ -17,6 +17,12 @@
 import 'package:analyzer/task/model.dart';
 
 /**
+ * Return `true` if the [result] of the [target] should be flushed.
+ */
+typedef bool FlushResultFilter<V>(
+    AnalysisTarget target, ResultDescriptor<V> result);
+
+/**
  * Return `true` if the given [target] is a priority one.
  */
 typedef bool IsPriorityAnalysisTarget(AnalysisTarget target);
@@ -103,6 +109,15 @@
   }
 
   /**
+   * Flush results that satisfy the given [filter].
+   */
+  void flush(FlushResultFilter filter) {
+    for (CachePartition partition in _partitions) {
+      partition.flush(filter);
+    }
+  }
+
+  /**
    * Return the entry associated with the given [target].
    */
   CacheEntry get(AnalysisTarget target) {
@@ -392,6 +407,17 @@
   }
 
   /**
+   * Flush results that satisfy the given [filter].
+   */
+  void flush(FlushResultFilter filter) {
+    _resultMap.forEach((ResultDescriptor result, ResultData data) {
+      if (filter(target, result)) {
+        data.flush();
+      }
+    });
+  }
+
+  /**
    * Return the result data associated with the [descriptor], creating one if it
    * isn't there.
    */
@@ -599,6 +625,11 @@
         return;
       }
     }
+//    if (deltaResult != null && deltaResult != DeltaResult.KEEP_CONTINUE) {
+//      String indent = '  ' * level;
+//      String deltaResultName = deltaResult.toString().split('.').last;
+//      print('[$id]$indent$deltaResultName $descriptor for $target');
+//    }
     if (deltaResult == DeltaResult.INVALIDATE_NO_DELTA) {
       delta = null;
     }
@@ -998,6 +1029,15 @@
   }
 
   /**
+   * Flush results that satisfy the given [filter].
+   */
+  void flush(FlushResultFilter filter) {
+    for (CacheEntry entry in entryMap.values) {
+      entry.flush(filter);
+    }
+  }
+
+  /**
    * Return the entry associated with the given [target].
    */
   CacheEntry get(AnalysisTarget target) => entryMap[target];
diff --git a/pkg/analyzer/lib/src/context/context.dart b/pkg/analyzer/lib/src/context/context.dart
index 23cae4a..27098db 100644
--- a/pkg/analyzer/lib/src/context/context.dart
+++ b/pkg/analyzer/lib/src/context/context.dart
@@ -11,6 +11,7 @@
 import 'package:analyzer/dart/element/element.dart';
 import 'package:analyzer/dart/element/type.dart';
 import 'package:analyzer/instrumentation/instrumentation.dart';
+import 'package:analyzer/plugin/resolver_provider.dart';
 import 'package:analyzer/plugin/task.dart';
 import 'package:analyzer/source/embedder.dart';
 import 'package:analyzer/src/cancelable_future.dart';
@@ -80,7 +81,10 @@
    */
   AnalysisOptionsImpl _options = new AnalysisOptionsImpl();
 
-  /// The embedder yaml locator for this context.
+  /**
+   * The embedder yaml locator for this context.
+   */
+  @deprecated
   EmbedderYamlLocator _embedderYamlLocator = new EmbedderYamlLocator(null);
 
   /**
@@ -225,6 +229,9 @@
    */
   CompilationUnit incrementalResolutionValidation_lastUnit;
 
+  @override
+  ResolverProvider fileResolverProvider;
+
   /**
    * Initialize a newly created analysis context.
    */
@@ -271,11 +278,18 @@
         ((options is AnalysisOptionsImpl)
             ? this._options.strongModeHints != options.strongModeHints
             : false) ||
+        ((options is AnalysisOptionsImpl)
+            ? this._options.implicitCasts != options.implicitCasts
+            : false) ||
+        ((options is AnalysisOptionsImpl)
+            ? this._options.implicitDynamic != options.implicitDynamic
+            : false) ||
         this._options.enableStrictCallChecks !=
             options.enableStrictCallChecks ||
         this._options.enableGenericMethods != options.enableGenericMethods ||
         this._options.enableAsync != options.enableAsync ||
-        this._options.enableSuperMixins != options.enableSuperMixins;
+        this._options.enableSuperMixins != options.enableSuperMixins ||
+        this._options.enableTrailingCommas != options.enableTrailingCommas;
     int cacheSize = options.cacheSize;
     if (this._options.cacheSize != cacheSize) {
       this._options.cacheSize = cacheSize;
@@ -291,6 +305,7 @@
     this._options.enableAsync = options.enableAsync;
     this._options.enableSuperMixins = options.enableSuperMixins;
     this._options.enableTiming = options.enableTiming;
+    this._options.enableTrailingCommas = options.enableTrailingCommas;
     this._options.hint = options.hint;
     this._options.incremental = options.incremental;
     this._options.incrementalApi = options.incrementalApi;
@@ -298,8 +313,12 @@
     this._options.lint = options.lint;
     this._options.preserveComments = options.preserveComments;
     this._options.strongMode = options.strongMode;
+    this._options.trackCacheDependencies = options.trackCacheDependencies;
+    this._options.finerGrainedInvalidation = options.finerGrainedInvalidation;
     if (options is AnalysisOptionsImpl) {
       this._options.strongModeHints = options.strongModeHints;
+      this._options.implicitCasts = options.implicitCasts;
+      this._options.implicitDynamic = options.implicitDynamic;
     }
     if (needsRecompute) {
       for (WorkManager workManager in workManagers) {
@@ -336,6 +355,7 @@
   @override
   DeclaredVariables get declaredVariables => _declaredVariables;
 
+  @deprecated
   @override
   EmbedderYamlLocator get embedderYamlLocator => _embedderYamlLocator;
 
@@ -466,13 +486,19 @@
 
   @override
   TypeProvider get typeProvider {
+    // The `AnalysisContextTarget.request` results go into the SDK partition,
+    // and the TYPE_PROVIDER result is computed and put into the SDK partition
+    // only by the first non-SDK analysis context. So, in order to reuse it
+    // in other analysis contexts, we need to ask for it from the cache.
+    _typeProvider ??= getResult(AnalysisContextTarget.request, TYPE_PROVIDER);
+    if (_typeProvider != null) {
+      return _typeProvider;
+    }
+
     // Make sure a task didn't accidentally try to call back into the context
     // to retrieve the type provider.
     assert(!driver.isTaskRunning);
 
-    if (_typeProvider != null) {
-      return _typeProvider;
-    }
     Source coreSource = sourceFactory.forUri(DartSdk.DART_CORE);
     if (coreSource == null) {
       throw new AnalysisException("Could not create a source for dart:core");
@@ -481,6 +507,7 @@
     if (coreElement == null) {
       throw new AnalysisException("Could not create an element for dart:core");
     }
+
     LibraryElement asyncElement;
     if (analysisOptions.enableAsync) {
       Source asyncSource = sourceFactory.forUri(DartSdk.DART_ASYNC);
@@ -493,8 +520,10 @@
             "Could not create an element for dart:async");
       }
     } else {
-      asyncElement = createMockAsyncLib(coreElement);
+      Source asyncSource = sourceFactory.forUri(DartSdk.DART_ASYNC);
+      asyncElement = createMockAsyncLib(coreElement, asyncSource);
     }
+
     _typeProvider = new TypeProviderImpl(coreElement, asyncElement);
     return _typeProvider;
   }
@@ -672,6 +701,7 @@
     CacheState state = entry.getState(descriptor);
     if (state == CacheState.FLUSHED || state == CacheState.INVALID) {
       driver.computeResult(target, descriptor);
+      entry = getCacheEntry(target);
     }
     state = entry.getState(descriptor);
     if (state == CacheState.ERROR) {
@@ -719,15 +749,14 @@
    * to stand in for a real one if one does not exist
    * facilitating creation a type provider without dart:async.
    */
-  LibraryElement createMockAsyncLib(LibraryElement coreLibrary) {
+  LibraryElement createMockAsyncLib(
+      LibraryElement coreLibrary, Source asyncSource) {
     InterfaceType objType = coreLibrary.getType('Object').type;
 
     ClassElement _classElement(String typeName, [List<String> parameterNames]) {
       ClassElementImpl element =
           new ClassElementImpl.forNode(AstFactory.identifier3(typeName));
       element.supertype = objType;
-      InterfaceTypeImpl type = new InterfaceTypeImpl(element);
-      element.type = type;
       if (parameterNames != null) {
         int count = parameterNames.length;
         if (count > 0) {
@@ -744,7 +773,6 @@
             typeParameter.type = typeArguments[i];
           }
           element.typeParameters = typeParameters;
-          type.typeArguments = typeArguments;
         }
       }
       return element;
@@ -757,6 +785,8 @@
     asyncUnit.types = <ClassElement>[futureType.element, streamType.element];
     LibraryElementImpl mockLib = new LibraryElementImpl.forNode(
         this, AstFactory.libraryIdentifier2(["dart.async"]));
+    asyncUnit.librarySource = asyncSource;
+    asyncUnit.source = asyncSource;
     mockLib.definingCompilationUnit = asyncUnit;
     mockLib.publicNamespace =
         new NamespaceBuilder().createPublicNamespaceForLibrary(mockLib);
@@ -1045,16 +1075,26 @@
     if (entry == null) {
       return false;
     }
+    // If there were no "originalContents" in the content cache,
+    // use the contents of the file instead.
+    if (originalContents == null) {
+      try {
+        TimestampedData<String> fileContents = source.contents;
+        if (fileContents.modificationTime == entry.modificationTime) {
+          originalContents = fileContents.data;
+        }
+      } catch (e) {}
+    }
     bool changed = newContents != originalContents;
     if (newContents != null) {
       if (changed) {
+        entry.modificationTime = _contentCache.getModificationStamp(source);
         if (!analysisOptions.incremental ||
             !_tryPoorMansIncrementalResolution(source, newContents)) {
           // Don't compare with old contents because the cache has already been
           // updated, and we know at this point that it changed.
           _sourceChanged(source, compareWithOld: false);
         }
-        entry.modificationTime = _contentCache.getModificationStamp(source);
         entry.setValue(CONTENT, newContents, TargetedResult.EMPTY_LIST);
       } else {
         entry.modificationTime = _contentCache.getModificationStamp(source);
@@ -1848,8 +1888,7 @@
     }
     // We need to invalidate the cache.
     {
-      Object delta = null;
-      if (AnalysisEngine.instance.limitInvalidationInTaskModel &&
+      if (analysisOptions.finerGrainedInvalidation &&
           AnalysisEngine.isDartFileName(source.fullName)) {
         // TODO(scheglov) Incorrect implementation in general.
         entry.setState(TOKEN_STREAM, CacheState.FLUSHED);
@@ -1857,8 +1896,13 @@
         List<Source> librarySources = getLibrariesContaining(source);
         if (librarySources.length == 1) {
           Source librarySource = librarySources[0];
-          CompilationUnit oldUnit =
-              getResolvedCompilationUnit2(source, librarySource);
+          // Try to find an old unit which has element model.
+          CacheEntry unitEntry =
+              getCacheEntry(new LibrarySpecificUnit(librarySource, source));
+          CompilationUnit oldUnit = RESOLVED_UNIT_RESULTS
+              .map(unitEntry.getValue)
+              .firstWhere((unit) => unit != null, orElse: () => null);
+          // If we have the old unit, we can try to update it.
           if (oldUnit != null) {
             CompilationUnit newUnit = parseCompilationUnit(source);
             IncrementalCompilationUnitElementBuilder builder =
@@ -1868,12 +1912,17 @@
             if (!unitDelta.hasDirectiveChange) {
               DartDelta dartDelta = new DartDelta(source);
               dartDelta.hasDirectiveChange = unitDelta.hasDirectiveChange;
-              unitDelta.addedDeclarations.forEach(dartDelta.elementAdded);
-              unitDelta.removedDeclarations.forEach(dartDelta.elementRemoved);
-//              print(
-//                  'dartDelta: add=${dartDelta.addedNames} remove=${dartDelta.removedNames}');
-              delta = dartDelta;
-              entry.setState(CONTENT, CacheState.INVALID, delta: delta);
+              unitDelta.addedDeclarations.forEach(dartDelta.elementChanged);
+              unitDelta.removedDeclarations.forEach(dartDelta.elementChanged);
+              unitDelta.classDeltas.values.forEach(dartDelta.classChanged);
+              // Add other names in the library that are changed transitively.
+              {
+                ReferencedNames referencedNames = new ReferencedNames(source);
+                new ReferencedNamesBuilder(referencedNames).build(oldUnit);
+                dartDelta.addChangedElements(referencedNames);
+              }
+              // Invalidate using the prepared DartDelta.
+              entry.setState(CONTENT, CacheState.INVALID, delta: dartDelta);
               return;
             }
           }
diff --git a/pkg/analyzer/lib/src/context/context_factory.dart b/pkg/analyzer/lib/src/context/context_factory.dart
new file mode 100644
index 0000000..b96100d
--- /dev/null
+++ b/pkg/analyzer/lib/src/context/context_factory.dart
@@ -0,0 +1,136 @@
+// Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+library analyzer.src.context_factory;
+
+import 'dart:convert';
+import 'dart:core' hide Resource;
+
+import 'package:analyzer/file_system/file_system.dart';
+import 'package:analyzer/src/generated/sdk.dart';
+import 'package:yaml/yaml.dart';
+import 'package:analyzer/src/generated/engine.dart';
+import 'dart:io' as io;
+
+/// (Placeholder)
+abstract class ContextFactory {
+  /// Create an analysis context for the given [source] directory or file, with
+  /// the given [defaultOptions].
+  AnalysisContext createContext(
+      io.FileSystemEntity source, AnalysisOptions defaultOptions);
+}
+
+/// Processes package maps, extracting SDK embedders and extenders, creating a
+/// consolidated [libraryMap].
+class PackageMapProcessor {
+  static const String _EMBEDDED_LIB_MAP_KEY = 'embedded_libs';
+  static const String _EMBEDDER_FILE_NAME = '_embedder.yaml';
+  static const String _SDK_EXT_NAME = '_sdkext';
+
+  /// Map of processed embedder libraries.
+  final LibraryMap embeddedLibraries = new LibraryMap();
+
+  /// Map of processed SDK extension libraries.
+  final LibraryMap extendedLibraries = new LibraryMap();
+
+  /// Combined map of processed libraries.
+  LibraryMap get libraryMap {
+    LibraryMap libraryMap = new LibraryMap();
+
+    // Add extenders first, allowing for overwrite by embedders who take precedence.
+    for (String uri in extendedLibraries.uris) {
+      libraryMap.setLibrary(uri, extendedLibraries.getLibrary(uri));
+    }
+    for (String uri in embeddedLibraries.uris) {
+      libraryMap.setLibrary(uri, embeddedLibraries.getLibrary(uri));
+    }
+    return libraryMap;
+  }
+
+  /// Create a processor for the given [packageMap].
+  PackageMapProcessor(Map<String, List<Folder>> packageMap) {
+    packageMap?.forEach(_processPackage);
+  }
+
+  /// Whether the package map contains an SDK embedder.
+  bool get hasEmbedder => embeddedLibraries.size() > 0;
+
+  /// Whether the package map contains an SDK extension.
+  bool get hasSdkExtension => extendedLibraries.size() > 0;
+
+  void _processEmbedderYaml(String embedderYaml, Folder libDir) {
+    try {
+      YamlNode map = loadYaml(embedderYaml);
+      if (map is YamlMap) {
+        YamlNode embedded_libs = map[_EMBEDDED_LIB_MAP_KEY];
+        if (embedded_libs is YamlMap) {
+          embedded_libs.forEach(
+              (k, v) => _processMapping(embeddedLibraries, k, v, libDir));
+        }
+      }
+    } catch (_) {
+      // Ignored.
+    }
+  }
+
+  void _processMapping(
+      LibraryMap libraryMap, String name, String file, Folder libDir) {
+    if (!_hasDartPrefix(name)) {
+      // SDK libraries must begin with 'dart:'.
+      return;
+    }
+    if (libraryMap.getLibrary(name) != null) {
+      // Libraries can't be redefined.
+      return;
+    }
+    String libPath = libDir.canonicalizePath(file);
+    SdkLibraryImpl library = new SdkLibraryImpl(name)..path = libPath;
+    libraryMap.setLibrary(name, library);
+  }
+
+  void _processPackage(String name, List<Folder> libDirs) {
+    for (Folder libDir in libDirs) {
+      String embedderYaml = _readEmbedderYaml(libDir);
+      if (embedderYaml != null) {
+        _processEmbedderYaml(embedderYaml, libDir);
+      }
+      String sdkExt = _readDotSdkExt(libDir);
+      if (sdkExt != null) {
+        _processSdkExt(sdkExt, libDir);
+      }
+    }
+  }
+
+  void _processSdkExt(String sdkExtJSON, Folder libDir) {
+    try {
+      var sdkExt = JSON.decode(sdkExtJSON);
+      if (sdkExt is Map) {
+        sdkExt.forEach(
+            (k, v) => _processMapping(extendedLibraries, k, v, libDir));
+      }
+    } catch (_) {
+      // Ignored.
+    }
+  }
+
+  static bool _hasDartPrefix(String uri) =>
+      uri.startsWith(DartSdk.DART_LIBRARY_PREFIX);
+
+  static String _readDotSdkExt(Folder libDir) =>
+      _safeRead(libDir.getChild(_SDK_EXT_NAME));
+
+  static String _readEmbedderYaml(Folder libDir) =>
+      _safeRead(libDir.getChild(_EMBEDDER_FILE_NAME));
+
+  static String _safeRead(Resource file) {
+    try {
+      if (file is File) {
+        return file.readAsStringSync();
+      }
+    } on FileSystemException {
+      // File can't be read.
+    }
+    return null;
+  }
+}
diff --git a/pkg/analyzer/lib/src/dart/ast/ast.dart b/pkg/analyzer/lib/src/dart/ast/ast.dart
index 5e2b646..97defa6 100644
--- a/pkg/analyzer/lib/src/dart/ast/ast.dart
+++ b/pkg/analyzer/lib/src/dart/ast/ast.dart
@@ -392,6 +392,9 @@
     ..addAll(_arguments)
     ..add(rightParenthesis);
 
+  List<ParameterElement> get correspondingPropagatedParameters =>
+      _correspondingPropagatedParameters;
+
   @override
   void set correspondingPropagatedParameters(
       List<ParameterElement> parameters) {
@@ -402,6 +405,9 @@
     _correspondingPropagatedParameters = parameters;
   }
 
+  List<ParameterElement> get correspondingStaticParameters =>
+      _correspondingStaticParameters;
+
   @override
   void set correspondingStaticParameters(List<ParameterElement> parameters) {
     if (parameters.length != _arguments.length) {
@@ -9037,6 +9043,10 @@
         return false;
       }
     }
+    if (parent is ConstructorFieldInitializer &&
+        identical(parent.fieldName, target)) {
+      return false;
+    }
     if (parent is ForEachStatement) {
       if (identical(parent.identifier, target)) {
         return false;
diff --git a/pkg/analyzer/lib/src/dart/ast/token.dart b/pkg/analyzer/lib/src/dart/ast/token.dart
index 9828113..baae1b8 100644
--- a/pkg/analyzer/lib/src/dart/ast/token.dart
+++ b/pkg/analyzer/lib/src/dart/ast/token.dart
@@ -569,5 +569,6 @@
   }
 
   @override
-  Token copy() => new TokenWithComment(type, offset, precedingComments);
+  Token copy() =>
+      new TokenWithComment(type, offset, copyComments(precedingComments));
 }
diff --git a/pkg/analyzer/lib/src/dart/ast/utilities.dart b/pkg/analyzer/lib/src/dart/ast/utilities.dart
index e3e379f..53f9109 100644
--- a/pkg/analyzer/lib/src/dart/ast/utilities.dart
+++ b/pkg/analyzer/lib/src/dart/ast/utilities.dart
@@ -994,6 +994,37 @@
   AstNode _other;
 
   /**
+   * Notify that [first] and second have different length.
+   * This implementation returns `false`. Subclasses can override and throw.
+   */
+  bool failDifferentLength(List first, List second) {
+    return false;
+  }
+
+  /**
+   * Check whether [second] is null. Subclasses can override to throw.
+   */
+  bool failIfNotNull(Object first, Object second) {
+    return second == null;
+  }
+
+  /**
+   * Notify that [first] is not `null` while [second] one is `null`.
+   * This implementation returns `false`. Subclasses can override and throw.
+   */
+  bool failIsNull(Object first, Object second) {
+    return false;
+  }
+
+  /**
+   * Notify that [first] and [second] have different types.
+   * This implementation returns `false`. Subclasses can override and throw.
+   */
+  bool failRuntimeType(Object first, Object second) {
+    return false;
+  }
+
+  /**
    * Return `true` if the [first] node and the [second] node have the same
    * structure.
    *
@@ -1002,11 +1033,11 @@
    */
   bool isEqualNodes(AstNode first, AstNode second) {
     if (first == null) {
-      return second == null;
+      return failIfNotNull(first, second);
     } else if (second == null) {
-      return false;
+      return failIsNull(first, second);
     } else if (first.runtimeType != second.runtimeType) {
-      return false;
+      return failRuntimeType(first, second);
     }
     _other = second;
     return first.accept(this);
@@ -1021,17 +1052,24 @@
    */
   bool isEqualTokens(Token first, Token second) {
     if (first == null) {
-      return second == null;
+      return failIfNotNull(first, second);
     } else if (second == null) {
-      return false;
+      return failIsNull(first, second);
     } else if (identical(first, second)) {
       return true;
     }
-    return first.offset == second.offset &&
-        first.length == second.length &&
-        first.lexeme == second.lexeme;
+    return isEqualTokensNotNull(first, second);
   }
 
+  /**
+   * Return `true` if the [first] token and the [second] token have the same
+   * structure.  Both [first] and [second] are not `null`.
+   */
+  bool isEqualTokensNotNull(Token first, Token second) =>
+      first.offset == second.offset &&
+      first.length == second.length &&
+      first.lexeme == second.lexeme;
+
   @override
   bool visitAdjacentStrings(AdjacentStrings node) {
     AdjacentStrings other = _other as AdjacentStrings;
@@ -2016,13 +2054,13 @@
    */
   bool _isEqualNodeLists(NodeList first, NodeList second) {
     if (first == null) {
-      return second == null;
+      return failIfNotNull(first, second);
     } else if (second == null) {
-      return false;
+      return failIsNull(first, second);
     }
     int size = first.length;
     if (second.length != size) {
-      return false;
+      return failDifferentLength(first, second);
     }
     for (int i = 0; i < size; i++) {
       if (!isEqualNodes(first[i], second[i])) {
@@ -2039,7 +2077,7 @@
   bool _isEqualTokenLists(List<Token> first, List<Token> second) {
     int length = first.length;
     if (second.length != length) {
-      return false;
+      return failDifferentLength(first, second);
     }
     for (int i = 0; i < length; i++) {
       if (!isEqualTokens(first[i], second[i])) {
@@ -2501,6 +2539,9 @@
       node.element;
 
   @override
+  Element visitExportDirective(ExportDirective node) => node.element;
+
+  @override
   Element visitFunctionDeclaration(FunctionDeclaration node) => node.element;
 
   @override
@@ -2532,6 +2573,8 @@
         if (element is LibraryElement) {
           return element.definingCompilationUnit;
         }
+      } else if (grandParent is LibraryDirective) {
+        return grandParent.element;
       }
     }
     return node.bestElement;
diff --git a/pkg/analyzer/lib/src/dart/constant/evaluation.dart b/pkg/analyzer/lib/src/dart/constant/evaluation.dart
index 5c5a74d..0c8288a 100644
--- a/pkg/analyzer/lib/src/dart/constant/evaluation.dart
+++ b/pkg/analyzer/lib/src/dart/constant/evaluation.dart
@@ -347,7 +347,14 @@
         // This could happen in the event of invalid code.  The error will be
         // reported at constant evaluation time.
       }
-      if (constNode.arguments != null) {
+      if (constNode == null) {
+        // We cannot determine what element the annotation is on, nor the offset
+        // of the annotation, so there's not a lot of information in this
+        // message, but it's better than getting an exception.
+        // https://github.com/dart-lang/sdk/issues/26811
+        AnalysisEngine.instance.logger.logInformation(
+            'No annotationAst for $constant in ${constant.compilationUnit}');
+      } else if (constNode.arguments != null) {
         constNode.arguments.accept(referenceFinder);
       }
     } else if (constant is VariableElement) {
@@ -1123,56 +1130,52 @@
       }
     }
     // evaluate operator
-    while (true) {
-      if (operatorType == TokenType.AMPERSAND) {
-        return _dartObjectComputer.bitAnd(node, leftResult, rightResult);
-      } else if (operatorType == TokenType.AMPERSAND_AMPERSAND) {
-        return _dartObjectComputer.logicalAnd(node, leftResult, rightResult);
-      } else if (operatorType == TokenType.BANG_EQ) {
-        return _dartObjectComputer.notEqual(node, leftResult, rightResult);
-      } else if (operatorType == TokenType.BAR) {
-        return _dartObjectComputer.bitOr(node, leftResult, rightResult);
-      } else if (operatorType == TokenType.BAR_BAR) {
-        return _dartObjectComputer.logicalOr(node, leftResult, rightResult);
-      } else if (operatorType == TokenType.CARET) {
-        return _dartObjectComputer.bitXor(node, leftResult, rightResult);
-      } else if (operatorType == TokenType.EQ_EQ) {
-        return _dartObjectComputer.equalEqual(node, leftResult, rightResult);
-      } else if (operatorType == TokenType.GT) {
-        return _dartObjectComputer.greaterThan(node, leftResult, rightResult);
-      } else if (operatorType == TokenType.GT_EQ) {
-        return _dartObjectComputer.greaterThanOrEqual(
-            node, leftResult, rightResult);
-      } else if (operatorType == TokenType.GT_GT) {
-        return _dartObjectComputer.shiftRight(node, leftResult, rightResult);
-      } else if (operatorType == TokenType.LT) {
-        return _dartObjectComputer.lessThan(node, leftResult, rightResult);
-      } else if (operatorType == TokenType.LT_EQ) {
-        return _dartObjectComputer.lessThanOrEqual(
-            node, leftResult, rightResult);
-      } else if (operatorType == TokenType.LT_LT) {
-        return _dartObjectComputer.shiftLeft(node, leftResult, rightResult);
-      } else if (operatorType == TokenType.MINUS) {
-        return _dartObjectComputer.minus(node, leftResult, rightResult);
-      } else if (operatorType == TokenType.PERCENT) {
-        return _dartObjectComputer.remainder(node, leftResult, rightResult);
-      } else if (operatorType == TokenType.PLUS) {
-        return _dartObjectComputer.add(node, leftResult, rightResult);
-      } else if (operatorType == TokenType.STAR) {
-        return _dartObjectComputer.times(node, leftResult, rightResult);
-      } else if (operatorType == TokenType.SLASH) {
-        return _dartObjectComputer.divide(node, leftResult, rightResult);
-      } else if (operatorType == TokenType.TILDE_SLASH) {
-        return _dartObjectComputer.integerDivide(node, leftResult, rightResult);
-      } else if (operatorType == TokenType.QUESTION_QUESTION) {
-        return _dartObjectComputer.questionQuestion(
-            node, leftResult, rightResult);
-      } else {
-        // TODO(brianwilkerson) Figure out which error to report.
-        _error(node, null);
-        return null;
-      }
-      break;
+    if (operatorType == TokenType.AMPERSAND) {
+      return _dartObjectComputer.bitAnd(node, leftResult, rightResult);
+    } else if (operatorType == TokenType.AMPERSAND_AMPERSAND) {
+      return _dartObjectComputer.logicalAnd(node, leftResult, rightResult);
+    } else if (operatorType == TokenType.BANG_EQ) {
+      return _dartObjectComputer.notEqual(node, leftResult, rightResult);
+    } else if (operatorType == TokenType.BAR) {
+      return _dartObjectComputer.bitOr(node, leftResult, rightResult);
+    } else if (operatorType == TokenType.BAR_BAR) {
+      return _dartObjectComputer.logicalOr(node, leftResult, rightResult);
+    } else if (operatorType == TokenType.CARET) {
+      return _dartObjectComputer.bitXor(node, leftResult, rightResult);
+    } else if (operatorType == TokenType.EQ_EQ) {
+      return _dartObjectComputer.equalEqual(node, leftResult, rightResult);
+    } else if (operatorType == TokenType.GT) {
+      return _dartObjectComputer.greaterThan(node, leftResult, rightResult);
+    } else if (operatorType == TokenType.GT_EQ) {
+      return _dartObjectComputer.greaterThanOrEqual(
+          node, leftResult, rightResult);
+    } else if (operatorType == TokenType.GT_GT) {
+      return _dartObjectComputer.shiftRight(node, leftResult, rightResult);
+    } else if (operatorType == TokenType.LT) {
+      return _dartObjectComputer.lessThan(node, leftResult, rightResult);
+    } else if (operatorType == TokenType.LT_EQ) {
+      return _dartObjectComputer.lessThanOrEqual(node, leftResult, rightResult);
+    } else if (operatorType == TokenType.LT_LT) {
+      return _dartObjectComputer.shiftLeft(node, leftResult, rightResult);
+    } else if (operatorType == TokenType.MINUS) {
+      return _dartObjectComputer.minus(node, leftResult, rightResult);
+    } else if (operatorType == TokenType.PERCENT) {
+      return _dartObjectComputer.remainder(node, leftResult, rightResult);
+    } else if (operatorType == TokenType.PLUS) {
+      return _dartObjectComputer.add(node, leftResult, rightResult);
+    } else if (operatorType == TokenType.STAR) {
+      return _dartObjectComputer.times(node, leftResult, rightResult);
+    } else if (operatorType == TokenType.SLASH) {
+      return _dartObjectComputer.divide(node, leftResult, rightResult);
+    } else if (operatorType == TokenType.TILDE_SLASH) {
+      return _dartObjectComputer.integerDivide(node, leftResult, rightResult);
+    } else if (operatorType == TokenType.QUESTION_QUESTION) {
+      return _dartObjectComputer.questionQuestion(
+          node, leftResult, rightResult);
+    } else {
+      // TODO(brianwilkerson) Figure out which error to report.
+      _error(node, null);
+      return null;
     }
   }
 
@@ -1399,19 +1402,16 @@
       _error(node, CompileTimeErrorCode.CONST_EVAL_THROWS_EXCEPTION);
       return null;
     }
-    while (true) {
-      if (node.operator.type == TokenType.BANG) {
-        return _dartObjectComputer.logicalNot(node, operand);
-      } else if (node.operator.type == TokenType.TILDE) {
-        return _dartObjectComputer.bitNot(node, operand);
-      } else if (node.operator.type == TokenType.MINUS) {
-        return _dartObjectComputer.negated(node, operand);
-      } else {
-        // TODO(brianwilkerson) Figure out which error to report.
-        _error(node, null);
-        return null;
-      }
-      break;
+    if (node.operator.type == TokenType.BANG) {
+      return _dartObjectComputer.logicalNot(node, operand);
+    } else if (node.operator.type == TokenType.TILDE) {
+      return _dartObjectComputer.bitNot(node, operand);
+    } else if (node.operator.type == TokenType.MINUS) {
+      return _dartObjectComputer.negated(node, operand);
+    } else {
+      // TODO(brianwilkerson) Figure out which error to report.
+      _error(node, null);
+      return null;
     }
   }
 
diff --git a/pkg/analyzer/lib/src/dart/element/builder.dart b/pkg/analyzer/lib/src/dart/element/builder.dart
index fb59f7b..3b09bef 100644
--- a/pkg/analyzer/lib/src/dart/element/builder.dart
+++ b/pkg/analyzer/lib/src/dart/element/builder.dart
@@ -350,6 +350,13 @@
     _currentHolder = initialHolder;
   }
 
+  /**
+   * Prepares for incremental resolution of a function body.
+   */
+  void initForFunctionBodyIncrementalResolution() {
+    _inFunction = true;
+  }
+
   @override
   Object visitAnnotation(Annotation node) {
     // Although it isn't valid to do so because closures are not constant
@@ -421,12 +428,7 @@
     ClassElementImpl element = new ClassElementImpl.forNode(className);
     _setCodeRange(element, node);
     element.metadata = _createElementAnnotations(node.metadata);
-    List<TypeParameterElement> typeParameters = holder.typeParameters;
-    List<DartType> typeArguments = _createTypeParameterTypes(typeParameters);
-    InterfaceTypeImpl interfaceType = new InterfaceTypeImpl(element);
-    interfaceType.typeArguments = typeArguments;
-    element.type = interfaceType;
-    element.typeParameters = typeParameters;
+    element.typeParameters = holder.typeParameters;
     setElementDocumentationComment(element, node);
     element.abstract = node.isAbstract;
     element.accessors = holder.accessors;
@@ -450,19 +452,6 @@
     return null;
   }
 
-  /**
-   * Implementation of this method should be synchronized with
-   * [visitClassDeclaration].
-   */
-  void visitClassDeclarationIncrementally(ClassDeclaration node) {
-    //
-    // Process field declarations before constructors and methods so that field
-    // formal parameters can be correctly resolved to their fields.
-    //
-    ClassElement classElement = node.element;
-    _buildFieldMap(classElement.fields);
-  }
-
   @override
   Object visitClassTypeAlias(ClassTypeAlias node) {
     ElementHolder holder = new ElementHolder();
@@ -473,12 +462,7 @@
     element.metadata = _createElementAnnotations(node.metadata);
     element.abstract = node.abstractKeyword != null;
     element.mixinApplication = true;
-    List<TypeParameterElement> typeParameters = holder.typeParameters;
-    element.typeParameters = typeParameters;
-    List<DartType> typeArguments = _createTypeParameterTypes(typeParameters);
-    InterfaceTypeImpl interfaceType = new InterfaceTypeImpl(element);
-    interfaceType.typeArguments = typeArguments;
-    element.type = interfaceType;
+    element.typeParameters = holder.typeParameters;
     setElementDocumentationComment(element, node);
     _currentHolder.addType(element);
     className.staticElement = element;
@@ -617,18 +601,11 @@
   @override
   Object visitEnumDeclaration(EnumDeclaration node) {
     SimpleIdentifier enumName = node.name;
-    ClassElementImpl enumElement = new ClassElementImpl.forNode(enumName);
+    EnumElementImpl enumElement = new EnumElementImpl.forNode(enumName);
     _setCodeRange(enumElement, node);
     enumElement.metadata = _createElementAnnotations(node.metadata);
-    enumElement.enum2 = true;
     setElementDocumentationComment(enumElement, node);
-    InterfaceTypeImpl enumType = new InterfaceTypeImpl(enumElement);
-    enumElement.type = enumType;
-    // The equivalent code for enums in the spec shows a single constructor,
-    // but that constructor is not callable (since it is a compile-time error
-    // to subclass, mix-in, implement, or explicitly instantiate an enum).  So
-    // we represent this as having no constructors.
-    enumElement.constructors = ConstructorElement.EMPTY_LIST;
+    InterfaceTypeImpl enumType = enumElement.type;
     //
     // Build the elements for the constants. These are minimal elements; the
     // rest of the constant elements (and elements for other fields) must be
@@ -645,7 +622,7 @@
       constantField.type = enumType;
       setElementDocumentationComment(constantField, constant);
       fields.add(constantField);
-      _createGetter(constantField);
+      new PropertyAccessorElementImpl_ImplicitGetter(constantField);
       constantName.staticElement = constantField;
     }
     enumElement.fields = fields;
@@ -1247,25 +1224,13 @@
       holder.validate();
     }
     if (element is PropertyInducingElementImpl) {
-      PropertyAccessorElementImpl getter =
-          new PropertyAccessorElementImpl.forVariable(element);
-      getter.getter = true;
-      if (element.hasImplicitType) {
-        getter.hasImplicitReturnType = true;
-      }
+      PropertyAccessorElementImpl_ImplicitGetter getter =
+          new PropertyAccessorElementImpl_ImplicitGetter(element);
       _currentHolder.addAccessor(getter);
-      element.getter = getter;
       if (!isConst && !isFinal) {
-        PropertyAccessorElementImpl setter =
-            new PropertyAccessorElementImpl.forVariable(element);
-        setter.setter = true;
-        ParameterElementImpl parameter =
-            new ParameterElementImpl("_${element.name}", element.nameOffset);
-        parameter.synthetic = true;
-        parameter.parameterKind = ParameterKind.REQUIRED;
-        setter.parameters = <ParameterElement>[parameter];
+        PropertyAccessorElementImpl_ImplicitSetter setter =
+            new PropertyAccessorElementImpl_ImplicitSetter(element);
         _currentHolder.addAccessor(setter);
-        element.setter = setter;
       }
     }
     return null;
@@ -1318,9 +1283,7 @@
     ConstructorElementImpl constructor =
         new ConstructorElementImpl.forNode(null);
     constructor.synthetic = true;
-    constructor.returnType = definingClass.type;
     constructor.enclosingElement = definingClass;
-    constructor.type = new FunctionTypeImpl(constructor);
     return <ConstructorElement>[constructor];
   }
 
@@ -1342,18 +1305,6 @@
   }
 
   /**
-   * Create a getter that corresponds to the given [field].
-   */
-  void _createGetter(FieldElementImpl field) {
-    PropertyAccessorElementImpl getter =
-        new PropertyAccessorElementImpl.forVariable(field);
-    getter.getter = true;
-    getter.returnType = field.type;
-    getter.type = new FunctionTypeImpl(getter);
-    field.getter = getter;
-  }
-
-  /**
    * Create the types associated with the given type parameters, setting the type of each type
    * parameter, and return an array of types corresponding to the given parameters.
    *
diff --git a/pkg/analyzer/lib/src/dart/element/element.dart b/pkg/analyzer/lib/src/dart/element/element.dart
index 5e72951..4619738 100644
--- a/pkg/analyzer/lib/src/dart/element/element.dart
+++ b/pkg/analyzer/lib/src/dart/element/element.dart
@@ -14,6 +14,7 @@
 import 'package:analyzer/dart/element/type.dart';
 import 'package:analyzer/dart/element/visitor.dart';
 import 'package:analyzer/src/dart/ast/utilities.dart';
+import 'package:analyzer/src/dart/constant/value.dart';
 import 'package:analyzer/src/dart/element/handle.dart';
 import 'package:analyzer/src/dart/element/type.dart';
 import 'package:analyzer/src/generated/constant.dart' show EvaluationResultImpl;
@@ -25,6 +26,7 @@
 import 'package:analyzer/src/generated/resolver.dart';
 import 'package:analyzer/src/generated/sdk.dart' show DartSdk;
 import 'package:analyzer/src/generated/source.dart';
+import 'package:analyzer/src/generated/testing/ast_factory.dart';
 import 'package:analyzer/src/generated/utilities_collection.dart';
 import 'package:analyzer/src/generated/utilities_dart.dart';
 import 'package:analyzer/src/generated/utilities_general.dart';
@@ -32,140 +34,43 @@
 import 'package:analyzer/src/task/dart.dart';
 
 /**
- * For AST nodes that could be in both the getter and setter contexts
- * ([IndexExpression]s and [SimpleIdentifier]s), the additional resolved
- * elements are stored in the AST node, in an [AuxiliaryElements]. Because
- * resolved elements are either statically resolved or resolved using propagated
- * type information, this class is a wrapper for a pair of [ExecutableElement]s,
- * not just a single [ExecutableElement].
- */
-class AuxiliaryElements {
-  /**
-   * The element based on propagated type information, or `null` if the AST
-   * structure has not been resolved or if the node could not be resolved.
-   */
-  final ExecutableElement propagatedElement;
-
-  /**
-   * The element based on static type information, or `null` if the AST
-   * structure has not been resolved or if the node could not be resolved.
-   */
-  final ExecutableElement staticElement;
-
-  /**
-   * Initialize a newly created pair to have both the [staticElement] and the
-   * [propagatedElement].
-   */
-  AuxiliaryElements(this.staticElement, this.propagatedElement);
-}
-
-/**
  * A concrete implementation of a [ClassElement].
  */
-class ClassElementImpl extends ElementImpl
-    with TypeParameterizedElementMixin
+abstract class AbstractClassElementImpl extends ElementImpl
     implements ClassElement {
   /**
-   * The unlinked representation of the class in the summary.
-   */
-  final UnlinkedClass _unlinkedClass;
-
-  /**
    * A list containing all of the accessors (getters and setters) contained in
    * this class.
    */
-  List<PropertyAccessorElement> _accessors = PropertyAccessorElement.EMPTY_LIST;
-
-  /**
-   * For classes which are not mixin applications, a list containing all of the
-   * constructors contained in this class, or `null` if the list of
-   * constructors has not yet been built.
-   *
-   * For classes which are mixin applications, the list of constructors is
-   * computed on the fly by the [constructors] getter, and this field is
-   * `null`.
-   */
-  List<ConstructorElement> _constructors;
+  List<PropertyAccessorElement> _accessors;
 
   /**
    * A list containing all of the fields contained in this class.
    */
-  List<FieldElement> _fields = FieldElement.EMPTY_LIST;
-
-  /**
-   * A list containing all of the mixins that are applied to the class being
-   * extended in order to derive the superclass of this class.
-   */
-  @override
-  List<InterfaceType> mixins = InterfaceType.EMPTY_LIST;
-
-  /**
-   * A list containing all of the interfaces that are implemented by this class.
-   */
-  @override
-  List<InterfaceType> interfaces = InterfaceType.EMPTY_LIST;
-
-  /**
-   * A list containing all of the methods contained in this class.
-   */
-  List<MethodElement> _methods = MethodElement.EMPTY_LIST;
-
-  /**
-   * The superclass of the class, or `null` if the class does not have an
-   * explicit superclass.
-   */
-  @override
-  InterfaceType supertype;
-
-  /**
-   * The type defined by the class.
-   */
-  @override
-  InterfaceType type;
-
-  /**
-   * A list containing all of the type parameters defined for this class.
-   */
-  List<TypeParameterElement> _typeParameters = TypeParameterElement.EMPTY_LIST;
-
-  /**
-   * A flag indicating whether the types associated with the instance members of
-   * this class have been inferred.
-   */
-  bool hasBeenInferred = false;
+  List<FieldElement> _fields;
 
   /**
    * Initialize a newly created class element to have the given [name] at the
    * given [offset] in the file that contains the declaration of this element.
    */
-  ClassElementImpl(String name, int offset)
-      : _unlinkedClass = null,
-        super(name, offset);
+  AbstractClassElementImpl(String name, int offset) : super(name, offset);
 
   /**
    * Initialize a newly created class element to have the given [name].
    */
-  ClassElementImpl.forNode(Identifier name)
-      : _unlinkedClass = null,
-        super.forNode(name);
+  AbstractClassElementImpl.forNode(Identifier name) : super.forNode(name);
 
   /**
    * Initialize using the given serialized information.
    */
-  ClassElementImpl.forSerialized(
-      this._unlinkedClass, CompilationUnitElementImpl enclosingUnit)
+  AbstractClassElementImpl.forSerialized(
+      CompilationUnitElementImpl enclosingUnit)
       : super.forSerialized(enclosingUnit);
 
-  /**
-   * Set whether this class is abstract.
-   */
-  void set abstract(bool isAbstract) {
-    assert(_unlinkedClass == null);
-    setModifier(Modifier.ABSTRACT, isAbstract);
-  }
-
   @override
-  List<PropertyAccessorElement> get accessors => _accessors;
+  List<PropertyAccessorElement> get accessors {
+    return _accessors ?? const <PropertyAccessorElement>[];
+  }
 
   /**
    * Set the accessors contained in this class to the given [accessors].
@@ -178,129 +83,10 @@
   }
 
   @override
-  List<InterfaceType> get allSupertypes {
-    List<InterfaceType> list = new List<InterfaceType>();
-    _collectAllSupertypes(list);
-    return list;
-  }
-
-  @override
-  int get codeLength {
-    if (_unlinkedClass != null) {
-      return _unlinkedClass.codeRange?.length;
-    }
-    return super.codeLength;
-  }
-
-  @override
-  int get codeOffset {
-    if (_unlinkedClass != null) {
-      return _unlinkedClass.codeRange?.offset;
-    }
-    return super.codeOffset;
-  }
-
-  @override
-  List<ConstructorElement> get constructors {
-    if (!isMixinApplication) {
-      assert(_constructors != null);
-      return _constructors ?? ConstructorElement.EMPTY_LIST;
-    }
-    return _computeMixinAppConstructors();
-  }
-
-  /**
-   * Set the constructors contained in this class to the given [constructors].
-   *
-   * Should only be used for class elements that are not mixin applications.
-   */
-  void set constructors(List<ConstructorElement> constructors) {
-    assert(!isMixinApplication);
-    for (ConstructorElement constructor in constructors) {
-      (constructor as ConstructorElementImpl).enclosingElement = this;
-    }
-    this._constructors = constructors;
-  }
-
-  @override
   String get displayName => name;
 
   @override
-  SourceRange get docRange {
-    if (_unlinkedClass != null) {
-      UnlinkedDocumentationComment comment =
-          _unlinkedClass.documentationComment;
-      return comment != null
-          ? new SourceRange(comment.offset, comment.length)
-          : null;
-    }
-    return super.docRange;
-  }
-
-  @override
-  String get documentationComment {
-    if (_unlinkedClass != null) {
-      return _unlinkedClass?.documentationComment?.text;
-    }
-    return super.documentationComment;
-  }
-
-  /**
-   * Return `true` if [CompileTimeErrorCode.MIXIN_HAS_NO_CONSTRUCTORS] should
-   * be reported for this class.
-   */
-  bool get doesMixinLackConstructors {
-    if (!isMixinApplication && mixins.isEmpty) {
-      // This class is not a mixin application and it doesn't have a "with"
-      // clause, so CompileTimeErrorCode.MIXIN_HAS_NO_CONSTRUCTORS is
-      // inapplicable.
-      return false;
-    }
-    if (supertype == null) {
-      // Should never happen, since Object is the only class that has no
-      // supertype, and it should have been caught by the test above.
-      assert(false);
-      return false;
-    }
-    // Find the nearest class in the supertype chain that is not a mixin
-    // application.
-    ClassElement nearestNonMixinClass = supertype.element;
-    if (nearestNonMixinClass.isMixinApplication) {
-      // Use a list to keep track of the classes we've seen, so that we won't
-      // go into an infinite loop in the event of a non-trivial loop in the
-      // class hierarchy.
-      List<ClassElement> classesSeen = <ClassElement>[this];
-      while (nearestNonMixinClass.isMixinApplication) {
-        if (classesSeen.contains(nearestNonMixinClass)) {
-          // Loop in the class hierarchy (which is reported elsewhere).  Don't
-          // confuse the user with further errors.
-          return false;
-        }
-        classesSeen.add(nearestNonMixinClass);
-        if (nearestNonMixinClass.supertype == null) {
-          // Should never happen, since Object is the only class that has no
-          // supertype, and it is not a mixin application.
-          assert(false);
-          return false;
-        }
-        nearestNonMixinClass = nearestNonMixinClass.supertype.element;
-      }
-    }
-    return !nearestNonMixinClass.constructors.any(isSuperConstructorAccessible);
-  }
-
-  @override
-  TypeParameterizedElementMixin get enclosingTypeParameterContext => null;
-
-  /**
-   * Set whether this class is defined by an enum declaration.
-   */
-  void set enum2(bool isEnum) {
-    setModifier(Modifier.ENUM, isEnum);
-  }
-
-  @override
-  List<FieldElement> get fields => _fields;
+  List<FieldElement> get fields => _fields ?? const <FieldElement>[];
 
   /**
    * Set the fields contained in this class to the given [fields].
@@ -313,249 +99,15 @@
   }
 
   @override
-  bool get hasNonFinalField {
-    List<ClassElement> classesToVisit = new List<ClassElement>();
-    HashSet<ClassElement> visitedClasses = new HashSet<ClassElement>();
-    classesToVisit.add(this);
-    while (!classesToVisit.isEmpty) {
-      ClassElement currentElement = classesToVisit.removeAt(0);
-      if (visitedClasses.add(currentElement)) {
-        // check fields
-        for (FieldElement field in currentElement.fields) {
-          if (!field.isFinal &&
-              !field.isConst &&
-              !field.isStatic &&
-              !field.isSynthetic) {
-            return true;
-          }
-        }
-        // check mixins
-        for (InterfaceType mixinType in currentElement.mixins) {
-          ClassElement mixinElement = mixinType.element;
-          classesToVisit.add(mixinElement);
-        }
-        // check super
-        InterfaceType supertype = currentElement.supertype;
-        if (supertype != null) {
-          ClassElement superElement = supertype.element;
-          if (superElement != null) {
-            classesToVisit.add(superElement);
-          }
-        }
-      }
-    }
-    // not found
-    return false;
-  }
-
-  @override
-  bool get hasReferenceToSuper => hasModifier(Modifier.REFERENCES_SUPER);
-
-  /**
-   * Set whether this class references 'super'.
-   */
-  void set hasReferenceToSuper(bool isReferencedSuper) {
-    setModifier(Modifier.REFERENCES_SUPER, isReferencedSuper);
-  }
-
-  @override
-  bool get hasStaticMember {
-    for (MethodElement method in _methods) {
-      if (method.isStatic) {
-        return true;
-      }
-    }
-    for (PropertyAccessorElement accessor in _accessors) {
-      if (accessor.isStatic) {
-        return true;
-      }
-    }
-    return false;
-  }
-
-  @override
-  bool get isAbstract {
-    if (_unlinkedClass != null) {
-      return _unlinkedClass.isAbstract;
-    }
-    return hasModifier(Modifier.ABSTRACT);
-  }
-
-  @override
-  bool get isEnum => hasModifier(Modifier.ENUM);
-
-  @override
-  bool get isMixinApplication {
-    if (_unlinkedClass != null) {
-      return _unlinkedClass.isMixinApplication;
-    }
-    return hasModifier(Modifier.MIXIN_APPLICATION);
-  }
-
-  @override
-  bool get isOrInheritsProxy =>
-      _safeIsOrInheritsProxy(this, new HashSet<ClassElement>());
-
-  @override
-  bool get isProxy {
-    for (ElementAnnotation annotation in metadata) {
-      if (annotation.isProxy) {
-        return true;
-      }
-    }
-    return false;
-  }
-
-  @override
-  bool get isValidMixin {
-    if (!context.analysisOptions.enableSuperMixins) {
-      if (hasReferenceToSuper) {
-        return false;
-      }
-      if (!supertype.isObject) {
-        return false;
-      }
-    }
-    for (ConstructorElement constructor in constructors) {
-      if (!constructor.isSynthetic && !constructor.isFactory) {
-        return false;
-      }
-    }
-    return true;
-  }
+  bool get isEnum;
 
   @override
   ElementKind get kind => ElementKind.CLASS;
 
   @override
-  List<ElementAnnotation> get metadata {
-    if (_unlinkedClass != null) {
-      return _metadata ??=
-          _buildAnnotations(enclosingUnit, _unlinkedClass.annotations);
-    }
-    return super.metadata;
-  }
-
-  @override
-  List<MethodElement> get methods => _methods;
-
-  /**
-   * Set the methods contained in this class to the given [methods].
-   */
-  void set methods(List<MethodElement> methods) {
-    for (MethodElement method in methods) {
-      (method as MethodElementImpl).enclosingElement = this;
-    }
-    this._methods = methods;
-  }
-
-  /**
-   * Set whether this class is a mixin application.
-   */
-  void set mixinApplication(bool isMixinApplication) {
-    assert(_unlinkedClass == null);
-    setModifier(Modifier.MIXIN_APPLICATION, isMixinApplication);
-  }
-
-  @override
-  String get name {
-    if (_unlinkedClass != null) {
-      return _unlinkedClass.name;
-    }
-    return super.name;
-  }
-
-  @override
-  int get nameOffset {
-    if (_unlinkedClass != null) {
-      return _unlinkedClass.nameOffset;
-    }
-    return super.nameOffset;
-  }
-
-  @override
-  TypeParameterizedElementMixin get typeParameterContext => this;
-
-  @override
-  List<TypeParameterElement> get typeParameters {
-    if (_unlinkedClass != null) {
-      return super.typeParameters;
-    }
-    return _typeParameters;
-  }
-
-  /**
-   * Set the type parameters defined for this class to the given
-   * [typeParameters].
-   */
-  void set typeParameters(List<TypeParameterElement> typeParameters) {
-    assert(_unlinkedClass == null);
-    for (TypeParameterElement typeParameter in typeParameters) {
-      (typeParameter as TypeParameterElementImpl).enclosingElement = this;
-    }
-    this._typeParameters = typeParameters;
-  }
-
-  @override
-  List<UnlinkedTypeParam> get unlinkedTypeParams =>
-      _unlinkedClass.typeParameters;
-
-  @override
-  ConstructorElement get unnamedConstructor {
-    for (ConstructorElement element in constructors) {
-      String name = element.displayName;
-      if (name == null || name.isEmpty) {
-        return element;
-      }
-    }
-    return null;
-  }
-
-  @override
   accept(ElementVisitor visitor) => visitor.visitClassElement(this);
 
   @override
-  void appendTo(StringBuffer buffer) {
-    if (isAbstract) {
-      buffer.write('abstract ');
-    }
-    if (isEnum) {
-      buffer.write('enum ');
-    } else {
-      buffer.write('class ');
-    }
-    String name = displayName;
-    if (name == null) {
-      buffer.write("{unnamed class}");
-    } else {
-      buffer.write(name);
-    }
-    int variableCount = _typeParameters.length;
-    if (variableCount > 0) {
-      buffer.write("<");
-      for (int i = 0; i < variableCount; i++) {
-        if (i > 0) {
-          buffer.write(", ");
-        }
-        (_typeParameters[i] as TypeParameterElementImpl).appendTo(buffer);
-      }
-      buffer.write(">");
-    }
-    if (supertype != null && !supertype.isObject) {
-      buffer.write(' extends ');
-      buffer.write(supertype.displayName);
-    }
-    if (mixins.isNotEmpty) {
-      buffer.write(' with ');
-      buffer.write(mixins.map((t) => t.displayName).join(', '));
-    }
-    if (interfaces.isNotEmpty) {
-      buffer.write(' implements ');
-      buffer.write(interfaces.map((t) => t.displayName).join(', '));
-    }
-  }
-
-  @override
   NamedCompilationUnitMember computeNode() {
     if (isEnum) {
       return getNodeMatching((node) => node is EnumDeclaration);
@@ -572,42 +124,24 @@
     // thrown a CCE if any of the elements in the arrays were not of the
     // expected types.
     //
-    for (PropertyAccessorElement accessor in _accessors) {
+    for (PropertyAccessorElement accessor in accessors) {
       PropertyAccessorElementImpl accessorImpl = accessor;
       if (accessorImpl.identifier == identifier) {
         return accessorImpl;
       }
     }
-    for (ConstructorElement constructor in _constructors) {
-      ConstructorElementImpl constructorImpl = constructor;
-      if (constructorImpl.identifier == identifier) {
-        return constructorImpl;
-      }
-    }
-    for (FieldElement field in _fields) {
+    for (FieldElement field in fields) {
       FieldElementImpl fieldImpl = field;
       if (fieldImpl.identifier == identifier) {
         return fieldImpl;
       }
     }
-    for (MethodElement method in _methods) {
-      MethodElementImpl methodImpl = method;
-      if (methodImpl.identifier == identifier) {
-        return methodImpl;
-      }
-    }
-    for (TypeParameterElement typeParameter in _typeParameters) {
-      TypeParameterElementImpl typeParameterImpl = typeParameter;
-      if (typeParameterImpl.identifier == identifier) {
-        return typeParameterImpl;
-      }
-    }
     return null;
   }
 
   @override
   FieldElement getField(String name) {
-    for (FieldElement fieldElement in _fields) {
+    for (FieldElement fieldElement in fields) {
       if (name == fieldElement.name) {
         return fieldElement;
       }
@@ -617,9 +151,9 @@
 
   @override
   PropertyAccessorElement getGetter(String getterName) {
-    int length = _accessors.length;
+    int length = accessors.length;
     for (int i = 0; i < length; i++) {
-      PropertyAccessorElement accessor = _accessors[i];
+      PropertyAccessorElement accessor = accessors[i];
       if (accessor.isGetter && accessor.name == getterName) {
         return accessor;
       }
@@ -628,29 +162,6 @@
   }
 
   @override
-  MethodElement getMethod(String methodName) {
-    int length = _methods.length;
-    for (int i = 0; i < length; i++) {
-      MethodElement method = _methods[i];
-      if (method.name == methodName) {
-        return method;
-      }
-    }
-    return null;
-  }
-
-  @override
-  ConstructorElement getNamedConstructor(String name) {
-    for (ConstructorElement element in constructors) {
-      String elementName = element.name;
-      if (elementName != null && elementName == name) {
-        return element;
-      }
-    }
-    return null;
-  }
-
-  @override
   PropertyAccessorElement getSetter(String setterName) {
     // TODO (jwren) revisit- should we append '=' here or require clients to
     // include it?
@@ -658,7 +169,7 @@
     if (!StringUtilities.endsWithChar(setterName, 0x3D)) {
       setterName += '=';
     }
-    for (PropertyAccessorElement accessor in _accessors) {
+    for (PropertyAccessorElement accessor in accessors) {
       if (accessor.isSetter && accessor.name == setterName) {
         return accessor;
       }
@@ -667,23 +178,6 @@
   }
 
   @override
-  bool isSuperConstructorAccessible(ConstructorElement constructor) {
-    // If this class has no mixins, then all superclass constructors are
-    // accessible.
-    if (mixins.isEmpty) {
-      return true;
-    }
-    // Otherwise only constructors that lack optional parameters are
-    // accessible (see dartbug.com/19576).
-    for (ParameterElement parameter in constructor.parameters) {
-      if (parameter.parameterKind != ParameterKind.REQUIRED) {
-        return false;
-      }
-    }
-    return true;
-  }
-
-  @override
   MethodElement lookUpConcreteMethod(
           String methodName, LibraryElement library) =>
       _internalLookUpConcreteMethod(
@@ -729,132 +223,8 @@
   @override
   void visitChildren(ElementVisitor visitor) {
     super.visitChildren(visitor);
-    safelyVisitChildren(_accessors, visitor);
-    safelyVisitChildren(_constructors, visitor);
-    safelyVisitChildren(_fields, visitor);
-    safelyVisitChildren(_methods, visitor);
-    safelyVisitChildren(_typeParameters, visitor);
-  }
-
-  void _collectAllSupertypes(List<InterfaceType> supertypes) {
-    List<InterfaceType> typesToVisit = new List<InterfaceType>();
-    List<ClassElement> visitedClasses = new List<ClassElement>();
-    typesToVisit.add(this.type);
-    while (!typesToVisit.isEmpty) {
-      InterfaceType currentType = typesToVisit.removeAt(0);
-      ClassElement currentElement = currentType.element;
-      if (!visitedClasses.contains(currentElement)) {
-        visitedClasses.add(currentElement);
-        if (!identical(currentType, this.type)) {
-          supertypes.add(currentType);
-        }
-        InterfaceType supertype = currentType.superclass;
-        if (supertype != null) {
-          typesToVisit.add(supertype);
-        }
-        for (InterfaceType type in currentElement.interfaces) {
-          typesToVisit.add(type);
-        }
-        for (InterfaceType type in currentElement.mixins) {
-          ClassElement element = type.element;
-          if (!visitedClasses.contains(element)) {
-            supertypes.add(type);
-          }
-        }
-      }
-    }
-  }
-
-  /**
-   * Compute a list of constructors for this class, which is a mixin
-   * application.  If specified, [visitedClasses] is a list of the other mixin
-   * application classes which have been visited on the way to reaching this
-   * one (this is used to detect circularities).
-   */
-  List<ConstructorElement> _computeMixinAppConstructors(
-      [List<ClassElementImpl> visitedClasses = null]) {
-    // First get the list of constructors of the superclass which need to be
-    // forwarded to this class.
-    Iterable<ConstructorElement> constructorsToForward;
-    if (supertype == null) {
-      // Shouldn't ever happen, since the only class with no supertype is
-      // Object, and it isn't a mixin application.  But for safety's sake just
-      // assume an empty list.
-      assert(false);
-      constructorsToForward = <ConstructorElement>[];
-    } else if (!supertype.element.isMixinApplication) {
-      List<ConstructorElement> superclassConstructors =
-          supertype.element.constructors;
-      // Filter out any constructors with optional parameters (see
-      // dartbug.com/15101).
-      constructorsToForward =
-          superclassConstructors.where(isSuperConstructorAccessible);
-    } else {
-      if (visitedClasses == null) {
-        visitedClasses = <ClassElementImpl>[this];
-      } else {
-        if (visitedClasses.contains(this)) {
-          // Loop in the class hierarchy.  Don't try to forward any
-          // constructors.
-          return <ConstructorElement>[];
-        }
-        visitedClasses.add(this);
-      }
-      try {
-        constructorsToForward = getImpl(supertype.element)
-            ._computeMixinAppConstructors(visitedClasses);
-      } finally {
-        visitedClasses.removeLast();
-      }
-    }
-
-    // Figure out the type parameter substitution we need to perform in order
-    // to produce constructors for this class.  We want to be robust in the
-    // face of errors, so drop any extra type arguments and fill in any missing
-    // ones with `dynamic`.
-    List<DartType> parameterTypes =
-        TypeParameterTypeImpl.getTypes(supertype.typeParameters);
-    List<DartType> argumentTypes = new List<DartType>.filled(
-        parameterTypes.length, DynamicTypeImpl.instance);
-    for (int i = 0; i < supertype.typeArguments.length; i++) {
-      if (i >= argumentTypes.length) {
-        break;
-      }
-      argumentTypes[i] = supertype.typeArguments[i];
-    }
-
-    // Now create an implicit constructor for every constructor found above,
-    // substituting type parameters as appropriate.
-    return constructorsToForward
-        .map((ConstructorElement superclassConstructor) {
-      ConstructorElementImpl implicitConstructor =
-          new ConstructorElementImpl(superclassConstructor.name, -1);
-      implicitConstructor.synthetic = true;
-      implicitConstructor.redirectedConstructor = superclassConstructor;
-      implicitConstructor.returnType = type;
-      List<ParameterElement> superParameters = superclassConstructor.parameters;
-      int count = superParameters.length;
-      if (count > 0) {
-        List<ParameterElement> implicitParameters =
-            new List<ParameterElement>(count);
-        for (int i = 0; i < count; i++) {
-          ParameterElement superParameter = superParameters[i];
-          ParameterElementImpl implicitParameter =
-              new ParameterElementImpl(superParameter.name, -1);
-          implicitParameter.const3 = superParameter.isConst;
-          implicitParameter.final2 = superParameter.isFinal;
-          implicitParameter.parameterKind = superParameter.parameterKind;
-          implicitParameter.synthetic = true;
-          implicitParameter.type =
-              superParameter.type.substitute2(argumentTypes, parameterTypes);
-          implicitParameters[i] = implicitParameter;
-        }
-        implicitConstructor.parameters = implicitParameters;
-      }
-      implicitConstructor.enclosingElement = this;
-      implicitConstructor.type = new FunctionTypeImpl(implicitConstructor);
-      return implicitConstructor;
-    }).toList();
+    safelyVisitChildren(accessors, visitor);
+    safelyVisitChildren(fields, visitor);
   }
 
   PropertyAccessorElement _internalLookUpConcreteGetter(
@@ -1004,6 +374,875 @@
     return null;
   }
 
+  /**
+   * Return the [AbstractClassElementImpl] of the given [classElement].  May
+   * throw an exception if the [AbstractClassElementImpl] cannot be provided
+   * (should not happen though).
+   */
+  static AbstractClassElementImpl getImpl(ClassElement classElement) {
+    if (classElement is ClassElementHandle) {
+      return getImpl(classElement.actualElement);
+    }
+    return classElement as AbstractClassElementImpl;
+  }
+}
+
+/**
+ * For AST nodes that could be in both the getter and setter contexts
+ * ([IndexExpression]s and [SimpleIdentifier]s), the additional resolved
+ * elements are stored in the AST node, in an [AuxiliaryElements]. Because
+ * resolved elements are either statically resolved or resolved using propagated
+ * type information, this class is a wrapper for a pair of [ExecutableElement]s,
+ * not just a single [ExecutableElement].
+ */
+class AuxiliaryElements {
+  /**
+   * The element based on propagated type information, or `null` if the AST
+   * structure has not been resolved or if the node could not be resolved.
+   */
+  final ExecutableElement propagatedElement;
+
+  /**
+   * The element based on static type information, or `null` if the AST
+   * structure has not been resolved or if the node could not be resolved.
+   */
+  final ExecutableElement staticElement;
+
+  /**
+   * Initialize a newly created pair to have both the [staticElement] and the
+   * [propagatedElement].
+   */
+  AuxiliaryElements(this.staticElement, this.propagatedElement);
+}
+
+/**
+ * An [AbstractClassElementImpl] which is a class.
+ */
+class ClassElementImpl extends AbstractClassElementImpl
+    with TypeParameterizedElementMixin {
+  /**
+   * The unlinked representation of the class in the summary.
+   */
+  final UnlinkedClass _unlinkedClass;
+
+  /**
+   * A list containing all of the type parameters defined for this class.
+   */
+  List<TypeParameterElement> _typeParameters = TypeParameterElement.EMPTY_LIST;
+
+  /**
+   * The superclass of the class, or `null` for [Object].
+   */
+  InterfaceType _supertype;
+
+  /**
+   * The type defined by the class.
+   */
+  InterfaceType _type;
+
+  /**
+   * A list containing all of the mixins that are applied to the class being
+   * extended in order to derive the superclass of this class.
+   */
+  List<InterfaceType> _mixins;
+
+  /**
+   * A list containing all of the interfaces that are implemented by this class.
+   */
+  List<InterfaceType> _interfaces;
+
+  /**
+   * For classes which are not mixin applications, a list containing all of the
+   * constructors contained in this class, or `null` if the list of
+   * constructors has not yet been built.
+   *
+   * For classes which are mixin applications, the list of constructors is
+   * computed on the fly by the [constructors] getter, and this field is
+   * `null`.
+   */
+  List<ConstructorElement> _constructors;
+
+  /**
+   * A list containing all of the methods contained in this class.
+   */
+  List<MethodElement> _methods;
+
+  /**
+   * A flag indicating whether the types associated with the instance members of
+   * this class have been inferred.
+   */
+  bool _hasBeenInferred = false;
+
+  /**
+   * Initialize a newly created class element to have the given [name] at the
+   * given [offset] in the file that contains the declaration of this element.
+   */
+  ClassElementImpl(String name, int offset)
+      : _unlinkedClass = null,
+        super(name, offset);
+
+  /**
+   * Initialize a newly created class element to have the given [name].
+   */
+  ClassElementImpl.forNode(Identifier name)
+      : _unlinkedClass = null,
+        super.forNode(name);
+
+  /**
+   * Initialize using the given serialized information.
+   */
+  ClassElementImpl.forSerialized(
+      this._unlinkedClass, CompilationUnitElementImpl enclosingUnit)
+      : super.forSerialized(enclosingUnit);
+
+  /**
+   * Set whether this class is abstract.
+   */
+  void set abstract(bool isAbstract) {
+    assert(_unlinkedClass == null);
+    setModifier(Modifier.ABSTRACT, isAbstract);
+  }
+
+  @override
+  List<PropertyAccessorElement> get accessors {
+    if (_unlinkedClass != null && _accessors == null) {
+      _resynthesizeFieldsAndPropertyAccessors();
+    }
+    return _accessors ?? const <PropertyAccessorElement>[];
+  }
+
+  @override
+  void set accessors(List<PropertyAccessorElement> accessors) {
+    assert(_unlinkedClass == null);
+    super.accessors = accessors;
+  }
+
+  @override
+  List<InterfaceType> get allSupertypes {
+    List<InterfaceType> list = new List<InterfaceType>();
+    _collectAllSupertypes(list);
+    return list;
+  }
+
+  @override
+  int get codeLength {
+    if (_unlinkedClass != null) {
+      return _unlinkedClass.codeRange?.length;
+    }
+    return super.codeLength;
+  }
+
+  @override
+  int get codeOffset {
+    if (_unlinkedClass != null) {
+      return _unlinkedClass.codeRange?.offset;
+    }
+    return super.codeOffset;
+  }
+
+  @override
+  List<ConstructorElement> get constructors {
+    if (isMixinApplication) {
+      return _computeMixinAppConstructors();
+    }
+    if (_unlinkedClass != null && _constructors == null) {
+      _constructors = _unlinkedClass.executables
+          .where((e) => e.kind == UnlinkedExecutableKind.constructor)
+          .map((e) => new ConstructorElementImpl.forSerialized(e, this))
+          .toList(growable: false);
+      // Ensure at least implicit default constructor.
+      if (_constructors.isEmpty) {
+        ConstructorElementImpl constructor = new ConstructorElementImpl('', -1);
+        constructor.synthetic = true;
+        constructor.enclosingElement = this;
+        _constructors = <ConstructorElement>[constructor];
+      }
+    }
+    assert(_constructors != null);
+    return _constructors ?? const <ConstructorElement>[];
+  }
+
+  /**
+   * Set the constructors contained in this class to the given [constructors].
+   *
+   * Should only be used for class elements that are not mixin applications.
+   */
+  void set constructors(List<ConstructorElement> constructors) {
+    assert(_unlinkedClass == null);
+    assert(!isMixinApplication);
+    for (ConstructorElement constructor in constructors) {
+      (constructor as ConstructorElementImpl).enclosingElement = this;
+    }
+    this._constructors = constructors;
+  }
+
+  @override
+  SourceRange get docRange {
+    if (_unlinkedClass != null) {
+      UnlinkedDocumentationComment comment =
+          _unlinkedClass.documentationComment;
+      return comment != null
+          ? new SourceRange(comment.offset, comment.length)
+          : null;
+    }
+    return super.docRange;
+  }
+
+  @override
+  String get documentationComment {
+    if (_unlinkedClass != null) {
+      return _unlinkedClass?.documentationComment?.text;
+    }
+    return super.documentationComment;
+  }
+
+  /**
+   * Return `true` if [CompileTimeErrorCode.MIXIN_HAS_NO_CONSTRUCTORS] should
+   * be reported for this class.
+   */
+  bool get doesMixinLackConstructors {
+    if (!isMixinApplication && mixins.isEmpty) {
+      // This class is not a mixin application and it doesn't have a "with"
+      // clause, so CompileTimeErrorCode.MIXIN_HAS_NO_CONSTRUCTORS is
+      // inapplicable.
+      return false;
+    }
+    if (supertype == null) {
+      // Should never happen, since Object is the only class that has no
+      // supertype, and it should have been caught by the test above.
+      assert(false);
+      return false;
+    }
+    // Find the nearest class in the supertype chain that is not a mixin
+    // application.
+    ClassElement nearestNonMixinClass = supertype.element;
+    if (nearestNonMixinClass.isMixinApplication) {
+      // Use a list to keep track of the classes we've seen, so that we won't
+      // go into an infinite loop in the event of a non-trivial loop in the
+      // class hierarchy.
+      List<ClassElement> classesSeen = <ClassElement>[this];
+      while (nearestNonMixinClass.isMixinApplication) {
+        if (classesSeen.contains(nearestNonMixinClass)) {
+          // Loop in the class hierarchy (which is reported elsewhere).  Don't
+          // confuse the user with further errors.
+          return false;
+        }
+        classesSeen.add(nearestNonMixinClass);
+        if (nearestNonMixinClass.supertype == null) {
+          // Should never happen, since Object is the only class that has no
+          // supertype, and it is not a mixin application.
+          assert(false);
+          return false;
+        }
+        nearestNonMixinClass = nearestNonMixinClass.supertype.element;
+      }
+    }
+    return !nearestNonMixinClass.constructors.any(isSuperConstructorAccessible);
+  }
+
+  @override
+  TypeParameterizedElementMixin get enclosingTypeParameterContext => null;
+
+  @override
+  List<FieldElement> get fields {
+    if (_unlinkedClass != null && _fields == null) {
+      _resynthesizeFieldsAndPropertyAccessors();
+    }
+    return _fields ?? const <FieldElement>[];
+  }
+
+  @override
+  void set fields(List<FieldElement> fields) {
+    assert(_unlinkedClass == null);
+    super.fields = fields;
+  }
+
+  bool get hasBeenInferred {
+    if (_unlinkedClass != null) {
+      return context.analysisOptions.strongMode;
+    }
+    return _hasBeenInferred;
+  }
+
+  void set hasBeenInferred(bool hasBeenInferred) {
+    assert(_unlinkedClass == null);
+    _hasBeenInferred = hasBeenInferred;
+  }
+
+  @override
+  bool get hasNonFinalField {
+    List<ClassElement> classesToVisit = new List<ClassElement>();
+    HashSet<ClassElement> visitedClasses = new HashSet<ClassElement>();
+    classesToVisit.add(this);
+    while (!classesToVisit.isEmpty) {
+      ClassElement currentElement = classesToVisit.removeAt(0);
+      if (visitedClasses.add(currentElement)) {
+        // check fields
+        for (FieldElement field in currentElement.fields) {
+          if (!field.isFinal &&
+              !field.isConst &&
+              !field.isStatic &&
+              !field.isSynthetic) {
+            return true;
+          }
+        }
+        // check mixins
+        for (InterfaceType mixinType in currentElement.mixins) {
+          ClassElement mixinElement = mixinType.element;
+          classesToVisit.add(mixinElement);
+        }
+        // check super
+        InterfaceType supertype = currentElement.supertype;
+        if (supertype != null) {
+          ClassElement superElement = supertype.element;
+          if (superElement != null) {
+            classesToVisit.add(superElement);
+          }
+        }
+      }
+    }
+    // not found
+    return false;
+  }
+
+  @override
+  bool get hasReferenceToSuper => hasModifier(Modifier.REFERENCES_SUPER);
+
+  /**
+   * Set whether this class references 'super'.
+   */
+  void set hasReferenceToSuper(bool isReferencedSuper) {
+    setModifier(Modifier.REFERENCES_SUPER, isReferencedSuper);
+  }
+
+  @override
+  bool get hasStaticMember {
+    for (MethodElement method in methods) {
+      if (method.isStatic) {
+        return true;
+      }
+    }
+    for (PropertyAccessorElement accessor in accessors) {
+      if (accessor.isStatic) {
+        return true;
+      }
+    }
+    return false;
+  }
+
+  @override
+  List<InterfaceType> get interfaces {
+    if (_unlinkedClass != null && _interfaces == null) {
+      ResynthesizerContext context = enclosingUnit.resynthesizerContext;
+      _interfaces = _unlinkedClass.interfaces
+          .map((EntityRef t) => context.resolveTypeRef(t, this))
+          .toList(growable: false);
+    }
+    return _interfaces ?? const <InterfaceType>[];
+  }
+
+  void set interfaces(List<InterfaceType> interfaces) {
+    assert(_unlinkedClass == null);
+    _interfaces = interfaces;
+  }
+
+  @override
+  bool get isAbstract {
+    if (_unlinkedClass != null) {
+      return _unlinkedClass.isAbstract;
+    }
+    return hasModifier(Modifier.ABSTRACT);
+  }
+
+  @override
+  bool get isEnum => false;
+
+  @override
+  bool get isMixinApplication {
+    if (_unlinkedClass != null) {
+      return _unlinkedClass.isMixinApplication;
+    }
+    return hasModifier(Modifier.MIXIN_APPLICATION);
+  }
+
+  @override
+  bool get isOrInheritsProxy =>
+      _safeIsOrInheritsProxy(this, new HashSet<ClassElement>());
+
+  @override
+  bool get isProxy {
+    for (ElementAnnotation annotation in metadata) {
+      if (annotation.isProxy) {
+        return true;
+      }
+    }
+    return false;
+  }
+
+  @override
+  bool get isValidMixin {
+    if (!context.analysisOptions.enableSuperMixins) {
+      if (hasReferenceToSuper) {
+        return false;
+      }
+      if (!supertype.isObject) {
+        return false;
+      }
+    }
+    for (ConstructorElement constructor in constructors) {
+      if (!constructor.isSynthetic && !constructor.isFactory) {
+        return false;
+      }
+    }
+    return true;
+  }
+
+  @override
+  List<ElementAnnotation> get metadata {
+    if (_unlinkedClass != null) {
+      return _metadata ??=
+          _buildAnnotations(enclosingUnit, _unlinkedClass.annotations);
+    }
+    return super.metadata;
+  }
+
+  @override
+  List<MethodElement> get methods {
+    if (_unlinkedClass != null) {
+      _methods ??= _unlinkedClass.executables
+          .where((e) => e.kind == UnlinkedExecutableKind.functionOrMethod)
+          .map((e) => new MethodElementImpl.forSerialized(e, this))
+          .toList(growable: false);
+    }
+    return _methods ?? const <MethodElement>[];
+  }
+
+  /**
+   * Set the methods contained in this class to the given [methods].
+   */
+  void set methods(List<MethodElement> methods) {
+    assert(_unlinkedClass == null);
+    for (MethodElement method in methods) {
+      (method as MethodElementImpl).enclosingElement = this;
+    }
+    _methods = methods;
+  }
+
+  /**
+   * Set whether this class is a mixin application.
+   */
+  void set mixinApplication(bool isMixinApplication) {
+    assert(_unlinkedClass == null);
+    setModifier(Modifier.MIXIN_APPLICATION, isMixinApplication);
+  }
+
+  @override
+  List<InterfaceType> get mixins {
+    if (_unlinkedClass != null && _mixins == null) {
+      ResynthesizerContext context = enclosingUnit.resynthesizerContext;
+      _mixins = _unlinkedClass.mixins
+          .map((EntityRef t) => context.resolveTypeRef(t, this))
+          .toList(growable: false);
+    }
+    return _mixins ?? const <InterfaceType>[];
+  }
+
+  void set mixins(List<InterfaceType> mixins) {
+    assert(_unlinkedClass == null);
+    _mixins = mixins;
+  }
+
+  @override
+  String get name {
+    if (_unlinkedClass != null) {
+      return _unlinkedClass.name;
+    }
+    return super.name;
+  }
+
+  @override
+  int get nameOffset {
+    if (_unlinkedClass != null) {
+      return _unlinkedClass.nameOffset;
+    }
+    return super.nameOffset;
+  }
+
+  @override
+  InterfaceType get supertype {
+    if (_unlinkedClass != null && _supertype == null) {
+      if (_unlinkedClass.supertype != null) {
+        _supertype = enclosingUnit.resynthesizerContext
+            .resolveTypeRef(_unlinkedClass.supertype, this);
+      } else if (_unlinkedClass.hasNoSupertype) {
+        return null;
+      } else {
+        _supertype = context.typeProvider.objectType;
+      }
+    }
+    return _supertype;
+  }
+
+  void set supertype(InterfaceType supertype) {
+    assert(_unlinkedClass == null);
+    _supertype = supertype;
+  }
+
+  @override
+  InterfaceType get type {
+    if (_type == null) {
+      InterfaceTypeImpl type = new InterfaceTypeImpl(this);
+      type.typeArguments = typeParameterTypes;
+      _type = type;
+    }
+    return _type;
+  }
+
+  @override
+  TypeParameterizedElementMixin get typeParameterContext => this;
+
+  @override
+  List<TypeParameterElement> get typeParameters {
+    if (_unlinkedClass != null) {
+      return super.typeParameters;
+    }
+    return _typeParameters;
+  }
+
+  /**
+   * Set the type parameters defined for this class to the given
+   * [typeParameters].
+   */
+  void set typeParameters(List<TypeParameterElement> typeParameters) {
+    assert(_unlinkedClass == null);
+    for (TypeParameterElement typeParameter in typeParameters) {
+      (typeParameter as TypeParameterElementImpl).enclosingElement = this;
+    }
+    this._typeParameters = typeParameters;
+  }
+
+  @override
+  List<UnlinkedTypeParam> get unlinkedTypeParams =>
+      _unlinkedClass.typeParameters;
+
+  @override
+  ConstructorElement get unnamedConstructor {
+    for (ConstructorElement element in constructors) {
+      String name = element.displayName;
+      if (name == null || name.isEmpty) {
+        return element;
+      }
+    }
+    return null;
+  }
+
+  @override
+  void appendTo(StringBuffer buffer) {
+    if (isAbstract) {
+      buffer.write('abstract ');
+    }
+    buffer.write('class ');
+    String name = displayName;
+    if (name == null) {
+      buffer.write("{unnamed class}");
+    } else {
+      buffer.write(name);
+    }
+    int variableCount = typeParameters.length;
+    if (variableCount > 0) {
+      buffer.write("<");
+      for (int i = 0; i < variableCount; i++) {
+        if (i > 0) {
+          buffer.write(", ");
+        }
+        (typeParameters[i] as TypeParameterElementImpl).appendTo(buffer);
+      }
+      buffer.write(">");
+    }
+    if (supertype != null && !supertype.isObject) {
+      buffer.write(' extends ');
+      buffer.write(supertype.displayName);
+    }
+    if (mixins.isNotEmpty) {
+      buffer.write(' with ');
+      buffer.write(mixins.map((t) => t.displayName).join(', '));
+    }
+    if (interfaces.isNotEmpty) {
+      buffer.write(' implements ');
+      buffer.write(interfaces.map((t) => t.displayName).join(', '));
+    }
+  }
+
+  @override
+  ElementImpl getChild(String identifier) {
+    ElementImpl child = super.getChild(identifier);
+    if (child != null) {
+      return child;
+    }
+    //
+    // The casts in this method are safe because the set methods would have
+    // thrown a CCE if any of the elements in the arrays were not of the
+    // expected types.
+    //
+    for (ConstructorElement constructor in _constructors) {
+      ConstructorElementImpl constructorImpl = constructor;
+      if (constructorImpl.identifier == identifier) {
+        return constructorImpl;
+      }
+    }
+    for (MethodElement method in methods) {
+      MethodElementImpl methodImpl = method;
+      if (methodImpl.identifier == identifier) {
+        return methodImpl;
+      }
+    }
+    for (TypeParameterElement typeParameter in typeParameters) {
+      TypeParameterElementImpl typeParameterImpl = typeParameter;
+      if (typeParameterImpl.identifier == identifier) {
+        return typeParameterImpl;
+      }
+    }
+    return null;
+  }
+
+  @override
+  MethodElement getMethod(String methodName) {
+    int length = methods.length;
+    for (int i = 0; i < length; i++) {
+      MethodElement method = methods[i];
+      if (method.name == methodName) {
+        return method;
+      }
+    }
+    return null;
+  }
+
+  @override
+  ConstructorElement getNamedConstructor(String name) {
+    for (ConstructorElement element in constructors) {
+      String elementName = element.name;
+      if (elementName != null && elementName == name) {
+        return element;
+      }
+    }
+    return null;
+  }
+
+  @override
+  bool isSuperConstructorAccessible(ConstructorElement constructor) {
+    // If this class has no mixins, then all superclass constructors are
+    // accessible.
+    if (mixins.isEmpty) {
+      return true;
+    }
+    // Otherwise only constructors that lack optional parameters are
+    // accessible (see dartbug.com/19576).
+    for (ParameterElement parameter in constructor.parameters) {
+      if (parameter.parameterKind != ParameterKind.REQUIRED) {
+        return false;
+      }
+    }
+    return true;
+  }
+
+  @override
+  void visitChildren(ElementVisitor visitor) {
+    super.visitChildren(visitor);
+    safelyVisitChildren(_constructors, visitor);
+    safelyVisitChildren(methods, visitor);
+    safelyVisitChildren(_typeParameters, visitor);
+  }
+
+  void _collectAllSupertypes(List<InterfaceType> supertypes) {
+    List<InterfaceType> typesToVisit = new List<InterfaceType>();
+    List<ClassElement> visitedClasses = new List<ClassElement>();
+    typesToVisit.add(this.type);
+    while (!typesToVisit.isEmpty) {
+      InterfaceType currentType = typesToVisit.removeAt(0);
+      ClassElement currentElement = currentType.element;
+      if (!visitedClasses.contains(currentElement)) {
+        visitedClasses.add(currentElement);
+        if (!identical(currentType, this.type)) {
+          supertypes.add(currentType);
+        }
+        InterfaceType supertype = currentType.superclass;
+        if (supertype != null) {
+          typesToVisit.add(supertype);
+        }
+        for (InterfaceType type in currentElement.interfaces) {
+          typesToVisit.add(type);
+        }
+        for (InterfaceType type in currentElement.mixins) {
+          ClassElement element = type.element;
+          if (!visitedClasses.contains(element)) {
+            supertypes.add(type);
+          }
+        }
+      }
+    }
+  }
+
+  /**
+   * Compute a list of constructors for this class, which is a mixin
+   * application.  If specified, [visitedClasses] is a list of the other mixin
+   * application classes which have been visited on the way to reaching this
+   * one (this is used to detect circularities).
+   */
+  List<ConstructorElement> _computeMixinAppConstructors(
+      [List<ClassElementImpl> visitedClasses = null]) {
+    // First get the list of constructors of the superclass which need to be
+    // forwarded to this class.
+    Iterable<ConstructorElement> constructorsToForward;
+    if (supertype == null) {
+      // Shouldn't ever happen, since the only class with no supertype is
+      // Object, and it isn't a mixin application.  But for safety's sake just
+      // assume an empty list.
+      assert(false);
+      constructorsToForward = <ConstructorElement>[];
+    } else if (!supertype.element.isMixinApplication) {
+      List<ConstructorElement> superclassConstructors =
+          supertype.element.constructors;
+      // Filter out any constructors with optional parameters (see
+      // dartbug.com/15101).
+      constructorsToForward =
+          superclassConstructors.where(isSuperConstructorAccessible);
+    } else {
+      if (visitedClasses == null) {
+        visitedClasses = <ClassElementImpl>[this];
+      } else {
+        if (visitedClasses.contains(this)) {
+          // Loop in the class hierarchy.  Don't try to forward any
+          // constructors.
+          return <ConstructorElement>[];
+        }
+        visitedClasses.add(this);
+      }
+      try {
+        ClassElementImpl superElement = AbstractClassElementImpl
+            .getImpl(supertype.element) as ClassElementImpl;
+        constructorsToForward =
+            superElement._computeMixinAppConstructors(visitedClasses);
+      } finally {
+        visitedClasses.removeLast();
+      }
+    }
+
+    // Figure out the type parameter substitution we need to perform in order
+    // to produce constructors for this class.  We want to be robust in the
+    // face of errors, so drop any extra type arguments and fill in any missing
+    // ones with `dynamic`.
+    List<DartType> parameterTypes =
+        TypeParameterTypeImpl.getTypes(supertype.typeParameters);
+    List<DartType> argumentTypes = new List<DartType>.filled(
+        parameterTypes.length, DynamicTypeImpl.instance);
+    for (int i = 0; i < supertype.typeArguments.length; i++) {
+      if (i >= argumentTypes.length) {
+        break;
+      }
+      argumentTypes[i] = supertype.typeArguments[i];
+    }
+
+    // Now create an implicit constructor for every constructor found above,
+    // substituting type parameters as appropriate.
+    return constructorsToForward
+        .map((ConstructorElement superclassConstructor) {
+      ConstructorElementImpl implicitConstructor =
+          new ConstructorElementImpl(superclassConstructor.name, -1);
+      implicitConstructor.synthetic = true;
+      implicitConstructor.redirectedConstructor = superclassConstructor;
+      List<ParameterElement> superParameters = superclassConstructor.parameters;
+      int count = superParameters.length;
+      if (count > 0) {
+        List<ParameterElement> implicitParameters =
+            new List<ParameterElement>(count);
+        for (int i = 0; i < count; i++) {
+          ParameterElement superParameter = superParameters[i];
+          ParameterElementImpl implicitParameter =
+              new ParameterElementImpl(superParameter.name, -1);
+          implicitParameter.const3 = superParameter.isConst;
+          implicitParameter.final2 = superParameter.isFinal;
+          implicitParameter.parameterKind = superParameter.parameterKind;
+          implicitParameter.synthetic = true;
+          implicitParameter.type =
+              superParameter.type.substitute2(argumentTypes, parameterTypes);
+          implicitParameters[i] = implicitParameter;
+        }
+        implicitConstructor.parameters = implicitParameters;
+      }
+      implicitConstructor.enclosingElement = this;
+      return implicitConstructor;
+    }).toList(growable: false);
+  }
+
+  /**
+   * Resynthesize explicit fields and property accessors and fill [_fields] and
+   * [_accessors] with explicit and implicit elements.
+   */
+  void _resynthesizeFieldsAndPropertyAccessors() {
+    assert(_fields == null);
+    assert(_accessors == null);
+    // Build explicit fields and implicit property accessors.
+    var explicitFields = <FieldElement>[];
+    var implicitAccessors = <PropertyAccessorElement>[];
+    for (UnlinkedVariable v in _unlinkedClass.fields) {
+      FieldElementImpl field =
+          new FieldElementImpl.forSerializedFactory(v, this);
+      explicitFields.add(field);
+      implicitAccessors.add(
+          new PropertyAccessorElementImpl_ImplicitGetter(field)
+            ..enclosingElement = this);
+      if (!field.isConst && !field.isFinal) {
+        implicitAccessors.add(
+            new PropertyAccessorElementImpl_ImplicitSetter(field)
+              ..enclosingElement = this);
+      }
+    }
+    // Build explicit property accessors and implicit fields.
+    var explicitAccessors = <PropertyAccessorElement>[];
+    var implicitFields = <String, FieldElementImpl>{};
+    for (UnlinkedExecutable e in _unlinkedClass.executables) {
+      if (e.kind == UnlinkedExecutableKind.getter ||
+          e.kind == UnlinkedExecutableKind.setter) {
+        PropertyAccessorElementImpl accessor =
+            new PropertyAccessorElementImpl.forSerialized(e, this);
+        explicitAccessors.add(accessor);
+        // Prepare the field type.
+        DartType fieldType;
+        if (e.kind == UnlinkedExecutableKind.getter) {
+          fieldType = accessor.returnType;
+        } else {
+          fieldType = accessor.parameters[0].type;
+        }
+        // Create or update the implicit field.
+        String fieldName = accessor.displayName;
+        FieldElementImpl field = implicitFields[fieldName];
+        if (field == null) {
+          field = new FieldElementImpl(fieldName, -1);
+          implicitFields[fieldName] = field;
+          field.enclosingElement = this;
+          field.synthetic = true;
+          field.final2 = e.kind == UnlinkedExecutableKind.getter;
+          field.type = fieldType;
+        } else {
+          field.final2 = false;
+        }
+        accessor.variable = field;
+        if (e.kind == UnlinkedExecutableKind.getter) {
+          field.getter = accessor;
+        } else {
+          field.setter = accessor;
+        }
+      }
+    }
+    // Combine explicit and implicit fields and property accessors.
+    _fields = <FieldElement>[]
+      ..addAll(explicitFields)
+      ..addAll(implicitFields.values);
+    _accessors = <PropertyAccessorElement>[]
+      ..addAll(explicitAccessors)
+      ..addAll(implicitAccessors);
+  }
+
   bool _safeIsOrInheritsProxy(
       ClassElement classElt, HashSet<ClassElement> visitedClassElts) {
     if (visitedClassElts.contains(classElt)) {
@@ -1030,19 +1269,6 @@
     }
     return false;
   }
-
-  /**
-   * Return the [ClassElementImpl] of the given [classElement].  May throw an
-   * exception if the [ClassElementImpl] cannot be provided (should not happen
-   * though).
-   */
-  static ClassElementImpl getImpl(ClassElement classElement) {
-    if (classElement is ClassElementHandle) {
-      classElement.ensureActualElementComplete();
-      return getImpl(classElement.actualElement);
-    }
-    return classElement as ClassElementImpl;
-  }
 }
 
 /**
@@ -1097,7 +1323,7 @@
   /**
    * A list containing all of the enums contained in this compilation unit.
    */
-  List<ClassElement> _enums = ClassElement.EMPTY_LIST;
+  List<ClassElement> _enums;
 
   /**
    * A list containing all of the top-level functions contained in this
@@ -1109,13 +1335,12 @@
    * A list containing all of the function type aliases contained in this
    * compilation unit.
    */
-  List<FunctionTypeAliasElement> _typeAliases =
-      FunctionTypeAliasElement.EMPTY_LIST;
+  List<FunctionTypeAliasElement> _typeAliases;
 
   /**
    * A list containing all of the types contained in this compilation unit.
    */
-  List<ClassElement> _types = ClassElement.EMPTY_LIST;
+  List<ClassElement> _types;
 
   /**
    * A list containing all of the variables contained in this compilation unit.
@@ -1225,14 +1450,22 @@
   }
 
   @override
-  List<ClassElement> get enums => _enums;
+  List<ClassElement> get enums {
+    if (_unlinkedUnit != null) {
+      _enums ??= _unlinkedUnit.enums
+          .map((e) => new EnumElementImpl.forSerialized(e, this))
+          .toList(growable: false);
+    }
+    return _enums ?? const <ClassElement>[];
+  }
 
   /**
    * Set the enums contained in this compilation unit to the given [enums].
    */
   void set enums(List<ClassElement> enums) {
+    assert(_unlinkedUnit == null);
     for (ClassElement enumDeclaration in enums) {
-      (enumDeclaration as ClassElementImpl).enclosingElement = this;
+      (enumDeclaration as EnumElementImpl).enclosingElement = this;
     }
     this._enums = enums;
   }
@@ -1240,9 +1473,12 @@
   @override
   List<FunctionElement> get functions {
     if (_unlinkedUnit != null) {
-      _functions ??= resynthesizerContext.buildTopLevelFunctions();
+      _functions ??= _unlinkedUnit.executables
+          .where((e) => e.kind == UnlinkedExecutableKind.functionOrMethod)
+          .map((e) => new FunctionElementImpl.forSerialized(e, this))
+          .toList(growable: false);
     }
-    return _functions ?? FunctionElement.EMPTY_LIST;
+    return _functions ?? const <FunctionElement>[];
   }
 
   /**
@@ -1257,7 +1493,14 @@
   }
 
   @override
-  List<FunctionTypeAliasElement> get functionTypeAliases => _typeAliases;
+  List<FunctionTypeAliasElement> get functionTypeAliases {
+    if (_unlinkedUnit != null) {
+      _typeAliases ??= _unlinkedUnit.typedefs
+          .map((t) => new FunctionTypeAliasElementImpl.forSerialized(t, this))
+          .toList(growable: false);
+    }
+    return _typeAliases ?? const <FunctionTypeAliasElement>[];
+  }
 
   @override
   int get hashCode => source.hashCode;
@@ -1334,6 +1577,7 @@
    * given [typeAliases].
    */
   void set typeAliases(List<FunctionTypeAliasElement> typeAliases) {
+    assert(_unlinkedUnit == null);
     for (FunctionTypeAliasElement typeAlias in typeAliases) {
       (typeAlias as FunctionTypeAliasElementImpl).enclosingElement = this;
     }
@@ -1344,12 +1588,20 @@
   TypeParameterizedElementMixin get typeParameterContext => null;
 
   @override
-  List<ClassElement> get types => _types;
+  List<ClassElement> get types {
+    if (_unlinkedUnit != null) {
+      _types ??= _unlinkedUnit.classes
+          .map((c) => new ClassElementImpl.forSerialized(c, this))
+          .toList(growable: false);
+    }
+    return _types ?? const <ClassElement>[];
+  }
 
   /**
    * Set the types contained in this compilation unit to the given [types].
    */
   void set types(List<ClassElement> types) {
+    assert(_unlinkedUnit == null);
     for (ClassElement type in types) {
       // Another implementation of ClassElement is _DeferredClassElement,
       // which is used to resynthesize classes lazily. We cannot cast it
@@ -1425,20 +1677,20 @@
         return functionImpl;
       }
     }
-    for (FunctionTypeAliasElement typeAlias in _typeAliases) {
+    for (FunctionTypeAliasElement typeAlias in functionTypeAliases) {
       FunctionTypeAliasElementImpl typeAliasImpl = typeAlias;
       if (typeAliasImpl.identifier == identifier) {
         return typeAliasImpl;
       }
     }
-    for (ClassElement type in _types) {
+    for (ClassElement type in types) {
       ClassElementImpl typeImpl = type;
       if (typeImpl.name == identifier) {
         return typeImpl;
       }
     }
     for (ClassElement type in _enums) {
-      ClassElementImpl typeImpl = type;
+      EnumElementImpl typeImpl = type;
       if (typeImpl.identifier == identifier) {
         return typeImpl;
       }
@@ -1466,7 +1718,7 @@
 
   @override
   ClassElement getType(String className) {
-    for (ClassElement type in _types) {
+    for (ClassElement type in types) {
       if (type.name == className) {
         return type;
       }
@@ -1507,8 +1759,8 @@
     safelyVisitChildren(accessors, visitor);
     safelyVisitChildren(_enums, visitor);
     safelyVisitChildren(functions, visitor);
-    safelyVisitChildren(_typeAliases, visitor);
-    safelyVisitChildren(_types, visitor);
+    safelyVisitChildren(functionTypeAliases, visitor);
+    safelyVisitChildren(types, visitor);
     safelyVisitChildren(topLevelVariables, visitor);
   }
 }
@@ -1544,6 +1796,149 @@
 }
 
 /**
+ * A field element representing an enum constant.
+ */
+class ConstFieldElementImpl_EnumValue extends ConstFieldElementImpl_ofEnum {
+  final UnlinkedEnumValue _unlinkedEnumValue;
+  final int _index;
+
+  ConstFieldElementImpl_EnumValue(
+      EnumElementImpl enumElement, this._unlinkedEnumValue, this._index)
+      : super(enumElement);
+
+  @override
+  SourceRange get docRange {
+    if (_unlinkedEnumValue != null) {
+      UnlinkedDocumentationComment comment =
+          _unlinkedEnumValue.documentationComment;
+      return comment != null
+          ? new SourceRange(comment.offset, comment.length)
+          : null;
+    }
+    return super.docRange;
+  }
+
+  @override
+  String get documentationComment {
+    if (_unlinkedEnumValue != null) {
+      return _unlinkedEnumValue?.documentationComment?.text;
+    }
+    return super.documentationComment;
+  }
+
+  @override
+  EvaluationResultImpl get evaluationResult {
+    if (_evaluationResult == null) {
+      Map<String, DartObjectImpl> fieldMap = <String, DartObjectImpl>{
+        name: new DartObjectImpl(
+            context.typeProvider.intType, new IntState(_index))
+      };
+      DartObjectImpl value =
+          new DartObjectImpl(type, new GenericState(fieldMap));
+      _evaluationResult = new EvaluationResultImpl(value);
+    }
+    return _evaluationResult;
+  }
+
+  @override
+  String get name {
+    if (_unlinkedEnumValue != null) {
+      return _unlinkedEnumValue.name;
+    }
+    return super.name;
+  }
+
+  @override
+  int get nameOffset {
+    if (_unlinkedEnumValue != null) {
+      return _unlinkedEnumValue.nameOffset;
+    }
+    return super.nameOffset;
+  }
+
+  @override
+  InterfaceType get type => _enum.type;
+}
+
+/**
+ * The synthetic `values` field of an enum.
+ */
+class ConstFieldElementImpl_EnumValues extends ConstFieldElementImpl_ofEnum {
+  ConstFieldElementImpl_EnumValues(EnumElementImpl enumElement)
+      : super(enumElement) {
+    synthetic = true;
+  }
+
+  @override
+  EvaluationResultImpl get evaluationResult {
+    if (_evaluationResult == null) {
+      List<DartObjectImpl> constantValues = <DartObjectImpl>[];
+      for (FieldElement field in _enum.fields) {
+        if (field is ConstFieldElementImpl_EnumValue) {
+          constantValues.add(field.evaluationResult.value);
+        }
+      }
+      _evaluationResult = new EvaluationResultImpl(
+          new DartObjectImpl(type, new ListState(constantValues)));
+    }
+    return _evaluationResult;
+  }
+
+  @override
+  String get name => 'values';
+
+  @override
+  InterfaceType get type {
+    if (_type == null) {
+      InterfaceType listType = context.typeProvider.listType;
+      return _type = listType.instantiate(<DartType>[_enum.type]);
+    }
+    return _type;
+  }
+}
+
+/**
+ * An abstract constant field of an enum.
+ */
+abstract class ConstFieldElementImpl_ofEnum extends ConstFieldElementImpl {
+  final EnumElementImpl _enum;
+
+  ConstFieldElementImpl_ofEnum(this._enum) : super(null, -1) {
+    enclosingElement = _enum;
+  }
+
+  @override
+  void set const3(bool isConst) {
+    assert(false);
+  }
+
+  @override
+  void set evaluationResult(_) {
+    assert(false);
+  }
+
+  @override
+  void set final2(bool isFinal) {
+    assert(false);
+  }
+
+  @override
+  bool get isConst => true;
+
+  @override
+  bool get isStatic => true;
+
+  @override
+  void set static(bool isStatic) {
+    assert(false);
+  }
+
+  void set type(DartType type) {
+    assert(false);
+  }
+}
+
+/**
  * A [LocalVariableElement] for a local 'const' variable that has an
  * initializer.
  */
@@ -1576,30 +1971,30 @@
   /**
    * The constructor to which this constructor is redirecting.
    */
-  ConstructorElement redirectedConstructor;
+  ConstructorElement _redirectedConstructor;
 
   /**
    * The initializers for this constructor (used for evaluating constant
    * instance creation expressions).
    */
-  List<ConstructorInitializer> constantInitializers;
+  List<ConstructorInitializer> _constantInitializers;
 
   /**
    * The offset of the `.` before this constructor name or `null` if not named.
    */
-  int periodOffset;
+  int _periodOffset;
 
   /**
    * Return the offset of the character immediately following the last character
    * of this constructor's name, or `null` if not named.
    */
-  int nameEnd;
+  int _nameEnd;
 
   /**
    * True if this constructor has been found by constant evaluation to be free
    * of redirect cycles, and is thus safe to evaluate.
    */
-  bool isCycleFree = false;
+  bool _isCycleFree = false;
 
   /**
    * Initialize a newly created constructor element to have the given [name] and
@@ -1627,8 +2022,24 @@
     setModifier(Modifier.CONST, isConst);
   }
 
+  List<ConstructorInitializer> get constantInitializers {
+    if (serializedExecutable != null && _constantInitializers == null) {
+      _constantInitializers ??= serializedExecutable.constantInitializers
+          .map((i) => _buildConstructorInitializer(i))
+          .toList(growable: false);
+    }
+    return _constantInitializers;
+  }
+
+  void set constantInitializers(
+      List<ConstructorInitializer> constantInitializers) {
+    assert(serializedExecutable == null);
+    _constantInitializers = constantInitializers;
+  }
+
   @override
-  ClassElement get enclosingElement => super.enclosingElement as ClassElement;
+  ClassElementImpl get enclosingElement =>
+      super.enclosingElement as ClassElementImpl;
 
   @override
   TypeParameterizedElementMixin get enclosingTypeParameterContext =>
@@ -1650,6 +2061,21 @@
     return hasModifier(Modifier.CONST);
   }
 
+  bool get isCycleFree {
+    if (serializedExecutable != null) {
+      return serializedExecutable.isConst &&
+          !enclosingUnit.resynthesizerContext
+              .isInConstCycle(serializedExecutable.constCycleSlot);
+    }
+    return _isCycleFree;
+  }
+
+  void set isCycleFree(bool isCycleFree) {
+    // This property is updated in ConstantEvaluationEngine even for
+    // resynthesized constructors, so we don't have the usual assert here.
+    _isCycleFree = isCycleFree;
+  }
+
   @override
   bool get isDefaultConstructor {
     // unnamed
@@ -1682,8 +2108,77 @@
   ElementKind get kind => ElementKind.CONSTRUCTOR;
 
   @override
+  int get nameEnd {
+    if (serializedExecutable != null) {
+      if (serializedExecutable.name.isNotEmpty) {
+        return serializedExecutable.nameEnd;
+      } else {
+        return serializedExecutable.nameOffset + enclosingElement.name.length;
+      }
+    }
+    return _nameEnd;
+  }
+
+  void set nameEnd(int nameEnd) {
+    assert(serializedExecutable == null);
+    _nameEnd = nameEnd;
+  }
+
+  @override
+  int get periodOffset {
+    if (serializedExecutable != null) {
+      if (serializedExecutable.name.isNotEmpty) {
+        return serializedExecutable.periodOffset;
+      }
+    }
+    return _periodOffset;
+  }
+
+  void set periodOffset(int periodOffset) {
+    assert(serializedExecutable == null);
+    _periodOffset = periodOffset;
+  }
+
+  @override
+  ConstructorElement get redirectedConstructor {
+    if (serializedExecutable != null && _redirectedConstructor == null) {
+      if (serializedExecutable.isRedirectedConstructor) {
+        if (serializedExecutable.isFactory) {
+          _redirectedConstructor = enclosingUnit.resynthesizerContext
+              .resolveConstructorRef(
+                  enclosingElement, serializedExecutable.redirectedConstructor);
+        } else {
+          _redirectedConstructor = enclosingElement.getNamedConstructor(
+              serializedExecutable.redirectedConstructorName);
+        }
+      } else {
+        return null;
+      }
+    }
+    return _redirectedConstructor;
+  }
+
+  void set redirectedConstructor(ConstructorElement redirectedConstructor) {
+    assert(serializedExecutable == null);
+    _redirectedConstructor = redirectedConstructor;
+  }
+
+  @override
   DartType get returnType => enclosingElement.type;
 
+  void set returnType(DartType returnType) {
+    assert(false);
+  }
+
+  @override
+  FunctionType get type {
+    return _type ??= new FunctionTypeImpl(this);
+  }
+
+  void set type(FunctionType type) {
+    assert(false);
+  }
+
   @override
   accept(ElementVisitor visitor) => visitor.visitConstructorElement(this);
 
@@ -1714,6 +2209,63 @@
   @override
   ConstructorDeclaration computeNode() =>
       getNodeMatching((node) => node is ConstructorDeclaration);
+
+  /**
+   * Resynthesize the AST for the given serialized constructor initializer.
+   */
+  ConstructorInitializer _buildConstructorInitializer(
+      UnlinkedConstructorInitializer serialized) {
+    UnlinkedConstructorInitializerKind kind = serialized.kind;
+    String name = serialized.name;
+    List<Expression> arguments = <Expression>[];
+    {
+      int numArguments = serialized.arguments.length;
+      int numNames = serialized.argumentNames.length;
+      for (int i = 0; i < numArguments; i++) {
+        Expression expression = enclosingUnit.resynthesizerContext
+            .buildExpression(this, serialized.arguments[i]);
+        int nameIndex = numNames + i - numArguments;
+        if (nameIndex >= 0) {
+          expression = AstFactory.namedExpression2(
+              serialized.argumentNames[nameIndex], expression);
+        }
+        arguments.add(expression);
+      }
+    }
+    switch (kind) {
+      case UnlinkedConstructorInitializerKind.field:
+        ConstructorFieldInitializer initializer =
+            AstFactory.constructorFieldInitializer(
+                false,
+                name,
+                enclosingUnit.resynthesizerContext
+                    .buildExpression(this, serialized.expression));
+        initializer.fieldName.staticElement = enclosingElement.getField(name);
+        return initializer;
+      case UnlinkedConstructorInitializerKind.superInvocation:
+        SuperConstructorInvocation initializer =
+            AstFactory.superConstructorInvocation2(
+                name.isNotEmpty ? name : null, arguments);
+        ClassElement superElement = enclosingElement.supertype.element;
+        ConstructorElement element = name.isEmpty
+            ? superElement.unnamedConstructor
+            : superElement.getNamedConstructor(name);
+        initializer.staticElement = element;
+        initializer.constructorName?.staticElement = element;
+        return initializer;
+      case UnlinkedConstructorInitializerKind.thisInvocation:
+        RedirectingConstructorInvocation initializer =
+            AstFactory.redirectingConstructorInvocation2(
+                name.isNotEmpty ? name : null, arguments);
+        ConstructorElement element = name.isEmpty
+            ? enclosingElement.unnamedConstructor
+            : enclosingElement.getNamedConstructor(name);
+        initializer.staticElement = element;
+        initializer.constructorName?.staticElement = element;
+        return initializer;
+    }
+    return null;
+  }
 }
 
 /**
@@ -1755,7 +2307,8 @@
  *
  * This class is not intended to be part of the public API for analyzer.
  */
-abstract class ConstVariableElement implements ConstantEvaluationTarget {
+abstract class ConstVariableElement
+    implements ElementImpl, ConstantEvaluationTarget {
   /**
    * If this element represents a constant variable, and it has an initializer,
    * a copy of the initializer for the constant.  Otherwise `null`.
@@ -1765,10 +2318,34 @@
    * in which case there might be some constant variables that lack
    * initializers.
    */
-  Expression constantInitializer;
+  Expression _constantInitializer;
 
-  @override
-  EvaluationResultImpl evaluationResult;
+  EvaluationResultImpl _evaluationResult;
+
+  Expression get constantInitializer {
+    if (_constantInitializer == null && _unlinkedConst != null) {
+      _constantInitializer = enclosingUnit.resynthesizerContext
+          .buildExpression(this, _unlinkedConst);
+    }
+    return _constantInitializer;
+  }
+
+  void set constantInitializer(Expression constantInitializer) {
+    assert(_unlinkedConst == null);
+    _constantInitializer = constantInitializer;
+  }
+
+  EvaluationResultImpl get evaluationResult => _evaluationResult;
+
+  void set evaluationResult(EvaluationResultImpl evaluationResult) {
+    _evaluationResult = evaluationResult;
+  }
+
+  /**
+   * If this element is resynthesized from the summary, return the unlinked
+   * initializer, otherwise return `null`.
+   */
+  UnlinkedConst get _unlinkedConst;
 
   /**
    * Return a representation of the value of this variable, forcing the value
@@ -1808,25 +2385,6 @@
   DefaultFieldFormalParameterElementImpl.forSerialized(
       UnlinkedParam unlinkedParam, ElementImpl enclosingElement)
       : super.forSerialized(unlinkedParam, enclosingElement);
-
-  @override
-  Expression get constantInitializer {
-    if (_unlinkedParam != null) {
-      UnlinkedConst defaultValue = _unlinkedParam.initializer?.bodyExpr;
-      if (defaultValue == null) {
-        return null;
-      }
-      return super.constantInitializer ??= enclosingUnit.resynthesizerContext
-          .buildExpression(this, defaultValue);
-    }
-    return super.constantInitializer;
-  }
-
-  @override
-  void set constantInitializer(Expression initializer) {
-    assert(_unlinkedParam == null);
-    super.constantInitializer = initializer;
-  }
 }
 
 /**
@@ -1854,25 +2412,6 @@
       : super.forSerialized(unlinkedParam, enclosingElement);
 
   @override
-  Expression get constantInitializer {
-    if (_unlinkedParam != null) {
-      UnlinkedConst defaultValue = _unlinkedParam.initializer?.bodyExpr;
-      if (defaultValue == null) {
-        return null;
-      }
-      return super.constantInitializer ??= enclosingUnit.resynthesizerContext
-          .buildExpression(this, defaultValue);
-    }
-    return super.constantInitializer;
-  }
-
-  @override
-  void set constantInitializer(Expression initializer) {
-    assert(_unlinkedParam == null);
-    super.constantInitializer = initializer;
-  }
-
-  @override
   DefaultFormalParameter computeNode() =>
       getNodeMatching((node) => node is DefaultFormalParameter);
 }
@@ -1923,6 +2462,12 @@
   static String _DEPRECATED_VARIABLE_NAME = "deprecated";
 
   /**
+   * The name of the top-level variable used to mark a method as being a
+   * factory.
+   */
+  static String _FACTORY_VARIABLE_NAME = "factory";
+
+  /**
    * The name of the class used to JS annotate an element.
    */
   static String _JS_CLASS_NAME = "JS";
@@ -2021,6 +2566,12 @@
   }
 
   @override
+  bool get isFactory =>
+      element is PropertyAccessorElement &&
+      element.name == _FACTORY_VARIABLE_NAME &&
+      element.library?.name == _META_LIB_NAME;
+
+  @override
   bool get isJS =>
       element is ConstructorElement &&
       element.enclosingElement.name == _JS_CLASS_NAME &&
@@ -2264,6 +2815,16 @@
   }
 
   @override
+  bool get isFactory {
+    for (ElementAnnotation annotation in metadata) {
+      if (annotation.isFactory) {
+        return true;
+      }
+    }
+    return false;
+  }
+
+  @override
   bool get isJS {
     for (ElementAnnotation annotation in metadata) {
       if (annotation.isJS) {
@@ -2328,6 +2889,9 @@
       getAncestor((element) => element is LibraryElement);
 
   @override
+  Source get librarySource => library?.source;
+
+  @override
   ElementLocation get location {
     if (_cachedLocation == null) {
       if (library == null) {
@@ -2768,6 +3332,252 @@
 }
 
 /**
+ * An [AbstractClassElementImpl] which is an enum.
+ */
+class EnumElementImpl extends AbstractClassElementImpl {
+  /**
+   * The unlinked representation of the enum in the summary.
+   */
+  final UnlinkedEnum _unlinkedEnum;
+
+  /**
+   * The type defined by the enum.
+   */
+  InterfaceType _type;
+
+  /**
+   * Initialize a newly created class element to have the given [name] at the
+   * given [offset] in the file that contains the declaration of this element.
+   */
+  EnumElementImpl(String name, int offset)
+      : _unlinkedEnum = null,
+        super(name, offset);
+
+  /**
+   * Initialize a newly created class element to have the given [name].
+   */
+  EnumElementImpl.forNode(Identifier name)
+      : _unlinkedEnum = null,
+        super.forNode(name);
+
+  /**
+   * Initialize using the given serialized information.
+   */
+  EnumElementImpl.forSerialized(
+      this._unlinkedEnum, CompilationUnitElementImpl enclosingUnit)
+      : super.forSerialized(enclosingUnit);
+
+  /**
+   * Set whether this class is abstract.
+   */
+  void set abstract(bool isAbstract) {
+    assert(_unlinkedEnum == null);
+  }
+
+  @override
+  List<PropertyAccessorElement> get accessors {
+    if (_unlinkedEnum != null && _accessors == null) {
+      _resynthesizeFieldsAndPropertyAccessors();
+    }
+    return _accessors ?? const <PropertyAccessorElement>[];
+  }
+
+  @override
+  void set accessors(List<PropertyAccessorElement> accessors) {
+    assert(_unlinkedEnum == null);
+    super.accessors = accessors;
+  }
+
+  @override
+  List<InterfaceType> get allSupertypes => <InterfaceType>[supertype];
+
+  @override
+  int get codeLength {
+    if (_unlinkedEnum != null) {
+      return _unlinkedEnum.codeRange?.length;
+    }
+    return super.codeLength;
+  }
+
+  @override
+  int get codeOffset {
+    if (_unlinkedEnum != null) {
+      return _unlinkedEnum.codeRange?.offset;
+    }
+    return super.codeOffset;
+  }
+
+  @override
+  List<ConstructorElement> get constructors {
+    // The equivalent code for enums in the spec shows a single constructor,
+    // but that constructor is not callable (since it is a compile-time error
+    // to subclass, mix-in, implement, or explicitly instantiate an enum).
+    // So we represent this as having no constructors.
+    return const <ConstructorElement>[];
+  }
+
+  @override
+  SourceRange get docRange {
+    if (_unlinkedEnum != null) {
+      UnlinkedDocumentationComment comment = _unlinkedEnum.documentationComment;
+      return comment != null
+          ? new SourceRange(comment.offset, comment.length)
+          : null;
+    }
+    return super.docRange;
+  }
+
+  @override
+  String get documentationComment {
+    if (_unlinkedEnum != null) {
+      return _unlinkedEnum?.documentationComment?.text;
+    }
+    return super.documentationComment;
+  }
+
+  @override
+  List<FieldElement> get fields {
+    if (_unlinkedEnum != null && _fields == null) {
+      _resynthesizeFieldsAndPropertyAccessors();
+    }
+    return _fields ?? const <FieldElement>[];
+  }
+
+  @override
+  void set fields(List<FieldElement> fields) {
+    assert(_unlinkedEnum == null);
+    super.fields = fields;
+  }
+
+  @override
+  bool get hasNonFinalField => false;
+
+  @override
+  bool get hasReferenceToSuper => false;
+
+  @override
+  bool get hasStaticMember => true;
+
+  @override
+  List<InterfaceType> get interfaces => const <InterfaceType>[];
+
+  @override
+  bool get isAbstract => false;
+
+  @override
+  bool get isEnum => true;
+
+  @override
+  bool get isMixinApplication => false;
+
+  @override
+  bool get isOrInheritsProxy => false;
+
+  @override
+  bool get isProxy => false;
+
+  @override
+  bool get isValidMixin => false;
+
+  @override
+  List<ElementAnnotation> get metadata {
+    if (_unlinkedEnum != null) {
+      return _metadata ??=
+          _buildAnnotations(enclosingUnit, _unlinkedEnum.annotations);
+    }
+    return super.metadata;
+  }
+
+  @override
+  List<MethodElement> get methods => const <MethodElement>[];
+
+  @override
+  List<InterfaceType> get mixins => const <InterfaceType>[];
+
+  @override
+  String get name {
+    if (_unlinkedEnum != null) {
+      return _unlinkedEnum.name;
+    }
+    return super.name;
+  }
+
+  @override
+  int get nameOffset {
+    if (_unlinkedEnum != null) {
+      return _unlinkedEnum.nameOffset;
+    }
+    return super.nameOffset;
+  }
+
+  @override
+  InterfaceType get supertype => context.typeProvider.objectType;
+
+  @override
+  InterfaceType get type {
+    if (_type == null) {
+      InterfaceTypeImpl type = new InterfaceTypeImpl(this);
+      type.typeArguments = const <DartType>[];
+      _type = type;
+    }
+    return _type;
+  }
+
+  @override
+  List<TypeParameterElement> get typeParameters =>
+      const <TypeParameterElement>[];
+
+  @override
+  ConstructorElement get unnamedConstructor => null;
+
+  @override
+  void appendTo(StringBuffer buffer) {
+    buffer.write('enum ');
+    String name = displayName;
+    if (name == null) {
+      buffer.write("{unnamed enum}");
+    } else {
+      buffer.write(name);
+    }
+  }
+
+  @override
+  MethodElement getMethod(String name) => null;
+
+  @override
+  ConstructorElement getNamedConstructor(String name) => null;
+
+  @override
+  bool isSuperConstructorAccessible(ConstructorElement constructor) => false;
+
+  void _resynthesizeFieldsAndPropertyAccessors() {
+    List<FieldElementImpl> fields = <FieldElementImpl>[];
+    // Build the 'index' field.
+    fields.add(new FieldElementImpl('index', -1)
+      ..enclosingElement = this
+      ..synthetic = true
+      ..final2 = true
+      ..type = context.typeProvider.intType);
+    // Build the 'values' field.
+    fields.add(new ConstFieldElementImpl_EnumValues(this));
+    // Build fields for all enum constants.
+    for (int i = 0; i < _unlinkedEnum.values.length; i++) {
+      UnlinkedEnumValue unlinkedValue = _unlinkedEnum.values[i];
+      ConstFieldElementImpl_EnumValue field =
+          new ConstFieldElementImpl_EnumValue(this, unlinkedValue, i);
+      fields.add(field);
+    }
+    // done
+    _fields = fields;
+    _accessors = fields
+        .map((FieldElementImpl field) =>
+            new PropertyAccessorElementImpl_ImplicitGetter(field)
+              ..enclosingElement = this)
+        .toList(growable: false);
+  }
+}
+
+/**
  * A base class for concrete implementations of an [ExecutableElement].
  */
 abstract class ExecutableElementImpl extends ElementImpl
@@ -2782,29 +3592,29 @@
    * A list containing all of the functions defined within this executable
    * element.
    */
-  List<FunctionElement> _functions = FunctionElement.EMPTY_LIST;
+  List<FunctionElement> _functions;
 
   /**
    * A list containing all of the labels defined within this executable element.
    */
-  List<LabelElement> _labels = LabelElement.EMPTY_LIST;
+  List<LabelElement> _labels;
 
   /**
    * A list containing all of the local variables defined within this executable
    * element.
    */
-  List<LocalVariableElement> _localVariables = LocalVariableElement.EMPTY_LIST;
+  List<LocalVariableElement> _localVariables;
 
   /**
    * A list containing all of the parameters defined by this executable element.
    */
-  List<ParameterElement> _parameters = ParameterElement.EMPTY_LIST;
+  List<ParameterElement> _parameters;
 
   /**
    * A list containing all of the type parameters defined for this executable
    * element.
    */
-  List<TypeParameterElement> _typeParameters = TypeParameterElement.EMPTY_LIST;
+  List<TypeParameterElement> _typeParameters;
 
   /**
    * The return type defined by this executable element.
@@ -2814,7 +3624,7 @@
   /**
    * The type of function defined by this executable element.
    */
-  FunctionType type;
+  FunctionType _type;
 
   /**
    * Initialize a newly created executable element to have the given [name] and
@@ -2899,13 +3709,20 @@
   }
 
   @override
-  List<FunctionElement> get functions => _functions;
+  List<FunctionElement> get functions {
+    if (serializedExecutable != null) {
+      _functions ??= FunctionElementImpl.resynthesizeList(
+          this, serializedExecutable.localFunctions);
+    }
+    return _functions ?? const <FunctionElement>[];
+  }
 
   /**
    * Set the functions defined within this executable element to the given
    * [functions].
    */
   void set functions(List<FunctionElement> functions) {
+    assert(serializedExecutable == null);
     for (FunctionElement function in functions) {
       (function as FunctionElementImpl).enclosingElement = this;
     }
@@ -2976,13 +3793,20 @@
   bool get isSynchronous => !isAsynchronous;
 
   @override
-  List<LabelElement> get labels => _labels;
+  List<LabelElement> get labels {
+    if (serializedExecutable != null) {
+      _labels ??= LabelElementImpl.resynthesizeList(
+          this, serializedExecutable.localLabels);
+    }
+    return _labels ?? const <LabelElement>[];
+  }
 
   /**
    * Set the labels defined within this executable element to the given
    * [labels].
    */
   void set labels(List<LabelElement> labels) {
+    assert(serializedExecutable == null);
     for (LabelElement label in labels) {
       (label as LabelElementImpl).enclosingElement = this;
     }
@@ -2990,13 +3814,32 @@
   }
 
   @override
-  List<LocalVariableElement> get localVariables => _localVariables;
+  List<LocalVariableElement> get localVariables {
+    if (serializedExecutable != null && _localVariables == null) {
+      List<UnlinkedVariable> unlinkedVariables =
+          serializedExecutable.localVariables;
+      int length = unlinkedVariables.length;
+      if (length != 0) {
+        List<LocalVariableElementImpl> localVariables =
+            new List<LocalVariableElementImpl>(length);
+        for (int i = 0; i < length; i++) {
+          localVariables[i] = new LocalVariableElementImpl.forSerializedFactory(
+              unlinkedVariables[i], this);
+        }
+        _localVariables = localVariables;
+      } else {
+        _localVariables = const <LocalVariableElement>[];
+      }
+    }
+    return _localVariables ?? const <LocalVariableElement>[];
+  }
 
   /**
    * Set the local variables defined within this executable element to the given
    * [variables].
    */
   void set localVariables(List<LocalVariableElement> variables) {
+    assert(serializedExecutable == null);
     for (LocalVariableElement variable in variables) {
       (variable as LocalVariableElementImpl).enclosingElement = this;
     }
@@ -3029,13 +3872,20 @@
   }
 
   @override
-  List<ParameterElement> get parameters => _parameters;
+  List<ParameterElement> get parameters {
+    if (serializedExecutable != null) {
+      _parameters ??= ParameterElementImpl.resynthesizeList(
+          serializedExecutable.parameters, this);
+    }
+    return _parameters ?? const <ParameterElement>[];
+  }
 
   /**
    * Set the parameters defined by this executable element to the given
    * [parameters].
    */
   void set parameters(List<ParameterElement> parameters) {
+    assert(serializedExecutable == null);
     for (ParameterElement parameter in parameters) {
       (parameter as ParameterElementImpl).enclosingElement = this;
     }
@@ -3063,16 +3913,36 @@
   }
 
   @override
+  FunctionType get type {
+    if (serializedExecutable != null) {
+      _type ??= new FunctionTypeImpl.elementWithNameAndArgs(
+          this, null, allEnclosingTypeParameterTypes, false);
+    }
+    return _type;
+  }
+
+  void set type(FunctionType type) {
+    assert(serializedExecutable == null);
+    _type = type;
+  }
+
+  @override
   TypeParameterizedElementMixin get typeParameterContext => this;
 
   @override
-  List<TypeParameterElement> get typeParameters => _typeParameters;
+  List<TypeParameterElement> get typeParameters {
+    if (serializedExecutable != null) {
+      return super.typeParameters;
+    }
+    return _typeParameters ?? const <TypeParameterElement>[];
+  }
 
   /**
    * Set the type parameters defined by this executable element to the given
    * [typeParameters].
    */
   void set typeParameters(List<TypeParameterElement> typeParameters) {
+    assert(serializedExecutable == null);
     for (TypeParameterElement parameter in typeParameters) {
       (parameter as TypeParameterElementImpl).enclosingElement = this;
     }
@@ -3086,26 +3956,26 @@
   @override
   void appendTo(StringBuffer buffer) {
     if (this.kind != ElementKind.GETTER) {
-      int typeParameterCount = _typeParameters.length;
+      int typeParameterCount = typeParameters.length;
       if (typeParameterCount > 0) {
         buffer.write('<');
         for (int i = 0; i < typeParameterCount; i++) {
           if (i > 0) {
             buffer.write(", ");
           }
-          (_typeParameters[i] as TypeParameterElementImpl).appendTo(buffer);
+          (typeParameters[i] as TypeParameterElementImpl).appendTo(buffer);
         }
         buffer.write('>');
       }
       buffer.write("(");
       String closing = null;
       ParameterKind kind = ParameterKind.REQUIRED;
-      int parameterCount = _parameters.length;
+      int parameterCount = parameters.length;
       for (int i = 0; i < parameterCount; i++) {
         if (i > 0) {
           buffer.write(", ");
         }
-        ParameterElement parameter = _parameters[i];
+        ParameterElement parameter = parameters[i];
         ParameterKind parameterKind = parameter.parameterKind;
         if (parameterKind != kind) {
           if (closing != null) {
@@ -3155,7 +4025,7 @@
         return variableImpl;
       }
     }
-    for (ParameterElement parameter in _parameters) {
+    for (ParameterElement parameter in parameters) {
       ParameterElementImpl parameterImpl = parameter;
       if (parameterImpl.identifier == identifier) {
         return parameterImpl;
@@ -3170,7 +4040,7 @@
     safelyVisitChildren(_functions, visitor);
     safelyVisitChildren(_labels, visitor);
     safelyVisitChildren(_localVariables, visitor);
-    safelyVisitChildren(_parameters, visitor);
+    safelyVisitChildren(parameters, visitor);
   }
 }
 
@@ -3348,6 +4218,22 @@
       UnlinkedVariable unlinkedVariable, ElementImpl enclosingElement)
       : super.forSerialized(unlinkedVariable, enclosingElement);
 
+  /**
+   * Initialize using the given serialized information.
+   */
+  factory FieldElementImpl.forSerializedFactory(
+      UnlinkedVariable unlinkedVariable, ClassElementImpl enclosingClass) {
+    if (unlinkedVariable.initializer?.bodyExpr != null &&
+        (unlinkedVariable.isConst ||
+            unlinkedVariable.isFinal && !unlinkedVariable.isStatic)) {
+      return new ConstFieldElementImpl.forSerialized(
+          unlinkedVariable, enclosingClass);
+    } else {
+      return new FieldElementImpl.forSerialized(
+          unlinkedVariable, enclosingClass);
+    }
+  }
+
   @override
   ClassElement get enclosingElement => super.enclosingElement as ClassElement;
 
@@ -3356,12 +4242,21 @@
       enclosingElement != null ? enclosingElement.isEnum : false;
 
   @override
+  bool get isStatic {
+    if (_unlinkedVariable != null) {
+      return _unlinkedVariable.isStatic;
+    }
+    return hasModifier(Modifier.STATIC);
+  }
+
+  @override
   ElementKind get kind => ElementKind.FIELD;
 
   /**
    * Set whether this field is static.
    */
   void set static(bool isStatic) {
+    assert(_unlinkedVariable == null);
     setModifier(Modifier.STATIC, isStatic);
   }
 
@@ -3540,6 +4435,13 @@
 
   @override
   SourceRange get visibleRange {
+    if (serializedExecutable != null) {
+      if (serializedExecutable.visibleLength == 0) {
+        return null;
+      }
+      return new SourceRange(serializedExecutable.visibleOffset,
+          serializedExecutable.visibleLength);
+    }
     if (_visibleRangeLength < 0) {
       return null;
     }
@@ -3567,6 +4469,7 @@
    * [offset] with the given [length].
    */
   void setVisibleRange(int offset, int length) {
+    assert(serializedExecutable == null);
     _visibleRangeOffset = offset;
     _visibleRangeLength = length;
   }
@@ -3589,6 +4492,25 @@
   void shareTypeParameters(List<TypeParameterElement> typeParameters) {
     this._typeParameters = typeParameters;
   }
+
+  /**
+   * Create and return [FunctionElement]s for the given [unlinkedFunctions].
+   */
+  static List<FunctionElement> resynthesizeList(
+      ExecutableElementImpl executableElement,
+      List<UnlinkedExecutable> unlinkedFunctions) {
+    int length = unlinkedFunctions.length;
+    if (length != 0) {
+      List<FunctionElement> elements = new List<FunctionElement>(length);
+      for (int i = 0; i < length; i++) {
+        elements[i] = new FunctionElementImpl.forSerialized(
+            unlinkedFunctions[i], executableElement);
+      }
+      return elements;
+    } else {
+      return const <FunctionElement>[];
+    }
+  }
 }
 
 /**
@@ -3627,12 +4549,47 @@
   @override
   final TypeParameterizedElementMixin enclosingTypeParameterContext;
 
+  final EntityRef _entityRef;
+
   FunctionElementImpl_forLUB(
-      this.enclosingUnit, this.enclosingTypeParameterContext)
+      this.enclosingUnit, this.enclosingTypeParameterContext, this._entityRef)
       : super('', -1);
 
   @override
   bool get isSynthetic => true;
+
+  @override
+  List<ParameterElement> get parameters {
+    return _parameters ??= ParameterElementImpl
+        .resynthesizeList(_entityRef.syntheticParams, this, synthetic: true);
+  }
+
+  @override
+  void set parameters(List<ParameterElement> parameters) {
+    assert(false);
+  }
+
+  @override
+  DartType get returnType {
+    return _returnType ??= enclosingUnit.resynthesizerContext
+        .resolveTypeRef(_entityRef.syntheticReturnType, typeParameterContext);
+  }
+
+  @override
+  void set returnType(DartType returnType) {
+    assert(false);
+  }
+
+  @override
+  FunctionType get type {
+    return _type ??=
+        new FunctionTypeImpl.elementWithNameAndArgs(this, null, null, false);
+  }
+
+  @override
+  void set type(FunctionType type) {
+    assert(false);
+  }
 }
 
 /**
@@ -3649,7 +4606,7 @@
   /**
    * A list containing all of the parameters defined by this type alias.
    */
-  List<ParameterElement> _parameters = ParameterElement.EMPTY_LIST;
+  List<ParameterElement> _parameters;
 
   /**
    * The return type defined by this type alias.
@@ -3659,7 +4616,7 @@
   /**
    * The type of function defined by this type alias.
    */
-  FunctionType type;
+  FunctionType _type;
 
   /**
    * A list containing all of the type parameters defined for this type.
@@ -3770,12 +4727,19 @@
   }
 
   @override
-  List<ParameterElement> get parameters => _parameters;
+  List<ParameterElement> get parameters {
+    if (_unlinkedTypedef != null) {
+      _parameters ??= ParameterElementImpl.resynthesizeList(
+          _unlinkedTypedef.parameters, this);
+    }
+    return _parameters ?? const <ParameterElement>[];
+  }
 
   /**
    * Set the parameters defined by this type alias to the given [parameters].
    */
   void set parameters(List<ParameterElement> parameters) {
+    assert(_unlinkedTypedef == null);
     if (parameters != null) {
       for (ParameterElement parameter in parameters) {
         (parameter as ParameterElementImpl).enclosingElement = this;
@@ -3799,6 +4763,19 @@
   }
 
   @override
+  FunctionType get type {
+    if (_unlinkedTypedef != null && _type == null) {
+      _type = new FunctionTypeImpl.forTypedef(this);
+    }
+    return _type;
+  }
+
+  void set type(FunctionType type) {
+    assert(_unlinkedTypedef == null);
+    _type = type;
+  }
+
+  @override
   TypeParameterizedElementMixin get typeParameterContext => this;
 
   @override
@@ -3867,7 +4844,7 @@
 
   @override
   ElementImpl getChild(String identifier) {
-    for (ParameterElement parameter in _parameters) {
+    for (ParameterElement parameter in parameters) {
       ParameterElementImpl parameterImpl = parameter;
       if (parameterImpl.identifier == identifier) {
         return parameterImpl;
@@ -3885,7 +4862,7 @@
   @override
   void visitChildren(ElementVisitor visitor) {
     super.visitChildren(visitor);
-    safelyVisitChildren(_parameters, visitor);
+    safelyVisitChildren(parameters, visitor);
     safelyVisitChildren(_typeParameters, visitor);
   }
 }
@@ -4202,6 +5179,11 @@
  */
 class LabelElementImpl extends ElementImpl implements LabelElement {
   /**
+   * The unlinked representation of the label in the summary.
+   */
+  final UnlinkedLabel _unlinkedLabel;
+
+  /**
    * A flag indicating whether this label is associated with a `switch`
    * statement.
    */
@@ -4223,7 +5205,8 @@
    */
   LabelElementImpl(String name, int nameOffset, this._onSwitchStatement,
       this._onSwitchMember)
-      : super(name, nameOffset);
+      : _unlinkedLabel = null,
+        super(name, nameOffset);
 
   /**
    * Initialize a newly created label element to have the given [name].
@@ -4233,7 +5216,21 @@
    */
   LabelElementImpl.forNode(
       Identifier name, this._onSwitchStatement, this._onSwitchMember)
-      : super.forNode(name);
+      : _unlinkedLabel = null,
+        super.forNode(name);
+
+  /**
+   * Initialize using the given serialized information.
+   */
+  LabelElementImpl.forSerialized(
+      UnlinkedLabel unlinkedLabel, ExecutableElementImpl enclosingExecutable)
+      : _unlinkedLabel = unlinkedLabel,
+        _onSwitchStatement = unlinkedLabel.isOnSwitchStatement,
+        _onSwitchMember = unlinkedLabel.isOnSwitchMember,
+        super.forSerialized(enclosingExecutable);
+
+  @override
+  String get displayName => name;
 
   @override
   ExecutableElement get enclosingElement =>
@@ -4254,7 +5251,42 @@
   ElementKind get kind => ElementKind.LABEL;
 
   @override
+  String get name {
+    if (_unlinkedLabel != null) {
+      return _unlinkedLabel.name;
+    }
+    return super.name;
+  }
+
+  @override
+  int get nameOffset {
+    if (_unlinkedLabel != null) {
+      return _unlinkedLabel.nameOffset;
+    }
+    return super.nameOffset;
+  }
+
+  @override
   accept(ElementVisitor visitor) => visitor.visitLabelElement(this);
+
+  /**
+   * Create and return [LabelElement]s for the given [unlinkedLabels].
+   */
+  static List<LabelElement> resynthesizeList(
+      ExecutableElementImpl enclosingExecutable,
+      List<UnlinkedLabel> unlinkedLabels) {
+    int length = unlinkedLabels.length;
+    if (length != 0) {
+      List<LabelElement> elements = new List<LabelElement>(length);
+      for (int i = 0; i < length; i++) {
+        elements[i] = new LabelElementImpl.forSerialized(
+            unlinkedLabels[i], enclosingExecutable);
+      }
+      return elements;
+    } else {
+      return const <LabelElement>[];
+    }
+  }
 }
 
 /**
@@ -4444,7 +5476,7 @@
         libraries.add(library);
       }
     }
-    return new List.from(libraries);
+    return libraries.toList(growable: false);
   }
 
   @override
@@ -4534,7 +5566,7 @@
         libraries.add(library);
       }
     }
-    return new List.from(libraries);
+    return libraries.toList(growable: false);
   }
 
   @override
@@ -4747,7 +5779,7 @@
           prefixes.add(prefix);
         }
       }
-      _prefixes = prefixes.toList();
+      _prefixes = prefixes.toList(growable: false);
     }
     return _prefixes;
   }
@@ -4782,9 +5814,9 @@
 
   @override
   List<LibraryElement> get visibleLibraries {
-    Set<LibraryElement> visibleLibraries = new Set();
+    HashSet<LibraryElement> visibleLibraries = new HashSet<LibraryElement>();
     _addVisibleLibraries(visibleLibraries, false);
-    return new List.from(visibleLibraries);
+    return visibleLibraries.toList(growable: false);
   }
 
   @override
@@ -5109,6 +6141,22 @@
       ExecutableElementImpl enclosingExecutable)
       : super.forSerialized(unlinkedVariable, enclosingExecutable);
 
+  /**
+   * Initialize using the given serialized information.
+   */
+  factory LocalVariableElementImpl.forSerializedFactory(
+      UnlinkedVariable unlinkedVariable,
+      ExecutableElementImpl enclosingExecutable) {
+    if (unlinkedVariable.isConst &&
+        unlinkedVariable.initializer?.bodyExpr != null) {
+      return new ConstLocalVariableElementImpl.forSerialized(
+          unlinkedVariable, enclosingExecutable);
+    } else {
+      return new LocalVariableElementImpl.forSerialized(
+          unlinkedVariable, enclosingExecutable);
+    }
+  }
+
   @override
   String get identifier {
     int enclosingOffset =
@@ -5128,6 +6176,13 @@
 
   @override
   SourceRange get visibleRange {
+    if (_unlinkedVariable != null) {
+      if (_unlinkedVariable.visibleLength == 0) {
+        return null;
+      }
+      return new SourceRange(
+          _unlinkedVariable.visibleOffset, _unlinkedVariable.visibleLength);
+    }
     if (_visibleRangeLength < 0) {
       return null;
     }
@@ -5153,6 +6208,7 @@
    * [offset] with the given [length].
    */
   void setVisibleRange(int offset, int length) {
+    assert(_unlinkedVariable == null);
     _visibleRangeOffset = offset;
     _visibleRangeLength = length;
   }
@@ -5189,6 +6245,14 @@
   }
 
   @override
+  List<TypeParameterType> get allEnclosingTypeParameterTypes {
+    if (isStatic) {
+      return const <TypeParameterType>[];
+    }
+    return super.allEnclosingTypeParameterTypes;
+  }
+
+  @override
   String get displayName {
     String displayName = super.displayName;
     if ("unary-" == displayName) {
@@ -5443,6 +6507,9 @@
   bool get isDeprecated => false;
 
   @override
+  bool get isFactory => false;
+
+  @override
   bool get isJS => false;
 
   @override
@@ -5476,6 +6543,9 @@
   LibraryElement get library => null;
 
   @override
+  Source get librarySource => null;
+
+  @override
   ElementLocation get location => null;
 
   @override
@@ -5540,7 +6610,12 @@
       if (i > 0) {
         buffer.write(", ");
       }
-      (conflictingElements[i] as ElementImpl).appendTo(buffer);
+      Element element = conflictingElements[i];
+      if (element is ElementImpl) {
+        element.appendTo(buffer);
+      } else {
+        buffer.write(element);
+      }
     }
     buffer.write("]");
     return buffer.toString();
@@ -5595,7 +6670,7 @@
     HashSet<Element> elements = new HashSet<Element>();
     _add(elements, firstElement);
     _add(elements, secondElement);
-    return new List.from(elements);
+    return elements.toList(growable: false);
   }
 }
 
@@ -5746,6 +6821,29 @@
   }
 
   @override
+  FunctionElement get initializer {
+    if (_unlinkedVariable != null && _initializer == null) {
+      UnlinkedExecutable unlinkedInitializer = _unlinkedVariable.initializer;
+      if (unlinkedInitializer != null) {
+        _initializer = new FunctionElementImpl.forSerialized(
+            unlinkedInitializer, this)..synthetic = true;
+      } else {
+        return null;
+      }
+    }
+    return super.initializer;
+  }
+
+  /**
+   * Set the function representing this variable's initializer to the given
+   * [function].
+   */
+  void set initializer(FunctionElement function) {
+    assert(_unlinkedVariable == null);
+    super.initializer = function;
+  }
+
+  @override
   bool get isConst {
     if (_unlinkedVariable != null) {
       return _unlinkedVariable.isConst;
@@ -5785,6 +6883,27 @@
     }
     return super.nameOffset;
   }
+
+  @override
+  DartType get type {
+    if (_unlinkedVariable != null && _type == null) {
+      _type = enclosingUnit.resynthesizerContext.resolveLinkedType(
+              _unlinkedVariable.inferredTypeSlot, typeParameterContext) ??
+          enclosingUnit.resynthesizerContext
+              .resolveTypeRef(_unlinkedVariable.type, typeParameterContext);
+    }
+    return super.type;
+  }
+
+  void set type(DartType type) {
+    assert(_unlinkedVariable == null);
+    _type = type;
+  }
+
+  /**
+   * Subclasses need this getter, see [ConstVariableElement._unlinkedConst].
+   */
+  UnlinkedConst get _unlinkedConst => _unlinkedVariable?.initializer?.bodyExpr;
 }
 
 /**
@@ -5856,6 +6975,34 @@
       : super.forSerialized(enclosingElement);
 
   /**
+   * Initialize using the given serialized information.
+   */
+  factory ParameterElementImpl.forSerializedFactory(
+      UnlinkedParam unlinkedParameter, ElementImpl enclosingElement,
+      {bool synthetic: false}) {
+    ParameterElementImpl element;
+    if (unlinkedParameter.isInitializingFormal) {
+      if (unlinkedParameter.kind == UnlinkedParamKind.required) {
+        element = new FieldFormalParameterElementImpl.forSerialized(
+            unlinkedParameter, enclosingElement);
+      } else {
+        element = new DefaultFieldFormalParameterElementImpl.forSerialized(
+            unlinkedParameter, enclosingElement);
+      }
+    } else {
+      if (unlinkedParameter.kind == UnlinkedParamKind.required) {
+        element = new ParameterElementImpl.forSerialized(
+            unlinkedParameter, enclosingElement);
+      } else {
+        element = new DefaultParameterElementImpl.forSerialized(
+            unlinkedParameter, enclosingElement);
+      }
+    }
+    element.synthetic = synthetic;
+    return element;
+  }
+
+  /**
    * Creates a synthetic parameter with [name], [type] and [kind].
    */
   factory ParameterElementImpl.synthetic(
@@ -5929,6 +7076,29 @@
   }
 
   @override
+  FunctionElement get initializer {
+    if (_unlinkedParam != null && _initializer == null) {
+      UnlinkedExecutable unlinkedInitializer = _unlinkedParam.initializer;
+      if (unlinkedInitializer != null) {
+        _initializer = new FunctionElementImpl.forSerialized(
+            unlinkedInitializer, this)..synthetic = true;
+      } else {
+        return null;
+      }
+    }
+    return super.initializer;
+  }
+
+  /**
+   * Set the function representing this variable's initializer to the given
+   * [function].
+   */
+  void set initializer(FunctionElement function) {
+    assert(_unlinkedParam == null);
+    super.initializer = function;
+  }
+
+  @override
   bool get isConst {
     if (_unlinkedParam != null) {
       return false;
@@ -6008,7 +7178,10 @@
   }
 
   @override
-  List<ParameterElement> get parameters => _parameters;
+  List<ParameterElement> get parameters {
+    _resynthesizeTypeAndParameters();
+    return _parameters;
+  }
 
   /**
    * Set the parameters defined by this executable element to the given
@@ -6023,12 +7196,7 @@
 
   @override
   DartType get type {
-    if (_unlinkedParam != null && _type == null) {
-      _type = enclosingUnit.resynthesizerContext.resolveLinkedType(
-              _unlinkedParam.inferredTypeSlot, typeParameterContext) ??
-          enclosingUnit.resynthesizerContext
-              .resolveTypeRef(_unlinkedParam.type, typeParameterContext);
-    }
+    _resynthesizeTypeAndParameters();
     return super.type;
   }
 
@@ -6061,6 +7229,11 @@
     return new SourceRange(_visibleRangeOffset, _visibleRangeLength);
   }
 
+  /**
+   * Subclasses need this getter, see [ConstVariableElement._unlinkedConst].
+   */
+  UnlinkedConst get _unlinkedConst => _unlinkedParam?.initializer?.bodyExpr;
+
   @override
   accept(ElementVisitor visitor) => visitor.visitParameterElement(this);
 
@@ -6111,7 +7284,90 @@
   @override
   void visitChildren(ElementVisitor visitor) {
     super.visitChildren(visitor);
-    safelyVisitChildren(_parameters, visitor);
+    safelyVisitChildren(parameters, visitor);
+  }
+
+  /**
+   * If this element is resynthesized, and its type and parameters have not
+   * been build yet, build them and remember in the corresponding fields.
+   */
+  void _resynthesizeTypeAndParameters() {
+    if (_unlinkedParam != null && _type == null) {
+      if (_unlinkedParam.isFunctionTyped) {
+        CompilationUnitElementImpl enclosingUnit = this.enclosingUnit;
+        FunctionElementImpl parameterTypeElement =
+            new FunctionElementImpl_forFunctionTypedParameter(
+                enclosingUnit, this);
+        if (!isSynthetic) {
+          parameterTypeElement.enclosingElement = this;
+        }
+        List<ParameterElement> subParameters = ParameterElementImpl
+            .resynthesizeList(_unlinkedParam.parameters, this,
+                synthetic: isSynthetic);
+        if (isSynthetic) {
+          parameterTypeElement.parameters = subParameters;
+        } else {
+          _parameters = subParameters;
+          parameterTypeElement.shareParameters(subParameters);
+        }
+        parameterTypeElement.returnType = enclosingUnit.resynthesizerContext
+            .resolveTypeRef(_unlinkedParam.type, typeParameterContext);
+        FunctionTypeImpl parameterType =
+            new FunctionTypeImpl.elementWithNameAndArgs(parameterTypeElement,
+                null, typeParameterContext.allTypeParameterTypes, false);
+        parameterTypeElement.type = parameterType;
+        _type = parameterType;
+      } else {
+        _type = enclosingUnit.resynthesizerContext.resolveLinkedType(
+                _unlinkedParam.inferredTypeSlot, typeParameterContext) ??
+            enclosingUnit.resynthesizerContext
+                .resolveTypeRef(_unlinkedParam.type, typeParameterContext);
+      }
+    }
+  }
+
+  /**
+   * Create and return [ParameterElement]s for the given [unlinkedParameters].
+   */
+  static List<ParameterElement> resynthesizeList(
+      List<UnlinkedParam> unlinkedParameters, ElementImpl enclosingElement,
+      {bool synthetic: false}) {
+    int length = unlinkedParameters.length;
+    if (length != 0) {
+      List<ParameterElement> parameters = new List<ParameterElement>(length);
+      for (int i = 0; i < length; i++) {
+        parameters[i] = new ParameterElementImpl.forSerializedFactory(
+            unlinkedParameters[i], enclosingElement,
+            synthetic: synthetic);
+      }
+      return parameters;
+    } else {
+      return const <ParameterElement>[];
+    }
+  }
+}
+
+/**
+ * The parameter of an implicit setter.
+ */
+class ParameterElementImpl_ofImplicitSetter extends ParameterElementImpl {
+  final PropertyAccessorElementImpl_ImplicitSetter setter;
+
+  ParameterElementImpl_ofImplicitSetter(
+      PropertyAccessorElementImpl_ImplicitSetter setter)
+      : setter = setter,
+        super('_${setter.variable.name}', setter.variable.nameOffset) {
+    enclosingElement = setter;
+    synthetic = true;
+    parameterKind = ParameterKind.REQUIRED;
+  }
+
+  @override
+  DartType get type => setter.variable.type;
+
+  @override
+  void set type(FunctionType type) {
+    assert(false); // Should never be called.
   }
 }
 
@@ -6263,6 +7519,14 @@
   }
 
   @override
+  List<TypeParameterType> get allEnclosingTypeParameterTypes {
+    if (isStatic) {
+      return const <TypeParameterType>[];
+    }
+    return super.allEnclosingTypeParameterTypes;
+  }
+
+  @override
   PropertyAccessorElement get correspondingGetter {
     if (isGetter || variable == null) {
       return null;
@@ -6302,9 +7566,6 @@
   }
 
   @override
-  int get hashCode => JenkinsSmiHash.hash2(super.hashCode, isGetter ? 1 : 2);
-
-  @override
   String get identifier {
     String name = displayName;
     String suffix = isGetter ? "?" : "=";
@@ -6372,11 +7633,6 @@
   }
 
   @override
-  bool operator ==(Object object) =>
-      super == object &&
-      isGetter == (object as PropertyAccessorElement).isGetter;
-
-  @override
   accept(ElementVisitor visitor) => visitor.visitPropertyAccessorElement(this);
 
   @override
@@ -6401,6 +7657,89 @@
 }
 
 /**
+ * Implicit getter for a [PropertyInducingElementImpl].
+ */
+class PropertyAccessorElementImpl_ImplicitGetter
+    extends PropertyAccessorElementImpl {
+  /**
+   * Create the implicit getter and bind it to the [property].
+   */
+  PropertyAccessorElementImpl_ImplicitGetter(
+      PropertyInducingElementImpl property)
+      : super.forVariable(property) {
+    property.getter = this;
+    enclosingElement = property.enclosingElement;
+  }
+
+  @override
+  bool get hasImplicitReturnType => variable.hasImplicitType;
+
+  @override
+  bool get isGetter => true;
+
+  @override
+  DartType get returnType => variable.type;
+
+  @override
+  void set returnType(DartType returnType) {
+    assert(false); // Should never be called.
+  }
+
+  @override
+  DartType get type {
+    return _type ??= new FunctionTypeImpl(this);
+  }
+
+  @override
+  void set type(FunctionType type) {
+    assert(false); // Should never be called.
+  }
+}
+
+/**
+ * Implicit setter for a [PropertyInducingElementImpl].
+ */
+class PropertyAccessorElementImpl_ImplicitSetter
+    extends PropertyAccessorElementImpl {
+  /**
+   * Create the implicit setter and bind it to the [property].
+   */
+  PropertyAccessorElementImpl_ImplicitSetter(
+      PropertyInducingElementImpl property)
+      : super.forVariable(property) {
+    property.setter = this;
+  }
+
+  @override
+  bool get isSetter => true;
+
+  @override
+  List<ParameterElement> get parameters {
+    return _parameters ??= <ParameterElement>[
+      new ParameterElementImpl_ofImplicitSetter(this)
+    ];
+  }
+
+  @override
+  DartType get returnType => VoidTypeImpl.instance;
+
+  @override
+  void set returnType(DartType returnType) {
+    assert(false); // Should never be called.
+  }
+
+  @override
+  DartType get type {
+    return _type ??= new FunctionTypeImpl(this);
+  }
+
+  @override
+  void set type(FunctionType type) {
+    assert(false); // Should never be called.
+  }
+}
+
+/**
  * A concrete implementation of a [PropertyInducingElement].
  */
 abstract class PropertyInducingElementImpl
@@ -6421,7 +7760,7 @@
    * The propagated type of this variable, or `null` if type propagation has not
    * been performed.
    */
-  DartType propagatedType;
+  DartType _propagatedType;
 
   /**
    * Initialize a newly created synthetic element to have the given [name] and
@@ -6440,6 +7779,20 @@
   PropertyInducingElementImpl.forSerialized(
       UnlinkedVariable unlinkedVariable, ElementImpl enclosingElement)
       : super.forSerialized(unlinkedVariable, enclosingElement);
+
+  @override
+  DartType get propagatedType {
+    if (_unlinkedVariable != null && _propagatedType == null) {
+      _propagatedType = enclosingUnit.resynthesizerContext.resolveLinkedType(
+          _unlinkedVariable.propagatedTypeSlot, typeParameterContext);
+    }
+    return _propagatedType;
+  }
+
+  void set propagatedType(DartType propagatedType) {
+    assert(_unlinkedVariable == null);
+    _propagatedType = propagatedType;
+  }
 }
 
 /**
@@ -6462,16 +7815,23 @@
   UnitExplicitTopLevelAccessors buildTopLevelAccessors();
 
   /**
-   * Build top-level functions.
-   */
-  List<FunctionElementImpl> buildTopLevelFunctions();
-
-  /**
    * Build explicit top-level variables.
    */
   UnitExplicitTopLevelVariables buildTopLevelVariables();
 
   /**
+   * Return `true` if the given const constructor [slot] is a part of a cycle.
+   */
+  bool isInConstCycle(int slot);
+
+  /**
+   * Resolve an [EntityRef] into a constructor.  If the reference is
+   * unresolved, return `null`.
+   */
+  ConstructorElement resolveConstructorRef(
+      TypeParameterizedElementMixin typeParameterContext, EntityRef entry);
+
+  /**
    * Build the appropriate [DartType] object corresponding to a slot id in the
    * [LinkedUnit.types] table.
    */
@@ -6626,11 +7986,6 @@
   final UnlinkedTypeParam _unlinkedTypeParam;
 
   /**
-   * The [TypeParameterizedElement] enclosing this one.
-   */
-  final TypeParameterizedElementMixin _enclosingTypeParameterizedElement;
-
-  /**
    * The number of type parameters whose scope overlaps this one, and which are
    * declared earlier in the file.
    *
@@ -6656,7 +8011,6 @@
   TypeParameterElementImpl(String name, int offset)
       : _unlinkedTypeParam = null,
         nestingLevel = null,
-        _enclosingTypeParameterizedElement = null,
         super(name, offset);
 
   /**
@@ -6665,17 +8019,13 @@
   TypeParameterElementImpl.forNode(Identifier name)
       : _unlinkedTypeParam = null,
         nestingLevel = null,
-        _enclosingTypeParameterizedElement = null,
         super.forNode(name);
 
   /**
    * Initialize using the given serialized information.
    */
-  TypeParameterElementImpl.forSerialized(
-      this._unlinkedTypeParam,
-      ElementImpl enclosingElement,
-      this._enclosingTypeParameterizedElement,
-      this.nestingLevel)
+  TypeParameterElementImpl.forSerialized(this._unlinkedTypeParam,
+      TypeParameterizedElementMixin enclosingElement, this.nestingLevel)
       : super.forSerialized(enclosingElement);
 
   /**
@@ -6685,7 +8035,6 @@
   TypeParameterElementImpl.synthetic(String name)
       : _unlinkedTypeParam = null,
         nestingLevel = null,
-        _enclosingTypeParameterizedElement = null,
         super(name, -1) {
     synthetic = true;
   }
@@ -6727,14 +8076,6 @@
   String get displayName => name;
 
   @override
-  Element get enclosingElement {
-    if (_unlinkedTypeParam != null) {
-      return _enclosingTypeParameterizedElement;
-    }
-    return super.enclosingElement;
-  }
-
-  @override
   ElementKind get kind => ElementKind.TYPE_PARAMETER;
 
   @override
@@ -6791,9 +8132,34 @@
  */
 abstract class TypeParameterizedElementMixin
     implements TypeParameterizedElement, ElementImpl {
-  List<TypeParameterType> _typeParameterTypes;
-  List<TypeParameterElement> _typeParameterElements;
   int _nestingLevel;
+  List<TypeParameterElement> _typeParameterElements;
+  List<TypeParameterType> _typeParameterTypes;
+  List<TypeParameterType> _allTypeParameterTypes;
+
+  /**
+   * Return all type parameter types of the element that encloses element.
+   * Not `null`, but might be empty for top-level and static class members.
+   */
+  List<TypeParameterType> get allEnclosingTypeParameterTypes {
+    return enclosingTypeParameterContext?.allTypeParameterTypes ??
+        const <TypeParameterType>[];
+  }
+
+  /**
+   * Return all type parameter types of this element.
+   */
+  List<TypeParameterType> get allTypeParameterTypes {
+    if (_allTypeParameterTypes == null) {
+      _allTypeParameterTypes = <TypeParameterType>[];
+      // The most logical order would be (enclosing, this).
+      // But we have to have it like this to be consistent with (inconsistent
+      // by itself) element builder for generic functions.
+      _allTypeParameterTypes.addAll(typeParameterTypes);
+      _allTypeParameterTypes.addAll(allEnclosingTypeParameterTypes);
+    }
+    return _allTypeParameterTypes;
+  }
 
   /**
    * Get the type parameter context enclosing this one, if any.
@@ -6821,7 +8187,7 @@
           new List<TypeParameterElement>(numTypeParameters);
       for (int i = 0; i < numTypeParameters; i++) {
         _typeParameterElements[i] = new TypeParameterElementImpl.forSerialized(
-            unlinkedTypeParams[i], this, this, enclosingNestingLevel + i);
+            unlinkedTypeParams[i], this, enclosingNestingLevel + i);
       }
     }
     return _typeParameterElements;
@@ -6832,11 +8198,9 @@
    * element's type parameters.
    */
   List<TypeParameterType> get typeParameterTypes {
-    if (_typeParameterTypes == null) {
-      _typeParameterTypes =
-          typeParameters.map((TypeParameterElement e) => e.type).toList();
-    }
-    return _typeParameterTypes;
+    return _typeParameterTypes ??= typeParameters
+        .map((TypeParameterElement e) => e.type)
+        .toList(growable: false);
   }
 
   /**
diff --git a/pkg/analyzer/lib/src/dart/element/handle.dart b/pkg/analyzer/lib/src/dart/element/handle.dart
index 0e71499..6823b99 100644
--- a/pkg/analyzer/lib/src/dart/element/handle.dart
+++ b/pkg/analyzer/lib/src/dart/element/handle.dart
@@ -98,67 +98,29 @@
   List<TypeParameterElement> get typeParameters => actualElement.typeParameters;
 
   @override
-  ConstructorElement get unnamedConstructor {
-    ensureConstructorsReady();
-    return actualElement.unnamedConstructor;
-  }
+  ConstructorElement get unnamedConstructor => actualElement.unnamedConstructor;
 
   @override
   NamedCompilationUnitMember computeNode() => super.computeNode();
 
-  /**
-   * Ensure that [ClassElement.accessors] and [ClassElement.fields] are ready
-   * in [actualElement].
-   */
-  void ensureAccessorsReady() {}
-
-  /**
-   * The method is called by [ClassElementImpl.getImpl] before returning
-   * the [actualElement] as [ClassElementImpl]. At this moment we must ensure
-   * that [ClassElementImpl] is fully complete, we cannot continue filling it
-   * lazily.
-   */
-  void ensureActualElementComplete() {}
-
-  /**
-   * Ensure that [ClassElement.constructors] are ready in [actualElement].
-   */
-  void ensureConstructorsReady() {}
-
-  /**
-   * Ensure that [ClassElement.methods] are ready in [actualElement].
-   */
-  void ensureMethodsReady() {}
+  @override
+  FieldElement getField(String fieldName) => actualElement.getField(fieldName);
 
   @override
-  FieldElement getField(String fieldName) {
-    ensureAccessorsReady();
-    return actualElement.getField(fieldName);
-  }
+  PropertyAccessorElement getGetter(String getterName) =>
+      actualElement.getGetter(getterName);
 
   @override
-  PropertyAccessorElement getGetter(String getterName) {
-    ensureAccessorsReady();
-    return actualElement.getGetter(getterName);
-  }
+  MethodElement getMethod(String methodName) =>
+      actualElement.getMethod(methodName);
 
   @override
-  MethodElement getMethod(String methodName) {
-    ensureMethodsReady();
-    return actualElement.getMethod(methodName);
-  }
+  ConstructorElement getNamedConstructor(String name) =>
+      actualElement.getNamedConstructor(name);
 
   @override
-  ConstructorElement getNamedConstructor(String name) {
-    ensureConstructorsReady();
-    return actualElement.getNamedConstructor(name);
-  }
-
-  @override
-  PropertyAccessorElement getSetter(String setterName) {
-    ensureAccessorsReady();
-    return actualElement.getSetter(setterName);
-  }
+  PropertyAccessorElement getSetter(String setterName) =>
+      actualElement.getSetter(setterName);
 
   @override
   bool isSuperConstructorAccessible(ConstructorElement constructor) =>
@@ -166,10 +128,8 @@
 
   @override
   MethodElement lookUpConcreteMethod(
-      String methodName, LibraryElement library) {
-    ensureMethodsReady();
-    return actualElement.lookUpConcreteMethod(methodName, library);
-  }
+          String methodName, LibraryElement library) =>
+      actualElement.lookUpConcreteMethod(methodName, library);
 
   @override
   PropertyAccessorElement lookUpGetter(
@@ -178,44 +138,33 @@
 
   @override
   PropertyAccessorElement lookUpInheritedConcreteGetter(
-      String methodName, LibraryElement library) {
-    ensureAccessorsReady();
-    return actualElement.lookUpInheritedConcreteGetter(methodName, library);
-  }
+          String methodName, LibraryElement library) =>
+      actualElement.lookUpInheritedConcreteGetter(methodName, library);
 
   @override
   MethodElement lookUpInheritedConcreteMethod(
-      String methodName, LibraryElement library) {
-    ensureMethodsReady();
-    return actualElement.lookUpInheritedConcreteMethod(methodName, library);
-  }
+          String methodName, LibraryElement library) =>
+      actualElement.lookUpInheritedConcreteMethod(methodName, library);
 
   @override
   PropertyAccessorElement lookUpInheritedConcreteSetter(
-      String methodName, LibraryElement library) {
-    ensureAccessorsReady();
-    return actualElement.lookUpInheritedConcreteSetter(methodName, library);
-  }
+          String methodName, LibraryElement library) =>
+      actualElement.lookUpInheritedConcreteSetter(methodName, library);
 
   @override
   MethodElement lookUpInheritedMethod(
       String methodName, LibraryElement library) {
-    ensureMethodsReady();
     return actualElement.lookUpInheritedMethod(methodName, library);
   }
 
   @override
-  MethodElement lookUpMethod(String methodName, LibraryElement library) {
-    ensureMethodsReady();
-    return actualElement.lookUpMethod(methodName, library);
-  }
+  MethodElement lookUpMethod(String methodName, LibraryElement library) =>
+      actualElement.lookUpMethod(methodName, library);
 
   @override
   PropertyAccessorElement lookUpSetter(
-      String setterName, LibraryElement library) {
-    ensureAccessorsReady();
-    return actualElement.lookUpSetter(setterName, library);
-  }
+          String setterName, LibraryElement library) =>
+      actualElement.lookUpSetter(setterName, library);
 }
 
 /**
@@ -408,6 +357,9 @@
   bool get isDeprecated => actualElement.isDeprecated;
 
   @override
+  bool get isFactory => actualElement.isFactory;
+
+  @override
   bool get isJS => actualElement.isJS;
 
   @override
@@ -433,6 +385,9 @@
       getAncestor((element) => element is LibraryElement);
 
   @override
+  Source get librarySource => actualElement.librarySource;
+
+  @override
   ElementLocation get location => _location;
 
   @override
@@ -480,6 +435,9 @@
       actualElement.isAccessibleIn(library);
 
   @override
+  String toString() => actualElement.toString();
+
+  @override
   void visitChildren(ElementVisitor visitor) {
     actualElement.visitChildren(visitor);
   }
@@ -1022,10 +980,10 @@
       actualElement.correspondingSetter;
 
   @override
-  bool get isGetter => actualElement.isGetter;
+  bool get isGetter => !isSetter;
 
   @override
-  bool get isSetter => actualElement.isSetter;
+  bool get isSetter => location.components.last.endsWith('=');
 
   @override
   ElementKind get kind {
diff --git a/pkg/analyzer/lib/src/dart/element/member.dart b/pkg/analyzer/lib/src/dart/element/member.dart
index 025c7c5..76ec36c 100644
--- a/pkg/analyzer/lib/src/dart/element/member.dart
+++ b/pkg/analyzer/lib/src/dart/element/member.dart
@@ -461,6 +461,9 @@
   bool get isDeprecated => _baseElement.isDeprecated;
 
   @override
+  bool get isFactory => _baseElement.isFactory;
+
+  @override
   bool get isJS => _baseElement.isJS;
 
   @override
@@ -488,6 +491,9 @@
   LibraryElement get library => _baseElement.library;
 
   @override
+  Source get librarySource => _baseElement.librarySource;
+
+  @override
   ElementLocation get location => _baseElement.location;
 
   @override
@@ -515,7 +521,8 @@
   AstNode computeNode() => _baseElement.computeNode();
 
   @override
-  Element/*=E*/ getAncestor/*<E extends Element >*/(Predicate<Element> predicate) =>
+  Element/*=E*/ getAncestor/*<E extends Element >*/(
+          Predicate<Element> predicate) =>
       baseElement.getAncestor(predicate);
 
   @override
@@ -700,7 +707,8 @@
   FormalParameter computeNode() => baseElement.computeNode();
 
   @override
-  Element/*=E*/ getAncestor/*<E extends Element>*/(Predicate<Element> predicate) {
+  Element/*=E*/ getAncestor/*<E extends Element>*/(
+      Predicate<Element> predicate) {
     Element element = baseElement.getAncestor(predicate);
     ParameterizedType definingType = this.definingType;
     if (definingType is InterfaceType) {
@@ -709,7 +717,8 @@
       } else if (element is MethodElement) {
         return MethodMember.from(element, definingType) as Element/*=E*/;
       } else if (element is PropertyAccessorElement) {
-        return PropertyAccessorMember.from(element, definingType) as Element/*=E*/;
+        return PropertyAccessorMember.from(element, definingType)
+            as Element/*=E*/;
       }
     }
     return element as Element/*=E*/;
diff --git a/pkg/analyzer/lib/src/dart/element/type.dart b/pkg/analyzer/lib/src/dart/element/type.dart
index 0ee0b97..22a247a 100644
--- a/pkg/analyzer/lib/src/dart/element/type.dart
+++ b/pkg/analyzer/lib/src/dart/element/type.dart
@@ -774,7 +774,8 @@
     //
     // Now instantiate([V]), and the result should be:
     //     {U/T, V/S} T -> S.
-    List<DartType> newTypeArgs = typeArguments.toList();
+    List<DartType> newTypeArgs = <DartType>[];
+    newTypeArgs.addAll(typeArguments);
     newTypeArgs.addAll(argumentTypes);
 
     return new FunctionTypeImpl._(
@@ -926,6 +927,39 @@
   }
 
   /**
+   * Given a generic function type [g] and an instantiated function type [f],
+   * find a list of type arguments TArgs such that `g<TArgs> == f`,
+   * and return TArgs.
+   *
+   * This function must be called with type [f] that was instantiated from [g].
+   */
+  static Iterable<DartType> recoverTypeArguments(
+      FunctionType g, FunctionType f) {
+    // TODO(jmesserly): perhaps a better design here would be: instead of
+    // recording staticInvokeType on InvocationExpression, we could record the
+    // instantiated type arguments, that way we wouldn't need to recover them.
+    //
+    // For now though, this is a pretty quick operation.
+    assert(identical(g.element, f.element));
+    assert(g.typeFormals.isNotEmpty && f.typeFormals.isEmpty);
+    assert(g.typeFormals.length + g.typeArguments.length ==
+        f.typeArguments.length);
+
+    // Instantiation in Analyzer works like this:
+    // Given:
+    //     {U/T} <S> T -> S
+    // Where {U/T} represents the typeArguments (U) and typeParameters (T) list,
+    // and <S> represents the typeFormals.
+    //
+    // Now instantiate([V]), and the result should be:
+    //     {U/T, V/S} T -> S.
+    //
+    // Therefore, we can recover the typeArguments from our instantiated
+    // function.
+    return f.typeArguments.skip(g.typeArguments.length);
+  }
+
+  /**
    * Compares two function types [t] and [s] to see if their corresponding
    * parameter types match [parameterRelation] and their return types match
    * [returnRelation].
diff --git a/pkg/analyzer/lib/src/dart/resolver/scope.dart b/pkg/analyzer/lib/src/dart/resolver/scope.dart
index e90ba86..04da23d 100644
--- a/pkg/analyzer/lib/src/dart/resolver/scope.dart
+++ b/pkg/analyzer/lib/src/dart/resolver/scope.dart
@@ -994,7 +994,7 @@
 
   @override
   Element get(String name) {
-    if (name.startsWith(_prefix)) {
+    if (name.length > _length && name.startsWith(_prefix)) {
       if (name.codeUnitAt(_length) == '.'.codeUnitAt(0)) {
         return _definedNames[name.substring(_length + 1)];
       }
diff --git a/pkg/analyzer/lib/src/generated/bazel.dart b/pkg/analyzer/lib/src/generated/bazel.dart
new file mode 100644
index 0000000..8198f46
--- /dev/null
+++ b/pkg/analyzer/lib/src/generated/bazel.dart
@@ -0,0 +1,69 @@
+// Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+library analyzer.src.generated.bazel;
+
+import 'dart:core' hide Resource;
+
+import 'package:analyzer/file_system/file_system.dart';
+import 'package:analyzer/src/generated/source.dart';
+import 'package:analyzer/src/generated/source_io.dart';
+
+/**
+ * Instances of the class `BazelFileUriResolver` resolve `file` URI's by first
+ * resolving file uri's in the expected way, and then by looking in the
+ * corresponding generated directories.
+ */
+class BazelFileUriResolver extends ResourceUriResolver {
+  /**
+   * The Bazel workspace directory.
+   */
+  final Folder _workspaceDir;
+
+  /**
+   * The build directories that relative `file` URI's should use to resolve
+   * relative URIs.
+   */
+  final List<Folder> _buildDirectories;
+
+  BazelFileUriResolver(
+      ResourceProvider provider, this._workspaceDir, this._buildDirectories)
+      : super(provider);
+
+  @override
+  Source resolveAbsolute(Uri uri, [Uri actualUri]) {
+    if (!ResourceUriResolver.isFileUri(uri)) {
+      return null;
+    }
+
+    File uriFile = provider.getFile(provider.pathContext.fromUri(uri));
+    if (uriFile.exists) {
+      return uriFile.createSource(actualUri ?? uri);
+    }
+
+    String relativeFromWorkspaceDir = _getPathFromWorkspaceDir(uri);
+    if (_buildDirectories.isEmpty || relativeFromWorkspaceDir.isEmpty) {
+      return null;
+    }
+
+    for (Folder buildDir in _buildDirectories) {
+      File file = buildDir.getChildAssumingFile(relativeFromWorkspaceDir);
+      if (file.exists) {
+        return file.createSource(actualUri ?? uri);
+      }
+    }
+    return null;
+  }
+
+  String _getPathFromWorkspaceDir(Uri uri) {
+    String uriPath = uri.path;
+    String workspacePath = _workspaceDir.path;
+
+    if (uriPath.startsWith(workspacePath) &&
+        workspacePath.length < uriPath.length) {
+      return uriPath.substring(workspacePath.length + 1);
+    }
+    return '';
+  }
+}
diff --git a/pkg/analyzer/lib/src/generated/element_resolver.dart b/pkg/analyzer/lib/src/generated/element_resolver.dart
index 724ddc5..dcb40d6 100644
--- a/pkg/analyzer/lib/src/generated/element_resolver.dart
+++ b/pkg/analyzer/lib/src/generated/element_resolver.dart
@@ -1136,7 +1136,7 @@
   @override
   Object visitSuperConstructorInvocation(SuperConstructorInvocation node) {
     ClassElementImpl enclosingClass =
-        ClassElementImpl.getImpl(_resolver.enclosingClass);
+        AbstractClassElementImpl.getImpl(_resolver.enclosingClass);
     if (enclosingClass == null) {
       // TODO(brianwilkerson) Report this error.
       return null;
@@ -1768,36 +1768,33 @@
    * [operator].
    */
   TokenType _operatorFromCompoundAssignment(TokenType operator) {
-    while (true) {
-      if (operator == TokenType.AMPERSAND_EQ) {
-        return TokenType.AMPERSAND;
-      } else if (operator == TokenType.BAR_EQ) {
-        return TokenType.BAR;
-      } else if (operator == TokenType.CARET_EQ) {
-        return TokenType.CARET;
-      } else if (operator == TokenType.GT_GT_EQ) {
-        return TokenType.GT_GT;
-      } else if (operator == TokenType.LT_LT_EQ) {
-        return TokenType.LT_LT;
-      } else if (operator == TokenType.MINUS_EQ) {
-        return TokenType.MINUS;
-      } else if (operator == TokenType.PERCENT_EQ) {
-        return TokenType.PERCENT;
-      } else if (operator == TokenType.PLUS_EQ) {
-        return TokenType.PLUS;
-      } else if (operator == TokenType.SLASH_EQ) {
-        return TokenType.SLASH;
-      } else if (operator == TokenType.STAR_EQ) {
-        return TokenType.STAR;
-      } else if (operator == TokenType.TILDE_SLASH_EQ) {
-        return TokenType.TILDE_SLASH;
-      } else {
-        // Internal error: Unmapped assignment operator.
-        AnalysisEngine.instance.logger.logError(
-            "Failed to map ${operator.lexeme} to it's corresponding operator");
-        return operator;
-      }
-      break;
+    if (operator == TokenType.AMPERSAND_EQ) {
+      return TokenType.AMPERSAND;
+    } else if (operator == TokenType.BAR_EQ) {
+      return TokenType.BAR;
+    } else if (operator == TokenType.CARET_EQ) {
+      return TokenType.CARET;
+    } else if (operator == TokenType.GT_GT_EQ) {
+      return TokenType.GT_GT;
+    } else if (operator == TokenType.LT_LT_EQ) {
+      return TokenType.LT_LT;
+    } else if (operator == TokenType.MINUS_EQ) {
+      return TokenType.MINUS;
+    } else if (operator == TokenType.PERCENT_EQ) {
+      return TokenType.PERCENT;
+    } else if (operator == TokenType.PLUS_EQ) {
+      return TokenType.PLUS;
+    } else if (operator == TokenType.SLASH_EQ) {
+      return TokenType.SLASH;
+    } else if (operator == TokenType.STAR_EQ) {
+      return TokenType.STAR;
+    } else if (operator == TokenType.TILDE_SLASH_EQ) {
+      return TokenType.TILDE_SLASH;
+    } else {
+      // Internal error: Unmapped assignment operator.
+      AnalysisEngine.instance.logger.logError(
+          "Failed to map ${operator.lexeme} to it's corresponding operator");
+      return operator;
     }
   }
 
diff --git a/pkg/analyzer/lib/src/generated/engine.dart b/pkg/analyzer/lib/src/generated/engine.dart
index 6d41c4fb..67a931c7 100644
--- a/pkg/analyzer/lib/src/generated/engine.dart
+++ b/pkg/analyzer/lib/src/generated/engine.dart
@@ -11,6 +11,7 @@
 import 'package:analyzer/dart/ast/visitor.dart';
 import 'package:analyzer/dart/element/element.dart';
 import 'package:analyzer/instrumentation/instrumentation.dart';
+import 'package:analyzer/plugin/resolver_provider.dart';
 import 'package:analyzer/source/embedder.dart';
 import 'package:analyzer/src/cancelable_future.dart';
 import 'package:analyzer/src/context/cache.dart';
@@ -85,6 +86,12 @@
   static const List<AnalysisContext> EMPTY_LIST = const <AnalysisContext>[];
 
   /**
+   * The file resolver provider used to override the way file URI's are
+   * resolved in some contexts.
+   */
+  ResolverProvider fileResolverProvider;
+
+  /**
    * Return the set of analysis options controlling the behavior of this
    * context. Clients should not modify the returned set of options. The options
    * should only be set by invoking the method [setAnalysisOptions].
@@ -780,12 +787,6 @@
   final PartitionManager partitionManager = new PartitionManager();
 
   /**
-   * A flag indicating whether the task model should attempt to limit
-   * invalidation after a change.
-   */
-  bool limitInvalidationInTaskModel = false;
-
-  /**
    * The task manager used to manage the tasks used to analyze code.
    */
   TaskManager _taskManager;
@@ -1094,6 +1095,20 @@
   bool get enableTiming;
 
   /**
+   * Return `true` to enable trailing commas in parameter and argument lists
+   * (sdk#26647).
+   */
+  bool get enableTrailingCommas;
+
+  /**
+   * A flag indicating whether finer grained dependencies should be used
+   * instead of just source level dependencies.
+   *
+   * This option is experimental and subject to change.
+   */
+  bool get finerGrainedInvalidation;
+
+  /**
    * Return `true` if errors, warnings and hints should be generated for sources
    * that are implicitly being analyzed. The default value is `true`.
    */
@@ -1144,6 +1159,14 @@
   bool get strongMode;
 
   /**
+   * Return `true` if dependencies between computed results should be tracked
+   * by analysis cache.  This option should only be set to `false` if analysis
+   * is performed in such a way that none of the inputs is ever changed
+   * during the life time of the context.
+   */
+  bool get trackCacheDependencies;
+
+  /**
    * Return an integer encoding of the values of the options that need to be the
    * same across all of the contexts associated with partitions that are to be
    * shared by a single analysis context.
@@ -1225,6 +1248,9 @@
   @override
   bool enableTiming = false;
 
+  @override
+  bool enableTrailingCommas = false;
+
   /**
    * A flag indicating whether errors, warnings and hints should be generated
    * for sources that are implicitly being analyzed.
@@ -1283,6 +1309,32 @@
   // TODO(leafp): replace this with something more general
   bool strongModeHints = false;
 
+  @override
+  bool trackCacheDependencies = true;
+
+  /**
+   * A flag indicating whether implicit casts are allowed in [strongMode]
+   * (they are always allowed in Dart 1.0 mode).
+   *
+   * This option is experimental and subject to change.
+   */
+  bool implicitCasts = true;
+
+  @override
+  bool finerGrainedInvalidation = false;
+
+  /**
+   * A flag indicating whether implicit dynamic type is allowed, on by default.
+   *
+   * This flag can be used without necessarily enabling [strongMode], but it is
+   * designed with strong mode's type inference in mind. Without type inference,
+   * it will raise many errors. Also it does not provide type safety without
+   * strong mode.
+   *
+   * This option is experimental and subject to change.
+   */
+  bool implicitDynamic = true;
+
   /**
    * Initialize a newly created set of analysis options to have their default
    * values.
@@ -1303,6 +1355,7 @@
     enableGenericMethods = options.enableGenericMethods;
     enableSuperMixins = options.enableSuperMixins;
     enableTiming = options.enableTiming;
+    enableTrailingCommas = options.enableTrailingCommas;
     generateImplicitErrors = options.generateImplicitErrors;
     generateSdkErrors = options.generateSdkErrors;
     hint = options.hint;
@@ -1314,7 +1367,11 @@
     strongMode = options.strongMode;
     if (options is AnalysisOptionsImpl) {
       strongModeHints = options.strongModeHints;
+      implicitCasts = options.implicitCasts;
+      implicitDynamic = options.implicitDynamic;
     }
+    trackCacheDependencies = options.trackCacheDependencies;
+    finerGrainedInvalidation = options.finerGrainedInvalidation;
   }
 
   bool get analyzeFunctionBodies {
@@ -1379,6 +1436,48 @@
   }
 
   /**
+   * Produce a human readable list of option names corresponding to the options
+   * encoded in the given [encoding], presumably from invoking the method
+   * [encodeCrossContextOptions].
+   */
+  static String decodeCrossContextOptions(int encoding) {
+    if (encoding == 0) {
+      return 'none';
+    }
+    StringBuffer buffer = new StringBuffer();
+    bool needsSeparator = false;
+    void add(String optionName) {
+      if (needsSeparator) {
+        buffer.write(', ');
+      }
+      buffer.write(optionName);
+      needsSeparator = true;
+    }
+    if (encoding & ENABLE_ASSERT_FLAG > 0) {
+      add('assert');
+    }
+    if (encoding & ENABLE_ASYNC_FLAG > 0) {
+      add('async');
+    }
+    if (encoding & ENABLE_GENERIC_METHODS_FLAG > 0) {
+      add('genericMethods');
+    }
+    if (encoding & ENABLE_STRICT_CALL_CHECKS_FLAG > 0) {
+      add('strictCallChecks');
+    }
+    if (encoding & ENABLE_STRONG_MODE_FLAG > 0) {
+      add('strongMode');
+    }
+    if (encoding & ENABLE_STRONG_MODE_HINTS_FLAG > 0) {
+      add('strongModeHints');
+    }
+    if (encoding & ENABLE_SUPER_MIXINS_FLAG > 0) {
+      add('superMixins');
+    }
+    return buffer.toString();
+  }
+
+  /**
    * Predicate used for [analyzeFunctionBodiesPredicate] when
    * [analyzeFunctionBodies] is set to `true`.
    */
@@ -1944,7 +2043,10 @@
    */
   set contentCache(ContentCache value);
 
-  /// Get the [EmbedderYamlLocator] for this context.
+  /**
+   * Get the [EmbedderYamlLocator] for this context.
+   */
+  @deprecated
   EmbedderYamlLocator get embedderYamlLocator;
 
   /**
diff --git a/pkg/analyzer/lib/src/generated/error.dart b/pkg/analyzer/lib/src/generated/error.dart
index 9fb06c5..f8b369c 100644
--- a/pkg/analyzer/lib/src/generated/error.dart
+++ b/pkg/analyzer/lib/src/generated/error.dart
@@ -11,6 +11,7 @@
 import 'package:analyzer/dart/element/element.dart';
 import 'package:analyzer/dart/element/type.dart';
 import 'package:analyzer/source/error_processor.dart';
+import 'package:analyzer/src/dart/element/element.dart';
 import 'package:analyzer/src/dart/element/type.dart';
 import 'package:analyzer/src/dart/scanner/scanner.dart' show ScannerErrorCode;
 import 'package:analyzer/src/generated/generated/shared_messages.dart'
@@ -21,7 +22,6 @@
 import 'package:analyzer/src/task/model.dart';
 import 'package:analyzer/task/model.dart';
 import 'package:source_span/source_span.dart';
-import 'package:analyzer/src/dart/element/element.dart';
 
 /**
  * The descriptor used to associate error processors with analysis contexts in
@@ -123,6 +123,12 @@
   }
 
   /**
+   * Initialize a newly created analysis error with given values.
+   */
+  AnalysisError.forValues(this.source, this.offset, this.length, this.errorCode,
+      this._message, this._correction);
+
+  /**
    * Return the template used to create the correction to be displayed for this
    * error, or `null` if there is no correction information for this error. The
    * correction should indicate how the user can fix the error.
@@ -2676,6 +2682,9 @@
     HintCode.DEPRECATED_MEMBER_USE,
     HintCode.DUPLICATE_IMPORT,
     HintCode.DIVISION_OPTIMIZATION,
+    HintCode.INVALID_FACTORY_ANNOTATION,
+    HintCode.INVALID_FACTORY_METHOD_DECL,
+    HintCode.INVALID_FACTORY_METHOD_IMPL,
     HintCode.IS_DOUBLE,
     HintCode.IS_INT,
     HintCode.IS_NOT_DOUBLE,
@@ -2845,6 +2854,34 @@
     StaticWarningCode.UNDEFINED_SUPER_SETTER,
     StaticWarningCode.VOID_RETURN_FOR_GETTER,
     StaticWarningCode.MISSING_ENUM_CONSTANT_IN_SWITCH,
+    StrongModeCode.ASSIGNMENT_CAST,
+    StrongModeCode.DOWN_CAST_COMPOSITE,
+    StrongModeCode.DOWN_CAST_IMPLICIT,
+    StrongModeCode.DYNAMIC_CAST,
+    StrongModeCode.DYNAMIC_INVOKE,
+    StrongModeCode.IMPLICIT_DYNAMIC_FIELD,
+    StrongModeCode.IMPLICIT_DYNAMIC_FUNCTION,
+    StrongModeCode.IMPLICIT_DYNAMIC_INVOKE,
+    StrongModeCode.IMPLICIT_DYNAMIC_LIST_LITERAL,
+    StrongModeCode.IMPLICIT_DYNAMIC_MAP_LITERAL,
+    StrongModeCode.IMPLICIT_DYNAMIC_METHOD,
+    StrongModeCode.IMPLICIT_DYNAMIC_PARAMETER,
+    StrongModeCode.IMPLICIT_DYNAMIC_RETURN,
+    StrongModeCode.IMPLICIT_DYNAMIC_TYPE,
+    StrongModeCode.IMPLICIT_DYNAMIC_VARIABLE,
+    StrongModeCode.INFERRED_TYPE,
+    StrongModeCode.INFERRED_TYPE_ALLOCATION,
+    StrongModeCode.INFERRED_TYPE_CLOSURE,
+    StrongModeCode.INFERRED_TYPE_LITERAL,
+    StrongModeCode.INVALID_FIELD_OVERRIDE,
+    StrongModeCode.INVALID_METHOD_OVERRIDE,
+    StrongModeCode.INVALID_METHOD_OVERRIDE_FROM_BASE,
+    StrongModeCode.INVALID_METHOD_OVERRIDE_FROM_MIXIN,
+    StrongModeCode.INVALID_PARAMETER_DECLARATION,
+    StrongModeCode.INVALID_SUPER_INVOCATION,
+    StrongModeCode.NON_GROUND_TYPE_CHECK_INFO,
+    StrongModeCode.STATIC_TYPE_ERROR,
+
     TodoCode.TODO,
 
     //
@@ -3023,6 +3060,11 @@
   ];
 
   /**
+   * The lazy initialized map from [uniqueName] to the [ErrorCode] instance.
+   */
+  static HashMap<String, ErrorCode> _uniqueNameToCodeMap;
+
+  /**
    * An empty list of error codes.
    */
   static const List<ErrorCode> EMPTY_LIST = const <ErrorCode>[];
@@ -3070,6 +3112,20 @@
 
   @override
   String toString() => uniqueName;
+
+  /**
+   * Return the [ErrorCode] with the given [uniqueName], or `null` if not
+   * found.
+   */
+  static ErrorCode byUniqueName(String uniqueName) {
+    if (_uniqueNameToCodeMap == null) {
+      _uniqueNameToCodeMap = new HashMap<String, ErrorCode>();
+      for (ErrorCode errorCode in values) {
+        _uniqueNameToCodeMap[errorCode.uniqueName] = errorCode;
+      }
+    }
+    return _uniqueNameToCodeMap[uniqueName];
+  }
 }
 
 /**
@@ -3566,6 +3622,34 @@
       "A value of type '{0}' cannot be assigned to a variable of type '{1}'");
 
   /**
+   * This hint is generated anywhere a @factory annotation is associated with
+   * anything other than a method.
+   */
+  static const HintCode INVALID_FACTORY_ANNOTATION = const HintCode(
+      'INVALID_FACTORY_ANNOTATION',
+      "Only methods can be annotated as factories.");
+
+  /**
+   * This hint is generated anywhere a @factory annotation is associated with
+   * a method that does not declare a return type.
+   */
+  static const HintCode INVALID_FACTORY_METHOD_DECL = const HintCode(
+      'INVALID_FACTORY_METHOD_DECL',
+      "Factory method '{0}' must have a return type.");
+
+  /**
+   * This hint is generated anywhere a @factory annotation is associated with
+   * a non-abstract method that can return anything other than a newly allocated
+   * object.
+   *
+   * Parameters:
+   * 0: the name of the method
+   */
+  static const HintCode INVALID_FACTORY_METHOD_IMPL = const HintCode(
+      'INVALID_FACTORY_METHOD_IMPL',
+      "Factory method '{0}' does not return a newly allocated object.");
+
+  /**
    * This hint is generated anywhere where a member annotated with `@protected`
    * is used outside an instance member of a subclass.
    *
@@ -4842,7 +4926,7 @@
    */
   static const StaticWarningCode FINAL_NOT_INITIALIZED =
       const StaticWarningCode('FINAL_NOT_INITIALIZED',
-          "The final variable '{0}' must be initialized");
+          "The final variable '{0}' must be initialized", null, false);
 
   /**
    * 7.6.1 Generative Constructors: Each final instance variable <i>f</i>
@@ -4858,7 +4942,7 @@
    */
   static const StaticWarningCode FINAL_NOT_INITIALIZED_CONSTRUCTOR_1 =
       const StaticWarningCode('FINAL_NOT_INITIALIZED_CONSTRUCTOR_1',
-          "The final variable '{0}' must be initialized");
+          "The final variable '{0}' must be initialized", null, false);
 
   /**
    * 7.6.1 Generative Constructors: Each final instance variable <i>f</i>
@@ -4874,8 +4958,11 @@
    * 1: the name of the uninitialized final variable
    */
   static const StaticWarningCode FINAL_NOT_INITIALIZED_CONSTRUCTOR_2 =
-      const StaticWarningCode('FINAL_NOT_INITIALIZED_CONSTRUCTOR_2',
-          "The final variables '{0}' and '{1}' must be initialized");
+      const StaticWarningCode(
+          'FINAL_NOT_INITIALIZED_CONSTRUCTOR_2',
+          "The final variables '{0}' and '{1}' must be initialized",
+          null,
+          false);
 
   /**
    * 7.6.1 Generative Constructors: Each final instance variable <i>f</i>
@@ -4892,8 +4979,11 @@
    * 2: the number of additional not initialized variables that aren't listed
    */
   static const StaticWarningCode FINAL_NOT_INITIALIZED_CONSTRUCTOR_3_PLUS =
-      const StaticWarningCode('FINAL_NOT_INITIALIZED_CONSTRUCTOR_3',
-          "The final variables '{0}', '{1}' and '{2}' more must be initialized");
+      const StaticWarningCode(
+          'FINAL_NOT_INITIALIZED_CONSTRUCTOR_3',
+          "The final variables '{0}', '{1}' and '{2}' more must be initialized",
+          null,
+          false);
 
   /**
    * 15.5 Function Types: It is a static warning if a concrete class implements
@@ -5214,8 +5304,11 @@
    * <i>S</i>, and <i>T</i> may not be assigned to <i>S</i>.
    */
   static const StaticWarningCode MISMATCHED_GETTER_AND_SETTER_TYPES =
-      const StaticWarningCode('MISMATCHED_GETTER_AND_SETTER_TYPES',
-          "The parameter type for setter '{0}' is '{1}' which is not assignable to its getter (of type '{2}')");
+      const StaticWarningCode(
+          'MISMATCHED_GETTER_AND_SETTER_TYPES',
+          "The parameter type for setter '{0}' is '{1}' which is not assignable to its getter (of type '{2}')",
+          null,
+          false);
 
   /**
    * 7.3 Setters: It is a static warning if a class has a setter named <i>v=</i>
@@ -5226,7 +5319,9 @@
       MISMATCHED_GETTER_AND_SETTER_TYPES_FROM_SUPERTYPE =
       const StaticWarningCode(
           'MISMATCHED_GETTER_AND_SETTER_TYPES_FROM_SUPERTYPE',
-          "The parameter type for setter '{0}' is '{1}' which is not assignable to its getter (of type '{2}'), from superclass '{3}'");
+          "The parameter type for setter '{0}' is '{1}' which is not assignable to its getter (of type '{2}'), from superclass '{3}'",
+          null,
+          false);
 
   /**
    * 13.12 Return: It is a static warning if a function contains both one or
@@ -5235,7 +5330,9 @@
    */
   static const StaticWarningCode MIXED_RETURN_TYPES = const StaticWarningCode(
       'MIXED_RETURN_TYPES',
-      "Methods and functions cannot use return both with and without values");
+      "Methods and functions cannot use return both with and without values",
+      null,
+      false);
 
   /**
    * 12.11.1 New: It is a static warning if <i>q</i> is a constructor of an
@@ -5447,7 +5544,7 @@
    */
   static const StaticWarningCode NON_VOID_RETURN_FOR_OPERATOR =
       const StaticWarningCode('NON_VOID_RETURN_FOR_OPERATOR',
-          "The return type of the operator []= must be 'void'");
+          "The return type of the operator []= must be 'void'", null, false);
 
   /**
    * 7.3 Setters: It is a static warning if a setter declares a return type
@@ -5455,7 +5552,7 @@
    */
   static const StaticWarningCode NON_VOID_RETURN_FOR_SETTER =
       const StaticWarningCode('NON_VOID_RETURN_FOR_SETTER',
-          "The return type of the setter must be 'void'");
+          "The return type of the setter must be 'void'", null, false);
 
   /**
    * 15.1 Static Types: A type <i>T</i> is malformed iff:
@@ -5554,7 +5651,10 @@
    * * The return type of <i>f</i> may not be assigned to void.
    */
   static const StaticWarningCode RETURN_WITHOUT_VALUE = const StaticWarningCode(
-      'RETURN_WITHOUT_VALUE', "Missing return value after 'return'");
+      'RETURN_WITHOUT_VALUE',
+      "Missing return value after 'return'",
+      null,
+      false);
 
   /**
    * 12.16.3 Static Invocation: It is a static warning if <i>C</i> does not
@@ -5737,7 +5837,7 @@
    */
   static const StaticWarningCode VOID_RETURN_FOR_GETTER =
       const StaticWarningCode('VOID_RETURN_FOR_GETTER',
-          "The return type of the getter must not be 'void'");
+          "The return type of the getter must not be 'void'", null, false);
 
   /**
    * 17.9 Switch: It is a static warning if all of the following conditions
@@ -5756,7 +5856,8 @@
       const StaticWarningCode(
           'MISSING_ENUM_CONSTANT_IN_SWITCH',
           "Missing case clause for '{0}'",
-          "Add a case clause for the missing constant or add a default clause.");
+          "Add a case clause for the missing constant or add a default clause.",
+          false);
 
   /**
    * A flag indicating whether this warning is an error when running with strong
@@ -5771,7 +5872,7 @@
    * given [correction] template.
    */
   const StaticWarningCode(String name, String message,
-      [String correction, this.isStrongModeError = false])
+      [String correction, this.isStrongModeError = true])
       : super(name, message, correction);
 
   @override
@@ -5782,6 +5883,184 @@
 }
 
 /**
+ * This class has Strong Mode specific error codes.
+ *
+ * These error codes tend to use the same message across different severity
+ * levels, so they are grouped for clarity.
+ *
+ * All of these error codes also use the "STRONG_MODE_" prefix in their name.
+ */
+class StrongModeCode extends ErrorCode {
+  static const String _implicitCastMessage =
+      'Unsound implicit cast from {0} to {1}';
+
+  static const String _typeCheckMessage =
+      'Type check failed: {0} is not of type {1}';
+
+  static const String _invalidOverrideMessage =
+      'The type of {0}.{1} ({2}) is not a '
+      'subtype of {3}.{1} ({4}).';
+
+  /**
+   * This is appended to the end of an error message about implicit dynamic.
+   *
+   * The idea is to make sure the user is aware that this error message is the
+   * result of turning on a particular option, and they are free to turn it
+   * back off.
+   */
+  static const String _implicitDynamicTip =
+      ". Either add an explicit type like 'dynamic'"
+      ", or enable implicit-dynamic in your Analyzer options.";
+
+  static const String _inferredTypeMessage = '{0} has inferred type {1}';
+
+  static const StrongModeCode DOWN_CAST_COMPOSITE = const StrongModeCode(
+      ErrorType.STATIC_WARNING, 'DOWN_CAST_COMPOSITE', _implicitCastMessage);
+
+  static const StrongModeCode DOWN_CAST_IMPLICIT = const StrongModeCode(
+      ErrorType.HINT, 'DOWN_CAST_IMPLICIT', _implicitCastMessage);
+
+  static const StrongModeCode DYNAMIC_CAST = const StrongModeCode(
+      ErrorType.HINT, 'DYNAMIC_CAST', _implicitCastMessage);
+
+  static const StrongModeCode ASSIGNMENT_CAST = const StrongModeCode(
+      ErrorType.HINT, 'ASSIGNMENT_CAST', _implicitCastMessage);
+
+  static const StrongModeCode INVALID_PARAMETER_DECLARATION =
+      const StrongModeCode(ErrorType.COMPILE_TIME_ERROR,
+          'INVALID_PARAMETER_DECLARATION', _typeCheckMessage);
+
+  static const StrongModeCode INFERRED_TYPE = const StrongModeCode(
+      ErrorType.HINT, 'INFERRED_TYPE', _inferredTypeMessage);
+
+  static const StrongModeCode INFERRED_TYPE_LITERAL = const StrongModeCode(
+      ErrorType.HINT, 'INFERRED_TYPE_LITERAL', _inferredTypeMessage);
+
+  static const StrongModeCode INFERRED_TYPE_ALLOCATION = const StrongModeCode(
+      ErrorType.HINT, 'INFERRED_TYPE_ALLOCATION', _inferredTypeMessage);
+
+  static const StrongModeCode INFERRED_TYPE_CLOSURE = const StrongModeCode(
+      ErrorType.HINT, 'INFERRED_TYPE_CLOSURE', _inferredTypeMessage);
+
+  static const StrongModeCode STATIC_TYPE_ERROR = const StrongModeCode(
+      ErrorType.COMPILE_TIME_ERROR,
+      'STATIC_TYPE_ERROR',
+      'Type check failed: {0} ({1}) is not of type {2}');
+
+  static const StrongModeCode INVALID_SUPER_INVOCATION = const StrongModeCode(
+      ErrorType.COMPILE_TIME_ERROR,
+      'INVALID_SUPER_INVOCATION',
+      "super call must be last in an initializer "
+      "list (see https://goo.gl/EY6hDP): {0}");
+
+  static const StrongModeCode NON_GROUND_TYPE_CHECK_INFO = const StrongModeCode(
+      ErrorType.HINT,
+      'NON_GROUND_TYPE_CHECK_INFO',
+      "Runtime check on non-ground type {0} may throw StrongModeError");
+
+  static const StrongModeCode DYNAMIC_INVOKE = const StrongModeCode(
+      ErrorType.HINT, 'DYNAMIC_INVOKE', '{0} requires a dynamic invoke');
+
+  static const StrongModeCode INVALID_METHOD_OVERRIDE = const StrongModeCode(
+      ErrorType.COMPILE_TIME_ERROR,
+      'INVALID_METHOD_OVERRIDE',
+      'Invalid override. $_invalidOverrideMessage');
+
+  static const StrongModeCode INVALID_METHOD_OVERRIDE_FROM_BASE =
+      const StrongModeCode(
+          ErrorType.COMPILE_TIME_ERROR,
+          'INVALID_METHOD_OVERRIDE_FROM_BASE',
+          'Base class introduces an invalid override. '
+          '$_invalidOverrideMessage');
+
+  static const StrongModeCode INVALID_METHOD_OVERRIDE_FROM_MIXIN =
+      const StrongModeCode(
+          ErrorType.COMPILE_TIME_ERROR,
+          'INVALID_METHOD_OVERRIDE_FROM_MIXIN',
+          'Mixin introduces an invalid override. $_invalidOverrideMessage');
+
+  static const StrongModeCode INVALID_FIELD_OVERRIDE = const StrongModeCode(
+      ErrorType.COMPILE_TIME_ERROR,
+      'INVALID_FIELD_OVERRIDE',
+      'Field declaration {3}.{1} cannot be '
+      'overridden in {0}.');
+
+  static const StrongModeCode IMPLICIT_DYNAMIC_PARAMETER = const StrongModeCode(
+      ErrorType.COMPILE_TIME_ERROR,
+      'IMPLICIT_DYNAMIC_PARAMETER',
+      "Missing parameter type for '{0}'$_implicitDynamicTip");
+
+  static const StrongModeCode IMPLICIT_DYNAMIC_RETURN = const StrongModeCode(
+      ErrorType.COMPILE_TIME_ERROR,
+      'IMPLICIT_DYNAMIC_RETURN',
+      "Missing return type for '{0}'$_implicitDynamicTip");
+
+  static const StrongModeCode IMPLICIT_DYNAMIC_VARIABLE = const StrongModeCode(
+      ErrorType.COMPILE_TIME_ERROR,
+      'IMPLICIT_DYNAMIC_VARIABLE',
+      "Missing variable type for '{0}'$_implicitDynamicTip");
+
+  static const StrongModeCode IMPLICIT_DYNAMIC_FIELD = const StrongModeCode(
+      ErrorType.COMPILE_TIME_ERROR,
+      'IMPLICIT_DYNAMIC_FIELD',
+      "Missing field type for '{0}'$_implicitDynamicTip");
+
+  static const StrongModeCode IMPLICIT_DYNAMIC_TYPE = const StrongModeCode(
+      ErrorType.COMPILE_TIME_ERROR,
+      'IMPLICIT_DYNAMIC_TYPE',
+      "Missing type arguments for generic type '{0}'"
+      "$_implicitDynamicTip");
+
+  static const StrongModeCode IMPLICIT_DYNAMIC_LIST_LITERAL =
+      const StrongModeCode(
+          ErrorType.COMPILE_TIME_ERROR,
+          'IMPLICIT_DYNAMIC_LIST_LITERAL',
+          "Missing type argument for list literal$_implicitDynamicTip");
+
+  static const StrongModeCode IMPLICIT_DYNAMIC_MAP_LITERAL =
+      const StrongModeCode(
+          ErrorType.COMPILE_TIME_ERROR,
+          'IMPLICIT_DYNAMIC_MAP_LITERAL',
+          'Missing type arguments for map literal$_implicitDynamicTip');
+
+  static const StrongModeCode IMPLICIT_DYNAMIC_FUNCTION = const StrongModeCode(
+      ErrorType.COMPILE_TIME_ERROR,
+      'IMPLICIT_DYNAMIC_FUNCTION',
+      "Missing type arguments for generic function '{0}<{1}>'"
+      "$_implicitDynamicTip");
+
+  static const StrongModeCode IMPLICIT_DYNAMIC_METHOD = const StrongModeCode(
+      ErrorType.COMPILE_TIME_ERROR,
+      'IMPLICIT_DYNAMIC_METHOD',
+      "Missing type arguments for generic method '{0}<{1}>'"
+      "$_implicitDynamicTip");
+
+  static const StrongModeCode IMPLICIT_DYNAMIC_INVOKE = const StrongModeCode(
+      ErrorType.COMPILE_TIME_ERROR,
+      'IMPLICIT_DYNAMIC_INVOKE',
+      "Missing type arguments for calling generic function type '{0}'"
+      "$_implicitDynamicTip");
+
+  @override
+  final ErrorType type;
+
+  /**
+   * Initialize a newly created error code to have the given [type] and [name].
+   *
+   * The message associated with the error will be created from the given
+   * [message] template. The correction associated with the error will be
+   * created from the optional [correction] template.
+   */
+  const StrongModeCode(ErrorType type, String name, String message,
+      [String correction])
+      : type = type,
+        super('STRONG_MODE_$name', message, correction);
+
+  @override
+  ErrorSeverity get errorSeverity => type.severity;
+}
+
+/**
  * The error code indicating a marker in code for work that needs to be finished
  * or revisited.
  */
diff --git a/pkg/analyzer/lib/src/generated/error_verifier.dart b/pkg/analyzer/lib/src/generated/error_verifier.dart
index 78b7cf3..6d4177b 100644
--- a/pkg/analyzer/lib/src/generated/error_verifier.dart
+++ b/pkg/analyzer/lib/src/generated/error_verifier.dart
@@ -30,7 +30,8 @@
 import 'package:analyzer/src/generated/source.dart';
 import 'package:analyzer/src/generated/utilities_dart.dart';
 import 'package:analyzer/src/task/dart.dart';
-import 'package:analyzer/src/task/strong/info.dart' show StaticInfo;
+import 'package:analyzer/src/task/strong/checker.dart' as checker
+    show isKnownFunction;
 
 /**
  * A visitor used to traverse an AST structure looking for additional errors and
@@ -78,7 +79,7 @@
   /**
    * The options for verification.
    */
-  AnalysisOptions _options;
+  AnalysisOptionsImpl _options;
 
   /**
    * The object providing access to the types defined by the language.
@@ -211,6 +212,12 @@
   ClassElementImpl _enclosingClass;
 
   /**
+   * The enum containing the AST nodes being visited, or `null` if we are not
+   * in the scope of an enum.
+   */
+  ClassElement _enclosingEnum;
+
+  /**
    * The method or function that we are currently visiting, or `null` if we are
    * not inside a method or function.
    */
@@ -435,7 +442,7 @@
     ClassElementImpl outerClass = _enclosingClass;
     try {
       _isInNativeClass = node.nativeClause != null;
-      _enclosingClass = ClassElementImpl.getImpl(node.element);
+      _enclosingClass = AbstractClassElementImpl.getImpl(node.element);
       ExtendsClause extendsClause = node.extendsClause;
       ImplementsClause implementsClause = node.implementsClause;
       WithClause withClause = node.withClause;
@@ -484,7 +491,7 @@
    */
   void visitClassDeclarationIncrementally(ClassDeclaration node) {
     _isInNativeClass = node.nativeClause != null;
-    _enclosingClass = ClassElementImpl.getImpl(node.element);
+    _enclosingClass = AbstractClassElementImpl.getImpl(node.element);
     // initialize initialFieldElementsMap
     if (_enclosingClass != null) {
       List<FieldElement> fieldElements = _enclosingClass.fields;
@@ -504,7 +511,7 @@
         node.name, CompileTimeErrorCode.BUILT_IN_IDENTIFIER_AS_TYPEDEF_NAME);
     ClassElementImpl outerClassElement = _enclosingClass;
     try {
-      _enclosingClass = ClassElementImpl.getImpl(node.element);
+      _enclosingClass = AbstractClassElementImpl.getImpl(node.element);
       ImplementsClause implementsClause = node.implementsClause;
       // Only check for all of the inheritance logic around clauses if there
       // isn't an error code such as "Cannot extend double" already on the
@@ -622,13 +629,12 @@
 
   @override
   Object visitEnumDeclaration(EnumDeclaration node) {
-    ClassElementImpl outerClass = _enclosingClass;
+    ClassElement outerEnum = _enclosingEnum;
     try {
-      _isInNativeClass = false;
-      _enclosingClass = ClassElementImpl.getImpl(node.element);
+      _enclosingEnum = node.element;
       return super.visitEnumDeclaration(node);
     } finally {
-      _enclosingClass = outerClass;
+      _enclosingEnum = outerEnum;
     }
   }
 
@@ -670,6 +676,12 @@
   }
 
   @override
+  Object visitExtendsClause(ExtendsClause node) {
+    _checkForImplicitDynamicType(node.superclass);
+    return super.visitExtendsClause(node);
+  }
+
+  @override
   Object visitFieldDeclaration(FieldDeclaration node) {
     _isInStaticVariableDeclaration = node.isStatic;
     _isInInstanceVariableDeclaration = !_isInStaticVariableDeclaration;
@@ -741,6 +753,7 @@
       }
       _checkForTypeAnnotationDeferredClass(returnType);
       _checkForIllegalReturnType(returnType);
+      _checkForImplicitDynamicReturn(node, node.element);
       return super.visitFunctionDeclaration(node);
     } finally {
       _enclosingFunction = outerFunction;
@@ -775,6 +788,7 @@
     } else if (expressionType is FunctionType) {
       _checkTypeArguments(expressionType.element, node.typeArguments);
     }
+    _checkForImplicitDynamicInvoke(node);
     return super.visitFunctionExpressionInvocation(node);
   }
 
@@ -793,6 +807,18 @@
     _isInFunctionTypedFormalParameter = true;
     try {
       _checkForTypeAnnotationDeferredClass(node.returnType);
+
+      // TODO(jmesserly): ideally we'd use _checkForImplicitDynamicReturn, and
+      // we can get the function element via `node?.element?.type?.element` but
+      // it doesn't have hasImplicitReturnType set correctly.
+      if (!_options.implicitDynamic && node.returnType == null) {
+        DartType parameterType = node.element.type;
+        if (parameterType is FunctionType &&
+            parameterType.returnType.isDynamic) {
+          _errorReporter.reportErrorForNode(
+              StrongModeCode.IMPLICIT_DYNAMIC_RETURN, node, [node.identifier]);
+        }
+      }
       return super.visitFunctionTypedFormalParameter(node);
     } finally {
       _isInFunctionTypedFormalParameter = old;
@@ -806,6 +832,12 @@
   }
 
   @override
+  Object visitImplementsClause(ImplementsClause node) {
+    node.interfaces.forEach(_checkForImplicitDynamicType);
+    return super.visitImplementsClause(node);
+  }
+
+  @override
   Object visitImportDirective(ImportDirective node) {
     ImportElement importElement = node.element;
     if (importElement != null) {
@@ -842,6 +874,7 @@
           _checkForNewWithUndefinedConstructor(node, constructorName, typeName);
         }
       }
+      _checkForImplicitDynamicType(typeName);
       return super.visitInstanceCreationExpression(node);
     } finally {
       _isInConstInstanceCreation = wasInConstInstanceCreation;
@@ -867,7 +900,7 @@
       }
       _checkForExpectedOneListTypeArgument(node, typeArguments);
     }
-
+    _checkForImplicitDynamicTypedLiteral(node);
     _checkForListElementTypeNotAssignable(node);
     return super.visitListLiteral(node);
   }
@@ -885,7 +918,7 @@
       }
       _checkExpectedTwoMapTypeArguments(typeArguments);
     }
-
+    _checkForImplicitDynamicTypedLiteral(node);
     _checkForMapTypeNotAssignable(node);
     _checkForNonConstMapAsExpressionStatement(node);
     return super.visitMapLiteral(node);
@@ -924,6 +957,7 @@
       _checkForAllInvalidOverrideErrorCodesForMethod(node);
       _checkForTypeAnnotationDeferredClass(returnTypeName);
       _checkForIllegalReturnType(returnTypeName);
+      _checkForImplicitDynamicReturn(node, node.element);
       _checkForMustCallSuper(node);
       return super.visitMethodDeclaration(node);
     } finally {
@@ -945,6 +979,7 @@
     }
     _checkTypeArguments(
         node.methodName.staticElement, node.typeArguments, target?.staticType);
+    _checkForImplicitDynamicInvoke(node);
     return super.visitMethodInvocation(node);
   }
 
@@ -1040,6 +1075,15 @@
     _checkForConstFormalParameter(node);
     _checkForPrivateOptionalParameter(node);
     _checkForTypeAnnotationDeferredClass(node.type);
+
+    // Checks for an implicit dynamic parameter type.
+    //
+    // We can skip other parameter kinds besides simple formal, because:
+    // - DefaultFormalParameter contains a simple one, so it gets here,
+    // - FieldFormalParameter error should be reported on the field,
+    // - FunctionTypedFormalParameter is a function type, not dynamic.
+    _checkForImplicitDynamicIdentifier(node, node.identifier);
+
     return super.visitSimpleFormalParameter(node);
   }
 
@@ -1110,6 +1154,7 @@
         CompileTimeErrorCode.BUILT_IN_IDENTIFIER_AS_TYPE_PARAMETER_NAME);
     _checkForTypeParameterSupertypeOfItsBound(node);
     _checkForTypeAnnotationDeferredClass(node.bound);
+    _checkForImplicitDynamicType(node.bound);
     return super.visitTypeParameter(node);
   }
 
@@ -1119,6 +1164,7 @@
     Expression initializerNode = node.initializer;
     // do checks
     _checkForInvalidAssignment(nameNode, initializerNode);
+    _checkForImplicitDynamicIdentifier(node, nameNode);
     // visit name
     nameNode.accept(this);
     // visit initializer
@@ -1157,6 +1203,12 @@
   }
 
   @override
+  Object visitWithClause(WithClause node) {
+    node.mixinTypes.forEach(_checkForImplicitDynamicType);
+    return super.visitWithClause(node);
+  }
+
+  @override
   Object visitYieldStatement(YieldStatement node) {
     if (_inGenerator) {
       _checkForYieldOfInvalidType(node.expression, node.star != null);
@@ -3532,6 +3584,113 @@
     return foundError;
   }
 
+  void _checkForImplicitDynamicIdentifier(AstNode node, Identifier id) {
+    if (_options.implicitDynamic) {
+      return;
+    }
+    VariableElement variable = getVariableElement(id);
+    if (variable != null &&
+        variable.hasImplicitType &&
+        variable.type.isDynamic) {
+      ErrorCode errorCode;
+      if (variable is FieldElement) {
+        errorCode = StrongModeCode.IMPLICIT_DYNAMIC_FIELD;
+      } else if (variable is ParameterElement) {
+        errorCode = StrongModeCode.IMPLICIT_DYNAMIC_PARAMETER;
+      } else {
+        errorCode = StrongModeCode.IMPLICIT_DYNAMIC_VARIABLE;
+      }
+      _errorReporter.reportErrorForNode(errorCode, node, [id]);
+    }
+  }
+
+  void _checkForImplicitDynamicInvoke(InvocationExpression node) {
+    if (_options.implicitDynamic ||
+        node == null ||
+        node.typeArguments != null) {
+      return;
+    }
+    DartType invokeType = node.staticInvokeType;
+    DartType declaredType = node.function.staticType;
+    if (invokeType is FunctionType && declaredType is FunctionType) {
+      Iterable<DartType> typeArgs =
+          FunctionTypeImpl.recoverTypeArguments(declaredType, invokeType);
+      if (typeArgs.any((t) => t.isDynamic)) {
+        // Issue an error depending on what we're trying to call.
+        Expression function = node.function;
+        if (function is Identifier) {
+          Element element = function.staticElement;
+          if (element is MethodElement) {
+            _errorReporter.reportErrorForNode(
+                StrongModeCode.IMPLICIT_DYNAMIC_METHOD,
+                node.function,
+                [element.displayName, element.typeParameters.join(', ')]);
+            return;
+          }
+
+          if (element is FunctionElement) {
+            _errorReporter.reportErrorForNode(
+                StrongModeCode.IMPLICIT_DYNAMIC_FUNCTION,
+                node.function,
+                [element.displayName, element.typeParameters.join(', ')]);
+            return;
+          }
+        }
+
+        // The catch all case if neither of those matched.
+        // For example, invoking a function expression.
+        _errorReporter.reportErrorForNode(
+            StrongModeCode.IMPLICIT_DYNAMIC_INVOKE,
+            node.function,
+            [declaredType]);
+      }
+    }
+  }
+
+  void _checkForImplicitDynamicReturn(AstNode node, ExecutableElement element) {
+    if (_options.implicitDynamic) {
+      return;
+    }
+    if (element is PropertyAccessorElement && element.isSetter) {
+      return;
+    }
+    if (element != null &&
+        element.hasImplicitReturnType &&
+        element.returnType.isDynamic) {
+      _errorReporter.reportErrorForNode(
+          StrongModeCode.IMPLICIT_DYNAMIC_RETURN, node, [element.displayName]);
+    }
+  }
+
+  void _checkForImplicitDynamicType(TypeName node) {
+    if (_options.implicitDynamic ||
+        node == null ||
+        node.typeArguments != null) {
+      return;
+    }
+    DartType type = node.type;
+    if (type is ParameterizedType &&
+        type.typeArguments.isNotEmpty &&
+        type.typeArguments.any((t) => t.isDynamic)) {
+      _errorReporter.reportErrorForNode(
+          StrongModeCode.IMPLICIT_DYNAMIC_TYPE, node, [type]);
+    }
+  }
+
+  void _checkForImplicitDynamicTypedLiteral(TypedLiteral node) {
+    if (_options.implicitDynamic || node.typeArguments != null) {
+      return;
+    }
+    DartType type = node.staticType;
+    // It's an error if either the key or value was inferred as dynamic.
+    if (type is InterfaceType && type.typeArguments.any((t) => t.isDynamic)) {
+      ErrorCode errorCode = node is ListLiteral
+          ? StrongModeCode.IMPLICIT_DYNAMIC_LIST_LITERAL
+          : StrongModeCode.IMPLICIT_DYNAMIC_MAP_LITERAL;
+      _errorReporter.reportErrorForNode(errorCode, node);
+    }
+  }
+
   /**
    * Verify that if the given [identifier] is part of a constructor initializer,
    * then it does not implicitly reference 'this' expression.
@@ -5199,7 +5358,7 @@
       return;
     }
     Element element = type.element;
-    if (element is TypeParameterizedElement) {
+    if (element is ClassElement) {
       // prepare type parameters
       List<TypeParameterElement> parameterElements = element.typeParameters;
       List<DartType> parameterTypes = element.type.typeArguments;
@@ -5364,6 +5523,9 @@
     if (identical(enclosingElement, _enclosingClass)) {
       return;
     }
+    if (identical(enclosingElement, _enclosingEnum)) {
+      return;
+    }
     if (enclosingElement is! ClassElement) {
       return;
     }
@@ -5735,8 +5897,7 @@
 
   bool _expressionIsAssignableAtType(Expression expression,
       DartType actualStaticType, DartType expectedStaticType) {
-    bool concrete =
-        _options.strongMode && StaticInfo.isKnownFunction(expression);
+    bool concrete = _options.strongMode && checker.isKnownFunction(expression);
     if (concrete) {
       actualStaticType =
           _typeSystem.typeToConcreteType(_typeProvider, actualStaticType);
@@ -6214,6 +6375,13 @@
   RequiredConstantsComputer(this.source);
 
   @override
+  Object visitFunctionExpressionInvocation(FunctionExpressionInvocation node) {
+    _checkForMissingRequiredParam(
+        node.staticInvokeType, node.argumentList, node);
+    return super.visitFunctionExpressionInvocation(node);
+  }
+
+  @override
   Object visitInstanceCreationExpression(InstanceCreationExpression node) {
     DartType type = node.constructorName.type.type;
     if (type is InterfaceType) {
@@ -6230,6 +6398,25 @@
     return super.visitMethodInvocation(node);
   }
 
+  @override
+  Object visitRedirectingConstructorInvocation(
+      RedirectingConstructorInvocation node) {
+    DartType type = node.staticElement?.type;
+    if (type != null) {
+      _checkForMissingRequiredParam(type, node.argumentList, node);
+    }
+    return super.visitRedirectingConstructorInvocation(node);
+  }
+
+  @override
+  Object visitSuperConstructorInvocation(SuperConstructorInvocation node) {
+    DartType type = node.staticElement?.type;
+    if (type != null) {
+      _checkForMissingRequiredParam(type, node.argumentList, node);
+    }
+    return super.visitSuperConstructorInvocation(node);
+  }
+
   void _checkForMissingRequiredParam(
       DartType type, ArgumentList argumentList, AstNode node) {
     if (type is FunctionType) {
diff --git a/pkg/analyzer/lib/src/generated/incremental_resolution_validator.dart b/pkg/analyzer/lib/src/generated/incremental_resolution_validator.dart
index 67885d1..518d6f6 100644
--- a/pkg/analyzer/lib/src/generated/incremental_resolution_validator.dart
+++ b/pkg/analyzer/lib/src/generated/incremental_resolution_validator.dart
@@ -5,8 +5,10 @@
 library analyzer.src.generated.incremental_resolution_validator;
 
 import 'package:analyzer/dart/ast/ast.dart';
+import 'package:analyzer/dart/ast/token.dart';
 import 'package:analyzer/dart/element/element.dart';
 import 'package:analyzer/dart/element/type.dart';
+import 'package:analyzer/src/dart/ast/utilities.dart';
 import 'package:analyzer/src/dart/element/element.dart';
 import 'package:analyzer/src/dart/element/member.dart';
 
@@ -16,9 +18,8 @@
  */
 void assertSameResolution(CompilationUnit actual, CompilationUnit expected,
     {bool validateTypes: false}) {
-  _SameResolutionValidator validator =
-      new _SameResolutionValidator(validateTypes, expected);
-  actual.accept(validator);
+  _SameResolutionValidator validator = new _SameResolutionValidator(validateTypes);
+  validator.isEqualNodes(expected, actual);
 }
 
 /**
@@ -33,808 +34,91 @@
   String toString() => "IncrementalResolutionMismatch: $message";
 }
 
-class _SameResolutionValidator implements AstVisitor {
+/**
+ * An [AstVisitor] that compares the structure of two [AstNode]s and their
+ * resolution to see whether they are equal.
+ */
+class _SameResolutionValidator extends AstComparator {
   final bool validateTypes;
 
-  /// The expected node to compare with the visited node.
-  AstNode other;
-
-  _SameResolutionValidator(this.validateTypes, this.other);
+  _SameResolutionValidator(this.validateTypes);
 
   @override
-  visitAdjacentStrings(AdjacentStrings node) {}
-
-  @override
-  visitAnnotation(Annotation node) {
-    Annotation other = this.other;
-    _visitNode(node.name, other.name);
-    _visitNode(node.constructorName, other.constructorName);
-    _visitNode(node.arguments, other.arguments);
-    _verifyElement(node.element, other.element);
-  }
-
-  @override
-  visitArgumentList(ArgumentList node) {
-    ArgumentList other = this.other;
-    _visitList(node.arguments, other.arguments);
-  }
-
-  @override
-  visitAsExpression(AsExpression node) {
-    AsExpression other = this.other;
-    _visitExpression(node, other);
-    _visitNode(node.expression, other.expression);
-    _visitNode(node.type, other.type);
-  }
-
-  @override
-  visitAssertStatement(AssertStatement node) {
-    AssertStatement other = this.other;
-    _visitNode(node.condition, other.condition);
-    _visitNode(node.message, other.message);
-  }
-
-  @override
-  visitAssignmentExpression(AssignmentExpression node) {
-    AssignmentExpression other = this.other;
-    _visitExpression(node, other);
-    _verifyElement(node.staticElement, other.staticElement);
-    _verifyElement(node.propagatedElement, other.propagatedElement);
-    _visitNode(node.leftHandSide, other.leftHandSide);
-    _visitNode(node.rightHandSide, other.rightHandSide);
-  }
-
-  @override
-  visitAwaitExpression(AwaitExpression node) {
-    AwaitExpression other = this.other;
-    _visitExpression(node, other);
-    _visitNode(node.expression, other.expression);
-  }
-
-  @override
-  visitBinaryExpression(BinaryExpression node) {
-    BinaryExpression other = this.other;
-    _visitExpression(node, other);
-    _verifyElement(node.staticElement, other.staticElement);
-    _verifyElement(node.propagatedElement, other.propagatedElement);
-    _visitNode(node.leftOperand, other.leftOperand);
-    _visitNode(node.rightOperand, other.rightOperand);
-  }
-
-  @override
-  visitBlock(Block node) {
-    Block other = this.other;
-    _visitList(node.statements, other.statements);
-  }
-
-  @override
-  visitBlockFunctionBody(BlockFunctionBody node) {
-    BlockFunctionBody other = this.other;
-    _visitNode(node.block, other.block);
-  }
-
-  @override
-  visitBooleanLiteral(BooleanLiteral node) {
-    BooleanLiteral other = this.other;
-    _visitExpression(node, other);
-  }
-
-  @override
-  visitBreakStatement(BreakStatement node) {
-    BreakStatement other = this.other;
-    _visitNode(node.label, other.label);
-  }
-
-  @override
-  visitCascadeExpression(CascadeExpression node) {
-    CascadeExpression other = this.other;
-    _visitExpression(node, other);
-    _visitNode(node.target, other.target);
-    _visitList(node.cascadeSections, other.cascadeSections);
-  }
-
-  @override
-  visitCatchClause(CatchClause node) {
-    CatchClause other = this.other;
-    _visitNode(node.exceptionType, other.exceptionType);
-    _visitNode(node.exceptionParameter, other.exceptionParameter);
-    _visitNode(node.stackTraceParameter, other.stackTraceParameter);
-    _visitNode(node.body, other.body);
-  }
-
-  @override
-  visitClassDeclaration(ClassDeclaration node) {
-    ClassDeclaration other = this.other;
-    _visitDeclaration(node, other);
-    _visitNode(node.name, other.name);
-    _visitNode(node.typeParameters, other.typeParameters);
-    _visitNode(node.extendsClause, other.extendsClause);
-    _visitNode(node.implementsClause, other.implementsClause);
-    _visitNode(node.withClause, other.withClause);
-    _visitList(node.members, other.members);
-  }
-
-  @override
-  visitClassTypeAlias(ClassTypeAlias node) {
-    ClassTypeAlias other = this.other;
-    _visitDeclaration(node, other);
-    _visitNode(node.name, other.name);
-    _visitNode(node.typeParameters, other.typeParameters);
-    _visitNode(node.superclass, other.superclass);
-    _visitNode(node.withClause, other.withClause);
-  }
-
-  @override
-  visitComment(Comment node) {
-    Comment other = this.other;
-    _visitList(node.references, other.references);
-  }
-
-  @override
-  visitCommentReference(CommentReference node) {
-    CommentReference other = this.other;
-    _visitNode(node.identifier, other.identifier);
-  }
-
-  @override
-  visitCompilationUnit(CompilationUnit node) {
-    CompilationUnit other = this.other;
-    _verifyElement(node.element, other.element);
-    _visitList(node.directives, other.directives);
-    _visitList(node.declarations, other.declarations);
-  }
-
-  @override
-  visitConditionalExpression(ConditionalExpression node) {
-    ConditionalExpression other = this.other;
-    _visitExpression(node, other);
-    _visitNode(node.condition, other.condition);
-    _visitNode(node.thenExpression, other.thenExpression);
-    _visitNode(node.elseExpression, other.elseExpression);
-  }
-
-  @override
-  visitConfiguration(Configuration node) {
-    Configuration other = this.other;
-    _visitNode(node.name, other.name);
-    _visitNode(node.value, other.value);
-    _visitNode(node.libraryUri, other.libraryUri);
-  }
-
-  @override
-  visitConstructorDeclaration(ConstructorDeclaration node) {
-    ConstructorDeclaration other = this.other;
-    _visitDeclaration(node, other);
-    _visitNode(node.returnType, other.returnType);
-    _visitNode(node.name, other.name);
-    _visitNode(node.parameters, other.parameters);
-    _visitNode(node.redirectedConstructor, other.redirectedConstructor);
-    _visitList(node.initializers, other.initializers);
-  }
-
-  @override
-  visitConstructorFieldInitializer(ConstructorFieldInitializer node) {
-    ConstructorFieldInitializer other = this.other;
-    _visitNode(node.fieldName, other.fieldName);
-    _visitNode(node.expression, other.expression);
-  }
-
-  @override
-  visitConstructorName(ConstructorName node) {
-    ConstructorName other = this.other;
-    _verifyElement(node.staticElement, other.staticElement);
-    _visitNode(node.type, other.type);
-    _visitNode(node.name, other.name);
-  }
-
-  @override
-  visitContinueStatement(ContinueStatement node) {
-    ContinueStatement other = this.other;
-    _visitNode(node.label, other.label);
-  }
-
-  @override
-  visitDeclaredIdentifier(DeclaredIdentifier node) {
-    DeclaredIdentifier other = this.other;
-    _visitNode(node.type, other.type);
-    _visitNode(node.identifier, other.identifier);
-  }
-
-  @override
-  visitDefaultFormalParameter(DefaultFormalParameter node) {
-    DefaultFormalParameter other = this.other;
-    _visitNode(node.parameter, other.parameter);
-    _visitNode(node.defaultValue, other.defaultValue);
-  }
-
-  @override
-  visitDoStatement(DoStatement node) {
-    DoStatement other = this.other;
-    _visitNode(node.condition, other.condition);
-    _visitNode(node.body, other.body);
-  }
-
-  @override
-  visitDottedName(DottedName node) {
-    DottedName other = this.other;
-    _visitList(node.components, other.components);
-  }
-
-  @override
-  visitDoubleLiteral(DoubleLiteral node) {
-    DoubleLiteral other = this.other;
-    _visitExpression(node, other);
-  }
-
-  @override
-  visitEmptyFunctionBody(EmptyFunctionBody node) {}
-
-  @override
-  visitEmptyStatement(EmptyStatement node) {}
-
-  @override
-  visitEnumConstantDeclaration(EnumConstantDeclaration node) {
-    EnumConstantDeclaration other = this.other;
-    _visitDeclaration(node, other);
-    _visitNode(node.name, other.name);
-  }
-
-  @override
-  visitEnumDeclaration(EnumDeclaration node) {
-    EnumDeclaration other = this.other;
-    _visitDeclaration(node, other);
-    _visitNode(node.name, other.name);
-    _visitList(node.constants, other.constants);
-  }
-
-  @override
-  visitExportDirective(ExportDirective node) {
-    ExportDirective other = this.other;
-    _visitDirective(node, other);
-  }
-
-  @override
-  visitExpressionFunctionBody(ExpressionFunctionBody node) {
-    ExpressionFunctionBody other = this.other;
-    _visitNode(node.expression, other.expression);
-  }
-
-  @override
-  visitExpressionStatement(ExpressionStatement node) {
-    ExpressionStatement other = this.other;
-    _visitNode(node.expression, other.expression);
-  }
-
-  @override
-  visitExtendsClause(ExtendsClause node) {
-    ExtendsClause other = this.other;
-    _visitNode(node.superclass, other.superclass);
-  }
-
-  @override
-  visitFieldDeclaration(FieldDeclaration node) {
-    FieldDeclaration other = this.other;
-    _visitDeclaration(node, other);
-    _visitNode(node.fields, other.fields);
-  }
-
-  @override
-  visitFieldFormalParameter(FieldFormalParameter node) {
-    FieldFormalParameter other = this.other;
-    _visitNormalFormalParameter(node, other);
-    _visitNode(node.type, other.type);
-    _visitNode(node.parameters, other.parameters);
-  }
-
-  @override
-  visitForEachStatement(ForEachStatement node) {
-    ForEachStatement other = this.other;
-    _visitNode(node.identifier, other.identifier);
-    _visitNode(node.loopVariable, other.loopVariable);
-    _visitNode(node.iterable, other.iterable);
-  }
-
-  @override
-  visitFormalParameterList(FormalParameterList node) {
-    FormalParameterList other = this.other;
-    _visitList(node.parameters, other.parameters);
-  }
-
-  @override
-  visitForStatement(ForStatement node) {
-    ForStatement other = this.other;
-    _visitNode(node.variables, other.variables);
-    _visitNode(node.initialization, other.initialization);
-    _visitNode(node.condition, other.condition);
-    _visitList(node.updaters, other.updaters);
-    _visitNode(node.body, other.body);
-  }
-
-  @override
-  visitFunctionDeclaration(FunctionDeclaration node) {
-    FunctionDeclaration other = this.other;
-    _visitDeclaration(node, other);
-    _visitNode(node.returnType, other.returnType);
-    _visitNode(node.name, other.name);
-    _visitNode(node.functionExpression, other.functionExpression);
-  }
-
-  @override
-  visitFunctionDeclarationStatement(FunctionDeclarationStatement node) {
-    FunctionDeclarationStatement other = this.other;
-    _visitNode(node.functionDeclaration, other.functionDeclaration);
-  }
-
-  @override
-  visitFunctionExpression(FunctionExpression node) {
-    FunctionExpression other = this.other;
-    _visitExpression(node, other);
-    _verifyElement(node.element, other.element);
-    _visitNode(node.parameters, other.parameters);
-    _visitNode(node.body, other.body);
-  }
-
-  @override
-  visitFunctionExpressionInvocation(FunctionExpressionInvocation node) {
-    FunctionExpressionInvocation other = this.other;
-    _visitExpression(node, other);
-    _verifyElement(node.staticElement, other.staticElement);
-    _verifyElement(node.propagatedElement, other.propagatedElement);
-    _visitNode(node.function, other.function);
-    _visitNode(node.argumentList, other.argumentList);
-  }
-
-  @override
-  visitFunctionTypeAlias(FunctionTypeAlias node) {
-    FunctionTypeAlias other = this.other;
-    _visitDeclaration(node, other);
-    _visitNode(node.returnType, other.returnType);
-    _visitNode(node.name, other.name);
-    _visitNode(node.typeParameters, other.typeParameters);
-    _visitNode(node.parameters, other.parameters);
-  }
-
-  @override
-  visitFunctionTypedFormalParameter(FunctionTypedFormalParameter node) {
-    FunctionTypedFormalParameter other = this.other;
-    _visitNormalFormalParameter(node, other);
-    _visitNode(node.returnType, other.returnType);
-    _visitNode(node.parameters, other.parameters);
-  }
-
-  @override
-  visitHideCombinator(HideCombinator node) {
-    HideCombinator other = this.other;
-    _visitList(node.hiddenNames, other.hiddenNames);
-  }
-
-  @override
-  visitIfStatement(IfStatement node) {
-    IfStatement other = this.other;
-    _visitNode(node.condition, other.condition);
-    _visitNode(node.thenStatement, other.thenStatement);
-    _visitNode(node.elseStatement, other.elseStatement);
-  }
-
-  @override
-  visitImplementsClause(ImplementsClause node) {
-    ImplementsClause other = this.other;
-    _visitList(node.interfaces, other.interfaces);
-  }
-
-  @override
-  visitImportDirective(ImportDirective node) {
-    ImportDirective other = this.other;
-    _visitDirective(node, other);
-    _visitNode(node.prefix, other.prefix);
-    _verifyElement(node.uriElement, other.uriElement);
-  }
-
-  @override
-  visitIndexExpression(IndexExpression node) {
-    IndexExpression other = this.other;
-    _visitExpression(node, other);
-    _verifyElement(node.staticElement, other.staticElement);
-    _verifyElement(node.propagatedElement, other.propagatedElement);
-    _visitNode(node.target, other.target);
-    _visitNode(node.index, other.index);
-  }
-
-  @override
-  visitInstanceCreationExpression(InstanceCreationExpression node) {
-    InstanceCreationExpression other = this.other;
-    _visitExpression(node, other);
-    _verifyElement(node.staticElement, other.staticElement);
-    _visitNode(node.constructorName, other.constructorName);
-    _visitNode(node.argumentList, other.argumentList);
-  }
-
-  @override
-  visitIntegerLiteral(IntegerLiteral node) {
-    IntegerLiteral other = this.other;
-    _visitExpression(node, other);
-  }
-
-  @override
-  visitInterpolationExpression(InterpolationExpression node) {
-    InterpolationExpression other = this.other;
-    _visitNode(node.expression, other.expression);
-  }
-
-  @override
-  visitInterpolationString(InterpolationString node) {}
-
-  @override
-  visitIsExpression(IsExpression node) {
-    IsExpression other = this.other;
-    _visitExpression(node, other);
-    _visitNode(node.expression, other.expression);
-    _visitNode(node.type, other.type);
-  }
-
-  @override
-  visitLabel(Label node) {
-    Label other = this.other;
-    _visitNode(node.label, other.label);
-  }
-
-  @override
-  visitLabeledStatement(LabeledStatement node) {
-    LabeledStatement other = this.other;
-    _visitList(node.labels, other.labels);
-    _visitNode(node.statement, other.statement);
-  }
-
-  @override
-  visitLibraryDirective(LibraryDirective node) {
-    LibraryDirective other = this.other;
-    _visitDirective(node, other);
-    _visitNode(node.name, other.name);
-  }
-
-  @override
-  visitLibraryIdentifier(LibraryIdentifier node) {
-    LibraryIdentifier other = this.other;
-    _visitList(node.components, other.components);
-  }
-
-  @override
-  visitListLiteral(ListLiteral node) {
-    ListLiteral other = this.other;
-    _visitExpression(node, other);
-    _visitList(node.elements, other.elements);
-  }
-
-  @override
-  visitMapLiteral(MapLiteral node) {
-    MapLiteral other = this.other;
-    _visitExpression(node, other);
-    _visitList(node.entries, other.entries);
-  }
-
-  @override
-  visitMapLiteralEntry(MapLiteralEntry node) {
-    MapLiteralEntry other = this.other;
-    _visitNode(node.key, other.key);
-    _visitNode(node.value, other.value);
-  }
-
-  @override
-  visitMethodDeclaration(MethodDeclaration node) {
-    MethodDeclaration other = this.other;
-    _visitDeclaration(node, other);
-    _visitNode(node.name, other.name);
-    _visitNode(node.parameters, other.parameters);
-    _visitNode(node.body, other.body);
-  }
-
-  @override
-  visitMethodInvocation(MethodInvocation node) {
-    MethodInvocation other = this.other;
-    _visitNode(node.target, other.target);
-    _visitNode(node.methodName, other.methodName);
-    _visitNode(node.argumentList, other.argumentList);
-  }
-
-  @override
-  visitNamedExpression(NamedExpression node) {
-    NamedExpression other = this.other;
-    _visitNode(node.name, other.name);
-    _visitNode(node.expression, other.expression);
-  }
-
-  @override
-  visitNativeClause(NativeClause node) {}
-
-  @override
-  visitNativeFunctionBody(NativeFunctionBody node) {}
-
-  @override
-  visitNullLiteral(NullLiteral node) {
-    NullLiteral other = this.other;
-    _visitExpression(node, other);
-  }
-
-  @override
-  visitParenthesizedExpression(ParenthesizedExpression node) {
-    ParenthesizedExpression other = this.other;
-    _visitNode(node.expression, other.expression);
-  }
-
-  @override
-  visitPartDirective(PartDirective node) {
-    PartDirective other = this.other;
-    _visitDirective(node, other);
-  }
-
-  @override
-  visitPartOfDirective(PartOfDirective node) {
-    PartOfDirective other = this.other;
-    _visitDirective(node, other);
-    _visitNode(node.libraryName, other.libraryName);
-  }
-
-  @override
-  visitPostfixExpression(PostfixExpression node) {
-    PostfixExpression other = this.other;
-    _visitExpression(node, other);
-    _verifyElement(node.staticElement, other.staticElement);
-    _verifyElement(node.propagatedElement, other.propagatedElement);
-    _visitNode(node.operand, other.operand);
-  }
-
-  @override
-  visitPrefixedIdentifier(PrefixedIdentifier node) {
-    PrefixedIdentifier other = this.other;
-    _visitExpression(node, other);
-    _visitNode(node.prefix, other.prefix);
-    _visitNode(node.identifier, other.identifier);
-  }
-
-  @override
-  visitPrefixExpression(PrefixExpression node) {
-    PrefixExpression other = this.other;
-    _visitExpression(node, other);
-    _verifyElement(node.staticElement, other.staticElement);
-    _verifyElement(node.propagatedElement, other.propagatedElement);
-    _visitNode(node.operand, other.operand);
-  }
-
-  @override
-  visitPropertyAccess(PropertyAccess node) {
-    PropertyAccess other = this.other;
-    _visitExpression(node, other);
-    _visitNode(node.target, other.target);
-    _visitNode(node.propertyName, other.propertyName);
-  }
-
-  @override
-  visitRedirectingConstructorInvocation(RedirectingConstructorInvocation node) {
-    RedirectingConstructorInvocation other = this.other;
-    _verifyElement(node.staticElement, other.staticElement);
-    _visitNode(node.constructorName, other.constructorName);
-    _visitNode(node.argumentList, other.argumentList);
-  }
-
-  @override
-  visitRethrowExpression(RethrowExpression node) {
-    RethrowExpression other = this.other;
-    _visitExpression(node, other);
-  }
-
-  @override
-  visitReturnStatement(ReturnStatement node) {
-    ReturnStatement other = this.other;
-    _visitNode(node.expression, other.expression);
-  }
-
-  @override
-  visitScriptTag(ScriptTag node) {}
-
-  @override
-  visitShowCombinator(ShowCombinator node) {
-    ShowCombinator other = this.other;
-    _visitList(node.shownNames, other.shownNames);
-  }
-
-  @override
-  visitSimpleFormalParameter(SimpleFormalParameter node) {
-    SimpleFormalParameter other = this.other;
-    _visitNormalFormalParameter(node, other);
-    _visitNode(node.type, other.type);
-  }
-
-  @override
-  visitSimpleIdentifier(SimpleIdentifier node) {
-    SimpleIdentifier other = this.other;
-    _verifyElement(node.staticElement, other.staticElement);
-    _verifyElement(node.propagatedElement, other.propagatedElement);
-    _visitExpression(node, other);
-  }
-
-  @override
-  visitSimpleStringLiteral(SimpleStringLiteral node) {}
-
-  @override
-  visitStringInterpolation(StringInterpolation node) {
-    StringInterpolation other = this.other;
-    _visitList(node.elements, other.elements);
-  }
-
-  @override
-  visitSuperConstructorInvocation(SuperConstructorInvocation node) {
-    SuperConstructorInvocation other = this.other;
-    _verifyElement(node.staticElement, other.staticElement);
-    _visitNode(node.constructorName, other.constructorName);
-    _visitNode(node.argumentList, other.argumentList);
-  }
-
-  @override
-  visitSuperExpression(SuperExpression node) {
-    SuperExpression other = this.other;
-    _visitExpression(node, other);
-  }
-
-  @override
-  visitSwitchCase(SwitchCase node) {
-    SwitchCase other = this.other;
-    _visitList(node.labels, other.labels);
-    _visitNode(node.expression, other.expression);
-    _visitList(node.statements, other.statements);
-  }
-
-  @override
-  visitSwitchDefault(SwitchDefault node) {
-    SwitchDefault other = this.other;
-    _visitList(node.statements, other.statements);
-  }
-
-  @override
-  visitSwitchStatement(SwitchStatement node) {
-    SwitchStatement other = this.other;
-    _visitNode(node.expression, other.expression);
-    _visitList(node.members, other.members);
-  }
-
-  @override
-  visitSymbolLiteral(SymbolLiteral node) {}
-
-  @override
-  visitThisExpression(ThisExpression node) {
-    ThisExpression other = this.other;
-    _visitExpression(node, other);
-  }
-
-  @override
-  visitThrowExpression(ThrowExpression node) {
-    ThrowExpression other = this.other;
-    _visitNode(node.expression, other.expression);
-  }
-
-  @override
-  visitTopLevelVariableDeclaration(TopLevelVariableDeclaration node) {
-    TopLevelVariableDeclaration other = this.other;
-    _visitNode(node.variables, other.variables);
-  }
-
-  @override
-  visitTryStatement(TryStatement node) {
-    TryStatement other = this.other;
-    _visitNode(node.body, other.body);
-    _visitList(node.catchClauses, other.catchClauses);
-    _visitNode(node.finallyBlock, other.finallyBlock);
-  }
-
-  @override
-  visitTypeArgumentList(TypeArgumentList node) {
-    TypeArgumentList other = this.other;
-    _visitList(node.arguments, other.arguments);
-  }
-
-  @override
-  visitTypeName(TypeName node) {
-    TypeName other = this.other;
-    _verifyType(node.type, other.type);
-    _visitNode(node.name, node.name);
-    _visitNode(node.typeArguments, other.typeArguments);
-  }
-
-  @override
-  visitTypeParameter(TypeParameter node) {
-    TypeParameter other = this.other;
-    _visitNode(node.name, other.name);
-    _visitNode(node.bound, other.bound);
-  }
-
-  @override
-  visitTypeParameterList(TypeParameterList node) {
-    TypeParameterList other = this.other;
-    _visitList(node.typeParameters, other.typeParameters);
-  }
-
-  @override
-  visitVariableDeclaration(VariableDeclaration node) {
-    VariableDeclaration other = this.other;
-    _visitDeclaration(node, other);
-    _visitNode(node.name, other.name);
-    _visitNode(node.initializer, other.initializer);
-  }
-
-  @override
-  visitVariableDeclarationList(VariableDeclarationList node) {
-    VariableDeclarationList other = this.other;
-    _visitNode(node.type, other.type);
-    _visitList(node.variables, other.variables);
-  }
-
-  @override
-  visitVariableDeclarationStatement(VariableDeclarationStatement node) {
-    VariableDeclarationStatement other = this.other;
-    _visitNode(node.variables, other.variables);
-  }
-
-  @override
-  visitWhileStatement(WhileStatement node) {
-    WhileStatement other = this.other;
-    _visitNode(node.condition, other.condition);
-    _visitNode(node.body, other.body);
-  }
-
-  @override
-  visitWithClause(WithClause node) {
-    WithClause other = this.other;
-    _visitList(node.mixinTypes, other.mixinTypes);
-  }
-
-  @override
-  visitYieldStatement(YieldStatement node) {
-    YieldStatement other = this.other;
-    _visitNode(node.expression, other.expression);
-  }
-
-  void _assertNode(AstNode a, AstNode b) {
-    _expectEquals(a.offset, b.offset);
-    _expectEquals(a.length, b.length);
-  }
-
-  void _expectEquals(actual, expected) {
-    if (actual != expected) {
-      String message = '';
-      message += 'Expected: $expected\n';
-      message += '  Actual: $actual\n';
-      _fail(message);
-    }
-  }
-
-  void _expectIsNull(obj) {
-    if (obj != null) {
-      String message = '';
-      message += 'Expected: null\n';
-      message += '  Actual: $obj\n';
-      _fail(message);
-    }
-  }
-
-  void _expectLength(List actualList, int expected) {
+  bool failDifferentLength(List expectedList, List actualList) {
+    int expectedLength = expectedList.length;
+    int actualLength = actualList.length;
     String message = '';
-    message += 'Expected length: $expected\n';
-    if (actualList == null) {
-      message += 'but null found.';
-      _fail(message);
+    message += 'Expected length: $expectedLength\n';
+    message += 'but $actualLength found\n';
+    message += 'in $actualList';
+    _fail(message);
+    return false;
+  }
+
+  @override
+  bool failIfNotNull(Object expected, Object actual) {
+    if (actual != null) {
+      _fail('Expected null, but found $actual');
+      return false;
     }
-    int actual = actualList.length;
-    if (actual != expected) {
-      message += 'but $actual found\n';
-      message += 'in $actualList';
-      _fail(message);
+    return true;
+  }
+
+  @override
+  bool failIsNull(Object expected, Object actual) {
+    _fail('Expected not null, but found null');
+    return false;
+  }
+
+  @override
+  bool failRuntimeType(Object expected, Object actual) {
+    _fail('Expected ${expected.runtimeType}, but found ${actual.runtimeType}');
+    return false;
+  }
+
+  @override
+  bool isEqualNodes(AstNode first, AstNode second) {
+    super.isEqualNodes(first, second);
+    if (first is SimpleIdentifier && second is SimpleIdentifier) {
+      int offset = first.offset;
+      _verifyElement(
+          first.staticElement, second.staticElement, 'staticElement[$offset]');
+      _verifyElement(first.propagatedElement, second.propagatedElement,
+          'propagatedElement[$offset]');
+    } else if (first is Declaration && second is Declaration) {
+      int offset = first.offset;
+      _verifyElement(first.element, second.element, 'declaration[$offset]');
+    } else if (first is Directive && second is Directive) {
+      int offset = first.offset;
+      _verifyElement(first.element, second.element, 'directive[$offset]');
+    } else if (first is Expression && second is Expression) {
+      int offset = first.offset;
+      _verifyType(first.staticType, second.staticType, 'staticType[$offset]');
+      _verifyType(first.propagatedType, second.propagatedType,
+          'propagatedType[$offset]');
+      _verifyElement(first.staticParameterElement,
+          second.staticParameterElement, 'staticParameterElement[$offset]');
+      _verifyElement(
+          first.propagatedParameterElement,
+          second.propagatedParameterElement,
+          'propagatedParameterElement[$offset]');
     }
+    return true;
+  }
+
+  @override
+  bool isEqualTokensNotNull(Token expected, Token actual) {
+    _verifyEqual('lexeme', expected.lexeme, actual.lexeme);
+    _verifyEqual('offset', expected.offset, actual.offset);
+    _verifyEqual('offset', expected.length, actual.length);
+    return true;
   }
 
   void _fail(String message) {
     throw new IncrementalResolutionMismatch(message);
   }
 
-  void _verifyElement(Element a, Element b) {
+  void _verifyElement(Element a, Element b, String desc) {
     if (a is Member && b is Member) {
       a = (a as Member).baseElement;
       b = (b as Member).baseElement;
@@ -842,8 +126,7 @@
     String locationA = _getElementLocationWithoutUri(a);
     String locationB = _getElementLocationWithoutUri(b);
     if (locationA != locationB) {
-      int offset = other.offset;
-      _fail('[$offset]\nExpected: $b ($locationB)\n  Actual: $a ($locationA)');
+      _fail('$desc\nExpected: $b ($locationB)\n  Actual: $a ($locationA)');
     }
     if (a == null && b == null) {
       return;
@@ -866,66 +149,15 @@
     }
   }
 
-  void _verifyType(DartType a, DartType b) {
+  void _verifyType(DartType a, DartType b, String desc) {
     if (!validateTypes) {
       return;
     }
     if (a != b) {
-      int offset = other.offset;
-      _fail('[$offset]\nExpected: $b\n  Actual: $a');
+      _fail('$desc\nExpected: $b\n  Actual: $a');
     }
   }
 
-  void _visitAnnotatedNode(AnnotatedNode node, AnnotatedNode other) {
-    _visitNode(node.documentationComment, other.documentationComment);
-    _visitList(node.metadata, other.metadata);
-  }
-
-  _visitDeclaration(Declaration node, Declaration other) {
-    _verifyElement(node.element, other.element);
-    _visitAnnotatedNode(node, other);
-  }
-
-  _visitDirective(Directive node, Directive other) {
-    _verifyElement(node.element, other.element);
-    _visitAnnotatedNode(node, other);
-  }
-
-  void _visitExpression(Expression a, Expression b) {
-//    print('[${a.offset}] |$a| vs. [${b.offset}] |$b|');
-    _verifyType(a.staticType, b.staticType);
-    _verifyType(a.propagatedType, b.propagatedType);
-    _verifyElement(a.staticParameterElement, b.staticParameterElement);
-    _verifyElement(a.propagatedParameterElement, b.propagatedParameterElement);
-    _assertNode(a, b);
-  }
-
-  void _visitList(NodeList nodeList, NodeList expected) {
-    int length = nodeList.length;
-    _expectLength(nodeList, expected.length);
-    for (int i = 0; i < length; i++) {
-      _visitNode(nodeList[i], expected[i]);
-    }
-  }
-
-  void _visitNode(AstNode node, AstNode other) {
-    if (node == null) {
-      _expectIsNull(other);
-    } else {
-      this.other = other;
-      _assertNode(node, other);
-      node.accept(this);
-    }
-  }
-
-  void _visitNormalFormalParameter(
-      NormalFormalParameter node, NormalFormalParameter other) {
-    _verifyElement(node.element, other.element);
-    _visitNode(node.documentationComment, other.documentationComment);
-    _visitList(node.metadata, other.metadata);
-    _visitNode(node.identifier, other.identifier);
-  }
-
   /**
    * Returns an URI scheme independent version of the [element] location.
    */
diff --git a/pkg/analyzer/lib/src/generated/incremental_resolver.dart b/pkg/analyzer/lib/src/generated/incremental_resolver.dart
index 04f9153..77907db 100644
--- a/pkg/analyzer/lib/src/generated/incremental_resolver.dart
+++ b/pkg/analyzer/lib/src/generated/incremental_resolver.dart
@@ -9,9 +9,7 @@
 
 import 'package:analyzer/dart/ast/ast.dart';
 import 'package:analyzer/dart/ast/token.dart';
-import 'package:analyzer/dart/ast/visitor.dart';
 import 'package:analyzer/dart/element/element.dart';
-import 'package:analyzer/dart/element/type.dart';
 import 'package:analyzer/dart/element/visitor.dart';
 import 'package:analyzer/src/context/cache.dart';
 import 'package:analyzer/src/dart/ast/token.dart';
@@ -38,818 +36,6 @@
 import 'package:analyzer/task/model.dart';
 
 /**
- * If `true`, an attempt to resolve API-changing modifications is made.
- */
-bool _resolveApiChanges = false;
-
-/**
- * This method is used to enable/disable API-changing modifications resolution.
- */
-void set test_resolveApiChanges(bool value) {
-  _resolveApiChanges = value;
-}
-
-/**
- * Instances of the class [DeclarationMatcher] determine whether the element
- * model defined by a given AST structure matches an existing element model.
- */
-class DeclarationMatcher extends RecursiveAstVisitor {
-  /**
-   * The library containing the AST nodes being visited.
-   */
-  LibraryElement _enclosingLibrary;
-
-  /**
-   * The compilation unit containing the AST nodes being visited.
-   */
-  CompilationUnitElement _enclosingUnit;
-
-  /**
-   * The function type alias containing the AST nodes being visited, or `null` if we are not
-   * in the scope of a function type alias.
-   */
-  FunctionTypeAliasElement _enclosingAlias;
-
-  /**
-   * The class containing the AST nodes being visited, or `null` if we are not
-   * in the scope of a class.
-   */
-  ClassElementImpl _enclosingClass;
-
-  /**
-   * The parameter containing the AST nodes being visited, or `null` if we are not in the
-   * scope of a parameter.
-   */
-  ParameterElement _enclosingParameter;
-
-  FieldDeclaration _enclosingFieldNode = null;
-  bool _inTopLevelVariableDeclaration = false;
-
-  /**
-   * Is `true` if the current class declaration has a constructor.
-   */
-  bool _hasConstructor = false;
-
-  /**
-   * A set containing all of the elements in the element model that were defined by the old AST node
-   * corresponding to the AST node being visited.
-   */
-  HashSet<Element> _allElements = new HashSet<Element>();
-
-  /**
-   * A set containing all of the elements were defined in the old element model,
-   * but are not defined in the new element model.
-   */
-  HashSet<Element> _removedElements = new HashSet<Element>();
-
-  /**
-   * A set containing all of the elements are defined in the new element model,
-   * but were not defined in the old element model.
-   */
-  HashSet<Element> _addedElements = new HashSet<Element>();
-
-  /**
-   * Determines how elements model corresponding to the given [node] differs
-   * from the [element].
-   */
-  DeclarationMatchKind matches(AstNode node, Element element) {
-    logger.enter('match $element @ ${element.nameOffset}');
-    try {
-      _captureEnclosingElements(element);
-      _gatherElements(element);
-      node.accept(this);
-    } on _DeclarationMismatchException {
-      logger.log("mismatched");
-      return DeclarationMatchKind.MISMATCH;
-    } finally {
-      logger.exit();
-    }
-    // no API changes
-    if (_removedElements.isEmpty && _addedElements.isEmpty) {
-      logger.log("no API changes");
-      return DeclarationMatchKind.MATCH;
-    }
-    // simple API change
-    logger.log('_removedElements: $_removedElements');
-    logger.log('_addedElements: $_addedElements');
-    _removedElements.forEach(_removeElement);
-    if (_removedElements.length <= 1 && _addedElements.length == 1) {
-      return DeclarationMatchKind.MISMATCH_OK;
-    }
-    // something more complex
-    return DeclarationMatchKind.MISMATCH;
-  }
-
-  @override
-  visitBlockFunctionBody(BlockFunctionBody node) {
-    // ignore bodies
-  }
-
-  @override
-  visitClassDeclaration(ClassDeclaration node) {
-    String name = node.name.name;
-    ClassElement element = _findElement(_enclosingUnit.types, name);
-    _enclosingClass = element;
-    _processElement(element);
-    _assertSameAnnotations(node, element);
-    _assertSameTypeParameters(node.typeParameters, element.typeParameters);
-    // check for missing clauses
-    if (node.extendsClause == null) {
-      _assertTrue(element.supertype.name == 'Object');
-    }
-    if (node.implementsClause == null) {
-      _assertTrue(element.interfaces.isEmpty);
-    }
-    if (node.withClause == null) {
-      _assertTrue(element.mixins.isEmpty);
-    }
-    // process clauses and members
-    _hasConstructor = false;
-    super.visitClassDeclaration(node);
-    // process default constructor
-    if (!_hasConstructor) {
-      ConstructorElement constructor = element.unnamedConstructor;
-      _processElement(constructor);
-      if (!constructor.isSynthetic) {
-        _assertEquals(constructor.parameters.length, 0);
-      }
-    }
-    // matches, set the element
-    node.name.staticElement = element;
-  }
-
-  @override
-  visitClassTypeAlias(ClassTypeAlias node) {
-    String name = node.name.name;
-    ClassElement element = _findElement(_enclosingUnit.types, name);
-    _enclosingClass = element;
-    _processElement(element);
-    _assertSameTypeParameters(node.typeParameters, element.typeParameters);
-    super.visitClassTypeAlias(node);
-  }
-
-  @override
-  visitCompilationUnit(CompilationUnit node) {
-    _processElement(_enclosingUnit);
-    super.visitCompilationUnit(node);
-  }
-
-  @override
-  visitConstructorDeclaration(ConstructorDeclaration node) {
-    _hasConstructor = true;
-    SimpleIdentifier constructorName = node.name;
-    ConstructorElementImpl element = constructorName == null
-        ? _enclosingClass.unnamedConstructor
-        : _enclosingClass.getNamedConstructor(constructorName.name);
-    _processElement(element);
-    _assertEquals(node.constKeyword != null, element.isConst);
-    _assertEquals(node.factoryKeyword != null, element.isFactory);
-    _assertCompatibleParameters(node.parameters, element.parameters);
-    // matches, update the existing element
-    ExecutableElement newElement = node.element;
-    node.element = element;
-    _setLocalElements(element, newElement);
-  }
-
-  @override
-  visitEnumConstantDeclaration(EnumConstantDeclaration node) {
-    String name = node.name.name;
-    FieldElement element = _findElement(_enclosingClass.fields, name);
-    _processElement(element);
-  }
-
-  @override
-  visitEnumDeclaration(EnumDeclaration node) {
-    String name = node.name.name;
-    ClassElement element = _findElement(_enclosingUnit.enums, name);
-    _enclosingClass = element;
-    _processElement(element);
-    _assertTrue(element.isEnum);
-    super.visitEnumDeclaration(node);
-  }
-
-  @override
-  visitExportDirective(ExportDirective node) {
-    String uri = _getStringValue(node.uri);
-    if (uri != null) {
-      ExportElement element =
-          _findUriReferencedElement(_enclosingLibrary.exports, uri);
-      _processElement(element);
-      _assertCombinators(node.combinators, element.combinators);
-    }
-  }
-
-  @override
-  visitExpressionFunctionBody(ExpressionFunctionBody node) {
-    // ignore bodies
-  }
-
-  @override
-  visitExtendsClause(ExtendsClause node) {
-    _assertSameType(node.superclass, _enclosingClass.supertype);
-  }
-
-  @override
-  visitFieldDeclaration(FieldDeclaration node) {
-    _enclosingFieldNode = node;
-    try {
-      super.visitFieldDeclaration(node);
-    } finally {
-      _enclosingFieldNode = null;
-    }
-  }
-
-  @override
-  visitFunctionDeclaration(FunctionDeclaration node) {
-    // prepare element name
-    String name = node.name.name;
-    if (node.isSetter) {
-      name += '=';
-    }
-    // prepare element
-    Token property = node.propertyKeyword;
-    ExecutableElementImpl element;
-    if (property == null) {
-      element = _findElement(_enclosingUnit.functions, name);
-    } else {
-      element = _findElement(_enclosingUnit.accessors, name);
-    }
-    // process element
-    _processElement(element);
-    _assertSameAnnotations(node, element);
-    _assertFalse(element.isSynthetic);
-    _assertSameType(node.returnType, element.returnType);
-    _assertCompatibleParameters(
-        node.functionExpression.parameters, element.parameters);
-    _assertBody(node.functionExpression.body, element);
-    // matches, update the existing element
-    ExecutableElement newElement = node.element;
-    node.name.staticElement = element;
-    node.functionExpression.element = element;
-    _setLocalElements(element, newElement);
-  }
-
-  @override
-  visitFunctionTypeAlias(FunctionTypeAlias node) {
-    String name = node.name.name;
-    FunctionTypeAliasElement element =
-        _findElement(_enclosingUnit.functionTypeAliases, name);
-    _processElement(element);
-    _assertSameTypeParameters(node.typeParameters, element.typeParameters);
-    _assertSameType(node.returnType, element.returnType);
-    _assertCompatibleParameters(node.parameters, element.parameters);
-  }
-
-  @override
-  visitImplementsClause(ImplementsClause node) {
-    List<TypeName> nodes = node.interfaces;
-    List<InterfaceType> types = _enclosingClass.interfaces;
-    _assertSameTypes(nodes, types);
-  }
-
-  @override
-  visitImportDirective(ImportDirective node) {
-    String uri = _getStringValue(node.uri);
-    if (uri != null) {
-      ImportElement element =
-          _findUriReferencedElement(_enclosingLibrary.imports, uri);
-      _processElement(element);
-      // match the prefix
-      SimpleIdentifier prefixNode = node.prefix;
-      PrefixElement prefixElement = element.prefix;
-      if (prefixNode == null) {
-        _assertNull(prefixElement);
-      } else {
-        _assertNotNull(prefixElement);
-        _assertEquals(prefixNode.name, prefixElement.name);
-      }
-      // match combinators
-      _assertCombinators(node.combinators, element.combinators);
-    }
-  }
-
-  @override
-  visitMethodDeclaration(MethodDeclaration node) {
-    // prepare element name
-    String name = node.name.name;
-    if (name == TokenType.MINUS.lexeme &&
-        node.parameters.parameters.length == 0) {
-      name = "unary-";
-    }
-    if (node.isSetter) {
-      name += '=';
-    }
-    // prepare element
-    Token property = node.propertyKeyword;
-    ExecutableElementImpl element;
-    if (property == null) {
-      element = _findElement(_enclosingClass.methods, name);
-    } else {
-      element = _findElement(_enclosingClass.accessors, name);
-    }
-    // process element
-    ExecutableElement newElement = node.element;
-    try {
-      _assertNotNull(element);
-      _assertSameAnnotations(node, element);
-      _assertEquals(node.isStatic, element.isStatic);
-      _assertSameType(node.returnType, element.returnType);
-      _assertCompatibleParameters(node.parameters, element.parameters);
-      _assertBody(node.body, element);
-      _removedElements.remove(element);
-      // matches, update the existing element
-      node.name.staticElement = element;
-      _setLocalElements(element, newElement);
-    } on _DeclarationMismatchException {
-      _removeElement(element);
-      // add new element
-      if (newElement != null) {
-        _addedElements.add(newElement);
-        if (newElement is MethodElement) {
-          List<MethodElement> methods = _enclosingClass.methods.toList();
-          methods.add(newElement);
-          _enclosingClass.methods = methods;
-        } else {
-          List<PropertyAccessorElement> accessors =
-              _enclosingClass.accessors.toList();
-          accessors.add(newElement);
-          _enclosingClass.accessors = accessors;
-        }
-      }
-    }
-  }
-
-  @override
-  visitPartDirective(PartDirective node) {
-    String uri = _getStringValue(node.uri);
-    if (uri != null) {
-      CompilationUnitElement element =
-          _findUriReferencedElement(_enclosingLibrary.parts, uri);
-      _processElement(element);
-    }
-    super.visitPartDirective(node);
-  }
-
-  @override
-  visitTopLevelVariableDeclaration(TopLevelVariableDeclaration node) {
-    _inTopLevelVariableDeclaration = true;
-    try {
-      super.visitTopLevelVariableDeclaration(node);
-    } finally {
-      _inTopLevelVariableDeclaration = false;
-    }
-  }
-
-  @override
-  visitVariableDeclaration(VariableDeclaration node) {
-    // prepare variable
-    String name = node.name.name;
-    PropertyInducingElement element;
-    if (_inTopLevelVariableDeclaration) {
-      element = _findElement(_enclosingUnit.topLevelVariables, name);
-    } else {
-      element = _findElement(_enclosingClass.fields, name);
-    }
-    // verify
-    PropertyInducingElement newElement = node.name.staticElement;
-    _processElement(element);
-    _assertSameAnnotations(node, element);
-    _assertEquals(node.isConst, element.isConst);
-    _assertEquals(node.isFinal, element.isFinal);
-    if (_enclosingFieldNode != null) {
-      _assertEquals(_enclosingFieldNode.isStatic, element.isStatic);
-    }
-    _assertSameType(
-        (node.parent as VariableDeclarationList).type, element.type);
-    // matches, restore the existing element
-    node.name.staticElement = element;
-    Element variable = element;
-    if (variable is VariableElementImpl) {
-      variable.initializer = newElement.initializer;
-    }
-  }
-
-  @override
-  visitWithClause(WithClause node) {
-    List<TypeName> nodes = node.mixinTypes;
-    List<InterfaceType> types = _enclosingClass.mixins;
-    _assertSameTypes(nodes, types);
-  }
-
-  /**
-   * Assert that the given [body] is compatible with the given [element].
-   * It should not be empty if the [element] is not an abstract class member.
-   * If it is present, it should have the same async / generator modifiers.
-   */
-  void _assertBody(FunctionBody body, ExecutableElementImpl element) {
-    if (body is EmptyFunctionBody) {
-      _assertTrue(element.isAbstract);
-    } else {
-      _assertFalse(element.isAbstract);
-      _assertEquals(body.isSynchronous, element.isSynchronous);
-      _assertEquals(body.isGenerator, element.isGenerator);
-    }
-  }
-
-  void _assertCombinators(List<Combinator> nodeCombinators,
-      List<NamespaceCombinator> elementCombinators) {
-    // prepare shown/hidden names in the element
-    Set<String> showNames = new Set<String>();
-    Set<String> hideNames = new Set<String>();
-    for (NamespaceCombinator combinator in elementCombinators) {
-      if (combinator is ShowElementCombinator) {
-        showNames.addAll(combinator.shownNames);
-      } else if (combinator is HideElementCombinator) {
-        hideNames.addAll(combinator.hiddenNames);
-      }
-    }
-    // match combinators with the node
-    for (Combinator combinator in nodeCombinators) {
-      if (combinator is ShowCombinator) {
-        for (SimpleIdentifier nameNode in combinator.shownNames) {
-          String name = nameNode.name;
-          _assertTrue(showNames.remove(name));
-        }
-      } else if (combinator is HideCombinator) {
-        for (SimpleIdentifier nameNode in combinator.hiddenNames) {
-          String name = nameNode.name;
-          _assertTrue(hideNames.remove(name));
-        }
-      }
-    }
-    _assertTrue(showNames.isEmpty);
-    _assertTrue(hideNames.isEmpty);
-  }
-
-  void _assertCompatibleParameter(
-      FormalParameter node, ParameterElement element) {
-    _assertEquals(node.kind, element.parameterKind);
-    if (node.kind == ParameterKind.NAMED ||
-        element.enclosingElement is ConstructorElement) {
-      _assertEquals(node.identifier.name, element.name);
-    }
-    // check parameter type specific properties
-    if (node is DefaultFormalParameter) {
-      Expression nodeDefault = node.defaultValue;
-      if (nodeDefault == null) {
-        _assertNull(element.defaultValueCode);
-      } else {
-        _assertEquals(nodeDefault.toSource(), element.defaultValueCode);
-      }
-      _assertCompatibleParameter(node.parameter, element);
-    } else if (node is FieldFormalParameter) {
-      _assertTrue(element.isInitializingFormal);
-      DartType parameterType = element.type;
-      if (node.type == null && node.parameters == null) {
-        FieldFormalParameterElement parameterElement = element;
-        if (!parameterElement.hasImplicitType) {
-          _assertTrue(parameterType == null || parameterType.isDynamic);
-        }
-        if (parameterElement.field != null) {
-          _assertEquals(node.identifier.name, element.name);
-        }
-      } else {
-        if (node.parameters != null) {
-          _assertTrue(parameterType is FunctionType);
-          FunctionType parameterFunctionType = parameterType;
-          _assertSameType(node.type, parameterFunctionType.returnType);
-        } else {
-          _assertSameType(node.type, parameterType);
-        }
-      }
-      _assertCompatibleParameters(node.parameters, element.parameters);
-    } else if (node is FunctionTypedFormalParameter) {
-      _assertFalse(element.isInitializingFormal);
-      _assertTrue(element.type is FunctionType);
-      FunctionType elementType = element.type;
-      _assertCompatibleParameters(node.parameters, element.parameters);
-      _assertSameType(node.returnType, elementType.returnType);
-    } else if (node is SimpleFormalParameter) {
-      _assertFalse(element.isInitializingFormal);
-      _assertSameType(node.type, element.type);
-    }
-  }
-
-  void _assertCompatibleParameters(
-      FormalParameterList nodes, List<ParameterElement> elements) {
-    if (nodes == null) {
-      return _assertEquals(elements.length, 0);
-    }
-    List<FormalParameter> parameters = nodes.parameters;
-    int length = parameters.length;
-    _assertEquals(length, elements.length);
-    for (int i = 0; i < length; i++) {
-      _assertCompatibleParameter(parameters[i], elements[i]);
-    }
-  }
-
-  /**
-   * Asserts that there is an import with the same prefix as the given
-   * [prefixNode], which exposes the given [element].
-   */
-  void _assertElementVisibleWithPrefix(
-      SimpleIdentifier prefixNode, Element element) {
-    if (prefixNode == null) {
-      return;
-    }
-    String prefixName = prefixNode.name;
-    for (ImportElement import in _enclosingLibrary.imports) {
-      if (import.prefix != null && import.prefix.name == prefixName) {
-        Namespace namespace =
-            new NamespaceBuilder().createImportNamespaceForDirective(import);
-        Iterable<Element> visibleElements = namespace.definedNames.values;
-        if (visibleElements.contains(element)) {
-          return;
-        }
-      }
-    }
-    _assertTrue(false);
-  }
-
-  void _assertEquals(Object a, Object b) {
-    if (a != b) {
-      throw new _DeclarationMismatchException();
-    }
-  }
-
-  void _assertFalse(bool condition) {
-    if (condition) {
-      throw new _DeclarationMismatchException();
-    }
-  }
-
-  void _assertNotNull(Object object) {
-    if (object == null) {
-      throw new _DeclarationMismatchException();
-    }
-  }
-
-  void _assertNull(Object object) {
-    if (object != null) {
-      throw new _DeclarationMismatchException();
-    }
-  }
-
-  void _assertSameAnnotation(Annotation node, ElementAnnotation annotation) {
-    Element element = annotation.element;
-    if (element is ConstructorElement) {
-      _assertTrue(node.name is SimpleIdentifier);
-      _assertNull(node.constructorName);
-      TypeName nodeType = new TypeName(node.name, null);
-      _assertSameType(nodeType, element.returnType);
-      // TODO(scheglov) validate arguments
-    }
-    if (element is PropertyAccessorElement) {
-      _assertTrue(node.name is SimpleIdentifier);
-      String nodeName = node.name.name;
-      String elementName = element.displayName;
-      _assertEquals(nodeName, elementName);
-    }
-  }
-
-  void _assertSameAnnotations(AnnotatedNode node, Element element) {
-    List<Annotation> nodeAnnotations = node.metadata;
-    List<ElementAnnotation> elementAnnotations = element.metadata;
-    int length = nodeAnnotations.length;
-    _assertEquals(elementAnnotations.length, length);
-    for (int i = 0; i < length; i++) {
-      _assertSameAnnotation(nodeAnnotations[i], elementAnnotations[i]);
-    }
-  }
-
-  void _assertSameType(TypeName node, DartType type) {
-    // no type == dynamic
-    if (node == null) {
-      return _assertTrue(type == null || type.isDynamic);
-    }
-    if (type == null) {
-      return _assertTrue(false);
-    }
-    // prepare name
-    SimpleIdentifier prefixIdentifier = null;
-    Identifier nameIdentifier = node.name;
-    if (nameIdentifier is PrefixedIdentifier) {
-      PrefixedIdentifier prefixedIdentifier = nameIdentifier;
-      prefixIdentifier = prefixedIdentifier.prefix;
-      nameIdentifier = prefixedIdentifier.identifier;
-    }
-    String nodeName = nameIdentifier.name;
-    // check specific type kinds
-    if (type is ParameterizedType) {
-      _assertEquals(nodeName, type.name);
-      _assertElementVisibleWithPrefix(prefixIdentifier, type.element);
-      // check arguments
-      TypeArgumentList nodeArgumentList = node.typeArguments;
-      List<DartType> typeArguments = type.typeArguments;
-      if (nodeArgumentList == null) {
-        // Node doesn't have type arguments, so all type arguments of the
-        // element must be "dynamic".
-        for (DartType typeArgument in typeArguments) {
-          _assertTrue(typeArgument.isDynamic);
-        }
-      } else {
-        List<TypeName> nodeArguments = nodeArgumentList.arguments;
-        _assertSameTypes(nodeArguments, typeArguments);
-      }
-    } else if (type is TypeParameterType) {
-      _assertEquals(nodeName, type.name);
-      // TODO(scheglov) it should be possible to rename type parameters
-    } else if (type.isVoid) {
-      _assertEquals(nodeName, 'void');
-    } else if (type.isDynamic) {
-      _assertEquals(nodeName, 'dynamic');
-    } else {
-      // TODO(scheglov) support other types
-      logger.log('node: $node type: $type  type.type: ${type.runtimeType}');
-      _assertTrue(false);
-    }
-  }
-
-  void _assertSameTypeParameter(
-      TypeParameter node, TypeParameterElement element) {
-    _assertSameType(node.bound, element.bound);
-  }
-
-  void _assertSameTypeParameters(
-      TypeParameterList nodesList, List<TypeParameterElement> elements) {
-    if (nodesList == null) {
-      return _assertEquals(elements.length, 0);
-    }
-    List<TypeParameter> nodes = nodesList.typeParameters;
-    int length = nodes.length;
-    _assertEquals(length, elements.length);
-    for (int i = 0; i < length; i++) {
-      _assertSameTypeParameter(nodes[i], elements[i]);
-    }
-  }
-
-  void _assertSameTypes(List<TypeName> nodes, List<DartType> types) {
-    int length = nodes.length;
-    _assertEquals(length, types.length);
-    for (int i = 0; i < length; i++) {
-      _assertSameType(nodes[i], types[i]);
-    }
-  }
-
-  void _assertTrue(bool condition) {
-    if (!condition) {
-      throw new _DeclarationMismatchException();
-    }
-  }
-
-  /**
-   * Given that the comparison is to begin with the given [element], capture
-   * the enclosing elements that might be used while performing the comparison.
-   */
-  void _captureEnclosingElements(Element element) {
-    Element parent =
-        element is CompilationUnitElement ? element : element.enclosingElement;
-    while (parent != null) {
-      if (parent is CompilationUnitElement) {
-        _enclosingUnit = parent;
-        _enclosingLibrary = element.library;
-      } else if (parent is ClassElement) {
-        if (_enclosingClass == null) {
-          _enclosingClass = parent;
-        }
-      } else if (parent is FunctionTypeAliasElement) {
-        if (_enclosingAlias == null) {
-          _enclosingAlias = parent;
-        }
-      } else if (parent is ParameterElement) {
-        if (_enclosingParameter == null) {
-          _enclosingParameter = parent;
-        }
-      }
-      parent = parent.enclosingElement;
-    }
-  }
-
-  void _gatherElements(Element element) {
-    _ElementsGatherer gatherer = new _ElementsGatherer(this);
-    element.accept(gatherer);
-    // TODO(scheglov) what if a change in a directive?
-    if (identical(element, _enclosingLibrary.definingCompilationUnit)) {
-      gatherer.addElements(_enclosingLibrary.imports);
-      gatherer.addElements(_enclosingLibrary.exports);
-      gatherer.addElements(_enclosingLibrary.parts);
-    }
-  }
-
-  void _processElement(Element element) {
-    _assertNotNull(element);
-    if (!_allElements.contains(element)) {
-      throw new _DeclarationMismatchException();
-    }
-    _removedElements.remove(element);
-  }
-
-  void _removeElement(Element element) {
-    if (element != null) {
-      Element enclosingElement = element.enclosingElement;
-      if (element is MethodElement) {
-        ClassElement classElement = enclosingElement;
-        _removeIdenticalElement(classElement.methods, element);
-      } else if (element is PropertyAccessorElement) {
-        if (enclosingElement is ClassElement) {
-          _removeIdenticalElement(enclosingElement.accessors, element);
-        }
-        if (enclosingElement is CompilationUnitElement) {
-          _removeIdenticalElement(enclosingElement.accessors, element);
-        }
-      }
-    }
-  }
-
-  /**
-   * Return the [Element] in [elements] with the given [name].
-   */
-  static Element _findElement(List<Element> elements, String name) {
-    for (Element element in elements) {
-      if (element.name == name) {
-        return element;
-      }
-    }
-    return null;
-  }
-
-  /**
-   * Return the [UriReferencedElement] from [elements] with the given [uri], or
-   * `null` if there is no such element.
-   */
-  static UriReferencedElement _findUriReferencedElement(
-      List<UriReferencedElement> elements, String uri) {
-    for (UriReferencedElement element in elements) {
-      if (element.uri == uri) {
-        return element;
-      }
-    }
-    return null;
-  }
-
-  /**
-   * Return the value of [literal], or `null` if the string is not a constant
-   * string without any string interpolation.
-   */
-  static String _getStringValue(StringLiteral literal) {
-    if (literal is StringInterpolation) {
-      return null;
-    }
-    return literal.stringValue;
-  }
-
-  /**
-   * Removes the first element identical to the given [element] from [elements].
-   */
-  static void _removeIdenticalElement(List elements, Object element) {
-    int length = elements.length;
-    for (int i = 0; i < length; i++) {
-      if (identical(elements[i], element)) {
-        elements.removeAt(i);
-        return;
-      }
-    }
-  }
-
-  static void _setLocalElements(
-      ExecutableElementImpl to, ExecutableElement from) {
-    if (from != null) {
-      to.functions = from.functions;
-      to.labels = from.labels;
-      to.localVariables = from.localVariables;
-      to.parameters = from.parameters;
-    }
-  }
-}
-
-/**
- * Describes how declarations match an existing elements model.
- */
-class DeclarationMatchKind {
-  /**
-   * Complete match, no API changes.
-   */
-  static const MATCH = const DeclarationMatchKind('MATCH');
-
-  /**
-   * Has API changes that we might be able to resolve incrementally.
-   */
-  static const MISMATCH_OK = const DeclarationMatchKind('MISMATCH_OK');
-
-  /**
-   * Has API changes that we cannot resolve incrementally.
-   */
-  static const MISMATCH = const DeclarationMatchKind('MISMATCH');
-
-  final String name;
-
-  const DeclarationMatchKind(this.name);
-
-  @override
-  String toString() => name;
-}
-
-/**
  * The [Delta] implementation used by incremental resolver.
  * It keeps Dart results that are either don't change or are updated.
  */
@@ -1056,103 +242,53 @@
         _updateDelta = updateEndNew - updateEndOld;
 
   /**
-   * Resolve [node], reporting any errors or warnings to the given listener.
+   * Resolve [body], reporting any errors or warnings to the given listener.
    *
-   * [node] - the root of the AST structure to be resolved.
-   *
-   * Returns `true` if resolution was successful.
+   * [body] - the root of the AST structure to be resolved.
    */
-  bool resolve(AstNode node) {
+  void resolve(BlockFunctionBody body) {
     logger.enter('resolve: $_definingUnit');
     try {
-      AstNode rootNode = _findResolutionRoot(node);
-      _prepareResolutionContext(rootNode);
+      Declaration executable = _findResolutionRoot(body);
+      _prepareResolutionContext(executable);
       // update elements
       _updateCache();
       _updateElementNameOffsets();
-      _buildElements(rootNode);
-      if (!_canBeIncrementallyResolved(rootNode)) {
-        return false;
-      }
+      _buildElements(executable, body);
       // resolve
-      _resolveReferences(rootNode);
-      _computeConstants(rootNode);
+      _resolveReferences(executable);
+      _computeConstants(executable);
       _resolveErrors = errorListener.getErrorsForSource(_source);
       // verify
-      _verify(rootNode);
+      _verify(executable);
       _context.invalidateLibraryHints(_librarySource);
       // update entry errors
       _updateEntry();
-      // OK
-      return true;
     } finally {
       logger.exit();
     }
   }
 
-  void _buildElements(AstNode node) {
+  void _buildElements(Declaration executable, AstNode node) {
     LoggingTimer timer = logger.startTimer();
     try {
       ElementHolder holder = new ElementHolder();
       ElementBuilder builder = new ElementBuilder(holder, _definingUnit);
-      if (_resolutionContext.enclosingClassDeclaration != null) {
-        builder.visitClassDeclarationIncrementally(
-            _resolutionContext.enclosingClassDeclaration);
-      }
+      builder.initForFunctionBodyIncrementalResolution();
       node.accept(builder);
+      // Move local elements into the ExecutableElementImpl.
+      ExecutableElementImpl executableElement =
+          executable.element as ExecutableElementImpl;
+      executableElement.localVariables = holder.localVariables;
+      executableElement.functions = holder.functions;
+      executableElement.labels = holder.labels;
+      holder.validate();
     } finally {
       timer.stop('build elements');
     }
   }
 
   /**
-   * Return `true` if [node] does not have element model changes, or these
-   * changes can be incrementally propagated.
-   */
-  bool _canBeIncrementallyResolved(AstNode node) {
-    // If we are replacing the whole declaration, this means that its signature
-    // is changed. It might be an API change, or not.
-    //
-    // If, for example, a required parameter is changed, it is not an API
-    // change, but we want to find the existing corresponding Element in the
-    // enclosing one, set it for the node and update as needed.
-    //
-    // If, for example, the name of a method is changed, it is an API change,
-    // we need to know the old Element and the new Element. Again, we need to
-    // check the whole enclosing Element.
-    if (node is Declaration) {
-      node = node.parent;
-    }
-    Element element = _getElement(node);
-    DeclarationMatcher matcher = new DeclarationMatcher();
-    DeclarationMatchKind matchKind = matcher.matches(node, element);
-    if (matchKind == DeclarationMatchKind.MATCH) {
-      return true;
-    }
-    // mismatch that cannot be incrementally fixed
-    return false;
-  }
-
-  /**
-   * Return `true` if the given node can be resolved independently of any other
-   * nodes.
-   *
-   * *Note*: This method needs to be kept in sync with
-   * [ScopeBuilder.ContextBuilder].
-   *
-   * [node] - the node being tested.
-   */
-  bool _canBeResolved(AstNode node) =>
-      node is ClassDeclaration ||
-      node is ClassTypeAlias ||
-      node is CompilationUnit ||
-      node is ConstructorDeclaration ||
-      node is FunctionDeclaration ||
-      node is FunctionTypeAlias ||
-      node is MethodDeclaration ||
-      node is TopLevelVariableDeclaration;
-
-  /**
    * Compute a value for all of the constants in the given [node].
    */
   void _computeConstants(AstNode node) {
@@ -1181,9 +317,11 @@
    *
    * Throws [AnalysisException] if there is no such node.
    */
-  AstNode _findResolutionRoot(AstNode node) {
+  Declaration _findResolutionRoot(AstNode node) {
     while (node != null) {
-      if (_canBeResolved(node)) {
+      if (node is ConstructorDeclaration ||
+          node is FunctionDeclaration ||
+          node is MethodDeclaration) {
         return node;
       }
       node = node.parent;
@@ -1191,19 +329,6 @@
     throw new AnalysisException("Cannot resolve node: no resolvable node");
   }
 
-  /**
-   * Return the element defined by [node], or `null` if the node does not
-   * define an element.
-   */
-  Element _getElement(AstNode node) {
-    if (node is Declaration) {
-      return node.element;
-    } else if (node is CompilationUnit) {
-      return node.element;
-    }
-    return null;
-  }
-
   void _prepareResolutionContext(AstNode node) {
     if (_resolutionContext == null) {
       _resolutionContext =
@@ -1406,9 +531,7 @@
       this._sourceEntry,
       this._unitEntry,
       this._oldUnit,
-      bool resolveApiChanges) {
-    _resolveApiChanges = resolveApiChanges;
-  }
+      bool resolveApiChanges);
 
   /**
    * Attempts to update [_oldUnit] to the state corresponding to [newCode].
@@ -1526,13 +649,7 @@
                     newParent is ConstructorDeclaration ||
                 oldParent is MethodDeclaration &&
                     newParent is MethodDeclaration) {
-              Element oldElement = (oldParent as Declaration).element;
-              if (new DeclarationMatcher().matches(newParent, oldElement) ==
-                  DeclarationMatchKind.MATCH) {
-                oldNode = oldParent;
-                newNode = newParent;
-                found = true;
-              } else {
+              if (oldParents.length == i || newParents.length == i) {
                 return false;
               }
             } else if (oldParent is FunctionBody && newParent is FunctionBody) {
@@ -1597,12 +714,7 @@
             _updateOffset,
             _updateEndOld,
             _updateEndNew);
-        bool success = incrementalResolver.resolve(newNode);
-        // check if success
-        if (!success) {
-          logger.log('Failure: element model changed.');
-          return false;
-        }
+        incrementalResolver.resolve(newNode);
         // update DartEntry
         _updateEntry();
         logger.log('Success.');
@@ -1800,10 +912,7 @@
   }
 
   static _TokenPair _findFirstDifferentToken(Token oldToken, Token newToken) {
-    while (true) {
-      if (oldToken.type == TokenType.EOF && newToken.type == TokenType.EOF) {
-        return null;
-      }
+    while (oldToken.type != TokenType.EOF || newToken.type != TokenType.EOF) {
       if (oldToken.type == TokenType.EOF || newToken.type == TokenType.EOF) {
         return new _TokenPair(_TokenDifferenceKind.CONTENT, oldToken, newToken);
       }
@@ -2069,13 +1178,6 @@
 }
 
 /**
- * Instances of the class [_DeclarationMismatchException] represent an exception
- * that is thrown when the element model defined by a given AST structure does
- * not match an existing element model.
- */
-class _DeclarationMismatchException {}
-
-/**
  * Adjusts the location of each Element that moved.
  *
  * Since `==` and `hashCode` of a local variable or function Element are based
@@ -2103,8 +1205,7 @@
         (element as ElementImpl).nameOffset = nameOffset + updateDelta;
       }
       if (element is ConstVariableElement) {
-        ConstVariableElement constVariable = element as ConstVariableElement;
-        Expression initializer = constVariable.constantInitializer;
+        Expression initializer = element.constantInitializer;
         if (initializer != null) {
           _shiftTokens(initializer.beginToken);
         }
@@ -2171,60 +1272,6 @@
   }
 }
 
-class _ElementsGatherer extends GeneralizingElementVisitor {
-  final DeclarationMatcher matcher;
-
-  _ElementsGatherer(this.matcher);
-
-  void addElements(List<Element> elements) {
-    for (Element element in elements) {
-      if (!element.isSynthetic) {
-        _addElement(element);
-      }
-    }
-  }
-
-  @override
-  visitElement(Element element) {
-    _addElement(element);
-    super.visitElement(element);
-  }
-
-  @override
-  visitExecutableElement(ExecutableElement element) {
-    _addElement(element);
-  }
-
-  @override
-  visitParameterElement(ParameterElement element) {}
-
-  @override
-  visitPropertyAccessorElement(PropertyAccessorElement element) {
-    if (!element.isSynthetic) {
-      _addElement(element);
-    }
-    // Don't visit children (such as synthetic setter parameters).
-  }
-
-  @override
-  visitPropertyInducingElement(PropertyInducingElement element) {
-    if (!element.isSynthetic) {
-      _addElement(element);
-    }
-    // Don't visit children (such as property accessors).
-  }
-
-  @override
-  visitTypeParameterElement(TypeParameterElement element) {}
-
-  void _addElement(Element element) {
-    if (element != null) {
-      matcher._allElements.add(element);
-      matcher._removedElements.add(element);
-    }
-  }
-}
-
 /**
  * Describes how two [Token]s are different.
  */
diff --git a/pkg/analyzer/lib/src/generated/parser.dart b/pkg/analyzer/lib/src/generated/parser.dart
index f63edfa..ab3b05e 100644
--- a/pkg/analyzer/lib/src/generated/parser.dart
+++ b/pkg/analyzer/lib/src/generated/parser.dart
@@ -216,16 +216,19 @@
       8,
       (Parser target, arg0, arg1, arg2, arg3, arg4, arg5, arg6, arg7) => target
           ._parseConstructor(arg0, arg1, arg2, arg3, arg4, arg5, arg6, arg7)),
-  'parseConstructorFieldInitializer_0': new MethodTrampoline(
-      0, (Parser target) => target._parseConstructorFieldInitializer()),
+  'parseConstructorFieldInitializer_1': new MethodTrampoline(1,
+      (Parser target, arg0) => target._parseConstructorFieldInitializer(arg0)),
   'parseContinueStatement_0': new MethodTrampoline(
       0, (Parser target) => target._parseContinueStatement()),
   'parseDirective_1': new MethodTrampoline(
       1, (Parser target, arg0) => target._parseDirective(arg0)),
   'parseDirectives_0':
       new MethodTrampoline(0, (Parser target) => target._parseDirectives()),
-  'parseDocumentationComment_0': new MethodTrampoline(
-      0, (Parser target) => target._parseDocumentationComment()),
+  'parseDocumentationComment_0': new MethodTrampoline(0, (Parser target) {
+    List<DocumentationCommentToken> tokens =
+        target._parseDocumentationCommentTokens();
+    return target._parseDocumentationComment(tokens);
+  }),
   'parseDoStatement_0':
       new MethodTrampoline(0, (Parser target) => target._parseDoStatement()),
   'parseDottedName_0':
@@ -320,13 +323,15 @@
   'parseOptionalReturnType_0': new MethodTrampoline(
       0, (Parser target) => target._parseOptionalReturnType()),
   'parsePartDirective_1': new MethodTrampoline(
-      1, (Parser target, arg0) => target._parsePartDirective(arg0)),
+      1, (Parser target, arg0) => target._parsePartOrPartOfDirective(arg0)),
   'parsePostfixExpression_0': new MethodTrampoline(
       0, (Parser target) => target._parsePostfixExpression()),
   'parsePrimaryExpression_0': new MethodTrampoline(
       0, (Parser target) => target._parsePrimaryExpression()),
-  'parseRedirectingConstructorInvocation_0': new MethodTrampoline(
-      0, (Parser target) => target._parseRedirectingConstructorInvocation()),
+  'parseRedirectingConstructorInvocation_1': new MethodTrampoline(
+      1,
+      (Parser target, arg0) =>
+          target._parseRedirectingConstructorInvocation(arg0)),
   'parseRelationalExpression_0': new MethodTrampoline(
       0, (Parser target) => target._parseRelationalExpression()),
   'parseRethrowExpression_0': new MethodTrampoline(
@@ -493,7 +498,7 @@
   final Comment comment;
 
   /**
-   * The metadata that was parsed.
+   * The metadata that was parsed, or `null` if none was given.
    */
   final List<Annotation> metadata;
 
@@ -501,6 +506,11 @@
    * Initialize a newly created holder with the given [comment] and [metadata].
    */
   CommentAndMetadata(this.comment, this.metadata);
+
+  /**
+   * Return `true` if some metadata was parsed.
+   */
+  bool get hasMetadata => metadata != null && metadata.isNotEmpty;
 }
 
 /**
@@ -2147,7 +2157,7 @@
 
   /**
    * A flag indicating whether the parser is currently in a function body marked
-   * as being 'async'.
+   * (by a star) as being a generator.
    */
   bool _inGenerator = false;
 
@@ -2163,7 +2173,7 @@
 
   /**
    * A flag indicating whether the parser is currently in a constructor field
-   * initializer, with no intervening parens, braces, or brackets.
+   * initializer, with no intervening parentheses, braces, or brackets.
    */
   bool _inInitializer = false;
 
@@ -2179,14 +2189,22 @@
   bool parseGenericMethodComments = false;
 
   /**
-   * Initialize a newly created parser to parse the content of the given
-   * [_source] and to report any errors that are found to the given
-   * [_errorListener].
+   * A flag indicating whether the parser is to parse trailing commas in
+   * parameter and argument lists (sdk#26647).
+   */
+  bool parseTrailingCommas = false;
+
+  /**
+   * Initialize a newly created parser to parse tokens in the given [_source]
+   * and to report any errors that are found to the given [_errorListener].
    */
   Parser(this._source, this._errorListener);
 
-  void set currentToken(Token currentToken) {
-    this._currentToken = currentToken;
+  /**
+   * Set the token with which the parse is to begin to the given [token].
+   */
+  void set currentToken(Token token) {
+    this._currentToken = token;
   }
 
   /**
@@ -2195,6 +2213,7 @@
    * parameters, followed by a left-parenthesis. This is used by
    * [_parseTypeAlias] to determine whether or not to parse a return type.
    */
+  @deprecated
   bool get hasReturnTypeInTypeAlias {
     Token next = _skipReturnType(_currentToken);
     if (next == null) {
@@ -2229,19 +2248,20 @@
    */
   Token getAndAdvance() {
     Token token = _currentToken;
-    _advance();
+    _currentToken = _currentToken.next;
     return token;
   }
 
   /**
    * Parse an annotation. Return the annotation that was parsed.
    *
+   * This method assumes that the current token matches [TokenType.AT].
+   *
    *     annotation ::=
    *         '@' qualified ('.' identifier)? arguments?
-   *
    */
   Annotation parseAnnotation() {
-    Token atSign = _expect(TokenType.AT);
+    Token atSign = getAndAdvance();
     Identifier name = parsePrefixedIdentifier();
     Token period = null;
     SimpleIdentifier constructorName = null;
@@ -2267,6 +2287,9 @@
    *         label expression
    */
   Expression parseArgument() {
+    // TODO(brianwilkerson) Consider returning a wrapper indicating whether the
+    // expression is a named expression in order to remove the 'is' check in
+    // 'parseArgumentList'.
     //
     // Both namedArgument and expression can start with an identifier, but only
     // namedArgument can have an identifier followed by a colon.
@@ -2281,6 +2304,8 @@
   /**
    * Parse a list of arguments. Return the argument list that was parsed.
    *
+   * This method assumes that the current token matches [TokenType.OPEN_PAREN].
+   *
    *     arguments ::=
    *         '(' argumentList? ')'
    *
@@ -2289,10 +2314,9 @@
    *       | expressionList (',' namedArgument)*
    */
   ArgumentList parseArgumentList() {
-    Token leftParenthesis = _expect(TokenType.OPEN_PAREN);
-    List<Expression> arguments = new List<Expression>();
+    Token leftParenthesis = getAndAdvance();
     if (_matches(TokenType.CLOSE_PAREN)) {
-      return new ArgumentList(leftParenthesis, arguments, getAndAdvance());
+      return new ArgumentList(leftParenthesis, null, getAndAdvance());
     }
     //
     // Even though unnamed arguments must all appear before any named arguments,
@@ -2302,30 +2326,33 @@
     _inInitializer = false;
     try {
       Expression argument = parseArgument();
-      arguments.add(argument);
+      List<Expression> arguments = <Expression>[argument];
       bool foundNamedArgument = argument is NamedExpression;
       bool generatedError = false;
       while (_optional(TokenType.COMMA)) {
+        if (parseTrailingCommas && _matches(TokenType.CLOSE_PAREN)) {
+          break;
+        }
         argument = parseArgument();
         arguments.add(argument);
-        if (foundNamedArgument) {
-          bool blankArgument =
-              argument is SimpleIdentifier && argument.name.isEmpty;
-          if (!generatedError &&
-              !(argument is NamedExpression && !blankArgument)) {
-            // Report the error, once, but allow the arguments to be in any
-            // order in the AST.
-            _reportErrorForCurrentToken(
-                ParserErrorCode.POSITIONAL_AFTER_NAMED_ARGUMENT);
-            generatedError = true;
-          }
-        } else if (argument is NamedExpression) {
+        if (argument is NamedExpression) {
           foundNamedArgument = true;
+        } else if (foundNamedArgument) {
+          if (!generatedError) {
+            if (!argument.isSynthetic) {
+              // Report the error, once, but allow the arguments to be in any
+              // order in the AST.
+              _reportErrorForCurrentToken(
+                  ParserErrorCode.POSITIONAL_AFTER_NAMED_ARGUMENT);
+              generatedError = true;
+            }
+          }
         }
       }
-      // TODO(brianwilkerson) Recovery: Look at the left parenthesis to see
-      // whether there is a matching right parenthesis. If there is, then we're
-      // more likely missing a comma and should go back to parsing arguments.
+      // Recovery: If the next token is not a right parenthesis, look at the
+      // left parenthesis to see whether there is a matching right parenthesis.
+      // If there is, then we're more likely missing a comma and should go back
+      // to parsing arguments.
       Token rightParenthesis = _expect(TokenType.CLOSE_PAREN);
       return new ArgumentList(leftParenthesis, arguments, rightParenthesis);
     } finally {
@@ -2343,16 +2370,15 @@
    */
   Expression parseBitwiseOrExpression() {
     Expression expression;
-    if (_matchesKeyword(Keyword.SUPER) &&
-        _tokenMatches(_peek(), TokenType.BAR)) {
+    if (_currentToken.keyword == Keyword.SUPER &&
+        _currentToken.next.type == TokenType.BAR) {
       expression = new SuperExpression(getAndAdvance());
     } else {
       expression = _parseBitwiseXorExpression();
     }
-    while (_matches(TokenType.BAR)) {
-      Token operator = getAndAdvance();
+    while (_currentToken.type == TokenType.BAR) {
       expression = new BinaryExpression(
-          expression, operator, _parseBitwiseXorExpression());
+          expression, getAndAdvance(), _parseBitwiseXorExpression());
     }
     return expression;
   }
@@ -2360,27 +2386,37 @@
   /**
    * Parse a block. Return the block that was parsed.
    *
+   * This method assumes that the current token matches
+   * [TokenType.OPEN_CURLY_BRACKET].
+   *
    *     block ::=
    *         '{' statements '}'
    */
   Block parseBlock() {
-    Token leftBracket = _expect(TokenType.OPEN_CURLY_BRACKET);
-    List<Statement> statements = new List<Statement>();
+    bool isEndOfBlock() {
+      TokenType type = _currentToken.type;
+      return type == TokenType.EOF || type == TokenType.CLOSE_CURLY_BRACKET;
+    }
+
+    Token leftBracket = getAndAdvance();
+    List<Statement> statements = <Statement>[];
     Token statementStart = _currentToken;
-    while (
-        !_matches(TokenType.EOF) && !_matches(TokenType.CLOSE_CURLY_BRACKET)) {
+    while (!isEndOfBlock()) {
       Statement statement = parseStatement2();
-      if (statement != null) {
-        statements.add(statement);
-      }
       if (identical(_currentToken, statementStart)) {
         // Ensure that we are making progress and report an error if we're not.
         _reportErrorForToken(ParserErrorCode.UNEXPECTED_TOKEN, _currentToken,
             [_currentToken.lexeme]);
         _advance();
+      } else if (statement != null) {
+        statements.add(statement);
       }
       statementStart = _currentToken;
     }
+    // Recovery: If the next token is not a right curly bracket, look at the
+    // left curly bracket to see whether there is a matching right bracket. If
+    // there is, then we're more likely missing a semi-colon and should go back
+    // to parsing statements.
     Token rightBracket = _expect(TokenType.CLOSE_CURLY_BRACKET);
     return new Block(leftBracket, statements, rightBracket);
   }
@@ -2397,21 +2433,25 @@
   ClassMember parseClassMember(String className) {
     CommentAndMetadata commentAndMetadata = _parseCommentAndMetadata();
     Modifiers modifiers = _parseModifiers();
-    if (_matchesKeyword(Keyword.VOID)) {
-      TypeName returnType = parseReturnType();
-      if (_matchesKeyword(Keyword.GET) && _tokenMatchesIdentifier(_peek())) {
+    Keyword keyword = _currentToken.keyword;
+    if (keyword == Keyword.VOID) {
+      TypeName returnType =
+          new TypeName(new SimpleIdentifier(getAndAdvance()), null);
+      keyword = _currentToken.keyword;
+      Token next = _peek();
+      bool isFollowedByIdentifier = _tokenMatchesIdentifier(next);
+      if (keyword == Keyword.GET && isFollowedByIdentifier) {
         _validateModifiersForGetterOrSetterOrMethod(modifiers);
         return _parseGetter(commentAndMetadata, modifiers.externalKeyword,
             modifiers.staticKeyword, returnType);
-      } else if (_matchesKeyword(Keyword.SET) &&
-          _tokenMatchesIdentifier(_peek())) {
+      } else if (keyword == Keyword.SET && isFollowedByIdentifier) {
         _validateModifiersForGetterOrSetterOrMethod(modifiers);
         return _parseSetter(commentAndMetadata, modifiers.externalKeyword,
             modifiers.staticKeyword, returnType);
-      } else if (_matchesKeyword(Keyword.OPERATOR) && _isOperator(_peek())) {
+      } else if (keyword == Keyword.OPERATOR && _isOperator(next)) {
         _validateModifiersForOperator(modifiers);
-        return _parseOperator(
-            commentAndMetadata, modifiers.externalKeyword, returnType);
+        return _parseOperatorAfterKeyword(commentAndMetadata,
+            modifiers.externalKeyword, returnType, getAndAdvance());
       } else if (_matchesIdentifier() &&
           _peek().matchesAny(const <TokenType>[
             TokenType.OPEN_PAREN,
@@ -2456,20 +2496,21 @@
             ParserErrorCode.EXPECTED_EXECUTABLE, _currentToken);
         return null;
       }
-    } else if (_matchesKeyword(Keyword.GET) &&
-        _tokenMatchesIdentifier(_peek())) {
+    }
+    Token next = _peek();
+    bool isFollowedByIdentifier = _tokenMatchesIdentifier(next);
+    if (keyword == Keyword.GET && isFollowedByIdentifier) {
       _validateModifiersForGetterOrSetterOrMethod(modifiers);
       return _parseGetter(commentAndMetadata, modifiers.externalKeyword,
           modifiers.staticKeyword, null);
-    } else if (_matchesKeyword(Keyword.SET) &&
-        _tokenMatchesIdentifier(_peek())) {
+    } else if (keyword == Keyword.SET && isFollowedByIdentifier) {
       _validateModifiersForGetterOrSetterOrMethod(modifiers);
       return _parseSetter(commentAndMetadata, modifiers.externalKeyword,
           modifiers.staticKeyword, null);
-    } else if (_matchesKeyword(Keyword.OPERATOR) && _isOperator(_peek())) {
+    } else if (keyword == Keyword.OPERATOR && _isOperator(next)) {
       _validateModifiersForOperator(modifiers);
-      return _parseOperator(
-          commentAndMetadata, modifiers.externalKeyword, null);
+      return _parseOperatorAfterKeyword(
+          commentAndMetadata, modifiers.externalKeyword, null, getAndAdvance());
     } else if (!_matchesIdentifier()) {
       //
       // Recover from an error.
@@ -2514,9 +2555,9 @@
         // We appear to have found an incomplete field declaration.
         //
         _reportErrorForCurrentToken(ParserErrorCode.MISSING_IDENTIFIER);
-        List<VariableDeclaration> variables = new List<VariableDeclaration>();
-        variables.add(
-            new VariableDeclaration(_createSyntheticIdentifier(), null, null));
+        VariableDeclaration variable =
+            new VariableDeclaration(_createSyntheticIdentifier(), null, null);
+        List<VariableDeclaration> variables = <VariableDeclaration>[variable];
         return new FieldDeclaration(
             commentAndMetadata.comment,
             commentAndMetadata.metadata,
@@ -2527,7 +2568,7 @@
       _reportErrorForToken(
           ParserErrorCode.EXPECTED_CLASS_MEMBER, _currentToken);
       if (commentAndMetadata.comment != null ||
-          !commentAndMetadata.metadata.isEmpty) {
+          commentAndMetadata.hasMetadata) {
         //
         // We appear to have found an incomplete declaration at the end of the
         // class. At this point it consists of a metadata, which we don't want
@@ -2549,7 +2590,7 @@
             new EmptyFunctionBody(_createSyntheticToken(TokenType.SEMICOLON)));
       }
       return null;
-    } else if (_tokenMatches(_peek(), TokenType.PERIOD) &&
+    } else if (_tokenMatches(next, TokenType.PERIOD) &&
         _tokenMatchesIdentifier(_peekAt(2)) &&
         _tokenMatches(_peekAt(3), TokenType.OPEN_PAREN)) {
       return _parseConstructor(
@@ -2561,7 +2602,7 @@
           getAndAdvance(),
           parseSimpleIdentifier(isDeclaration: true),
           parseFormalParameterList());
-    } else if (_tokenMatches(_peek(), TokenType.OPEN_PAREN)) {
+    } else if (_tokenMatches(next, TokenType.OPEN_PAREN)) {
       TypeName returnType = _parseOptionalTypeNameComment();
       SimpleIdentifier methodName = parseSimpleIdentifier(isDeclaration: true);
       TypeParameterList typeParameters = _parseGenericCommentTypeParameters();
@@ -2589,7 +2630,7 @@
           methodName,
           typeParameters,
           parameters);
-    } else if (_peek().matchesAny(const <TokenType>[
+    } else if (next.matchesAny(const <TokenType>[
       TokenType.EQ,
       TokenType.COMMA,
       TokenType.SEMICOLON
@@ -2602,7 +2643,7 @@
       }
       return _parseInitializedIdentifierList(commentAndMetadata,
           modifiers.staticKeyword, _validateModifiersForField(modifiers), null);
-    } else if (_matchesKeyword(Keyword.TYPEDEF)) {
+    } else if (keyword == Keyword.TYPEDEF) {
       _reportErrorForCurrentToken(ParserErrorCode.TYPEDEF_IN_CLASS);
       // TODO(brianwilkerson) We don't currently have any way to capture the
       // function type alias that was parsed.
@@ -2615,20 +2656,22 @@
             modifiers.externalKeyword, modifiers.staticKeyword, null);
       }
     }
-    TypeName type = parseTypeName();
-    if (_matchesKeyword(Keyword.GET) && _tokenMatchesIdentifier(_peek())) {
+    TypeName type = _parseTypeNameAfterIdentifier();
+    keyword = _currentToken.keyword;
+    next = _peek();
+    isFollowedByIdentifier = _tokenMatchesIdentifier(next);
+    if (keyword == Keyword.GET && isFollowedByIdentifier) {
       _validateModifiersForGetterOrSetterOrMethod(modifiers);
       return _parseGetter(commentAndMetadata, modifiers.externalKeyword,
           modifiers.staticKeyword, type);
-    } else if (_matchesKeyword(Keyword.SET) &&
-        _tokenMatchesIdentifier(_peek())) {
+    } else if (keyword == Keyword.SET && isFollowedByIdentifier) {
       _validateModifiersForGetterOrSetterOrMethod(modifiers);
       return _parseSetter(commentAndMetadata, modifiers.externalKeyword,
           modifiers.staticKeyword, type);
-    } else if (_matchesKeyword(Keyword.OPERATOR) && _isOperator(_peek())) {
+    } else if (keyword == Keyword.OPERATOR && _isOperator(next)) {
       _validateModifiersForOperator(modifiers);
-      return _parseOperator(
-          commentAndMetadata, modifiers.externalKeyword, type);
+      return _parseOperatorAfterKeyword(
+          commentAndMetadata, modifiers.externalKeyword, type, getAndAdvance());
     } else if (!_matchesIdentifier()) {
       if (_matches(TokenType.CLOSE_CURLY_BRACKET)) {
         //
@@ -2668,8 +2711,9 @@
       } finally {
         _unlockErrorListener();
       }
-    } else if (_tokenMatches(_peek(), TokenType.OPEN_PAREN)) {
-      SimpleIdentifier methodName = parseSimpleIdentifier(isDeclaration: true);
+    } else if (_tokenMatches(next, TokenType.OPEN_PAREN)) {
+      SimpleIdentifier methodName =
+          _parseSimpleIdentifierUnchecked(isDeclaration: true);
       TypeParameterList typeParameters = _parseGenericCommentTypeParameters();
       FormalParameterList parameters = parseFormalParameterList();
       if (methodName.name == className) {
@@ -2694,10 +2738,10 @@
           methodName,
           typeParameters,
           parameters);
-    } else if (parseGenericMethods && _tokenMatches(_peek(), TokenType.LT)) {
+    } else if (parseGenericMethods && _tokenMatches(next, TokenType.LT)) {
       return _parseMethodDeclarationAfterReturnType(commentAndMetadata,
           modifiers.externalKeyword, modifiers.staticKeyword, type);
-    } else if (_tokenMatches(_peek(), TokenType.OPEN_CURLY_BRACKET)) {
+    } else if (_tokenMatches(next, TokenType.OPEN_CURLY_BRACKET)) {
       // We have found "TypeName identifier {", and are guessing that this is a
       // getter without the keyword 'get'.
       _validateModifiersForGetterOrSetterOrMethod(modifiers);
@@ -2720,14 +2764,10 @@
    *       | 'hide' identifier (',' identifier)*
    */
   Combinator parseCombinator() {
-    if (_matchesString(_SHOW) || _matchesString(_HIDE)) {
-      Token keyword = getAndAdvance();
-      List<SimpleIdentifier> names = _parseIdentifierList();
-      if (keyword.lexeme == _SHOW) {
-        return new ShowCombinator(keyword, names);
-      } else {
-        return new HideCombinator(keyword, names);
-      }
+    if (_matchesString(_SHOW)) {
+      return new ShowCombinator(getAndAdvance(), _parseIdentifierList());
+    } else if (_matchesString(_HIDE)) {
+      return new HideCombinator(getAndAdvance(), _parseIdentifierList());
     }
     return null;
   }
@@ -2773,70 +2813,72 @@
     bool partOfDirectiveFound = false;
     bool partDirectiveFound = false;
     bool directiveFoundAfterDeclaration = false;
-    List<Directive> directives = new List<Directive>();
-    List<CompilationUnitMember> declarations =
-        new List<CompilationUnitMember>();
+    List<Directive> directives = <Directive>[];
+    List<CompilationUnitMember> declarations = <CompilationUnitMember>[];
     Token memberStart = _currentToken;
-    while (!_matches(TokenType.EOF)) {
+    TokenType type = _currentToken.type;
+    while (type != TokenType.EOF) {
       CommentAndMetadata commentAndMetadata = _parseCommentAndMetadata();
-      if ((_matchesKeyword(Keyword.IMPORT) ||
-              _matchesKeyword(Keyword.EXPORT) ||
-              _matchesKeyword(Keyword.LIBRARY) ||
-              _matchesKeyword(Keyword.PART)) &&
-          !_tokenMatches(_peek(), TokenType.PERIOD) &&
-          !_tokenMatches(_peek(), TokenType.LT) &&
-          !_tokenMatches(_peek(), TokenType.OPEN_PAREN)) {
-        Directive directive = _parseDirective(commentAndMetadata);
+      Keyword keyword = _currentToken.keyword;
+      TokenType nextType = _currentToken.next.type;
+      if ((keyword == Keyword.IMPORT ||
+              keyword == Keyword.EXPORT ||
+              keyword == Keyword.LIBRARY ||
+              keyword == Keyword.PART) &&
+          nextType != TokenType.PERIOD &&
+          nextType != TokenType.LT &&
+          nextType != TokenType.OPEN_PAREN) {
+        Directive parseDirective() {
+          if (keyword == Keyword.IMPORT) {
+            if (partDirectiveFound) {
+              _reportErrorForCurrentToken(
+                  ParserErrorCode.IMPORT_DIRECTIVE_AFTER_PART_DIRECTIVE);
+            }
+            return _parseImportDirective(commentAndMetadata);
+          } else if (keyword == Keyword.EXPORT) {
+            if (partDirectiveFound) {
+              _reportErrorForCurrentToken(
+                  ParserErrorCode.EXPORT_DIRECTIVE_AFTER_PART_DIRECTIVE);
+            }
+            return _parseExportDirective(commentAndMetadata);
+          } else if (keyword == Keyword.LIBRARY) {
+            if (libraryDirectiveFound) {
+              _reportErrorForCurrentToken(
+                  ParserErrorCode.MULTIPLE_LIBRARY_DIRECTIVES);
+            } else {
+              if (directives.length > 0) {
+                _reportErrorForCurrentToken(
+                    ParserErrorCode.LIBRARY_DIRECTIVE_NOT_FIRST);
+              }
+              libraryDirectiveFound = true;
+            }
+            return _parseLibraryDirective(commentAndMetadata);
+          } else if (keyword == Keyword.PART) {
+            if (_tokenMatchesString(_peek(), _OF)) {
+              partOfDirectiveFound = true;
+              return _parsePartOfDirective(commentAndMetadata);
+            } else {
+              partDirectiveFound = true;
+              return _parsePartDirective(commentAndMetadata);
+            }
+          } else {
+            // Internal error: this method should not have been invoked if the
+            // current token was something other than one of the above.
+            throw new IllegalStateException(
+                "parseDirective invoked in an invalid state (currentToken = $_currentToken)");
+          }
+        }
+        Directive directive = parseDirective();
         if (declarations.length > 0 && !directiveFoundAfterDeclaration) {
           _reportErrorForToken(ParserErrorCode.DIRECTIVE_AFTER_DECLARATION,
               directive.beginToken);
           directiveFoundAfterDeclaration = true;
         }
-        if (directive is LibraryDirective) {
-          if (libraryDirectiveFound) {
-            _reportErrorForCurrentToken(
-                ParserErrorCode.MULTIPLE_LIBRARY_DIRECTIVES);
-          } else {
-            if (directives.length > 0) {
-              _reportErrorForToken(ParserErrorCode.LIBRARY_DIRECTIVE_NOT_FIRST,
-                  directive.libraryKeyword);
-            }
-            libraryDirectiveFound = true;
-          }
-        } else if (directive is PartDirective) {
-          partDirectiveFound = true;
-        } else if (partDirectiveFound) {
-          if (directive is ExportDirective) {
-            _reportErrorForToken(
-                ParserErrorCode.EXPORT_DIRECTIVE_AFTER_PART_DIRECTIVE,
-                directive.keyword);
-          } else if (directive is ImportDirective) {
-            _reportErrorForToken(
-                ParserErrorCode.IMPORT_DIRECTIVE_AFTER_PART_DIRECTIVE,
-                directive.keyword);
-          }
-        }
-        if (directive is PartOfDirective) {
-          if (partOfDirectiveFound) {
-            _reportErrorForCurrentToken(
-                ParserErrorCode.MULTIPLE_PART_OF_DIRECTIVES);
-          } else {
-            int directiveCount = directives.length;
-            for (int i = 0; i < directiveCount; i++) {
-              _reportErrorForToken(
-                  ParserErrorCode.NON_PART_OF_DIRECTIVE_IN_PART,
-                  directives[i].keyword);
-            }
-            partOfDirectiveFound = true;
-          }
-        } else {
-          if (partOfDirectiveFound) {
-            _reportErrorForToken(ParserErrorCode.NON_PART_OF_DIRECTIVE_IN_PART,
-                directive.keyword);
-          }
-        }
         directives.add(directive);
-      } else if (_matches(TokenType.SEMICOLON)) {
+      } else if (type == TokenType.SEMICOLON) {
+        // TODO(brianwilkerson) Consider moving this error detection into
+        // _parseCompilationUnitMember (in the places where EXPECTED_EXECUTABLE
+        // is being generated).
         _reportErrorForToken(ParserErrorCode.UNEXPECTED_TOKEN, _currentToken,
             [_currentToken.lexeme]);
         _advance();
@@ -2857,6 +2899,38 @@
         }
       }
       memberStart = _currentToken;
+      type = _currentToken.type;
+    }
+    if (partOfDirectiveFound && directives.length > 1) {
+      // TODO(brianwilkerson) Improve error reporting when both a library and
+      // part-of directive are found.
+//      if (libraryDirectiveFound) {
+//        int directiveCount = directives.length;
+//        for (int i = 0; i < directiveCount; i++) {
+//          Directive directive = directives[i];
+//          if (directive is PartOfDirective) {
+//            _reportErrorForToken(
+//                ParserErrorCode.PART_OF_IN_LIBRARY, directive.partKeyword);
+//          }
+//        }
+//      } else {
+      bool firstPartOf = true;
+      int directiveCount = directives.length;
+      for (int i = 0; i < directiveCount; i++) {
+        Directive directive = directives[i];
+        if (directive is PartOfDirective) {
+          if (firstPartOf) {
+            firstPartOf = false;
+          } else {
+            _reportErrorForToken(ParserErrorCode.MULTIPLE_PART_OF_DIRECTIVES,
+                directive.partKeyword);
+          }
+        } else {
+          _reportErrorForToken(ParserErrorCode.NON_PART_OF_DIRECTIVE_IN_PART,
+              directives[i].keyword);
+        }
+//        }
+      }
     }
     return new CompilationUnit(
         firstToken, scriptTag, directives, declarations, _currentToken);
@@ -2871,7 +2945,7 @@
    */
   Expression parseConditionalExpression() {
     Expression condition = parseIfNullExpression();
-    if (!_matches(TokenType.QUESTION)) {
+    if (_currentToken.type != TokenType.QUESTION) {
       return condition;
     }
     Token question = getAndAdvance();
@@ -2932,9 +3006,10 @@
    *       | throwExpression
    */
   Expression parseExpression2() {
-    if (_matchesKeyword(Keyword.THROW)) {
+    Keyword keyword = _currentToken.keyword;
+    if (keyword == Keyword.THROW) {
       return _parseThrowExpression();
-    } else if (_matchesKeyword(Keyword.RETHROW)) {
+    } else if (keyword == Keyword.RETHROW) {
       // TODO(brianwilkerson) Rethrow is a statement again.
       return _parseRethrowExpression();
     }
@@ -2945,18 +3020,17 @@
     // grammar after making that determination.
     //
     Expression expression = parseConditionalExpression();
-    TokenType tokenType = _currentToken.type;
-    if (tokenType == TokenType.PERIOD_PERIOD) {
-      List<Expression> cascadeSections = new List<Expression>();
-      while (tokenType == TokenType.PERIOD_PERIOD) {
+    TokenType type = _currentToken.type;
+    if (type == TokenType.PERIOD_PERIOD) {
+      List<Expression> cascadeSections = <Expression>[];
+      do {
         Expression section = _parseCascadeSection();
         if (section != null) {
           cascadeSections.add(section);
         }
-        tokenType = _currentToken.type;
-      }
+      } while (_currentToken.type == TokenType.PERIOD_PERIOD);
       return new CascadeExpression(expression, cascadeSections);
-    } else if (tokenType.isAssignmentOperator) {
+    } else if (type.isAssignmentOperator) {
       Token operator = getAndAdvance();
       _ensureAssignable(expression);
       return new AssignmentExpression(expression, operator, parseExpression2());
@@ -2999,11 +3073,13 @@
    * Parse a class extends clause. Return the class extends clause that was
    * parsed.
    *
+   * This method assumes that the current token matches `Keyword.EXTENDS`.
+   *
    *     classExtendsClause ::=
    *         'extends' type
    */
   ExtendsClause parseExtendsClause() {
-    Token keyword = _expectKeyword(Keyword.EXTENDS);
+    Token keyword = getAndAdvance();
     TypeName superclass = parseTypeName();
     return new ExtendsClause(keyword, superclass);
   }
@@ -3031,145 +3107,16 @@
    *         '{' defaultNamedParameter (',' defaultNamedParameter)* '}'
    */
   FormalParameterList parseFormalParameterList() {
-    Token leftParenthesis = _expect(TokenType.OPEN_PAREN);
-    if (_matches(TokenType.CLOSE_PAREN)) {
-      return new FormalParameterList(
-          leftParenthesis, null, null, null, getAndAdvance());
+    if (_matches(TokenType.OPEN_PAREN)) {
+      return _parseFormalParameterListUnchecked();
     }
-    //
-    // Even though it is invalid to have default parameters outside of brackets,
-    // required parameters inside of brackets, or multiple groups of default and
-    // named parameters, we allow all of these cases so that we can recover
-    // better.
-    //
-    List<FormalParameter> parameters = new List<FormalParameter>();
-    Token leftSquareBracket = null;
-    Token rightSquareBracket = null;
-    Token leftCurlyBracket = null;
-    Token rightCurlyBracket = null;
-    ParameterKind kind = ParameterKind.REQUIRED;
-    bool firstParameter = true;
-    bool reportedMultiplePositionalGroups = false;
-    bool reportedMultipleNamedGroups = false;
-    bool reportedMixedGroups = false;
-    bool wasOptionalParameter = false;
-    Token initialToken = null;
-    do {
-      if (firstParameter) {
-        firstParameter = false;
-      } else if (!_optional(TokenType.COMMA)) {
-        // TODO(brianwilkerson) The token is wrong, we need to recover from this
-        // case.
-        if (_getEndToken(leftParenthesis) != null) {
-          _reportErrorForCurrentToken(
-              ParserErrorCode.EXPECTED_TOKEN, [TokenType.COMMA.lexeme]);
-        } else {
-          _reportErrorForToken(ParserErrorCode.MISSING_CLOSING_PARENTHESIS,
-              _currentToken.previous);
-          break;
-        }
-      }
-      initialToken = _currentToken;
-      //
-      // Handle the beginning of parameter groups.
-      //
-      if (_matches(TokenType.OPEN_SQUARE_BRACKET)) {
-        wasOptionalParameter = true;
-        if (leftSquareBracket != null && !reportedMultiplePositionalGroups) {
-          _reportErrorForCurrentToken(
-              ParserErrorCode.MULTIPLE_POSITIONAL_PARAMETER_GROUPS);
-          reportedMultiplePositionalGroups = true;
-        }
-        if (leftCurlyBracket != null && !reportedMixedGroups) {
-          _reportErrorForCurrentToken(ParserErrorCode.MIXED_PARAMETER_GROUPS);
-          reportedMixedGroups = true;
-        }
-        leftSquareBracket = getAndAdvance();
-        kind = ParameterKind.POSITIONAL;
-      } else if (_matches(TokenType.OPEN_CURLY_BRACKET)) {
-        wasOptionalParameter = true;
-        if (leftCurlyBracket != null && !reportedMultipleNamedGroups) {
-          _reportErrorForCurrentToken(
-              ParserErrorCode.MULTIPLE_NAMED_PARAMETER_GROUPS);
-          reportedMultipleNamedGroups = true;
-        }
-        if (leftSquareBracket != null && !reportedMixedGroups) {
-          _reportErrorForCurrentToken(ParserErrorCode.MIXED_PARAMETER_GROUPS);
-          reportedMixedGroups = true;
-        }
-        leftCurlyBracket = getAndAdvance();
-        kind = ParameterKind.NAMED;
-      }
-      //
-      // Parse and record the parameter.
-      //
-      FormalParameter parameter = _parseFormalParameter(kind);
-      parameters.add(parameter);
-      if (kind == ParameterKind.REQUIRED && wasOptionalParameter) {
-        _reportErrorForNode(
-            ParserErrorCode.NORMAL_BEFORE_OPTIONAL_PARAMETERS, parameter);
-      }
-      //
-      // Handle the end of parameter groups.
-      //
-      // TODO(brianwilkerson) Improve the detection and reporting of missing and
-      // mismatched delimiters.
-      if (_matches(TokenType.CLOSE_SQUARE_BRACKET)) {
-        rightSquareBracket = getAndAdvance();
-        if (leftSquareBracket == null) {
-          if (leftCurlyBracket != null) {
-            _reportErrorForCurrentToken(
-                ParserErrorCode.WRONG_TERMINATOR_FOR_PARAMETER_GROUP, ["}"]);
-            rightCurlyBracket = rightSquareBracket;
-            rightSquareBracket = null;
-          } else {
-            _reportErrorForCurrentToken(
-                ParserErrorCode.UNEXPECTED_TERMINATOR_FOR_PARAMETER_GROUP,
-                ["["]);
-          }
-        }
-        kind = ParameterKind.REQUIRED;
-      } else if (_matches(TokenType.CLOSE_CURLY_BRACKET)) {
-        rightCurlyBracket = getAndAdvance();
-        if (leftCurlyBracket == null) {
-          if (leftSquareBracket != null) {
-            _reportErrorForCurrentToken(
-                ParserErrorCode.WRONG_TERMINATOR_FOR_PARAMETER_GROUP, ["]"]);
-            rightSquareBracket = rightCurlyBracket;
-            rightCurlyBracket = null;
-          } else {
-            _reportErrorForCurrentToken(
-                ParserErrorCode.UNEXPECTED_TERMINATOR_FOR_PARAMETER_GROUP,
-                ["{"]);
-          }
-        }
-        kind = ParameterKind.REQUIRED;
-      }
-    } while (!_matches(TokenType.CLOSE_PAREN) &&
-        !identical(initialToken, _currentToken));
-    Token rightParenthesis = _expect(TokenType.CLOSE_PAREN);
-    //
-    // Check that the groups were closed correctly.
-    //
-    if (leftSquareBracket != null && rightSquareBracket == null) {
-      _reportErrorForCurrentToken(
-          ParserErrorCode.MISSING_TERMINATOR_FOR_PARAMETER_GROUP, ["]"]);
-    }
-    if (leftCurlyBracket != null && rightCurlyBracket == null) {
-      _reportErrorForCurrentToken(
-          ParserErrorCode.MISSING_TERMINATOR_FOR_PARAMETER_GROUP, ["}"]);
-    }
-    //
-    // Build the parameter list.
-    //
-    if (leftSquareBracket == null) {
-      leftSquareBracket = leftCurlyBracket;
-    }
-    if (rightSquareBracket == null) {
-      rightSquareBracket = rightCurlyBracket;
-    }
-    return new FormalParameterList(leftParenthesis, parameters,
-        leftSquareBracket, rightSquareBracket, rightParenthesis);
+    // TODO(brianwilkerson) Improve the error message.
+    _reportErrorForCurrentToken(
+        ParserErrorCode.EXPECTED_TOKEN, [TokenType.OPEN_PAREN.lexeme]);
+    // Recovery: Check for an unmatched closing paren and parse parameters until
+    // it is reached.
+    return _parseFormalParameterListAfterParen(
+        _createSyntheticToken(TokenType.OPEN_PAREN));
   }
 
   /**
@@ -3196,10 +3143,9 @@
    */
   Expression parseIfNullExpression() {
     Expression expression = parseLogicalOrExpression();
-    while (_matches(TokenType.QUESTION_QUESTION)) {
-      Token operator = getAndAdvance();
+    while (_currentToken.type == TokenType.QUESTION_QUESTION) {
       expression = new BinaryExpression(
-          expression, operator, parseLogicalOrExpression());
+          expression, getAndAdvance(), parseLogicalOrExpression());
     }
     return expression;
   }
@@ -3207,12 +3153,14 @@
   /**
    * Parse an implements clause. Return the implements clause that was parsed.
    *
+   * This method assumes that the current token matches `Keyword.IMPLEMENTS`.
+   *
    *     implementsClause ::=
    *         'implements' type (',' type)*
    */
   ImplementsClause parseImplementsClause() {
-    Token keyword = _expectKeyword(Keyword.IMPLEMENTS);
-    List<TypeName> interfaces = new List<TypeName>();
+    Token keyword = getAndAdvance();
+    List<TypeName> interfaces = <TypeName>[];
     interfaces.add(parseTypeName());
     while (_optional(TokenType.COMMA)) {
       interfaces.add(parseTypeName());
@@ -3223,13 +3171,16 @@
   /**
    * Parse a label. Return the label that was parsed.
    *
+   * This method assumes that the current token matches an identifier and that
+   * the following token matches `TokenType.COLON`.
+   *
    *     label ::=
    *         identifier ':'
    */
   Label parseLabel({bool isDeclaration: false}) {
     SimpleIdentifier label =
-        parseSimpleIdentifier(isDeclaration: isDeclaration);
-    Token colon = _expect(TokenType.COLON);
+        _parseSimpleIdentifierUnchecked(isDeclaration: isDeclaration);
+    Token colon = getAndAdvance();
     return new Label(label, colon);
   }
 
@@ -3240,10 +3191,9 @@
    *         identifier ('.' identifier)*
    */
   LibraryIdentifier parseLibraryIdentifier() {
-    List<SimpleIdentifier> components = new List<SimpleIdentifier>();
+    List<SimpleIdentifier> components = <SimpleIdentifier>[];
     components.add(parseSimpleIdentifier());
-    while (_matches(TokenType.PERIOD)) {
-      _advance();
+    while (_optional(TokenType.PERIOD)) {
       components.add(parseSimpleIdentifier());
     }
     return new LibraryIdentifier(components);
@@ -3258,10 +3208,9 @@
    */
   Expression parseLogicalOrExpression() {
     Expression expression = _parseLogicalAndExpression();
-    while (_matches(TokenType.BAR_BAR)) {
-      Token operator = getAndAdvance();
+    while (_currentToken.type == TokenType.BAR_BAR) {
       expression = new BinaryExpression(
-          expression, operator, _parseLogicalAndExpression());
+          expression, getAndAdvance(), _parseLogicalAndExpression());
     }
     return expression;
   }
@@ -3310,7 +3259,7 @@
     SimpleIdentifier identifier = parseSimpleIdentifier();
     TypeParameterList typeParameters = _parseGenericMethodTypeParameters();
     if (_matches(TokenType.OPEN_PAREN)) {
-      FormalParameterList parameters = parseFormalParameterList();
+      FormalParameterList parameters = _parseFormalParameterListUnchecked();
       if (thisKeyword == null) {
         if (holder.keyword != null) {
           _reportErrorForToken(
@@ -3381,13 +3330,7 @@
    *         identifier ('.' identifier)?
    */
   Identifier parsePrefixedIdentifier() {
-    SimpleIdentifier qualifier = parseSimpleIdentifier();
-    if (!_matches(TokenType.PERIOD) || _injectGenericCommentTypeList()) {
-      return qualifier;
-    }
-    Token period = getAndAdvance();
-    SimpleIdentifier qualified = parseSimpleIdentifier();
-    return new PrefixedIdentifier(qualifier, period, qualified);
+    return _parsePrefixedIdentifierAfterIdentifier(parseSimpleIdentifier());
   }
 
   /**
@@ -3398,7 +3341,7 @@
    *       | type
    */
   TypeName parseReturnType() {
-    if (_matchesKeyword(Keyword.VOID)) {
+    if (_currentToken.keyword == Keyword.VOID) {
       return new TypeName(new SimpleIdentifier(getAndAdvance()), null);
     } else {
       return parseTypeName();
@@ -3413,14 +3356,7 @@
    */
   SimpleIdentifier parseSimpleIdentifier({bool isDeclaration: false}) {
     if (_matchesIdentifier()) {
-      String lexeme = _currentToken.lexeme;
-      if ((_inAsync || _inGenerator) &&
-          (lexeme == 'async' || lexeme == 'await' || lexeme == 'yield')) {
-        _reportErrorForCurrentToken(
-            ParserErrorCode.ASYNC_KEYWORD_USED_AS_IDENTIFIER);
-      }
-      return new SimpleIdentifier(getAndAdvance(),
-          isDeclaration: isDeclaration);
+      return _parseSimpleIdentifierUnchecked(isDeclaration: isDeclaration);
     }
     _reportErrorForCurrentToken(ParserErrorCode.MISSING_IDENTIFIER);
     return _createSyntheticIdentifier(isDeclaration: isDeclaration);
@@ -3444,7 +3380,7 @@
    */
   Statement parseStatement2() {
     List<Label> labels = null;
-    while (_matchesIdentifier() && _tokenMatches(_peek(), TokenType.COLON)) {
+    while (_matchesIdentifier() && _currentToken.next.type == TokenType.COLON) {
       Label label = parseLabel(isDeclaration: true);
       if (labels == null) {
         labels = <Label>[label];
@@ -3477,31 +3413,19 @@
    *       | SINGLE_LINE_STRING+
    */
   StringLiteral parseStringLiteral() {
-    List<StringLiteral> strings = new List<StringLiteral>();
-    while (_matches(TokenType.STRING)) {
-      Token string = getAndAdvance();
-      if (_matches(TokenType.STRING_INTERPOLATION_EXPRESSION) ||
-          _matches(TokenType.STRING_INTERPOLATION_IDENTIFIER)) {
-        strings.add(_parseStringInterpolation(string));
-      } else {
-        strings.add(new SimpleStringLiteral(
-            string, _computeStringValue(string.lexeme, true, true)));
-      }
+    if (_matches(TokenType.STRING)) {
+      return _parseStringLiteralUnchecked();
     }
-    if (strings.length < 1) {
-      _reportErrorForCurrentToken(ParserErrorCode.EXPECTED_STRING_LITERAL);
-      return _createSyntheticStringLiteral();
-    } else if (strings.length == 1) {
-      return strings[0];
-    } else {
-      return new AdjacentStrings(strings);
-    }
+    _reportErrorForCurrentToken(ParserErrorCode.EXPECTED_STRING_LITERAL);
+    return _createSyntheticStringLiteral();
   }
 
   /**
    * Parse a list of type arguments. Return the type argument list that was
    * parsed.
    *
+   * This method assumes that the current token matches `TokenType.LT`.
+   *
    *     typeArguments ::=
    *         '<' typeList '>'
    *
@@ -3509,9 +3433,8 @@
    *         type (',' type)*
    */
   TypeArgumentList parseTypeArgumentList() {
-    Token leftBracket = _expect(TokenType.LT);
-    List<TypeName> arguments = new List<TypeName>();
-    arguments.add(parseTypeName());
+    Token leftBracket = getAndAdvance();
+    List<TypeName> arguments = <TypeName>[parseTypeName()];
     while (_optional(TokenType.COMMA)) {
       arguments.add(parseTypeName());
     }
@@ -3531,8 +3454,8 @@
     // type to replace the real type name.
     // TODO(jmesserly): this feels like a big hammer. Can we restrict it to
     // only work inside generic methods?
-    TypeName typeComment = _parseOptionalTypeNameComment();
-    return typeComment ?? realType;
+    TypeName typeFromComment = _parseOptionalTypeNameComment();
+    return typeFromComment ?? realType;
   }
 
   /**
@@ -3558,13 +3481,14 @@
    * Parse a list of type parameters. Return the list of type parameters that
    * were parsed.
    *
+   * This method assumes that the current token matches `TokenType.LT`.
+   *
    *     typeParameterList ::=
    *         '<' typeParameter (',' typeParameter)* '>'
    */
   TypeParameterList parseTypeParameterList() {
-    Token leftBracket = _expect(TokenType.LT);
-    List<TypeParameter> typeParameters = new List<TypeParameter>();
-    typeParameters.add(parseTypeParameter());
+    Token leftBracket = getAndAdvance();
+    List<TypeParameter> typeParameters = <TypeParameter>[parseTypeParameter()];
     while (_optional(TokenType.COMMA)) {
       typeParameters.add(parseTypeParameter());
     }
@@ -3575,17 +3499,18 @@
   /**
    * Parse a with clause. Return the with clause that was parsed.
    *
+   * This method assumes that the current token matches `Keyword.WITH`.
+   *
    *     withClause ::=
    *         'with' typeName (',' typeName)*
    */
   WithClause parseWithClause() {
-    Token with2 = _expectKeyword(Keyword.WITH);
-    List<TypeName> types = new List<TypeName>();
-    types.add(parseTypeName());
+    Token withKeyword = getAndAdvance();
+    List<TypeName> types = <TypeName>[parseTypeName()];
     while (_optional(TokenType.COMMA)) {
       types.add(parseTypeName());
     }
-    return new WithClause(with2, types);
+    return new WithClause(withKeyword, types);
   }
 
   /**
@@ -3669,30 +3594,33 @@
    * member.
    */
   bool _couldBeStartOfCompilationUnitMember() {
-    if ((_matchesKeyword(Keyword.IMPORT) ||
-            _matchesKeyword(Keyword.EXPORT) ||
-            _matchesKeyword(Keyword.LIBRARY) ||
-            _matchesKeyword(Keyword.PART)) &&
-        !_tokenMatches(_peek(), TokenType.PERIOD) &&
-        !_tokenMatches(_peek(), TokenType.LT)) {
+    Keyword keyword = _currentToken.keyword;
+    Token next = _currentToken.next;
+    TokenType nextType = next.type;
+    if ((keyword == Keyword.IMPORT ||
+            keyword == Keyword.EXPORT ||
+            keyword == Keyword.LIBRARY ||
+            keyword == Keyword.PART) &&
+        nextType != TokenType.PERIOD &&
+        nextType != TokenType.LT) {
       // This looks like the start of a directive
       return true;
-    } else if (_matchesKeyword(Keyword.CLASS)) {
+    } else if (keyword == Keyword.CLASS) {
       // This looks like the start of a class definition
       return true;
-    } else if (_matchesKeyword(Keyword.TYPEDEF) &&
-        !_tokenMatches(_peek(), TokenType.PERIOD) &&
-        !_tokenMatches(_peek(), TokenType.LT)) {
+    } else if (keyword == Keyword.TYPEDEF &&
+        nextType != TokenType.PERIOD &&
+        nextType != TokenType.LT) {
       // This looks like the start of a typedef
       return true;
-    } else if (_matchesKeyword(Keyword.VOID) ||
-        ((_matchesKeyword(Keyword.GET) || _matchesKeyword(Keyword.SET)) &&
-            _tokenMatchesIdentifier(_peek())) ||
-        (_matchesKeyword(Keyword.OPERATOR) && _isOperator(_peek()))) {
+    } else if (keyword == Keyword.VOID ||
+        ((keyword == Keyword.GET || keyword == Keyword.SET) &&
+            _tokenMatchesIdentifier(next)) ||
+        (keyword == Keyword.OPERATOR && _isOperator(next))) {
       // This looks like the start of a function
       return true;
     } else if (_matchesIdentifier()) {
-      if (_tokenMatches(_peek(), TokenType.OPEN_PAREN)) {
+      if (nextType == TokenType.OPEN_PAREN) {
         // This looks like the start of a function
         return true;
       }
@@ -3700,9 +3628,10 @@
       if (token == null) {
         return false;
       }
-      if (_matchesKeyword(Keyword.GET) ||
-          _matchesKeyword(Keyword.SET) ||
-          (_matchesKeyword(Keyword.OPERATOR) && _isOperator(_peek())) ||
+      // TODO(brianwilkerson) This looks wrong; should we be checking 'token'?
+      if (keyword == Keyword.GET ||
+          keyword == Keyword.SET ||
+          (keyword == Keyword.OPERATOR && _isOperator(next)) ||
           _matchesIdentifier()) {
         return true;
       }
@@ -3872,7 +3801,7 @@
    * should be treated as code blocks.
    */
   List<List<int>> _getCodeBlockRanges(String comment) {
-    List<List<int>> ranges = new List<List<int>>();
+    List<List<int>> ranges = <List<int>>[];
     int length = comment.length;
     if (length < 3) {
       return ranges;
@@ -4015,7 +3944,8 @@
    * function declaration.
    */
   bool _isFunctionDeclaration() {
-    if (_matchesKeyword(Keyword.VOID)) {
+    Keyword keyword = _currentToken.keyword;
+    if (keyword == Keyword.VOID) {
       return true;
     }
     Token afterReturnType = _skipTypeName(_currentToken);
@@ -4038,20 +3968,20 @@
     }
     // It's possible that we have found a getter. While this isn't valid at this
     // point we test for it in order to recover better.
-    if (_matchesKeyword(Keyword.GET)) {
+    if (keyword == Keyword.GET) {
       Token afterName = _skipSimpleIdentifier(_currentToken.next);
       if (afterName == null) {
         return false;
       }
-      return _tokenMatches(afterName, TokenType.FUNCTION) ||
-          _tokenMatches(afterName, TokenType.OPEN_CURLY_BRACKET);
+      TokenType type = afterName.type;
+      return type == TokenType.FUNCTION || type == TokenType.OPEN_CURLY_BRACKET;
     } else if (_tokenMatchesKeyword(afterReturnType, Keyword.GET)) {
       Token afterName = _skipSimpleIdentifier(afterReturnType.next);
       if (afterName == null) {
         return false;
       }
-      return _tokenMatches(afterName, TokenType.FUNCTION) ||
-          _tokenMatches(afterName, TokenType.OPEN_CURLY_BRACKET);
+      TokenType type = afterName.type;
+      return type == TokenType.FUNCTION || type == TokenType.OPEN_CURLY_BRACKET;
     }
     return false;
   }
@@ -4114,12 +4044,13 @@
    *         identifier ('=' expression)?
    */
   bool _isInitializedVariableDeclaration() {
-    if (_matchesKeyword(Keyword.FINAL) || _matchesKeyword(Keyword.VAR)) {
+    Keyword keyword = _currentToken.keyword;
+    if (keyword == Keyword.FINAL || keyword == Keyword.VAR) {
       // An expression cannot start with a keyword other than 'const',
       // 'rethrow', or 'throw'.
       return true;
     }
-    if (_matchesKeyword(Keyword.CONST)) {
+    if (keyword == Keyword.CONST) {
       // Look to see whether we might be at the start of a list or map literal,
       // otherwise this should be the start of a variable declaration.
       return !_peek().matchesAny(const <TokenType>[
@@ -4156,7 +4087,7 @@
     if (type == TokenType.EQ ||
         type == TokenType.COMMA ||
         type == TokenType.SEMICOLON ||
-        _tokenMatchesKeyword(token, Keyword.IN)) {
+        token.keyword == Keyword.IN) {
       return true;
     }
     // It is OK to parse as a variable declaration in these cases:
@@ -4178,6 +4109,8 @@
   }
 
   bool _isLikelyArgumentList() {
+    // Try to reduce the amount of lookahead required here before enabling
+    // generic methods.
     if (_matches(TokenType.OPEN_PAREN)) {
       return true;
     }
@@ -4228,7 +4161,7 @@
     if (!startToken.isOperator) {
       return false;
     }
-    // Token "=" means that it is actually field initializer.
+    // Token "=" means that it is actually a field initializer.
     if (startToken.type == TokenType.EQ) {
       return false;
     }
@@ -4372,7 +4305,7 @@
    * should not be invoked with an argument value of [TokenType.GT].
    */
   bool _optional(TokenType type) {
-    if (_matches(type)) {
+    if (_currentToken.type == type) {
       _advance();
       return true;
     }
@@ -4389,28 +4322,45 @@
    */
   Expression _parseAdditiveExpression() {
     Expression expression;
-    if (_matchesKeyword(Keyword.SUPER) &&
+    if (_currentToken.keyword == Keyword.SUPER &&
         _currentToken.next.type.isAdditiveOperator) {
       expression = new SuperExpression(getAndAdvance());
     } else {
       expression = _parseMultiplicativeExpression();
     }
     while (_currentToken.type.isAdditiveOperator) {
-      Token operator = getAndAdvance();
       expression = new BinaryExpression(
-          expression, operator, _parseMultiplicativeExpression());
+          expression, getAndAdvance(), _parseMultiplicativeExpression());
     }
     return expression;
   }
 
   /**
+   * Parse an argument list when we need to check for an open paren and recover
+   * when there isn't one. Return the argument list that was parsed.
+   */
+  ArgumentList _parseArgumentListChecked() {
+    if (_matches(TokenType.OPEN_PAREN)) {
+      return parseArgumentList();
+    }
+    _reportErrorForCurrentToken(
+        ParserErrorCode.EXPECTED_TOKEN, [TokenType.OPEN_PAREN.lexeme]);
+    // Recovery: Look to see whether there is a close paren that isn't matched
+    // to an open paren and if so parse the list of arguments as normal.
+    return new ArgumentList(_createSyntheticToken(TokenType.OPEN_PAREN), null,
+        _createSyntheticToken(TokenType.CLOSE_PAREN));
+  }
+
+  /**
    * Parse an assert statement. Return the assert statement.
    *
+   * This method assumes that the current token matches `Keyword.ASSERT`.
+   *
    *     assertStatement ::=
    *         'assert' '(' conditionalExpression ')' ';'
    */
   AssertStatement _parseAssertStatement() {
-    Token keyword = _expectKeyword(Keyword.ASSERT);
+    Token keyword = getAndAdvance();
     Token leftParen = _expect(TokenType.OPEN_PAREN);
     Expression expression = parseExpression2();
     if (expression is AssignmentExpression) {
@@ -4454,6 +4404,17 @@
           new SuperExpression(getAndAdvance()), false,
           allowConditional: false);
     }
+    return _parseAssignableExpressionNotStartingWithSuper(primaryAllowed);
+  }
+
+  /**
+   * Parse an assignable expression given that the current token is not 'super'.
+   * The [primaryAllowed] is `true` if the expression is allowed to be a primary
+   * without any assignable selector. Return the assignable expression that was
+   * parsed.
+   */
+  Expression _parseAssignableExpressionNotStartingWithSuper(
+      bool primaryAllowed) {
     //
     // A primary expression can start with an identifier. We resolve the
     // ambiguity by determining whether the primary consists of anything other
@@ -4523,7 +4484,8 @@
    */
   Expression _parseAssignableSelector(Expression prefix, bool optional,
       {bool allowConditional: true}) {
-    if (_matches(TokenType.OPEN_SQUARE_BRACKET)) {
+    TokenType type = _currentToken.type;
+    if (type == TokenType.OPEN_SQUARE_BRACKET) {
       Token leftBracket = getAndAdvance();
       bool wasInInitializer = _inInitializer;
       _inInitializer = false;
@@ -4535,27 +4497,32 @@
       } finally {
         _inInitializer = wasInInitializer;
       }
-    } else if (_matches(TokenType.PERIOD) ||
-        _matches(TokenType.QUESTION_PERIOD)) {
-      if (_matches(TokenType.QUESTION_PERIOD) && !allowConditional) {
-        _reportErrorForCurrentToken(
-            ParserErrorCode.INVALID_OPERATOR_FOR_SUPER, [_currentToken.lexeme]);
-      }
-      Token operator = getAndAdvance();
-      return new PropertyAccess(prefix, operator, parseSimpleIdentifier());
     } else {
-      if (!optional) {
-        // Report the missing selector.
-        _reportErrorForCurrentToken(
-            ParserErrorCode.MISSING_ASSIGNABLE_SELECTOR);
+      bool isQuestionPeriod = type == TokenType.QUESTION_PERIOD;
+      if (type == TokenType.PERIOD || isQuestionPeriod) {
+        if (isQuestionPeriod && !allowConditional) {
+          _reportErrorForCurrentToken(
+              ParserErrorCode.INVALID_OPERATOR_FOR_SUPER,
+              [_currentToken.lexeme]);
+        }
+        Token operator = getAndAdvance();
+        return new PropertyAccess(prefix, operator, parseSimpleIdentifier());
+      } else {
+        if (!optional) {
+          // Report the missing selector.
+          _reportErrorForCurrentToken(
+              ParserErrorCode.MISSING_ASSIGNABLE_SELECTOR);
+        }
+        return prefix;
       }
-      return prefix;
     }
   }
 
   /**
    * Parse a await expression. Return the await expression that was parsed.
    *
+   * This method assumes that the current token matches `_AWAIT`.
+   *
    *     awaitExpression ::=
    *         'await' unaryExpression
    */
@@ -4575,16 +4542,15 @@
    */
   Expression _parseBitwiseAndExpression() {
     Expression expression;
-    if (_matchesKeyword(Keyword.SUPER) &&
-        _tokenMatches(_peek(), TokenType.AMPERSAND)) {
+    if (_currentToken.keyword == Keyword.SUPER &&
+        _currentToken.next.type == TokenType.AMPERSAND) {
       expression = new SuperExpression(getAndAdvance());
     } else {
       expression = _parseShiftExpression();
     }
-    while (_matches(TokenType.AMPERSAND)) {
-      Token operator = getAndAdvance();
-      expression =
-          new BinaryExpression(expression, operator, _parseShiftExpression());
+    while (_currentToken.type == TokenType.AMPERSAND) {
+      expression = new BinaryExpression(
+          expression, getAndAdvance(), _parseShiftExpression());
     }
     return expression;
   }
@@ -4599,31 +4565,52 @@
    */
   Expression _parseBitwiseXorExpression() {
     Expression expression;
-    if (_matchesKeyword(Keyword.SUPER) &&
-        _tokenMatches(_peek(), TokenType.CARET)) {
+    if (_currentToken.keyword == Keyword.SUPER &&
+        _currentToken.next.type == TokenType.CARET) {
       expression = new SuperExpression(getAndAdvance());
     } else {
       expression = _parseBitwiseAndExpression();
     }
-    while (_matches(TokenType.CARET)) {
-      Token operator = getAndAdvance();
+    while (_currentToken.type == TokenType.CARET) {
       expression = new BinaryExpression(
-          expression, operator, _parseBitwiseAndExpression());
+          expression, getAndAdvance(), _parseBitwiseAndExpression());
     }
     return expression;
   }
 
   /**
+   * Parse a block when we need to check for an open curly brace and recover
+   * when there isn't one. Return the block that was parsed.
+   *
+   *     block ::=
+   *         '{' statements '}'
+   */
+  Block _parseBlockChecked() {
+    if (_matches(TokenType.OPEN_CURLY_BRACKET)) {
+      return parseBlock();
+    }
+    // TODO(brianwilkerson) Improve the error message.
+    _reportErrorForCurrentToken(
+        ParserErrorCode.EXPECTED_TOKEN, [TokenType.OPEN_CURLY_BRACKET.lexeme]);
+    // Recovery: Check for an unmatched closing curly bracket and parse
+    // statements until it is reached.
+    return new Block(_createSyntheticToken(TokenType.OPEN_CURLY_BRACKET), null,
+        _createSyntheticToken(TokenType.CLOSE_CURLY_BRACKET));
+  }
+
+  /**
    * Parse a break statement. Return the break statement that was parsed.
    *
+   * This method assumes that the current token matches `Keyword.BREAK`.
+   *
    *     breakStatement ::=
    *         'break' identifier? ';'
    */
   Statement _parseBreakStatement() {
-    Token breakKeyword = _expectKeyword(Keyword.BREAK);
+    Token breakKeyword = getAndAdvance();
     SimpleIdentifier label = null;
     if (_matchesIdentifier()) {
-      label = parseSimpleIdentifier();
+      label = _parseSimpleIdentifierUnchecked();
     }
     if (!_inLoop && !_inSwitch && label == null) {
       _reportErrorForToken(ParserErrorCode.BREAK_OUTSIDE_OF_LOOP, breakKeyword);
@@ -4636,6 +4623,9 @@
    * Parse a cascade section. Return the expression representing the cascaded
    * method invocation.
    *
+   * This method assumes that the current token matches
+   * `TokenType.PERIOD_PERIOD`.
+   *
    *     cascadeSection ::=
    *         '..' (cascadeSelector typeArguments? arguments*)
    *         (assignableSelector typeArguments? arguments*)* cascadeAssignment?
@@ -4648,11 +4638,11 @@
    *         assignmentOperator expressionWithoutCascade
    */
   Expression _parseCascadeSection() {
-    Token period = _expect(TokenType.PERIOD_PERIOD);
+    Token period = getAndAdvance();
     Expression expression = null;
     SimpleIdentifier functionName = null;
     if (_matchesIdentifier()) {
-      functionName = parseSimpleIdentifier();
+      functionName = _parseSimpleIdentifierUnchecked();
     } else if (_currentToken.type == TokenType.OPEN_SQUARE_BRACKET) {
       Token leftBracket = getAndAdvance();
       bool wasInInitializer = _inInitializer;
@@ -4674,7 +4664,7 @@
     assert((expression == null && functionName != null) ||
         (expression != null && functionName == null));
     if (_isLikelyArgumentList()) {
-      while (_isLikelyArgumentList()) {
+      do {
         TypeArgumentList typeArguments = _parseOptionalTypeArguments();
         if (functionName != null) {
           expression = new MethodInvocation(expression, period, functionName,
@@ -4689,7 +4679,7 @@
           expression = new FunctionExpressionInvocation(
               expression, typeArguments, parseArgumentList());
         }
-      }
+      } while (_isLikelyArgumentList());
     } else if (functionName != null) {
       expression = new PropertyAccess(expression, period, functionName);
       period = null;
@@ -4734,31 +4724,33 @@
    * keyword 'abstract', or `null` if the keyword was not given. Return the
    * class declaration that was parsed.
    *
+   * This method assumes that the current token matches `Keyword.CLASS`.
+   *
    *     classDeclaration ::=
    *         metadata 'abstract'? 'class' name typeParameterList? (extendsClause withClause?)? implementsClause? '{' classMembers '}' |
    *         metadata 'abstract'? 'class' mixinApplicationClass
    */
   CompilationUnitMember _parseClassDeclaration(
       CommentAndMetadata commentAndMetadata, Token abstractKeyword) {
-    Token keyword = _expectKeyword(Keyword.CLASS);
-    if (_matchesIdentifier()) {
-      Token next = _peek();
-      if (_tokenMatches(next, TokenType.LT)) {
-        next = _skipTypeParameterList(next);
-        if (next != null && _tokenMatches(next, TokenType.EQ)) {
-          return _parseClassTypeAlias(
-              commentAndMetadata, abstractKeyword, keyword);
-        }
-      } else if (_tokenMatches(next, TokenType.EQ)) {
-        return _parseClassTypeAlias(
-            commentAndMetadata, abstractKeyword, keyword);
-      }
-    }
+    //
+    // Parse the name and type parameters.
+    //
+    Token keyword = getAndAdvance();
     SimpleIdentifier name = parseSimpleIdentifier(isDeclaration: true);
     String className = name.name;
     TypeParameterList typeParameters = null;
-    if (_matches(TokenType.LT)) {
+    TokenType type = _currentToken.type;
+    if (type == TokenType.LT) {
       typeParameters = parseTypeParameterList();
+      type = _currentToken.type;
+    }
+    //
+    // Check to see whether this might be a class type alias rather than a class
+    // declaration.
+    //
+    if (type == TokenType.EQ) {
+      return _parseClassTypeAliasAfterName(
+          commentAndMetadata, abstractKeyword, keyword, name, typeParameters);
     }
     //
     // Parse the clauses. The parser accepts clauses in any order, but will
@@ -4770,7 +4762,8 @@
     ImplementsClause implementsClause = null;
     bool foundClause = true;
     while (foundClause) {
-      if (_matchesKeyword(Keyword.EXTENDS)) {
+      Keyword keyword = _currentToken.keyword;
+      if (keyword == Keyword.EXTENDS) {
         if (extendsClause == null) {
           extendsClause = parseExtendsClause();
           if (withClause != null) {
@@ -4785,7 +4778,7 @@
               extendsClause.extendsKeyword);
           parseExtendsClause();
         }
-      } else if (_matchesKeyword(Keyword.WITH)) {
+      } else if (keyword == Keyword.WITH) {
         if (withClause == null) {
           withClause = parseWithClause();
           if (implementsClause != null) {
@@ -4799,7 +4792,7 @@
           // TODO(brianwilkerson) Should we merge the list of applied mixins
           // into a single list?
         }
-      } else if (_matchesKeyword(Keyword.IMPLEMENTS)) {
+      } else if (keyword == Keyword.IMPLEMENTS) {
         if (implementsClause == null) {
           implementsClause = parseImplementsClause();
         } else {
@@ -4831,10 +4824,12 @@
     List<ClassMember> members = null;
     Token rightBracket = null;
     if (_matches(TokenType.OPEN_CURLY_BRACKET)) {
-      leftBracket = _expect(TokenType.OPEN_CURLY_BRACKET);
+      leftBracket = getAndAdvance();
       members = _parseClassMembers(className, _getEndToken(leftBracket));
       rightBracket = _expect(TokenType.CLOSE_CURLY_BRACKET);
     } else {
+      // Recovery: Check for an unmatched closing curly bracket and parse
+      // members until it is reached.
       leftBracket = _createSyntheticToken(TokenType.OPEN_CURLY_BRACKET);
       rightBracket = _createSyntheticToken(TokenType.CLOSE_CURLY_BRACKET);
       _reportErrorForCurrentToken(ParserErrorCode.MISSING_CLASS_BODY);
@@ -4866,14 +4861,15 @@
    *         (metadata memberDefinition)*
    */
   List<ClassMember> _parseClassMembers(String className, Token closingBracket) {
-    List<ClassMember> members = new List<ClassMember>();
+    List<ClassMember> members = <ClassMember>[];
     Token memberStart = _currentToken;
-    while (!_matches(TokenType.EOF) &&
-        !_matches(TokenType.CLOSE_CURLY_BRACKET) &&
+    TokenType type = _currentToken.type;
+    Keyword keyword = _currentToken.keyword;
+    while (type != TokenType.EOF &&
+        type != TokenType.CLOSE_CURLY_BRACKET &&
         (closingBracket != null ||
-            (!_matchesKeyword(Keyword.CLASS) &&
-                !_matchesKeyword(Keyword.TYPEDEF)))) {
-      if (_matches(TokenType.SEMICOLON)) {
+            (keyword != Keyword.CLASS && keyword != Keyword.TYPEDEF))) {
+      if (type == TokenType.SEMICOLON) {
         _reportErrorForToken(ParserErrorCode.UNEXPECTED_TOKEN, _currentToken,
             [_currentToken.lexeme]);
         _advance();
@@ -4889,6 +4885,8 @@
         _advance();
       }
       memberStart = _currentToken;
+      type = _currentToken.type;
+      keyword = _currentToken.keyword;
     }
     return members;
   }
@@ -4899,6 +4897,8 @@
    * the 'abstract' keyword. The [classKeyword] is the token representing the
    * 'class' keyword. Return the class type alias that was parsed.
    *
+   * This method assumes that the current token matches an identifier.
+   *
    *     classTypeAlias ::=
    *         identifier typeParameters? '=' 'abstract'? mixinApplication
    *
@@ -4907,11 +4907,36 @@
    */
   ClassTypeAlias _parseClassTypeAlias(CommentAndMetadata commentAndMetadata,
       Token abstractKeyword, Token classKeyword) {
-    SimpleIdentifier className = parseSimpleIdentifier(isDeclaration: true);
+    SimpleIdentifier className =
+        _parseSimpleIdentifierUnchecked(isDeclaration: true);
     TypeParameterList typeParameters = null;
     if (_matches(TokenType.LT)) {
       typeParameters = parseTypeParameterList();
     }
+    return _parseClassTypeAliasAfterName(commentAndMetadata, abstractKeyword,
+        classKeyword, className, typeParameters);
+  }
+
+  /**
+   * Parse a class type alias. The [commentAndMetadata] is the metadata to be
+   * associated with the member. The [abstractKeyword] is the token representing
+   * the 'abstract' keyword. The [classKeyword] is the token representing the
+   * 'class' keyword. The [className] is the name of the alias, and the
+   * [typeParameters] are the type parameters following the name. Return the
+   * class type alias that was parsed.
+   *
+   *     classTypeAlias ::=
+   *         identifier typeParameters? '=' 'abstract'? mixinApplication
+   *
+   *     mixinApplication ::=
+   *         type withClause implementsClause? ';'
+   */
+  ClassTypeAlias _parseClassTypeAliasAfterName(
+      CommentAndMetadata commentAndMetadata,
+      Token abstractKeyword,
+      Token classKeyword,
+      SimpleIdentifier className,
+      TypeParameterList typeParameters) {
     Token equals = _expect(TokenType.EQ);
     TypeName superclass = parseTypeName();
     WithClause withClause = null;
@@ -4957,19 +4982,20 @@
 
   /**
    * Parse a list of combinators in a directive. Return the combinators that
-   * were parsed.
+   * were parsed, or `null` if there are no combinators.
    *
    *     combinator ::=
    *         'show' identifier (',' identifier)*
    *       | 'hide' identifier (',' identifier)*
    */
   List<Combinator> _parseCombinators() {
-    List<Combinator> combinators = new List<Combinator>();
+    List<Combinator> combinators = null;
     while (true) {
       Combinator combinator = parseCombinator();
       if (combinator == null) {
         break;
       }
+      combinators ??= <Combinator>[];
       combinators.add(combinator);
     }
     return combinators;
@@ -4986,18 +5012,20 @@
    *         annotation*
    */
   CommentAndMetadata _parseCommentAndMetadata() {
-    Comment comment = _parseDocumentationComment();
+    // TODO(brianwilkerson) Consider making the creation of documentation
+    // comments be lazy.
+    List<DocumentationCommentToken> tokens = _parseDocumentationCommentTokens();
     List<Annotation> metadata = null;
     while (_matches(TokenType.AT)) {
-      metadata ??= new List<Annotation>();
+      metadata ??= <Annotation>[];
       metadata.add(parseAnnotation());
-      Comment optionalComment = _parseDocumentationComment();
-      if (optionalComment != null) {
-        comment = optionalComment;
+      List<DocumentationCommentToken> optionalTokens =
+          _parseDocumentationCommentTokens();
+      if (optionalTokens != null) {
+        tokens = optionalTokens;
       }
     }
-    metadata ??= const <Annotation>[];
-    return new CommentAndMetadata(comment, metadata);
+    return new CommentAndMetadata(_parseDocumentationComment(tokens), metadata);
   }
 
   /**
@@ -5014,11 +5042,6 @@
       String referenceSource, int sourceOffset) {
     // TODO(brianwilkerson) The errors are not getting the right offset/length
     // and are being duplicated.
-    if (referenceSource.length == 0) {
-      Token syntheticToken =
-          new SyntheticStringToken(TokenType.IDENTIFIER, "", sourceOffset);
-      return new CommentReference(null, new SimpleIdentifier(syntheticToken));
-    }
     try {
       BooleanErrorListener listener = new BooleanErrorListener();
       Scanner scanner = new Scanner(
@@ -5028,6 +5051,12 @@
       if (listener.errorReported) {
         return null;
       }
+      if (firstToken.type == TokenType.EOF) {
+        Token syntheticToken =
+            new SyntheticStringToken(TokenType.IDENTIFIER, "", sourceOffset);
+        syntheticToken.setNext(firstToken);
+        return new CommentReference(null, new SimpleIdentifier(syntheticToken));
+      }
       Token newKeyword = null;
       if (_tokenMatchesKeyword(firstToken, Keyword.NEW)) {
         newKeyword = firstToken;
@@ -5051,16 +5080,19 @@
           return null;
         }
         return new CommentReference(newKeyword, identifier);
-      } else if (_tokenMatchesKeyword(firstToken, Keyword.THIS) ||
-          _tokenMatchesKeyword(firstToken, Keyword.NULL) ||
-          _tokenMatchesKeyword(firstToken, Keyword.TRUE) ||
-          _tokenMatchesKeyword(firstToken, Keyword.FALSE)) {
-        // TODO(brianwilkerson) If we want to support this we will need to
-        // extend the definition of CommentReference to take an expression
-        // rather than an identifier. For now we just ignore it to reduce the
-        // number of errors produced, but that's probably not a valid long term
-        // approach.
-        return null;
+      } else {
+        Keyword keyword = firstToken.keyword;
+        if (keyword == Keyword.THIS ||
+            keyword == Keyword.NULL ||
+            keyword == Keyword.TRUE ||
+            keyword == Keyword.FALSE) {
+          // TODO(brianwilkerson) If we want to support this we will need to
+          // extend the definition of CommentReference to take an expression
+          // rather than an identifier. For now we just ignore it to reduce the
+          // number of errors produced, but that's probably not a valid long term
+          // approach.
+          return null;
+        }
       }
     } catch (exception) {
       // Ignored because we assume that it wasn't a real comment reference.
@@ -5082,7 +5114,7 @@
    */
   List<CommentReference> _parseCommentReferences(
       List<DocumentationCommentToken> tokens) {
-    List<CommentReference> references = new List<CommentReference>();
+    List<CommentReference> references = <CommentReference>[];
     for (DocumentationCommentToken token in tokens) {
       String comment = token.lexeme;
       comment = _removeCodeBlocksGitHub(comment);
@@ -5112,20 +5144,21 @@
           } else {
             // terminating ']' is not typed yet
             int charAfterLeft = comment.codeUnitAt(leftIndex + 1);
+            Token nameToken;
             if (Character.isLetterOrDigit(charAfterLeft)) {
               int nameEnd = StringUtilities.indexOfFirstNotLetterDigit(
                   comment, leftIndex + 1);
               String name = comment.substring(leftIndex + 1, nameEnd);
-              Token nameToken =
+              nameToken =
                   new StringToken(TokenType.IDENTIFIER, name, nameOffset);
-              references.add(
-                  new CommentReference(null, new SimpleIdentifier(nameToken)));
             } else {
-              Token nameToken = new SyntheticStringToken(
-                  TokenType.IDENTIFIER, "", nameOffset);
-              references.add(
-                  new CommentReference(null, new SimpleIdentifier(nameToken)));
+              nameToken = new SyntheticStringToken(
+                  TokenType.IDENTIFIER, '', nameOffset);
             }
+            nameToken.setNext(new SimpleToken(TokenType.EOF, nameToken.end));
+            references.add(
+                new CommentReference(null, new SimpleIdentifier(nameToken)));
+            token.references.add(nameToken);
             // next character
             rightIndex = leftIndex + 1;
           }
@@ -5158,32 +5191,41 @@
   CompilationUnitMember _parseCompilationUnitMember(
       CommentAndMetadata commentAndMetadata) {
     Modifiers modifiers = _parseModifiers();
-    if (_matchesKeyword(Keyword.CLASS)) {
+    Keyword keyword = _currentToken.keyword;
+    if (keyword == Keyword.CLASS) {
       return _parseClassDeclaration(
           commentAndMetadata, _validateModifiersForClass(modifiers));
-    } else if (_matchesKeyword(Keyword.TYPEDEF) &&
-        !_tokenMatches(_peek(), TokenType.PERIOD) &&
-        !_tokenMatches(_peek(), TokenType.LT) &&
-        !_tokenMatches(_peek(), TokenType.OPEN_PAREN)) {
+    }
+    Token next = _peek();
+    TokenType nextType = next.type;
+    if (keyword == Keyword.TYPEDEF &&
+        nextType != TokenType.PERIOD &&
+        nextType != TokenType.LT &&
+        nextType != TokenType.OPEN_PAREN) {
       _validateModifiersForTypedef(modifiers);
       return _parseTypeAlias(commentAndMetadata);
-    } else if (_matchesKeyword(Keyword.ENUM)) {
+    } else if (keyword == Keyword.ENUM) {
       _validateModifiersForEnum(modifiers);
       return _parseEnumDeclaration(commentAndMetadata);
-    }
-    if (_matchesKeyword(Keyword.VOID)) {
-      TypeName returnType = parseReturnType();
-      if ((_matchesKeyword(Keyword.GET) || _matchesKeyword(Keyword.SET)) &&
-          _tokenMatchesIdentifier(_peek())) {
+    } else if (keyword == Keyword.VOID) {
+      TypeName returnType =
+          new TypeName(new SimpleIdentifier(getAndAdvance()), null);
+      keyword = _currentToken.keyword;
+      next = _peek();
+      if ((keyword == Keyword.GET || keyword == Keyword.SET) &&
+          _tokenMatchesIdentifier(next)) {
         _validateModifiersForTopLevelFunction(modifiers);
         return _parseFunctionDeclaration(
             commentAndMetadata, modifiers.externalKeyword, returnType);
-      } else if (_matchesKeyword(Keyword.OPERATOR) && _isOperator(_peek())) {
+      } else if (keyword == Keyword.OPERATOR && _isOperator(next)) {
         _reportErrorForToken(ParserErrorCode.TOP_LEVEL_OPERATOR, _currentToken);
-        return _convertToFunctionDeclaration(_parseOperator(
-            commentAndMetadata, modifiers.externalKeyword, returnType));
+        return _convertToFunctionDeclaration(_parseOperatorAfterKeyword(
+            commentAndMetadata,
+            modifiers.externalKeyword,
+            returnType,
+            getAndAdvance()));
       } else if (_matchesIdentifier() &&
-          _peek().matchesAny(const <TokenType>[
+          next.matchesAny(const <TokenType>[
             TokenType.OPEN_PAREN,
             TokenType.OPEN_CURLY_BRACKET,
             TokenType.FUNCTION,
@@ -5197,7 +5239,7 @@
         // We have found an error of some kind. Try to recover.
         //
         if (_matchesIdentifier()) {
-          if (_peek().matchesAny(const <TokenType>[
+          if (next.matchesAny(const <TokenType>[
             TokenType.EQ,
             TokenType.COMMA,
             TokenType.SEMICOLON
@@ -5218,15 +5260,18 @@
             ParserErrorCode.EXPECTED_EXECUTABLE, _currentToken);
         return null;
       }
-    } else if ((_matchesKeyword(Keyword.GET) || _matchesKeyword(Keyword.SET)) &&
-        _tokenMatchesIdentifier(_peek())) {
+    } else if ((keyword == Keyword.GET || keyword == Keyword.SET) &&
+        _tokenMatchesIdentifier(next)) {
       _validateModifiersForTopLevelFunction(modifiers);
       return _parseFunctionDeclaration(
           commentAndMetadata, modifiers.externalKeyword, null);
-    } else if (_matchesKeyword(Keyword.OPERATOR) && _isOperator(_peek())) {
+    } else if (keyword == Keyword.OPERATOR && _isOperator(next)) {
       _reportErrorForToken(ParserErrorCode.TOP_LEVEL_OPERATOR, _currentToken);
-      return _convertToFunctionDeclaration(
-          _parseOperator(commentAndMetadata, modifiers.externalKeyword, null));
+      return _convertToFunctionDeclaration(_parseOperatorAfterKeyword(
+          commentAndMetadata,
+          modifiers.externalKeyword,
+          null,
+          getAndAdvance()));
     } else if (!_matchesIdentifier()) {
       Token keyword = modifiers.varKeyword;
       if (keyword == null) {
@@ -5240,9 +5285,9 @@
         // We appear to have found an incomplete top-level variable declaration.
         //
         _reportErrorForCurrentToken(ParserErrorCode.MISSING_IDENTIFIER);
-        List<VariableDeclaration> variables = new List<VariableDeclaration>();
-        variables.add(
-            new VariableDeclaration(_createSyntheticIdentifier(), null, null));
+        VariableDeclaration variable =
+            new VariableDeclaration(_createSyntheticIdentifier(), null, null);
+        List<VariableDeclaration> variables = <VariableDeclaration>[variable];
         return new TopLevelVariableDeclaration(
             commentAndMetadata.comment,
             commentAndMetadata.metadata,
@@ -5254,12 +5299,12 @@
     } else if (_isPeekGenericTypeParametersAndOpenParen()) {
       return _parseFunctionDeclaration(
           commentAndMetadata, modifiers.externalKeyword, null);
-    } else if (_tokenMatches(_peek(), TokenType.OPEN_PAREN)) {
+    } else if (_tokenMatches(next, TokenType.OPEN_PAREN)) {
       TypeName returnType = _parseOptionalTypeNameComment();
       _validateModifiersForTopLevelFunction(modifiers);
       return _parseFunctionDeclaration(
           commentAndMetadata, modifiers.externalKeyword, returnType);
-    } else if (_peek().matchesAny(const <TokenType>[
+    } else if (next.matchesAny(const <TokenType>[
       TokenType.EQ,
       TokenType.COMMA,
       TokenType.SEMICOLON
@@ -5278,15 +5323,20 @@
           _expect(TokenType.SEMICOLON));
     }
     TypeName returnType = parseReturnType();
-    if ((_matchesKeyword(Keyword.GET) || _matchesKeyword(Keyword.SET)) &&
-        _tokenMatchesIdentifier(_peek())) {
+    keyword = _currentToken.keyword;
+    next = _peek();
+    if ((keyword == Keyword.GET || keyword == Keyword.SET) &&
+        _tokenMatchesIdentifier(next)) {
       _validateModifiersForTopLevelFunction(modifiers);
       return _parseFunctionDeclaration(
           commentAndMetadata, modifiers.externalKeyword, returnType);
-    } else if (_matchesKeyword(Keyword.OPERATOR) && _isOperator(_peek())) {
+    } else if (keyword == Keyword.OPERATOR && _isOperator(next)) {
       _reportErrorForToken(ParserErrorCode.TOP_LEVEL_OPERATOR, _currentToken);
-      return _convertToFunctionDeclaration(_parseOperator(
-          commentAndMetadata, modifiers.externalKeyword, returnType));
+      return _convertToFunctionDeclaration(_parseOperatorAfterKeyword(
+          commentAndMetadata,
+          modifiers.externalKeyword,
+          returnType,
+          getAndAdvance()));
     } else if (_matches(TokenType.AT)) {
       return new TopLevelVariableDeclaration(
           commentAndMetadata.comment,
@@ -5304,16 +5354,15 @@
       } else {
         semicolon = _createSyntheticToken(TokenType.SEMICOLON);
       }
-      List<VariableDeclaration> variables = new List<VariableDeclaration>();
-      variables.add(
-          new VariableDeclaration(_createSyntheticIdentifier(), null, null));
+      VariableDeclaration variable =
+          new VariableDeclaration(_createSyntheticIdentifier(), null, null);
+      List<VariableDeclaration> variables = <VariableDeclaration>[variable];
       return new TopLevelVariableDeclaration(
           commentAndMetadata.comment,
           commentAndMetadata.metadata,
           new VariableDeclarationList(null, null, null, returnType, variables),
           semicolon);
-    }
-    if (_peek().matchesAny(const <TokenType>[
+    } else if (next.matchesAny(const <TokenType>[
       TokenType.OPEN_PAREN,
       TokenType.FUNCTION,
       TokenType.OPEN_CURLY_BRACKET,
@@ -5334,6 +5383,8 @@
   /**
    * Parse a configuration in either an import or export directive.
    *
+   * This method assumes that the current token matches `Keyword.IF`.
+   *
    *     configuration ::=
    *         'if' '(' test ')' uri
    *
@@ -5344,7 +5395,7 @@
    *         identifier ('.' identifier)*
    */
   Configuration _parseConfiguration() {
-    Token ifKeyword = _expectKeyword(Keyword.IF);
+    Token ifKeyword = getAndAdvance();
     Token leftParenthesis = _expect(TokenType.OPEN_PAREN);
     DottedName name = _parseDottedName();
     Token equalToken = null;
@@ -5364,12 +5415,13 @@
   }
 
   /**
-   * Parse a list of configurations. If conditional directives are not
-   * supported, return an empty list without attempting to parse anything.
+   * Parse a list of configurations. Return the configurations that were parsed,
+   * or `null` if there are no configurations.
    */
   List<Configuration> _parseConfigurations() {
-    List<Configuration> configurations = <Configuration>[];
+    List<Configuration> configurations = null;
     while (_matchesKeyword(Keyword.IF)) {
+      configurations ??= <Configuration>[];
       configurations.add(_parseConfiguration());
     }
     return configurations;
@@ -5378,19 +5430,22 @@
   /**
    * Parse a const expression. Return the const expression that was parsed.
    *
+   * This method assumes that the current token matches `Keyword.CONST`.
+   *
    *     constExpression ::=
    *         instanceCreationExpression
    *       | listLiteral
    *       | mapLiteral
    */
   Expression _parseConstExpression() {
-    Token keyword = _expectKeyword(Keyword.CONST);
-    if (_matches(TokenType.LT) || _injectGenericCommentTypeList()) {
+    Token keyword = getAndAdvance();
+    TokenType type = _currentToken.type;
+    if (type == TokenType.LT || _injectGenericCommentTypeList()) {
       return _parseListOrMapLiteral(keyword);
-    } else if (_matches(TokenType.OPEN_SQUARE_BRACKET) ||
-        _matches(TokenType.INDEX)) {
+    } else if (type == TokenType.OPEN_SQUARE_BRACKET ||
+        type == TokenType.INDEX) {
       return _parseListLiteral(keyword, null);
-    } else if (_matches(TokenType.OPEN_CURLY_BRACKET)) {
+    } else if (type == TokenType.OPEN_CURLY_BRACKET) {
       return _parseMapLiteral(keyword, null);
     }
     return _parseInstanceCreationExpression(keyword);
@@ -5410,26 +5465,28 @@
     List<ConstructorInitializer> initializers = null;
     if (_matches(TokenType.COLON)) {
       separator = getAndAdvance();
-      initializers = new List<ConstructorInitializer>();
+      initializers = <ConstructorInitializer>[];
       do {
-        if (_matchesKeyword(Keyword.THIS)) {
-          if (_tokenMatches(_peek(), TokenType.OPEN_PAREN)) {
+        Keyword keyword = _currentToken.keyword;
+        if (keyword == Keyword.THIS) {
+          TokenType nextType = _peek().type;
+          if (nextType == TokenType.OPEN_PAREN) {
             bodyAllowed = false;
-            initializers.add(_parseRedirectingConstructorInvocation());
-          } else if (_tokenMatches(_peek(), TokenType.PERIOD) &&
+            initializers.add(_parseRedirectingConstructorInvocation(false));
+          } else if (nextType == TokenType.PERIOD &&
               _tokenMatches(_peekAt(3), TokenType.OPEN_PAREN)) {
             bodyAllowed = false;
-            initializers.add(_parseRedirectingConstructorInvocation());
+            initializers.add(_parseRedirectingConstructorInvocation(true));
           } else {
-            initializers.add(_parseConstructorFieldInitializer());
+            initializers.add(_parseConstructorFieldInitializer(true));
           }
-        } else if (_matchesKeyword(Keyword.SUPER)) {
+        } else if (keyword == Keyword.SUPER) {
           initializers.add(_parseSuperConstructorInvocation());
         } else if (_matches(TokenType.OPEN_CURLY_BRACKET) ||
             _matches(TokenType.FUNCTION)) {
           _reportErrorForCurrentToken(ParserErrorCode.MISSING_INITIALIZER);
         } else {
-          initializers.add(_parseConstructorFieldInitializer());
+          initializers.add(_parseConstructorFieldInitializer(false));
         }
       } while (_optional(TokenType.COMMA));
       if (factoryKeyword != null) {
@@ -5492,54 +5549,55 @@
   }
 
   /**
-   * Parse a field initializer within a constructor. Return the field
-   * initializer that was parsed.
+   * Parse a field initializer within a constructor. The flag [hasThis] should
+   * be true if the current token is `this`. Return the field initializer that
+   * was parsed.
    *
    *     fieldInitializer:
    *         ('this' '.')? identifier '=' conditionalExpression cascadeSection*
    */
-  ConstructorFieldInitializer _parseConstructorFieldInitializer() {
-    Token keyword = null;
+  ConstructorFieldInitializer _parseConstructorFieldInitializer(bool hasThis) {
+    Token keywordToken = null;
     Token period = null;
-    if (_matchesKeyword(Keyword.THIS)) {
-      keyword = getAndAdvance();
+    if (hasThis) {
+      keywordToken = getAndAdvance();
       period = _expect(TokenType.PERIOD);
     }
     SimpleIdentifier fieldName = parseSimpleIdentifier();
     Token equals = null;
-    if (_matches(TokenType.EQ)) {
+    TokenType type = _currentToken.type;
+    if (type == TokenType.EQ) {
       equals = getAndAdvance();
-    } else if (!_matchesKeyword(Keyword.THIS) &&
-        !_matchesKeyword(Keyword.SUPER) &&
-        !_matches(TokenType.OPEN_CURLY_BRACKET) &&
-        !_matches(TokenType.FUNCTION)) {
-      _reportErrorForCurrentToken(
-          ParserErrorCode.MISSING_ASSIGNMENT_IN_INITIALIZER);
-      equals = _createSyntheticToken(TokenType.EQ);
     } else {
       _reportErrorForCurrentToken(
           ParserErrorCode.MISSING_ASSIGNMENT_IN_INITIALIZER);
-      return new ConstructorFieldInitializer(keyword, period, fieldName,
-          _createSyntheticToken(TokenType.EQ), _createSyntheticIdentifier());
+      Keyword keyword = _currentToken.keyword;
+      if (keyword != Keyword.THIS &&
+          keyword != Keyword.SUPER &&
+          type != TokenType.OPEN_CURLY_BRACKET &&
+          type != TokenType.FUNCTION) {
+        equals = _createSyntheticToken(TokenType.EQ);
+      } else {
+        return new ConstructorFieldInitializer(keywordToken, period, fieldName,
+            _createSyntheticToken(TokenType.EQ), _createSyntheticIdentifier());
+      }
     }
     bool wasInInitializer = _inInitializer;
     _inInitializer = true;
     try {
       Expression expression = parseConditionalExpression();
-      TokenType tokenType = _currentToken.type;
-      if (tokenType == TokenType.PERIOD_PERIOD) {
-        List<Expression> cascadeSections = new List<Expression>();
-        while (tokenType == TokenType.PERIOD_PERIOD) {
+      if (_matches(TokenType.PERIOD_PERIOD)) {
+        List<Expression> cascadeSections = <Expression>[];
+        do {
           Expression section = _parseCascadeSection();
           if (section != null) {
             cascadeSections.add(section);
           }
-          tokenType = _currentToken.type;
-        }
+        } while (_matches(TokenType.PERIOD_PERIOD));
         expression = new CascadeExpression(expression, cascadeSections);
       }
       return new ConstructorFieldInitializer(
-          keyword, period, fieldName, equals, expression);
+          keywordToken, period, fieldName, equals, expression);
     } finally {
       _inInitializer = wasInInitializer;
     }
@@ -5548,18 +5606,20 @@
   /**
    * Parse a continue statement. Return the continue statement that was parsed.
    *
+   * This method assumes that the current token matches `Keyword.CONTINUE`.
+   *
    *     continueStatement ::=
    *         'continue' identifier? ';'
    */
   Statement _parseContinueStatement() {
-    Token continueKeyword = _expectKeyword(Keyword.CONTINUE);
+    Token continueKeyword = getAndAdvance();
     if (!_inLoop && !_inSwitch) {
       _reportErrorForToken(
           ParserErrorCode.CONTINUE_OUTSIDE_OF_LOOP, continueKeyword);
     }
     SimpleIdentifier label = null;
     if (_matchesIdentifier()) {
-      label = parseSimpleIdentifier();
+      label = _parseSimpleIdentifierUnchecked();
     }
     if (_inSwitch && !_inLoop && label == null) {
       _reportErrorForToken(
@@ -5587,7 +5647,7 @@
     } else if (_matchesKeyword(Keyword.LIBRARY)) {
       return _parseLibraryDirective(commentAndMetadata);
     } else if (_matchesKeyword(Keyword.PART)) {
-      return _parsePartDirective(commentAndMetadata);
+      return _parsePartOrPartOfDirective(commentAndMetadata);
     } else {
       // Internal error: this method should not have been invoked if the current
       // token was something other than one of the above.
@@ -5609,16 +5669,18 @@
     if (_matches(TokenType.SCRIPT_TAG)) {
       scriptTag = new ScriptTag(getAndAdvance());
     }
-    List<Directive> directives = new List<Directive>();
+    List<Directive> directives = <Directive>[];
     while (!_matches(TokenType.EOF)) {
       CommentAndMetadata commentAndMetadata = _parseCommentAndMetadata();
-      if ((_matchesKeyword(Keyword.IMPORT) ||
-              _matchesKeyword(Keyword.EXPORT) ||
-              _matchesKeyword(Keyword.LIBRARY) ||
-              _matchesKeyword(Keyword.PART)) &&
-          !_tokenMatches(_peek(), TokenType.PERIOD) &&
-          !_tokenMatches(_peek(), TokenType.LT) &&
-          !_tokenMatches(_peek(), TokenType.OPEN_PAREN)) {
+      Keyword keyword = _currentToken.keyword;
+      TokenType type = _peek().type;
+      if ((keyword == Keyword.IMPORT ||
+              keyword == Keyword.EXPORT ||
+              keyword == Keyword.LIBRARY ||
+              keyword == Keyword.PART) &&
+          type != TokenType.PERIOD &&
+          type != TokenType.LT &&
+          type != TokenType.OPEN_PAREN) {
         directives.add(_parseDirective(commentAndMetadata));
       } else if (_matches(TokenType.SEMICOLON)) {
         _advance();
@@ -5626,12 +5688,29 @@
         while (!_matches(TokenType.EOF)) {
           _advance();
         }
-        return new CompilationUnit(firstToken, scriptTag, directives,
-            new List<CompilationUnitMember>(), _currentToken);
+        return new CompilationUnit(
+            firstToken, scriptTag, directives, null, _currentToken);
       }
     }
-    return new CompilationUnit(firstToken, scriptTag, directives,
-        new List<CompilationUnitMember>(), _currentToken);
+    return new CompilationUnit(
+        firstToken, scriptTag, directives, null, _currentToken);
+  }
+
+  /**
+   * Parse a documentation comment based on the given list of documentation
+   * comment tokens. Return the documentation comment that was parsed, or `null`
+   * if there was no comment.
+   *
+   *     documentationComment ::=
+   *         multiLineComment?
+   *       | singleLineComment*
+   */
+  Comment _parseDocumentationComment(List<DocumentationCommentToken> tokens) {
+    if (tokens == null) {
+      return null;
+    }
+    List<CommentReference> references = _parseCommentReferences(tokens);
+    return Comment.createDocumentationCommentWithReferences(tokens, references);
   }
 
   /**
@@ -5642,37 +5721,32 @@
    *         multiLineComment?
    *       | singleLineComment*
    */
-  Comment _parseDocumentationComment() {
-    List<DocumentationCommentToken> documentationTokens =
-        <DocumentationCommentToken>[];
+  List<DocumentationCommentToken> _parseDocumentationCommentTokens() {
+    List<DocumentationCommentToken> tokens = <DocumentationCommentToken>[];
     CommentToken commentToken = _currentToken.precedingComments;
     while (commentToken != null) {
       if (commentToken is DocumentationCommentToken) {
-        if (documentationTokens.isNotEmpty) {
+        if (tokens.isNotEmpty) {
           if (commentToken.type == TokenType.SINGLE_LINE_COMMENT) {
-            if (documentationTokens[0].type != TokenType.SINGLE_LINE_COMMENT) {
-              documentationTokens.clear();
+            if (tokens[0].type != TokenType.SINGLE_LINE_COMMENT) {
+              tokens.clear();
             }
           } else {
-            documentationTokens.clear();
+            tokens.clear();
           }
         }
-        documentationTokens.add(commentToken);
+        tokens.add(commentToken);
       }
       commentToken = commentToken.next;
     }
-    if (documentationTokens.isEmpty) {
-      return null;
-    }
-    List<CommentReference> references =
-        _parseCommentReferences(documentationTokens);
-    return Comment.createDocumentationCommentWithReferences(
-        documentationTokens, references);
+    return tokens.isEmpty ? null : tokens;
   }
 
   /**
    * Parse a do statement. Return the do statement that was parsed.
    *
+   * This method assumes that the current token matches `Keyword.DO`.
+   *
    *     doStatement ::=
    *         'do' statement 'while' '(' expression ')' ';'
    */
@@ -5680,7 +5754,7 @@
     bool wasInLoop = _inLoop;
     _inLoop = true;
     try {
-      Token doKeyword = _expectKeyword(Keyword.DO);
+      Token doKeyword = getAndAdvance();
       Statement body = parseStatement2();
       Token whileKeyword = _expectKeyword(Keyword.WHILE);
       Token leftParenthesis = _expect(TokenType.OPEN_PAREN);
@@ -5701,10 +5775,10 @@
    *         identifier ('.' identifier)*
    */
   DottedName _parseDottedName() {
-    List<SimpleIdentifier> components = new List<SimpleIdentifier>();
-    components.add(parseSimpleIdentifier());
-    while (_matches(TokenType.PERIOD)) {
-      _advance();
+    List<SimpleIdentifier> components = <SimpleIdentifier>[
+      parseSimpleIdentifier()
+    ];
+    while (_optional(TokenType.PERIOD)) {
       components.add(parseSimpleIdentifier());
     }
     return new DottedName(components);
@@ -5713,20 +5787,36 @@
   /**
    * Parse an empty statement. Return the empty statement that was parsed.
    *
+   * This method assumes that the current token matches `TokenType.SEMICOLON`.
+   *
    *     emptyStatement ::=
    *         ';'
    */
   Statement _parseEmptyStatement() => new EmptyStatement(getAndAdvance());
 
+  /**
+   * Parse an enum constant declaration. Return the enum constant declaration
+   * that was parsed.
+   *
+   * Specified:
+   *
+   *     enumConstant ::=
+   *         id
+   *
+   * Actual:
+   *
+   *     enumConstant ::=
+   *         metadata id
+   */
   EnumConstantDeclaration _parseEnumConstantDeclaration() {
     CommentAndMetadata commentAndMetadata = _parseCommentAndMetadata();
     SimpleIdentifier name;
     if (_matchesIdentifier()) {
-      name = parseSimpleIdentifier(isDeclaration: true);
+      name = _parseSimpleIdentifierUnchecked(isDeclaration: true);
     } else {
       name = _createSyntheticIdentifier();
     }
-    if (commentAndMetadata.metadata.isNotEmpty) {
+    if (commentAndMetadata.hasMetadata) {
       _reportErrorForNode(ParserErrorCode.ANNOTATION_ON_ENUM_CONSTANT,
           commentAndMetadata.metadata[0]);
     }
@@ -5738,18 +5828,19 @@
    * Parse an enum declaration. The [commentAndMetadata] is the metadata to be
    * associated with the member. Return the enum declaration that was parsed.
    *
+   * This method assumes that the current token matches `Keyword.ENUM`.
+   *
    *     enumType ::=
    *         metadata 'enum' id '{' id (',' id)* (',')? '}'
    */
   EnumDeclaration _parseEnumDeclaration(CommentAndMetadata commentAndMetadata) {
-    Token keyword = _expectKeyword(Keyword.ENUM);
+    Token keyword = getAndAdvance();
     SimpleIdentifier name = parseSimpleIdentifier(isDeclaration: true);
     Token leftBracket = null;
-    List<EnumConstantDeclaration> constants =
-        new List<EnumConstantDeclaration>();
+    List<EnumConstantDeclaration> constants = <EnumConstantDeclaration>[];
     Token rightBracket = null;
     if (_matches(TokenType.OPEN_CURLY_BRACKET)) {
-      leftBracket = _expect(TokenType.OPEN_CURLY_BRACKET);
+      leftBracket = getAndAdvance();
       if (_matchesIdentifier() || _matches(TokenType.AT)) {
         constants.add(_parseEnumConstantDeclaration());
       } else if (_matches(TokenType.COMMA) &&
@@ -5792,7 +5883,7 @@
    */
   Expression _parseEqualityExpression() {
     Expression expression;
-    if (_matchesKeyword(Keyword.SUPER) &&
+    if (_currentToken.keyword == Keyword.SUPER &&
         _currentToken.next.type.isEqualityOperator) {
       expression = new SuperExpression(getAndAdvance());
     } else {
@@ -5800,13 +5891,12 @@
     }
     bool leftEqualityExpression = false;
     while (_currentToken.type.isEqualityOperator) {
-      Token operator = getAndAdvance();
       if (leftEqualityExpression) {
         _reportErrorForNode(
             ParserErrorCode.EQUALITY_CANNOT_BE_EQUALITY_OPERAND, expression);
       }
       expression = new BinaryExpression(
-          expression, operator, _parseRelationalExpression());
+          expression, getAndAdvance(), _parseRelationalExpression());
       leftEqualityExpression = true;
     }
     return expression;
@@ -5816,11 +5906,13 @@
    * Parse an export directive. The [commentAndMetadata] is the metadata to be
    * associated with the directive. Return the export directive that was parsed.
    *
+   * This method assumes that the current token matches `Keyword.EXPORT`.
+   *
    *     exportDirective ::=
    *         metadata 'export' stringLiteral configuration* combinator*';'
    */
   ExportDirective _parseExportDirective(CommentAndMetadata commentAndMetadata) {
-    Token exportKeyword = _expectKeyword(Keyword.EXPORT);
+    Token exportKeyword = getAndAdvance();
     StringLiteral libraryUri = _parseUri();
     List<Configuration> configurations = _parseConfigurations();
     List<Combinator> combinators = _parseCombinators();
@@ -5842,8 +5934,7 @@
    *         expression (',' expression)*
    */
   List<Expression> _parseExpressionList() {
-    List<Expression> expressions = new List<Expression>();
-    expressions.add(parseExpression2());
+    List<Expression> expressions = <Expression>[parseExpression2()];
     while (_optional(TokenType.COMMA)) {
       expressions.add(parseExpression2());
     }
@@ -5862,23 +5953,24 @@
    *       | type
    */
   FinalConstVarOrType _parseFinalConstVarOrType(bool optional) {
-    Token keyword = null;
+    Token keywordToken = null;
     TypeName type = null;
-    if (_matchesKeyword(Keyword.FINAL) || _matchesKeyword(Keyword.CONST)) {
-      keyword = getAndAdvance();
+    Keyword keyword = _currentToken.keyword;
+    if (keyword == Keyword.FINAL || keyword == Keyword.CONST) {
+      keywordToken = getAndAdvance();
       if (_isTypedIdentifier(_currentToken)) {
         type = parseTypeName();
       } else {
         // Support `final/*=T*/ x;`
         type = _parseOptionalTypeNameComment();
       }
-    } else if (_matchesKeyword(Keyword.VAR)) {
-      keyword = getAndAdvance();
+    } else if (keyword == Keyword.VAR) {
+      keywordToken = getAndAdvance();
       // Support `var/*=T*/ x;`
       type = _parseOptionalTypeNameComment();
       if (type != null) {
         // Clear the keyword to prevent an error.
-        keyword = null;
+        keywordToken = null;
       }
     } else if (_isTypedIdentifier(_currentToken)) {
       type = parseReturnType();
@@ -5890,7 +5982,7 @@
       // This is not supported if the type is required.
       type = _parseOptionalTypeNameComment();
     }
-    return new FinalConstVarOrType(keyword, type);
+    return new FinalConstVarOrType(keywordToken, type);
   }
 
   /**
@@ -5907,31 +5999,32 @@
    */
   FormalParameter _parseFormalParameter(ParameterKind kind) {
     NormalFormalParameter parameter = parseNormalFormalParameter();
-    if (_matches(TokenType.EQ)) {
-      Token seperator = getAndAdvance();
+    TokenType type = _currentToken.type;
+    if (type == TokenType.EQ) {
+      Token separator = getAndAdvance();
       Expression defaultValue = parseExpression2();
       if (kind == ParameterKind.NAMED) {
         _reportErrorForToken(
-            ParserErrorCode.WRONG_SEPARATOR_FOR_NAMED_PARAMETER, seperator);
+            ParserErrorCode.WRONG_SEPARATOR_FOR_NAMED_PARAMETER, separator);
       } else if (kind == ParameterKind.REQUIRED) {
         _reportErrorForNode(
             ParserErrorCode.POSITIONAL_PARAMETER_OUTSIDE_GROUP, parameter);
       }
       return new DefaultFormalParameter(
-          parameter, kind, seperator, defaultValue);
-    } else if (_matches(TokenType.COLON)) {
-      Token seperator = getAndAdvance();
+          parameter, kind, separator, defaultValue);
+    } else if (type == TokenType.COLON) {
+      Token separator = getAndAdvance();
       Expression defaultValue = parseExpression2();
       if (kind == ParameterKind.POSITIONAL) {
         _reportErrorForToken(
             ParserErrorCode.WRONG_SEPARATOR_FOR_POSITIONAL_PARAMETER,
-            seperator);
+            separator);
       } else if (kind == ParameterKind.REQUIRED) {
         _reportErrorForNode(
             ParserErrorCode.NAMED_PARAMETER_OUTSIDE_GROUP, parameter);
       }
       return new DefaultFormalParameter(
-          parameter, kind, seperator, defaultValue);
+          parameter, kind, separator, defaultValue);
     } else if (kind != ParameterKind.REQUIRED) {
       return new DefaultFormalParameter(parameter, kind, null, null);
     }
@@ -5939,6 +6032,175 @@
   }
 
   /**
+   * Parse a list of formal parameters given that the list starts with the given
+   * [leftParenthesis]. Return the formal parameters that were parsed.
+   */
+  FormalParameterList _parseFormalParameterListAfterParen(
+      Token leftParenthesis) {
+    if (_matches(TokenType.CLOSE_PAREN)) {
+      return new FormalParameterList(
+          leftParenthesis, null, null, null, getAndAdvance());
+    }
+    //
+    // Even though it is invalid to have default parameters outside of brackets,
+    // required parameters inside of brackets, or multiple groups of default and
+    // named parameters, we allow all of these cases so that we can recover
+    // better.
+    //
+    List<FormalParameter> parameters = <FormalParameter>[];
+    Token leftSquareBracket = null;
+    Token rightSquareBracket = null;
+    Token leftCurlyBracket = null;
+    Token rightCurlyBracket = null;
+    ParameterKind kind = ParameterKind.REQUIRED;
+    bool firstParameter = true;
+    bool reportedMultiplePositionalGroups = false;
+    bool reportedMultipleNamedGroups = false;
+    bool reportedMixedGroups = false;
+    bool wasOptionalParameter = false;
+    Token initialToken = null;
+    do {
+      if (firstParameter) {
+        firstParameter = false;
+      } else if (!_optional(TokenType.COMMA)) {
+        // TODO(brianwilkerson) The token is wrong, we need to recover from this
+        // case.
+        if (_getEndToken(leftParenthesis) != null) {
+          _reportErrorForCurrentToken(
+              ParserErrorCode.EXPECTED_TOKEN, [TokenType.COMMA.lexeme]);
+        } else {
+          _reportErrorForToken(ParserErrorCode.MISSING_CLOSING_PARENTHESIS,
+              _currentToken.previous);
+          break;
+        }
+      }
+      initialToken = _currentToken;
+      //
+      // Handle the beginning of parameter groups.
+      //
+      TokenType type = _currentToken.type;
+      if (type == TokenType.OPEN_SQUARE_BRACKET) {
+        wasOptionalParameter = true;
+        if (leftSquareBracket != null && !reportedMultiplePositionalGroups) {
+          _reportErrorForCurrentToken(
+              ParserErrorCode.MULTIPLE_POSITIONAL_PARAMETER_GROUPS);
+          reportedMultiplePositionalGroups = true;
+        }
+        if (leftCurlyBracket != null && !reportedMixedGroups) {
+          _reportErrorForCurrentToken(ParserErrorCode.MIXED_PARAMETER_GROUPS);
+          reportedMixedGroups = true;
+        }
+        leftSquareBracket = getAndAdvance();
+        kind = ParameterKind.POSITIONAL;
+      } else if (type == TokenType.OPEN_CURLY_BRACKET) {
+        wasOptionalParameter = true;
+        if (leftCurlyBracket != null && !reportedMultipleNamedGroups) {
+          _reportErrorForCurrentToken(
+              ParserErrorCode.MULTIPLE_NAMED_PARAMETER_GROUPS);
+          reportedMultipleNamedGroups = true;
+        }
+        if (leftSquareBracket != null && !reportedMixedGroups) {
+          _reportErrorForCurrentToken(ParserErrorCode.MIXED_PARAMETER_GROUPS);
+          reportedMixedGroups = true;
+        }
+        leftCurlyBracket = getAndAdvance();
+        kind = ParameterKind.NAMED;
+      }
+      //
+      // Parse and record the parameter.
+      //
+      FormalParameter parameter = _parseFormalParameter(kind);
+      parameters.add(parameter);
+      if (kind == ParameterKind.REQUIRED && wasOptionalParameter) {
+        _reportErrorForNode(
+            ParserErrorCode.NORMAL_BEFORE_OPTIONAL_PARAMETERS, parameter);
+      }
+      //
+      // Handle the end of parameter groups.
+      //
+      // TODO(brianwilkerson) Improve the detection and reporting of missing and
+      // mismatched delimiters.
+      type = _currentToken.type;
+
+      // Advance past trailing commas as appropriate.
+      if (parseTrailingCommas && type == TokenType.COMMA) {
+        // Only parse commas trailing normal (non-positional/named) params.
+        if (rightSquareBracket == null && rightCurlyBracket == null) {
+          Token next = _peek();
+          if (next.type == TokenType.CLOSE_PAREN ||
+              next.type == TokenType.CLOSE_CURLY_BRACKET ||
+              next.type == TokenType.CLOSE_SQUARE_BRACKET) {
+            _advance();
+            type = _currentToken.type;
+          }
+        }
+      }
+
+      if (type == TokenType.CLOSE_SQUARE_BRACKET) {
+        rightSquareBracket = getAndAdvance();
+        if (leftSquareBracket == null) {
+          if (leftCurlyBracket != null) {
+            _reportErrorForCurrentToken(
+                ParserErrorCode.WRONG_TERMINATOR_FOR_PARAMETER_GROUP, ["}"]);
+            rightCurlyBracket = rightSquareBracket;
+            rightSquareBracket = null;
+          } else {
+            _reportErrorForCurrentToken(
+                ParserErrorCode.UNEXPECTED_TERMINATOR_FOR_PARAMETER_GROUP,
+                ["["]);
+          }
+        }
+        kind = ParameterKind.REQUIRED;
+      } else if (type == TokenType.CLOSE_CURLY_BRACKET) {
+        rightCurlyBracket = getAndAdvance();
+        if (leftCurlyBracket == null) {
+          if (leftSquareBracket != null) {
+            _reportErrorForCurrentToken(
+                ParserErrorCode.WRONG_TERMINATOR_FOR_PARAMETER_GROUP, ["]"]);
+            rightSquareBracket = rightCurlyBracket;
+            rightCurlyBracket = null;
+          } else {
+            _reportErrorForCurrentToken(
+                ParserErrorCode.UNEXPECTED_TERMINATOR_FOR_PARAMETER_GROUP,
+                ["{"]);
+          }
+        }
+        kind = ParameterKind.REQUIRED;
+      }
+    } while (!_matches(TokenType.CLOSE_PAREN) &&
+        !identical(initialToken, _currentToken));
+    Token rightParenthesis = _expect(TokenType.CLOSE_PAREN);
+    //
+    // Check that the groups were closed correctly.
+    //
+    if (leftSquareBracket != null && rightSquareBracket == null) {
+      _reportErrorForCurrentToken(
+          ParserErrorCode.MISSING_TERMINATOR_FOR_PARAMETER_GROUP, ["]"]);
+    }
+    if (leftCurlyBracket != null && rightCurlyBracket == null) {
+      _reportErrorForCurrentToken(
+          ParserErrorCode.MISSING_TERMINATOR_FOR_PARAMETER_GROUP, ["}"]);
+    }
+    //
+    // Build the parameter list.
+    //
+    leftSquareBracket ??= leftCurlyBracket;
+    rightSquareBracket ??= rightCurlyBracket;
+    return new FormalParameterList(leftParenthesis, parameters,
+        leftSquareBracket, rightSquareBracket, rightParenthesis);
+  }
+
+  /**
+   * Parse a list of formal parameters. Return the formal parameters that were
+   * parsed.
+   *
+   * This method assumes that the current token matches `TokenType.OPEN_PAREN`.
+   */
+  FormalParameterList _parseFormalParameterListUnchecked() {
+    return _parseFormalParameterListAfterParen(getAndAdvance());
+  }
+
+  /**
    * Parse a for statement. Return the for statement that was parsed.
    *
    *     forStatement ::=
@@ -5970,19 +6232,20 @@
         if (_matchesIdentifier() &&
             (_tokenMatchesKeyword(_peek(), Keyword.IN) ||
                 _tokenMatches(_peek(), TokenType.COLON))) {
-          List<VariableDeclaration> variables = new List<VariableDeclaration>();
-          SimpleIdentifier variableName = parseSimpleIdentifier();
-          variables.add(new VariableDeclaration(variableName, null, null));
+          SimpleIdentifier variableName = _parseSimpleIdentifierUnchecked();
           variableList = new VariableDeclarationList(commentAndMetadata.comment,
-              commentAndMetadata.metadata, null, null, variables);
+              commentAndMetadata.metadata, null, null, <VariableDeclaration>[
+            new VariableDeclaration(variableName, null, null)
+          ]);
         } else if (_isInitializedVariableDeclaration()) {
           variableList =
               _parseVariableDeclarationListAfterMetadata(commentAndMetadata);
         } else {
           initialization = parseExpression2();
         }
-        if (_matchesKeyword(Keyword.IN) || _matches(TokenType.COLON)) {
-          if (_matches(TokenType.COLON)) {
+        TokenType type = _currentToken.type;
+        if (_matchesKeyword(Keyword.IN) || type == TokenType.COLON) {
+          if (type == TokenType.COLON) {
             _reportErrorForCurrentToken(ParserErrorCode.COLON_IN_PLACE_OF_IN);
           }
           DeclaredIdentifier loopVariable = null;
@@ -6014,7 +6277,7 @@
                   new SimpleIdentifier(variable.name.token,
                       isDeclaration: true));
             } else {
-              if (!commentAndMetadata.metadata.isEmpty) {
+              if (commentAndMetadata.hasMetadata) {
                 // TODO(jwren) metadata isn't allowed before the identifier in
                 // "identifier in expression", add warning if commentAndMetadata
                 // has content
@@ -6106,45 +6369,47 @@
     _inLoop = false;
     _inSwitch = false;
     try {
-      if (_matches(TokenType.SEMICOLON)) {
+      TokenType type = _currentToken.type;
+      if (type == TokenType.SEMICOLON) {
         if (!mayBeEmpty) {
           _reportErrorForCurrentToken(emptyErrorCode);
         }
         return new EmptyFunctionBody(getAndAdvance());
-      } else if (_matchesString(_NATIVE)) {
-        Token nativeToken = getAndAdvance();
-        StringLiteral stringLiteral = null;
-        if (_matches(TokenType.STRING)) {
-          stringLiteral = parseStringLiteral();
-        }
-        return new NativeFunctionBody(
-            nativeToken, stringLiteral, _expect(TokenType.SEMICOLON));
       }
       Token keyword = null;
       Token star = null;
-      if (_matchesString(ASYNC)) {
-        keyword = getAndAdvance();
-        if (!_parseAsync) {
-          _reportErrorForToken(ParserErrorCode.ASYNC_NOT_SUPPORTED, keyword);
-        }
-        if (_matches(TokenType.STAR)) {
-          star = getAndAdvance();
-          _inGenerator = true;
-        }
-        _inAsync = true;
-      } else if (_matchesString(SYNC)) {
-        keyword = getAndAdvance();
-        if (!_parseAsync) {
-          _reportErrorForToken(ParserErrorCode.ASYNC_NOT_SUPPORTED, keyword);
-        }
-        if (_matches(TokenType.STAR)) {
-          star = getAndAdvance();
-          _inGenerator = true;
+      bool foundAsync = false;
+      bool foundSync = false;
+      if (type == TokenType.IDENTIFIER) {
+        String lexeme = _currentToken.lexeme;
+        if (lexeme == ASYNC) {
+          foundAsync = true;
+          keyword = getAndAdvance();
+          if (!_parseAsync) {
+            _reportErrorForToken(ParserErrorCode.ASYNC_NOT_SUPPORTED, keyword);
+          }
+          if (_matches(TokenType.STAR)) {
+            star = getAndAdvance();
+            _inGenerator = true;
+          }
+          type = _currentToken.type;
+          _inAsync = true;
+        } else if (lexeme == SYNC) {
+          foundSync = true;
+          keyword = getAndAdvance();
+          if (!_parseAsync) {
+            _reportErrorForToken(ParserErrorCode.ASYNC_NOT_SUPPORTED, keyword);
+          }
+          if (_matches(TokenType.STAR)) {
+            star = getAndAdvance();
+            _inGenerator = true;
+          }
+          type = _currentToken.type;
         }
       }
-      if (_matches(TokenType.FUNCTION)) {
+      if (type == TokenType.FUNCTION) {
         if (keyword != null) {
-          if (!_tokenMatchesString(keyword, ASYNC)) {
+          if (!foundAsync) {
             _reportErrorForToken(ParserErrorCode.INVALID_SYNC, keyword);
             keyword = null;
           } else if (star != null) {
@@ -6169,9 +6434,9 @@
         }
         return new ExpressionFunctionBody(
             keyword, functionDefinition, expression, semicolon);
-      } else if (_matches(TokenType.OPEN_CURLY_BRACKET)) {
+      } else if (type == TokenType.OPEN_CURLY_BRACKET) {
         if (keyword != null) {
-          if (_tokenMatchesString(keyword, SYNC) && star == null) {
+          if (foundSync && star == null) {
             _reportErrorForToken(
                 ParserErrorCode.MISSING_STAR_AFTER_SYNC, keyword);
           }
@@ -6182,6 +6447,14 @@
               _createSyntheticToken(TokenType.SEMICOLON));
         }
         return new BlockFunctionBody(keyword, star, parseBlock());
+      } else if (_matchesString(_NATIVE)) {
+        Token nativeToken = getAndAdvance();
+        StringLiteral stringLiteral = null;
+        if (_matches(TokenType.STRING)) {
+          stringLiteral = _parseStringLiteralUnchecked();
+        }
+        return new NativeFunctionBody(
+            nativeToken, stringLiteral, _expect(TokenType.SEMICOLON));
       } else {
         // Invalid function body
         _reportErrorForCurrentToken(emptyErrorCode);
@@ -6213,22 +6486,28 @@
       CommentAndMetadata commentAndMetadata,
       Token externalKeyword,
       TypeName returnType) {
-    Token keyword = null;
+    Token keywordToken = null;
     bool isGetter = false;
-    if (_matchesKeyword(Keyword.GET) &&
-        !_tokenMatches(_peek(), TokenType.OPEN_PAREN)) {
-      keyword = getAndAdvance();
+    Keyword keyword = _currentToken.keyword;
+    SimpleIdentifier name = null;
+    if (keyword == Keyword.GET) {
+      keywordToken = getAndAdvance();
       isGetter = true;
-    } else if (_matchesKeyword(Keyword.SET) &&
-        !_tokenMatches(_peek(), TokenType.OPEN_PAREN)) {
-      keyword = getAndAdvance();
+    } else if (keyword == Keyword.SET) {
+      keywordToken = getAndAdvance();
     }
-    SimpleIdentifier name = parseSimpleIdentifier(isDeclaration: true);
+    if (keywordToken != null && _matches(TokenType.OPEN_PAREN)) {
+      name = new SimpleIdentifier(keywordToken, isDeclaration: true);
+      keywordToken = null;
+      isGetter = false;
+    } else {
+      name = parseSimpleIdentifier(isDeclaration: true);
+    }
     TypeParameterList typeParameters = _parseGenericMethodTypeParameters();
     FormalParameterList parameters = null;
     if (!isGetter) {
       if (_matches(TokenType.OPEN_PAREN)) {
-        parameters = parseFormalParameterList();
+        parameters = _parseFormalParameterListUnchecked();
         _validateFormalParameterList(parameters);
       } else {
         _reportErrorForCurrentToken(
@@ -6242,7 +6521,7 @@
       }
     } else if (_matches(TokenType.OPEN_PAREN)) {
       _reportErrorForCurrentToken(ParserErrorCode.GETTER_WITH_PARAMETERS);
-      parseFormalParameterList();
+      _parseFormalParameterListUnchecked();
     }
     FunctionBody body;
     if (externalKeyword == null) {
@@ -6261,7 +6540,7 @@
         commentAndMetadata.metadata,
         externalKeyword,
         returnType,
-        keyword,
+        keywordToken,
         name,
         new FunctionExpression(typeParameters, parameters, body));
   }
@@ -6328,7 +6607,8 @@
     if (_matches(TokenType.LT)) {
       typeParameters = parseTypeParameterList();
     }
-    if (_matches(TokenType.SEMICOLON) || _matches(TokenType.EOF)) {
+    TokenType type = _currentToken.type;
+    if (type == TokenType.SEMICOLON || type == TokenType.EOF) {
       _reportErrorForCurrentToken(ParserErrorCode.MISSING_TYPEDEF_PARAMETERS);
       FormalParameterList parameters = new FormalParameterList(
           _createSyntheticToken(TokenType.OPEN_PAREN),
@@ -6346,12 +6626,24 @@
           typeParameters,
           parameters,
           semicolon);
-    } else if (!_matches(TokenType.OPEN_PAREN)) {
+    } else if (type == TokenType.OPEN_PAREN) {
+      FormalParameterList parameters = _parseFormalParameterListUnchecked();
+      _validateFormalParameterList(parameters);
+      Token semicolon = _expect(TokenType.SEMICOLON);
+      return new FunctionTypeAlias(
+          commentAndMetadata.comment,
+          commentAndMetadata.metadata,
+          keyword,
+          returnType,
+          name,
+          typeParameters,
+          parameters,
+          semicolon);
+    } else {
       _reportErrorForCurrentToken(ParserErrorCode.MISSING_TYPEDEF_PARAMETERS);
-      // TODO(brianwilkerson) Recover from this error. At the very least we
-      // should skip to the start of the next valid compilation unit member,
-      // allowing for the possibility of finding the typedef parameters before
-      // that point.
+      // Recovery: At the very least we should skip to the start of the next
+      // valid compilation unit member, allowing for the possibility of finding
+      // the typedef parameters before that point.
       return new FunctionTypeAlias(
           commentAndMetadata.comment,
           commentAndMetadata.metadata,
@@ -6363,18 +6655,6 @@
               null, null, null, _createSyntheticToken(TokenType.CLOSE_PAREN)),
           _createSyntheticToken(TokenType.SEMICOLON));
     }
-    FormalParameterList parameters = parseFormalParameterList();
-    _validateFormalParameterList(parameters);
-    Token semicolon = _expect(TokenType.SEMICOLON);
-    return new FunctionTypeAlias(
-        commentAndMetadata.comment,
-        commentAndMetadata.metadata,
-        keyword,
-        returnType,
-        name,
-        typeParameters,
-        parameters,
-        semicolon);
   }
 
   /**
@@ -6416,6 +6696,8 @@
    * been parsed, or `null` if there was no return type. Return the getter that
    * was parsed.
    *
+   * This method assumes that the current token matches `Keyword.GET`.
+   *
    *     getter ::=
    *         getterSignature functionBody?
    *
@@ -6424,7 +6706,7 @@
    */
   MethodDeclaration _parseGetter(CommentAndMetadata commentAndMetadata,
       Token externalKeyword, Token staticKeyword, TypeName returnType) {
-    Token propertyKeyword = _expectKeyword(Keyword.GET);
+    Token propertyKeyword = getAndAdvance();
     SimpleIdentifier name = parseSimpleIdentifier(isDeclaration: true);
     if (_matches(TokenType.OPEN_PAREN) &&
         _tokenMatches(_peek(), TokenType.CLOSE_PAREN)) {
@@ -6461,10 +6743,10 @@
    *         identifier (',' identifier)*
    */
   List<SimpleIdentifier> _parseIdentifierList() {
-    List<SimpleIdentifier> identifiers = new List<SimpleIdentifier>();
-    identifiers.add(parseSimpleIdentifier());
-    while (_matches(TokenType.COMMA)) {
-      _advance();
+    List<SimpleIdentifier> identifiers = <SimpleIdentifier>[
+      parseSimpleIdentifier()
+    ];
+    while (_optional(TokenType.COMMA)) {
       identifiers.add(parseSimpleIdentifier());
     }
     return identifiers;
@@ -6473,11 +6755,13 @@
   /**
    * Parse an if statement. Return the if statement that was parsed.
    *
+   * This method assumes that the current token matches `Keyword.IF`.
+   *
    *     ifStatement ::=
    *         'if' '(' expression ')' statement ('else' statement)?
    */
   Statement _parseIfStatement() {
-    Token ifKeyword = _expectKeyword(Keyword.IF);
+    Token ifKeyword = getAndAdvance();
     Token leftParenthesis = _expect(TokenType.OPEN_PAREN);
     Expression condition = parseExpression2();
     Token rightParenthesis = _expect(TokenType.CLOSE_PAREN);
@@ -6496,11 +6780,13 @@
    * Parse an import directive. The [commentAndMetadata] is the metadata to be
    * associated with the directive. Return the import directive that was parsed.
    *
+   * This method assumes that the current token matches `Keyword.IMPORT`.
+   *
    *     importDirective ::=
    *         metadata 'import' stringLiteral configuration* (deferred)? ('as' identifier)? combinator*';'
    */
   ImportDirective _parseImportDirective(CommentAndMetadata commentAndMetadata) {
-    Token importKeyword = _expectKeyword(Keyword.IMPORT);
+    Token importKeyword = getAndAdvance();
     StringLiteral libraryUri = _parseUri();
     List<Configuration> configurations = _parseConfigurations();
     Token deferredToken = null;
@@ -6590,7 +6876,7 @@
    */
   InstanceCreationExpression _parseInstanceCreationExpression(Token keyword) {
     ConstructorName constructorName = parseConstructorName();
-    ArgumentList argumentList = parseArgumentList();
+    ArgumentList argumentList = _parseArgumentListChecked();
     return new InstanceCreationExpression(
         keyword, constructorName, argumentList);
   }
@@ -6600,12 +6886,14 @@
    * associated with the directive. Return the library directive that was
    * parsed.
    *
+   * This method assumes that the current token matches `Keyword.LIBRARY`.
+   *
    *     libraryDirective ::=
    *         metadata 'library' identifier ';'
    */
   LibraryDirective _parseLibraryDirective(
       CommentAndMetadata commentAndMetadata) {
-    Token keyword = _expectKeyword(Keyword.LIBRARY);
+    Token keyword = getAndAdvance();
     LibraryIdentifier libraryName = _parseLibraryName(
         ParserErrorCode.MISSING_NAME_IN_LIBRARY_DIRECTIVE, keyword);
     Token semicolon = _expect(TokenType.SEMICOLON);
@@ -6627,17 +6915,15 @@
     if (_matchesIdentifier()) {
       return parseLibraryIdentifier();
     } else if (_matches(TokenType.STRING)) {
-      // TODO(brianwilkerson) Recovery: This should be extended to handle
-      // arbitrary tokens until we can find a token that can start a compilation
-      // unit member.
+      // Recovery: This should be extended to handle arbitrary tokens until we
+      // can find a token that can start a compilation unit member.
       StringLiteral string = parseStringLiteral();
       _reportErrorForNode(ParserErrorCode.NON_IDENTIFIER_LIBRARY_NAME, string);
     } else {
       _reportErrorForToken(missingNameError, missingNameToken);
     }
-    List<SimpleIdentifier> components = new List<SimpleIdentifier>();
-    components.add(_createSyntheticIdentifier());
-    return new LibraryIdentifier(components);
+    return new LibraryIdentifier(
+        <SimpleIdentifier>[_createSyntheticIdentifier()]);
   }
 
   /**
@@ -6646,13 +6932,16 @@
    * is the type arguments appearing before the literal, or `null` if there are
    * no type arguments. Return the list literal that was parsed.
    *
+   * This method assumes that the current token matches either
+   * `TokenType.OPEN_SQUARE_BRACKET` or `TokenType.INDEX`.
+   *
    *     listLiteral ::=
    *         'const'? typeArguments? '[' (expressionList ','?)? ']'
    */
   ListLiteral _parseListLiteral(
       Token modifier, TypeArgumentList typeArguments) {
-    // may be empty list literal
     if (_matches(TokenType.INDEX)) {
+      // Split the token into two separate tokens.
       BeginToken leftBracket = _createToken(
           _currentToken, TokenType.OPEN_SQUARE_BRACKET,
           isBegin: true);
@@ -6666,8 +6955,7 @@
       return new ListLiteral(
           modifier, typeArguments, leftBracket, null, rightBracket);
     }
-    // open
-    Token leftBracket = _expect(TokenType.OPEN_SQUARE_BRACKET);
+    Token leftBracket = getAndAdvance();
     if (_matches(TokenType.CLOSE_SQUARE_BRACKET)) {
       return new ListLiteral(
           modifier, typeArguments, leftBracket, null, getAndAdvance());
@@ -6675,8 +6963,7 @@
     bool wasInInitializer = _inInitializer;
     _inInitializer = false;
     try {
-      List<Expression> elements = new List<Expression>();
-      elements.add(parseExpression2());
+      List<Expression> elements = <Expression>[parseExpression2()];
       while (_optional(TokenType.COMMA)) {
         if (_matches(TokenType.CLOSE_SQUARE_BRACKET)) {
           return new ListLiteral(
@@ -6727,10 +7014,9 @@
    */
   Expression _parseLogicalAndExpression() {
     Expression expression = _parseEqualityExpression();
-    while (_matches(TokenType.AMPERSAND_AMPERSAND)) {
-      Token operator = getAndAdvance();
+    while (_currentToken.type == TokenType.AMPERSAND_AMPERSAND) {
       expression = new BinaryExpression(
-          expression, operator, _parseEqualityExpression());
+          expression, getAndAdvance(), _parseEqualityExpression());
     }
     return expression;
   }
@@ -6741,20 +7027,22 @@
    * is the type arguments that were declared, or `null` if there are no type
    * arguments. Return the map literal that was parsed.
    *
+   * This method assumes that the current token matches
+   * `TokenType.OPEN_CURLY_BRACKET`.
+   *
    *     mapLiteral ::=
    *         'const'? typeArguments? '{' (mapLiteralEntry (',' mapLiteralEntry)* ','?)? '}'
    */
   MapLiteral _parseMapLiteral(Token modifier, TypeArgumentList typeArguments) {
-    Token leftBracket = _expect(TokenType.OPEN_CURLY_BRACKET);
-    List<MapLiteralEntry> entries = new List<MapLiteralEntry>();
+    Token leftBracket = getAndAdvance();
     if (_matches(TokenType.CLOSE_CURLY_BRACKET)) {
       return new MapLiteral(
-          modifier, typeArguments, leftBracket, entries, getAndAdvance());
+          modifier, typeArguments, leftBracket, null, getAndAdvance());
     }
     bool wasInInitializer = _inInitializer;
     _inInitializer = false;
     try {
-      entries.add(parseMapLiteralEntry());
+      List<MapLiteralEntry> entries = <MapLiteralEntry>[parseMapLiteralEntry()];
       while (_optional(TokenType.COMMA)) {
         if (_matches(TokenType.CLOSE_CURLY_BRACKET)) {
           return new MapLiteral(
@@ -6838,9 +7126,11 @@
     SimpleIdentifier methodName = parseSimpleIdentifier(isDeclaration: true);
     TypeParameterList typeParameters = _parseGenericMethodTypeParameters();
     FormalParameterList parameters;
-    if (!_matches(TokenType.OPEN_PAREN) &&
-        (_matches(TokenType.OPEN_CURLY_BRACKET) ||
-            _matches(TokenType.FUNCTION))) {
+    TokenType type = _currentToken.type;
+    // TODO(brianwilkerson) Figure out why we care what the current token is if
+    // it isn't a paren.
+    if (type != TokenType.OPEN_PAREN &&
+        (type == TokenType.OPEN_CURLY_BRACKET || type == TokenType.FUNCTION)) {
       _reportErrorForToken(
           ParserErrorCode.MISSING_METHOD_PARAMETERS, _currentToken.previous);
       parameters = new FormalParameterList(
@@ -6878,12 +7168,14 @@
     Modifiers modifiers = new Modifiers();
     bool progress = true;
     while (progress) {
-      if (_tokenMatches(_peek(), TokenType.PERIOD) ||
-          _tokenMatches(_peek(), TokenType.LT) ||
-          _tokenMatches(_peek(), TokenType.OPEN_PAREN)) {
+      TokenType nextType = _peek().type;
+      if (nextType == TokenType.PERIOD ||
+          nextType == TokenType.LT ||
+          nextType == TokenType.OPEN_PAREN) {
         return modifiers;
       }
-      if (_matchesKeyword(Keyword.ABSTRACT)) {
+      Keyword keyword = _currentToken.keyword;
+      if (keyword == Keyword.ABSTRACT) {
         if (modifiers.abstractKeyword != null) {
           _reportErrorForCurrentToken(
               ParserErrorCode.DUPLICATED_MODIFIER, [_currentToken.lexeme]);
@@ -6891,7 +7183,7 @@
         } else {
           modifiers.abstractKeyword = getAndAdvance();
         }
-      } else if (_matchesKeyword(Keyword.CONST)) {
+      } else if (keyword == Keyword.CONST) {
         if (modifiers.constKeyword != null) {
           _reportErrorForCurrentToken(
               ParserErrorCode.DUPLICATED_MODIFIER, [_currentToken.lexeme]);
@@ -6899,9 +7191,7 @@
         } else {
           modifiers.constKeyword = getAndAdvance();
         }
-      } else if (_matchesKeyword(Keyword.EXTERNAL) &&
-          !_tokenMatches(_peek(), TokenType.PERIOD) &&
-          !_tokenMatches(_peek(), TokenType.LT)) {
+      } else if (keyword == Keyword.EXTERNAL) {
         if (modifiers.externalKeyword != null) {
           _reportErrorForCurrentToken(
               ParserErrorCode.DUPLICATED_MODIFIER, [_currentToken.lexeme]);
@@ -6909,9 +7199,7 @@
         } else {
           modifiers.externalKeyword = getAndAdvance();
         }
-      } else if (_matchesKeyword(Keyword.FACTORY) &&
-          !_tokenMatches(_peek(), TokenType.PERIOD) &&
-          !_tokenMatches(_peek(), TokenType.LT)) {
+      } else if (keyword == Keyword.FACTORY) {
         if (modifiers.factoryKeyword != null) {
           _reportErrorForCurrentToken(
               ParserErrorCode.DUPLICATED_MODIFIER, [_currentToken.lexeme]);
@@ -6919,7 +7207,7 @@
         } else {
           modifiers.factoryKeyword = getAndAdvance();
         }
-      } else if (_matchesKeyword(Keyword.FINAL)) {
+      } else if (keyword == Keyword.FINAL) {
         if (modifiers.finalKeyword != null) {
           _reportErrorForCurrentToken(
               ParserErrorCode.DUPLICATED_MODIFIER, [_currentToken.lexeme]);
@@ -6927,9 +7215,7 @@
         } else {
           modifiers.finalKeyword = getAndAdvance();
         }
-      } else if (_matchesKeyword(Keyword.STATIC) &&
-          !_tokenMatches(_peek(), TokenType.PERIOD) &&
-          !_tokenMatches(_peek(), TokenType.LT)) {
+      } else if (keyword == Keyword.STATIC) {
         if (modifiers.staticKeyword != null) {
           _reportErrorForCurrentToken(
               ParserErrorCode.DUPLICATED_MODIFIER, [_currentToken.lexeme]);
@@ -6937,7 +7223,7 @@
         } else {
           modifiers.staticKeyword = getAndAdvance();
         }
-      } else if (_matchesKeyword(Keyword.VAR)) {
+      } else if (keyword == Keyword.VAR) {
         if (modifiers.varKeyword != null) {
           _reportErrorForCurrentToken(
               ParserErrorCode.DUPLICATED_MODIFIER, [_currentToken.lexeme]);
@@ -6962,16 +7248,15 @@
    */
   Expression _parseMultiplicativeExpression() {
     Expression expression;
-    if (_matchesKeyword(Keyword.SUPER) &&
+    if (_currentToken.keyword == Keyword.SUPER &&
         _currentToken.next.type.isMultiplicativeOperator) {
       expression = new SuperExpression(getAndAdvance());
     } else {
       expression = _parseUnaryExpression();
     }
     while (_currentToken.type.isMultiplicativeOperator) {
-      Token operator = getAndAdvance();
-      expression =
-          new BinaryExpression(expression, operator, _parseUnaryExpression());
+      expression = new BinaryExpression(
+          expression, getAndAdvance(), _parseUnaryExpression());
     }
     return expression;
   }
@@ -6979,6 +7264,8 @@
   /**
    * Parse a class native clause. Return the native clause that was parsed.
    *
+   * This method assumes that the current token matches `_NATIVE`.
+   *
    *     classNativeClause ::=
    *         'native' name
    */
@@ -6991,11 +7278,13 @@
   /**
    * Parse a new expression. Return the new expression that was parsed.
    *
+   * This method assumes that the current token matches `Keyword.NEW`.
+   *
    *     newExpression ::=
    *         instanceCreationExpression
    */
   InstanceCreationExpression _parseNewExpression() =>
-      _parseInstanceCreationExpression(_expectKeyword(Keyword.NEW));
+      _parseInstanceCreationExpression(getAndAdvance());
 
   /**
    * Parse a non-labeled statement. Return the non-labeled statement that was
@@ -7020,7 +7309,8 @@
   Statement _parseNonLabeledStatement() {
     // TODO(brianwilkerson) Pass the comment and metadata on where appropriate.
     CommentAndMetadata commentAndMetadata = _parseCommentAndMetadata();
-    if (_matches(TokenType.OPEN_CURLY_BRACKET)) {
+    TokenType type = _currentToken.type;
+    if (type == TokenType.OPEN_CURLY_BRACKET) {
       if (_tokenMatches(_peek(), TokenType.STRING)) {
         Token afterString = _skipStringLiteral(_currentToken.next);
         if (afterString != null && afterString.type == TokenType.COLON) {
@@ -7029,7 +7319,7 @@
         }
       }
       return parseBlock();
-    } else if (_matches(TokenType.KEYWORD) &&
+    } else if (type == TokenType.KEYWORD &&
         !_currentToken.keyword.isPseudoKeyword) {
       Keyword keyword = _currentToken.keyword;
       // TODO(jwren) compute some metrics to figure out a better order for this
@@ -7064,9 +7354,11 @@
         return _parseVariableDeclarationStatementAfterMetadata(
             commentAndMetadata);
       } else if (keyword == Keyword.VOID) {
-        TypeName returnType = parseReturnType();
+        TypeName returnType =
+            new TypeName(new SimpleIdentifier(getAndAdvance()), null);
+        Token next = _currentToken.next;
         if (_matchesIdentifier() &&
-            _peek().matchesAny(const <TokenType>[
+            next.matchesAny(const <TokenType>[
               TokenType.OPEN_PAREN,
               TokenType.OPEN_CURLY_BRACKET,
               TokenType.FUNCTION,
@@ -7079,7 +7371,7 @@
           // We have found an error of some kind. Try to recover.
           //
           if (_matchesIdentifier()) {
-            if (_peek().matchesAny(const <TokenType>[
+            if (next.matchesAny(const <TokenType>[
               TokenType.EQ,
               TokenType.COMMA,
               TokenType.SEMICOLON
@@ -7104,7 +7396,8 @@
           return new EmptyStatement(_createSyntheticToken(TokenType.SEMICOLON));
         }
       } else if (keyword == Keyword.CONST) {
-        if (_peek().matchesAny(const <TokenType>[
+        Token next = _currentToken.next;
+        if (next.matchesAny(const <TokenType>[
           TokenType.LT,
           TokenType.OPEN_CURLY_BRACKET,
           TokenType.OPEN_SQUARE_BRACKET,
@@ -7112,8 +7405,8 @@
         ])) {
           return new ExpressionStatement(
               parseExpression2(), _expect(TokenType.SEMICOLON));
-        } else if (_tokenMatches(_peek(), TokenType.IDENTIFIER)) {
-          Token afterType = _skipTypeName(_peek());
+        } else if (_tokenMatches(next, TokenType.IDENTIFIER)) {
+          Token afterType = _skipTypeName(next);
           if (afterType != null) {
             if (_tokenMatches(afterType, TokenType.OPEN_PAREN) ||
                 (_tokenMatches(afterType, TokenType.PERIOD) &&
@@ -7158,14 +7451,14 @@
             CompileTimeErrorCode.ASYNC_FOR_IN_WRONG_CONTEXT, awaitToken);
       }
       return statement;
-    } else if (_matches(TokenType.SEMICOLON)) {
+    } else if (type == TokenType.SEMICOLON) {
       return _parseEmptyStatement();
     } else if (_isInitializedVariableDeclaration()) {
       return _parseVariableDeclarationStatementAfterMetadata(
           commentAndMetadata);
     } else if (_isFunctionDeclaration()) {
       return _parseFunctionDeclarationStatement();
-    } else if (_matches(TokenType.CLOSE_CURLY_BRACKET)) {
+    } else if (type == TokenType.CLOSE_CURLY_BRACKET) {
       _reportErrorForCurrentToken(ParserErrorCode.MISSING_STATEMENT);
       return new EmptyStatement(_createSyntheticToken(TokenType.SEMICOLON));
     } else {
@@ -7197,6 +7490,29 @@
           ParserErrorCode.MISSING_KEYWORD_OPERATOR, _currentToken);
       operatorKeyword = _createSyntheticKeyword(Keyword.OPERATOR);
     }
+    return _parseOperatorAfterKeyword(
+        commentAndMetadata, externalKeyword, returnType, operatorKeyword);
+  }
+
+  /**
+   * Parse an operator declaration starting after the 'operator' keyword. The
+   * [commentAndMetadata] is the documentation comment and metadata to be
+   * associated with the declaration. The [externalKeyword] is the 'external'
+   * token. The [returnType] is the return type that has already been parsed, or
+   * `null` if there was no return type. The [operatorKeyword] is the 'operator'
+   * keyword. Return the operator declaration that was parsed.
+   *
+   *     operatorDeclaration ::=
+   *         operatorSignature (';' | functionBody)
+   *
+   *     operatorSignature ::=
+   *         'external'? returnType? 'operator' operator formalParameterList
+   */
+  MethodDeclaration _parseOperatorAfterKeyword(
+      CommentAndMetadata commentAndMetadata,
+      Token externalKeyword,
+      TypeName returnType,
+      Token operatorKeyword) {
     if (!_currentToken.isUserDefinableOperator) {
       _reportErrorForCurrentToken(
           ParserErrorCode.NON_USER_DEFINABLE_OPERATOR, [_currentToken.lexeme]);
@@ -7242,21 +7558,27 @@
     TypeName typeComment = _parseOptionalTypeNameComment();
     if (typeComment != null) {
       return typeComment;
-    } else if (_matchesKeyword(Keyword.VOID)) {
-      return parseReturnType();
-    } else if (_matchesIdentifier() &&
-        !_matchesKeyword(Keyword.GET) &&
-        !_matchesKeyword(Keyword.SET) &&
-        !_matchesKeyword(Keyword.OPERATOR) &&
-        (_tokenMatchesIdentifier(_peek()) ||
-            _tokenMatches(_peek(), TokenType.LT))) {
-      return parseReturnType();
-    } else if (_matchesIdentifier() &&
-        _tokenMatches(_peek(), TokenType.PERIOD) &&
-        _tokenMatchesIdentifier(_peekAt(2)) &&
-        (_tokenMatchesIdentifier(_peekAt(3)) ||
-            _tokenMatches(_peekAt(3), TokenType.LT))) {
-      return parseReturnType();
+    }
+    Keyword keyword = _currentToken.keyword;
+    if (keyword == Keyword.VOID) {
+      return new TypeName(new SimpleIdentifier(getAndAdvance()), null);
+    } else if (_matchesIdentifier()) {
+      Token next = _peek();
+      if (keyword != Keyword.GET &&
+          keyword != Keyword.SET &&
+          keyword != Keyword.OPERATOR &&
+          (_tokenMatchesIdentifier(next) ||
+              _tokenMatches(next, TokenType.LT))) {
+        return parseReturnType();
+      }
+      Token next2 = next.next;
+      Token next3 = next2.next;
+      if (_tokenMatches(next, TokenType.PERIOD) &&
+          _tokenMatchesIdentifier(next2) &&
+          (_tokenMatchesIdentifier(next3) ||
+              _tokenMatches(next3, TokenType.LT))) {
+        return parseReturnType();
+      }
     }
     return null;
   }
@@ -7280,35 +7602,67 @@
   }
 
   /**
+   * Parse a part directive. The [commentAndMetadata] is the metadata to be
+   * associated with the directive. Return the part or part-of directive that
+   * was parsed.
+   *
+   * This method assumes that the current token matches `Keyword.PART`.
+   *
+   *     partDirective ::=
+   *         metadata 'part' stringLiteral ';'
+   */
+  Directive _parsePartDirective(CommentAndMetadata commentAndMetadata) {
+    Token partKeyword = getAndAdvance();
+    StringLiteral partUri = _parseUri();
+    Token semicolon = _expect(TokenType.SEMICOLON);
+    return new PartDirective(commentAndMetadata.comment,
+        commentAndMetadata.metadata, partKeyword, partUri, semicolon);
+  }
+
+  /**
+   * Parse a part-of directive. The [commentAndMetadata] is the metadata to be
+   * associated with the directive. Return the part or part-of directive that
+   * was parsed.
+   *
+   * This method assumes that the current token matches [Keyword.PART] and that
+   * the following token matches the identifier 'of'.
+   *
+   *     partOfDirective ::=
+   *         metadata 'part' 'of' identifier ';'
+   */
+  Directive _parsePartOfDirective(CommentAndMetadata commentAndMetadata) {
+    Token partKeyword = getAndAdvance();
+    Token ofKeyword = getAndAdvance();
+    LibraryIdentifier libraryName = _parseLibraryName(
+        ParserErrorCode.MISSING_NAME_IN_PART_OF_DIRECTIVE, ofKeyword);
+    Token semicolon = _expect(TokenType.SEMICOLON);
+    return new PartOfDirective(
+        commentAndMetadata.comment,
+        commentAndMetadata.metadata,
+        partKeyword,
+        ofKeyword,
+        libraryName,
+        semicolon);
+  }
+
+  /**
    * Parse a part or part-of directive. The [commentAndMetadata] is the metadata
    * to be associated with the directive. Return the part or part-of directive
    * that was parsed.
    *
+   * This method assumes that the current token matches `Keyword.PART`.
+   *
    *     partDirective ::=
    *         metadata 'part' stringLiteral ';'
    *
    *     partOfDirective ::=
    *         metadata 'part' 'of' identifier ';'
    */
-  Directive _parsePartDirective(CommentAndMetadata commentAndMetadata) {
-    Token partKeyword = _expectKeyword(Keyword.PART);
-    if (_matchesString(_OF)) {
-      Token ofKeyword = getAndAdvance();
-      LibraryIdentifier libraryName = _parseLibraryName(
-          ParserErrorCode.MISSING_NAME_IN_PART_OF_DIRECTIVE, ofKeyword);
-      Token semicolon = _expect(TokenType.SEMICOLON);
-      return new PartOfDirective(
-          commentAndMetadata.comment,
-          commentAndMetadata.metadata,
-          partKeyword,
-          ofKeyword,
-          libraryName,
-          semicolon);
+  Directive _parsePartOrPartOfDirective(CommentAndMetadata commentAndMetadata) {
+    if (_tokenMatchesString(_peek(), _OF)) {
+      return _parsePartOfDirective(commentAndMetadata);
     }
-    StringLiteral partUri = _parseUri();
-    Token semicolon = _expect(TokenType.SEMICOLON);
-    return new PartDirective(commentAndMetadata.comment,
-        commentAndMetadata.metadata, partKeyword, partUri, semicolon);
+    return _parsePartDirective(commentAndMetadata);
   }
 
   /**
@@ -7324,11 +7678,12 @@
    */
   Expression _parsePostfixExpression() {
     Expression operand = _parseAssignableExpression(true);
-    if (_matches(TokenType.OPEN_SQUARE_BRACKET) ||
-        _matches(TokenType.PERIOD) ||
-        _matches(TokenType.QUESTION_PERIOD) ||
-        _matches(TokenType.OPEN_PAREN) ||
-        (parseGenericMethods && _matches(TokenType.LT))) {
+    TokenType type = _currentToken.type;
+    if (type == TokenType.OPEN_SQUARE_BRACKET ||
+        type == TokenType.PERIOD ||
+        type == TokenType.QUESTION_PERIOD ||
+        type == TokenType.OPEN_PAREN ||
+        (parseGenericMethods && type == TokenType.LT)) {
       do {
         if (_isLikelyArgumentList()) {
           TypeArgumentList typeArguments = _parseOptionalTypeArguments();
@@ -7348,10 +7703,11 @@
         } else {
           operand = _parseAssignableSelector(operand, true);
         }
-      } while (_matches(TokenType.OPEN_SQUARE_BRACKET) ||
-          _matches(TokenType.PERIOD) ||
-          _matches(TokenType.QUESTION_PERIOD) ||
-          _matches(TokenType.OPEN_PAREN));
+        type = _currentToken.type;
+      } while (type == TokenType.OPEN_SQUARE_BRACKET ||
+          type == TokenType.PERIOD ||
+          type == TokenType.QUESTION_PERIOD ||
+          type == TokenType.OPEN_PAREN);
       return operand;
     }
     if (!_currentToken.type.isIncrementOperator) {
@@ -7363,6 +7719,37 @@
   }
 
   /**
+   * Parse a prefixed identifier given that the given [qualifier] was already
+   * parsed. Return the prefixed identifier that was parsed.
+   *
+   *     prefixedIdentifier ::=
+   *         identifier ('.' identifier)?
+   */
+  Identifier _parsePrefixedIdentifierAfterIdentifier(
+      SimpleIdentifier qualifier) {
+    if (!_matches(TokenType.PERIOD) || _injectGenericCommentTypeList()) {
+      return qualifier;
+    }
+    Token period = getAndAdvance();
+    SimpleIdentifier qualified = parseSimpleIdentifier();
+    return new PrefixedIdentifier(qualifier, period, qualified);
+  }
+
+  /**
+   * Parse a prefixed identifier. Return the prefixed identifier that was
+   * parsed.
+   *
+   * This method assumes that the current token matches an identifier.
+   *
+   *     prefixedIdentifier ::=
+   *         identifier ('.' identifier)?
+   */
+  Identifier _parsePrefixedIdentifierUnchecked() {
+    return _parsePrefixedIdentifierAfterIdentifier(
+        _parseSimpleIdentifierUnchecked());
+  }
+
+  /**
    * Parse a primary expression. Return the primary expression that was parsed.
    *
    *     primary ::=
@@ -7386,39 +7773,24 @@
    *       | listLiteral
    */
   Expression _parsePrimaryExpression() {
-    if (_matchesKeyword(Keyword.THIS)) {
-      return new ThisExpression(getAndAdvance());
-    } else if (_matchesKeyword(Keyword.SUPER)) {
-      // TODO(paulberry): verify with Gilad that "super" must be followed by
-      // unconditionalAssignableSelector in this case.
-      return _parseAssignableSelector(
-          new SuperExpression(getAndAdvance()), false,
-          allowConditional: false);
-    } else if (_matchesKeyword(Keyword.NULL)) {
-      return new NullLiteral(getAndAdvance());
-    } else if (_matchesKeyword(Keyword.FALSE)) {
-      return new BooleanLiteral(getAndAdvance(), false);
-    } else if (_matchesKeyword(Keyword.TRUE)) {
-      return new BooleanLiteral(getAndAdvance(), true);
-    } else if (_matches(TokenType.DOUBLE)) {
-      Token token = getAndAdvance();
-      double value = 0.0;
-      try {
-        value = double.parse(token.lexeme);
-      } on FormatException {
-        // The invalid format should have been reported by the scanner.
-      }
-      return new DoubleLiteral(token, value);
-    } else if (_matches(TokenType.HEXADECIMAL)) {
-      Token token = getAndAdvance();
-      int value = null;
-      try {
-        value = int.parse(token.lexeme.substring(2), radix: 16);
-      } on FormatException {
-        // The invalid format should have been reported by the scanner.
-      }
-      return new IntegerLiteral(token, value);
-    } else if (_matches(TokenType.INT)) {
+    if (_matchesIdentifier()) {
+      // TODO(brianwilkerson) The code below was an attempt to recover from an
+      // error case, but it needs to be applied as a recovery only after we
+      // know that parsing it as an identifier doesn't work. Leaving the code as
+      // a reminder of how to recover.
+//      if (isFunctionExpression(_peek())) {
+//        //
+//        // Function expressions were allowed to have names at one point, but this is now illegal.
+//        //
+//        reportError(ParserErrorCode.NAMED_FUNCTION_EXPRESSION, getAndAdvance());
+//        return parseFunctionExpression();
+//      }
+      return _parsePrefixedIdentifierUnchecked();
+    }
+    TokenType type = _currentToken.type;
+    if (type == TokenType.STRING) {
+      return parseStringLiteral();
+    } else if (type == TokenType.INT) {
       Token token = getAndAdvance();
       int value = null;
       try {
@@ -7427,26 +7799,46 @@
         // The invalid format should have been reported by the scanner.
       }
       return new IntegerLiteral(token, value);
-    } else if (_matches(TokenType.STRING)) {
-      return parseStringLiteral();
-    } else if (_matchesIdentifier()) {
-      // TODO(brianwilkerson) The code below was an attempt to recover from an
-      // error case, but it needs to be applied as a recovery only after we
-      // know that parsing it as an identifier doesn't work. Leaving the code as
-      // a reminder of how to recover.
-//            if (isFunctionExpression(peek())) {
-//              //
-//              // Function expressions were allowed to have names at one point, but this is now illegal.
-//              //
-//              reportError(ParserErrorCode.NAMED_FUNCTION_EXPRESSION, getAndAdvance());
-//              return parseFunctionExpression();
-//            }
-      return parsePrefixedIdentifier();
-    } else if (_matchesKeyword(Keyword.NEW)) {
+    }
+    Keyword keyword = _currentToken.keyword;
+    if (keyword == Keyword.NULL) {
+      return new NullLiteral(getAndAdvance());
+    } else if (keyword == Keyword.NEW) {
       return _parseNewExpression();
-    } else if (_matchesKeyword(Keyword.CONST)) {
+    } else if (keyword == Keyword.THIS) {
+      return new ThisExpression(getAndAdvance());
+    } else if (keyword == Keyword.SUPER) {
+      // TODO(paulberry): verify with Gilad that "super" must be followed by
+      // unconditionalAssignableSelector in this case.
+      return _parseAssignableSelector(
+          new SuperExpression(getAndAdvance()), false,
+          allowConditional: false);
+    } else if (keyword == Keyword.FALSE) {
+      return new BooleanLiteral(getAndAdvance(), false);
+    } else if (keyword == Keyword.TRUE) {
+      return new BooleanLiteral(getAndAdvance(), true);
+    }
+    if (type == TokenType.DOUBLE) {
+      Token token = getAndAdvance();
+      double value = 0.0;
+      try {
+        value = double.parse(token.lexeme);
+      } on FormatException {
+        // The invalid format should have been reported by the scanner.
+      }
+      return new DoubleLiteral(token, value);
+    } else if (type == TokenType.HEXADECIMAL) {
+      Token token = getAndAdvance();
+      int value = null;
+      try {
+        value = int.parse(token.lexeme.substring(2), radix: 16);
+      } on FormatException {
+        // The invalid format should have been reported by the scanner.
+      }
+      return new IntegerLiteral(token, value);
+    } else if (keyword == Keyword.CONST) {
       return _parseConstExpression();
-    } else if (_matches(TokenType.OPEN_PAREN)) {
+    } else if (type == TokenType.OPEN_PAREN) {
       if (_isFunctionExpression(_currentToken)) {
         return parseFunctionExpression();
       }
@@ -7461,20 +7853,20 @@
       } finally {
         _inInitializer = wasInInitializer;
       }
-    } else if (_matches(TokenType.LT) || _injectGenericCommentTypeList()) {
+    } else if (type == TokenType.LT || _injectGenericCommentTypeList()) {
       return _parseListOrMapLiteral(null);
-    } else if (_matches(TokenType.OPEN_CURLY_BRACKET)) {
+    } else if (type == TokenType.OPEN_CURLY_BRACKET) {
       return _parseMapLiteral(null, null);
-    } else if (_matches(TokenType.OPEN_SQUARE_BRACKET) ||
-        _matches(TokenType.INDEX)) {
+    } else if (type == TokenType.OPEN_SQUARE_BRACKET ||
+        type == TokenType.INDEX) {
       return _parseListLiteral(null, null);
-    } else if (_matches(TokenType.QUESTION) &&
+    } else if (type == TokenType.QUESTION &&
         _tokenMatches(_peek(), TokenType.IDENTIFIER)) {
       _reportErrorForCurrentToken(
           ParserErrorCode.UNEXPECTED_TOKEN, [_currentToken.lexeme]);
       _advance();
       return _parsePrimaryExpression();
-    } else if (_matchesKeyword(Keyword.VOID)) {
+    } else if (keyword == Keyword.VOID) {
       //
       // Recover from having a return type of "void" where a return type is not
       // expected.
@@ -7484,7 +7876,7 @@
           ParserErrorCode.UNEXPECTED_TOKEN, [_currentToken.lexeme]);
       _advance();
       return _parsePrimaryExpression();
-    } else if (_matches(TokenType.HASH)) {
+    } else if (type == TokenType.HASH) {
       return _parseSymbolLiteral();
     } else {
       _reportErrorForCurrentToken(ParserErrorCode.MISSING_IDENTIFIER);
@@ -7493,21 +7885,31 @@
   }
 
   /**
-   * Parse a redirecting constructor invocation. Return the redirecting
+   * Parse a redirecting constructor invocation. The flag [hasPeriod] should be
+   * `true` if the `this` is followed by a period. Return the redirecting
    * constructor invocation that was parsed.
    *
+   * This method assumes that the current token matches `Keyword.THIS`.
+   *
    *     redirectingConstructorInvocation ::=
    *         'this' ('.' identifier)? arguments
    */
-  RedirectingConstructorInvocation _parseRedirectingConstructorInvocation() {
-    Token keyword = _expectKeyword(Keyword.THIS);
+  RedirectingConstructorInvocation _parseRedirectingConstructorInvocation(
+      bool hasPeriod) {
+    Token keyword = getAndAdvance();
     Token period = null;
     SimpleIdentifier constructorName = null;
-    if (_matches(TokenType.PERIOD)) {
+    if (hasPeriod) {
       period = getAndAdvance();
-      constructorName = parseSimpleIdentifier();
+      if (_matchesIdentifier()) {
+        constructorName = _parseSimpleIdentifierUnchecked(isDeclaration: false);
+      } else {
+        _reportErrorForCurrentToken(ParserErrorCode.MISSING_IDENTIFIER);
+        constructorName = _createSyntheticIdentifier(isDeclaration: false);
+        _advance();
+      }
     }
-    ArgumentList argumentList = parseArgumentList();
+    ArgumentList argumentList = _parseArgumentListChecked();
     return new RedirectingConstructorInvocation(
         keyword, period, constructorName, argumentList);
   }
@@ -7521,29 +7923,29 @@
    *       | 'super' relationalOperator bitwiseOrExpression
    */
   Expression _parseRelationalExpression() {
-    if (_matchesKeyword(Keyword.SUPER) &&
+    if (_currentToken.keyword == Keyword.SUPER &&
         _currentToken.next.type.isRelationalOperator) {
       Expression expression = new SuperExpression(getAndAdvance());
       Token operator = getAndAdvance();
-      expression = new BinaryExpression(
+      return new BinaryExpression(
           expression, operator, parseBitwiseOrExpression());
-      return expression;
     }
     Expression expression = parseBitwiseOrExpression();
-    if (_matchesKeyword(Keyword.AS)) {
+    Keyword keyword = _currentToken.keyword;
+    if (keyword == Keyword.AS) {
       Token asOperator = getAndAdvance();
-      expression = new AsExpression(expression, asOperator, parseTypeName());
-    } else if (_matchesKeyword(Keyword.IS)) {
+      return new AsExpression(expression, asOperator, parseTypeName());
+    } else if (keyword == Keyword.IS) {
       Token isOperator = getAndAdvance();
       Token notOperator = null;
       if (_matches(TokenType.BANG)) {
         notOperator = getAndAdvance();
       }
-      expression = new IsExpression(
+      return new IsExpression(
           expression, isOperator, notOperator, parseTypeName());
     } else if (_currentToken.type.isRelationalOperator) {
       Token operator = getAndAdvance();
-      expression = new BinaryExpression(
+      return new BinaryExpression(
           expression, operator, parseBitwiseOrExpression());
     }
     return expression;
@@ -7552,20 +7954,24 @@
   /**
    * Parse a rethrow expression. Return the rethrow expression that was parsed.
    *
+   * This method assumes that the current token matches `Keyword.RETHROW`.
+   *
    *     rethrowExpression ::=
    *         'rethrow'
    */
   Expression _parseRethrowExpression() =>
-      new RethrowExpression(_expectKeyword(Keyword.RETHROW));
+      new RethrowExpression(getAndAdvance());
 
   /**
    * Parse a return statement. Return the return statement that was parsed.
    *
+   * This method assumes that the current token matches `Keyword.RETURN`.
+   *
    *     returnStatement ::=
    *         'return' expression? ';'
    */
   Statement _parseReturnStatement() {
-    Token returnKeyword = _expectKeyword(Keyword.RETURN);
+    Token returnKeyword = getAndAdvance();
     if (_matches(TokenType.SEMICOLON)) {
       return new ReturnStatement(returnKeyword, null, getAndAdvance());
     }
@@ -7582,6 +7988,8 @@
    * already been parsed, or `null` if there was no return type. Return the
    * setter that was parsed.
    *
+   * This method assumes that the current token matches `Keyword.SET`.
+   *
    *     setter ::=
    *         setterSignature functionBody?
    *
@@ -7590,7 +7998,7 @@
    */
   MethodDeclaration _parseSetter(CommentAndMetadata commentAndMetadata,
       Token externalKeyword, Token staticKeyword, TypeName returnType) {
-    Token propertyKeyword = _expectKeyword(Keyword.SET);
+    Token propertyKeyword = getAndAdvance();
     SimpleIdentifier name = parseSimpleIdentifier(isDeclaration: true);
     FormalParameterList parameters = parseFormalParameterList();
     _validateFormalParameterList(parameters);
@@ -7624,21 +8032,39 @@
    */
   Expression _parseShiftExpression() {
     Expression expression;
-    if (_matchesKeyword(Keyword.SUPER) &&
+    if (_currentToken.keyword == Keyword.SUPER &&
         _currentToken.next.type.isShiftOperator) {
       expression = new SuperExpression(getAndAdvance());
     } else {
       expression = _parseAdditiveExpression();
     }
     while (_currentToken.type.isShiftOperator) {
-      Token operator = getAndAdvance();
       expression = new BinaryExpression(
-          expression, operator, _parseAdditiveExpression());
+          expression, getAndAdvance(), _parseAdditiveExpression());
     }
     return expression;
   }
 
   /**
+   * Parse a simple identifier. Return the simple identifier that was parsed.
+   *
+   * This method assumes that the current token matches an identifier.
+   *
+   *     identifier ::=
+   *         IDENTIFIER
+   */
+  SimpleIdentifier _parseSimpleIdentifierUnchecked(
+      {bool isDeclaration: false}) {
+    String lexeme = _currentToken.lexeme;
+    if ((_inAsync || _inGenerator) &&
+        (lexeme == ASYNC || lexeme == _AWAIT || lexeme == _YIELD)) {
+      _reportErrorForCurrentToken(
+          ParserErrorCode.ASYNC_KEYWORD_USED_AS_IDENTIFIER);
+    }
+    return new SimpleIdentifier(getAndAdvance(), isDeclaration: isDeclaration);
+  }
+
+  /**
    * Parse a list of statements within a switch statement. Return the statements
    * that were parsed.
    *
@@ -7646,10 +8072,11 @@
    *         statement*
    */
   List<Statement> _parseStatementList() {
-    List<Statement> statements = new List<Statement>();
+    List<Statement> statements = <Statement>[];
     Token statementStart = _currentToken;
-    while (!_matches(TokenType.EOF) &&
-        !_matches(TokenType.CLOSE_CURLY_BRACKET) &&
+    TokenType type = _currentToken.type;
+    while (type != TokenType.EOF &&
+        type != TokenType.CLOSE_CURLY_BRACKET &&
         !_isSwitchMember()) {
       statements.add(parseStatement2());
       if (identical(_currentToken, statementStart)) {
@@ -7658,6 +8085,7 @@
         _advance();
       }
       statementStart = _currentToken;
+      type = _currentToken.type;
     }
     return statements;
   }
@@ -7665,15 +8093,20 @@
   /**
    * Parse a string literal that contains interpolations. Return the string
    * literal that was parsed.
+   *
+   * This method assumes that the current token matches either
+   * [TokenType.STRING_INTERPOLATION_EXPRESSION] or
+   * [TokenType.STRING_INTERPOLATION_IDENTIFIER].
    */
   StringInterpolation _parseStringInterpolation(Token string) {
-    List<InterpolationElement> elements = new List<InterpolationElement>();
-    bool hasMore = _matches(TokenType.STRING_INTERPOLATION_EXPRESSION) ||
-        _matches(TokenType.STRING_INTERPOLATION_IDENTIFIER);
-    elements.add(new InterpolationString(
-        string, _computeStringValue(string.lexeme, true, !hasMore)));
+    List<InterpolationElement> elements = <InterpolationElement>[
+      new InterpolationString(
+          string, _computeStringValue(string.lexeme, true, false))
+    ];
+    bool hasMore = true;
+    bool isExpression = _matches(TokenType.STRING_INTERPOLATION_EXPRESSION);
     while (hasMore) {
-      if (_matches(TokenType.STRING_INTERPOLATION_EXPRESSION)) {
+      if (isExpression) {
         Token openToken = getAndAdvance();
         bool wasInInitializer = _inInitializer;
         _inInitializer = false;
@@ -7697,8 +8130,9 @@
       }
       if (_matches(TokenType.STRING)) {
         string = getAndAdvance();
-        hasMore = _matches(TokenType.STRING_INTERPOLATION_EXPRESSION) ||
-            _matches(TokenType.STRING_INTERPOLATION_IDENTIFIER);
+        isExpression = _matches(TokenType.STRING_INTERPOLATION_EXPRESSION);
+        hasMore =
+            isExpression || _matches(TokenType.STRING_INTERPOLATION_IDENTIFIER);
         elements.add(new InterpolationString(
             string, _computeStringValue(string.lexeme, false, !hasMore)));
       } else {
@@ -7709,21 +8143,47 @@
   }
 
   /**
+   * Parse a string literal. Return the string literal that was parsed.
+   *
+   * This method assumes that the current token matches `TokenType.STRING`.
+   *
+   *     stringLiteral ::=
+   *         MULTI_LINE_STRING+
+   *       | SINGLE_LINE_STRING+
+   */
+  StringLiteral _parseStringLiteralUnchecked() {
+    List<StringLiteral> strings = <StringLiteral>[];
+    do {
+      Token string = getAndAdvance();
+      if (_matches(TokenType.STRING_INTERPOLATION_EXPRESSION) ||
+          _matches(TokenType.STRING_INTERPOLATION_IDENTIFIER)) {
+        strings.add(_parseStringInterpolation(string));
+      } else {
+        strings.add(new SimpleStringLiteral(
+            string, _computeStringValue(string.lexeme, true, true)));
+      }
+    } while (_matches(TokenType.STRING));
+    return strings.length == 1 ? strings[0] : new AdjacentStrings(strings);
+  }
+
+  /**
    * Parse a super constructor invocation. Return the super constructor
    * invocation that was parsed.
    *
+   * This method assumes that the current token matches [Keyword.SUPER].
+   *
    *     superConstructorInvocation ::=
    *         'super' ('.' identifier)? arguments
    */
   SuperConstructorInvocation _parseSuperConstructorInvocation() {
-    Token keyword = _expectKeyword(Keyword.SUPER);
+    Token keyword = getAndAdvance();
     Token period = null;
     SimpleIdentifier constructorName = null;
     if (_matches(TokenType.PERIOD)) {
       period = getAndAdvance();
       constructorName = parseSimpleIdentifier();
     }
-    ArgumentList argumentList = parseArgumentList();
+    ArgumentList argumentList = _parseArgumentListChecked();
     return new SuperConstructorInvocation(
         keyword, period, constructorName, argumentList);
   }
@@ -7751,14 +8211,14 @@
       Token rightParenthesis = _expect(TokenType.CLOSE_PAREN);
       Token leftBracket = _expect(TokenType.OPEN_CURLY_BRACKET);
       Token defaultKeyword = null;
-      List<SwitchMember> members = new List<SwitchMember>();
-      while (!_matches(TokenType.EOF) &&
-          !_matches(TokenType.CLOSE_CURLY_BRACKET)) {
-        List<Label> labels = new List<Label>();
+      List<SwitchMember> members = <SwitchMember>[];
+      TokenType type = _currentToken.type;
+      while (type != TokenType.EOF && type != TokenType.CLOSE_CURLY_BRACKET) {
+        List<Label> labels = <Label>[];
         while (
             _matchesIdentifier() && _tokenMatches(_peek(), TokenType.COLON)) {
           SimpleIdentifier identifier =
-              parseSimpleIdentifier(isDeclaration: true);
+              _parseSimpleIdentifierUnchecked(isDeclaration: true);
           String label = identifier.token.lexeme;
           if (definedLabels.contains(label)) {
             _reportErrorForToken(
@@ -7768,10 +8228,11 @@
           } else {
             definedLabels.add(label);
           }
-          Token colon = _expect(TokenType.COLON);
+          Token colon = getAndAdvance();
           labels.add(new Label(identifier, colon));
         }
-        if (_matchesKeyword(Keyword.CASE)) {
+        Keyword keyword = _currentToken.keyword;
+        if (keyword == Keyword.CASE) {
           Token caseKeyword = getAndAdvance();
           Expression caseExpression = parseExpression2();
           Token colon = _expect(TokenType.COLON);
@@ -7782,7 +8243,7 @@
                 ParserErrorCode.SWITCH_HAS_CASE_AFTER_DEFAULT_CASE,
                 caseKeyword);
           }
-        } else if (_matchesKeyword(Keyword.DEFAULT)) {
+        } else if (keyword == Keyword.DEFAULT) {
           if (defaultKeyword != null) {
             _reportErrorForToken(
                 ParserErrorCode.SWITCH_HAS_MULTIPLE_DEFAULT_CASES, _peek());
@@ -7795,13 +8256,20 @@
           // We need to advance, otherwise we could end up in an infinite loop,
           // but this could be a lot smarter about recovering from the error.
           _reportErrorForCurrentToken(ParserErrorCode.EXPECTED_CASE_OR_DEFAULT);
-          while (!_matches(TokenType.EOF) &&
-              !_matches(TokenType.CLOSE_CURLY_BRACKET) &&
-              !_matchesKeyword(Keyword.CASE) &&
-              !_matchesKeyword(Keyword.DEFAULT)) {
+          bool atEndOrNextMember() {
+            TokenType type = _currentToken.type;
+            if (type == TokenType.EOF ||
+                type == TokenType.CLOSE_CURLY_BRACKET) {
+              return true;
+            }
+            Keyword keyword = _currentToken.keyword;
+            return keyword == Keyword.CASE || keyword == Keyword.DEFAULT;
+          }
+          while (!atEndOrNextMember()) {
             _advance();
           }
         }
+        type = _currentToken.type;
       }
       Token rightBracket = _expect(TokenType.CLOSE_CURLY_BRACKET);
       return new SwitchStatement(keyword, leftParenthesis, expression,
@@ -7814,16 +8282,17 @@
   /**
    * Parse a symbol literal. Return the symbol literal that was parsed.
    *
+   * This method assumes that the current token matches [TokenType.HASH].
+   *
    *     symbolLiteral ::=
    *         '#' identifier ('.' identifier)*
    */
   SymbolLiteral _parseSymbolLiteral() {
     Token poundSign = getAndAdvance();
-    List<Token> components = new List<Token>();
+    List<Token> components = <Token>[];
     if (_matchesIdentifier()) {
       components.add(getAndAdvance());
-      while (_matches(TokenType.PERIOD)) {
-        _advance();
+      while (_optional(TokenType.PERIOD)) {
         if (_matchesIdentifier()) {
           components.add(getAndAdvance());
         } else {
@@ -7834,7 +8303,7 @@
       }
     } else if (_currentToken.isOperator) {
       components.add(getAndAdvance());
-    } else if (_tokenMatchesKeyword(_currentToken, Keyword.VOID)) {
+    } else if (_matchesKeyword(Keyword.VOID)) {
       components.add(getAndAdvance());
     } else {
       _reportErrorForCurrentToken(ParserErrorCode.MISSING_IDENTIFIER);
@@ -7846,12 +8315,15 @@
   /**
    * Parse a throw expression. Return the throw expression that was parsed.
    *
+   * This method assumes that the current token matches [Keyword.THROW].
+   *
    *     throwExpression ::=
    *         'throw' expression
    */
   Expression _parseThrowExpression() {
-    Token keyword = _expectKeyword(Keyword.THROW);
-    if (_matches(TokenType.SEMICOLON) || _matches(TokenType.CLOSE_PAREN)) {
+    Token keyword = getAndAdvance();
+    TokenType type = _currentToken.type;
+    if (type == TokenType.SEMICOLON || type == TokenType.CLOSE_PAREN) {
       _reportErrorForToken(
           ParserErrorCode.MISSING_EXPRESSION_IN_THROW, _currentToken);
       return new ThrowExpression(keyword, _createSyntheticIdentifier());
@@ -7863,12 +8335,15 @@
   /**
    * Parse a throw expression. Return the throw expression that was parsed.
    *
+   * This method assumes that the current token matches [Keyword.THROW].
+   *
    *     throwExpressionWithoutCascade ::=
    *         'throw' expressionWithoutCascade
    */
   Expression _parseThrowExpressionWithoutCascade() {
-    Token keyword = _expectKeyword(Keyword.THROW);
-    if (_matches(TokenType.SEMICOLON) || _matches(TokenType.CLOSE_PAREN)) {
+    Token keyword = getAndAdvance();
+    TokenType type = _currentToken.type;
+    if (type == TokenType.SEMICOLON || type == TokenType.CLOSE_PAREN) {
       _reportErrorForToken(
           ParserErrorCode.MISSING_EXPRESSION_IN_THROW, _currentToken);
       return new ThrowExpression(keyword, _createSyntheticIdentifier());
@@ -7880,6 +8355,8 @@
   /**
    * Parse a try statement. Return the try statement that was parsed.
    *
+   * This method assumes that the current token matches [Keyword.TRY].
+   *
    *     tryStatement ::=
    *         'try' block (onPart+ finallyPart? | finallyPart)
    *
@@ -7894,9 +8371,9 @@
    *         'finally' block
    */
   Statement _parseTryStatement() {
-    Token tryKeyword = _expectKeyword(Keyword.TRY);
-    Block body = parseBlock();
-    List<CatchClause> catchClauses = new List<CatchClause>();
+    Token tryKeyword = getAndAdvance();
+    Block body = _parseBlockChecked();
+    List<CatchClause> catchClauses = <CatchClause>[];
     Block finallyClause = null;
     while (_matchesString(_ON) || _matchesKeyword(Keyword.CATCH)) {
       Token onKeyword = null;
@@ -7921,7 +8398,7 @@
         }
         rightParenthesis = _expect(TokenType.CLOSE_PAREN);
       }
-      Block catchBody = parseBlock();
+      Block catchBody = _parseBlockChecked();
       catchClauses.add(new CatchClause(
           onKeyword,
           exceptionType,
@@ -7936,11 +8413,9 @@
     Token finallyKeyword = null;
     if (_matchesKeyword(Keyword.FINALLY)) {
       finallyKeyword = getAndAdvance();
-      finallyClause = parseBlock();
-    } else {
-      if (catchClauses.isEmpty) {
-        _reportErrorForCurrentToken(ParserErrorCode.MISSING_CATCH_OR_FINALLY);
-      }
+      finallyClause = _parseBlockChecked();
+    } else if (catchClauses.isEmpty) {
+      _reportErrorForCurrentToken(ParserErrorCode.MISSING_CATCH_OR_FINALLY);
     }
     return new TryStatement(
         tryKeyword, body, catchClauses, finallyKeyword, finallyClause);
@@ -7950,6 +8425,8 @@
    * Parse a type alias. The [commentAndMetadata] is the metadata to be
    * associated with the member. Return the type alias that was parsed.
    *
+   * This method assumes that the current token matches [Keyword.TYPEDEF].
+   *
    *     typeAlias ::=
    *         'typedef' typeAliasBody
    *
@@ -7970,7 +8447,7 @@
    *         returnType? name
    */
   TypeAlias _parseTypeAlias(CommentAndMetadata commentAndMetadata) {
-    Token keyword = _expectKeyword(Keyword.TYPEDEF);
+    Token keyword = getAndAdvance();
     if (_matchesIdentifier()) {
       Token next = _peek();
       if (_tokenMatches(next, TokenType.LT)) {
@@ -7995,11 +8472,11 @@
 
   TypeName _parseTypeName() {
     Identifier typeName;
-    if (_matchesKeyword(Keyword.VAR)) {
+    if (_matchesIdentifier()) {
+      typeName = _parsePrefixedIdentifierUnchecked();
+    } else if (_matchesKeyword(Keyword.VAR)) {
       _reportErrorForCurrentToken(ParserErrorCode.VAR_AS_TYPE_NAME);
       typeName = new SimpleIdentifier(getAndAdvance());
-    } else if (_matchesIdentifier()) {
-      typeName = parsePrefixedIdentifier();
     } else {
       typeName = _createSyntheticIdentifier();
       _reportErrorForCurrentToken(ParserErrorCode.EXPECTED_TYPE_NAME);
@@ -8009,6 +8486,25 @@
   }
 
   /**
+   * Parse a type name. Return the type name that was parsed.
+   *
+   * This method assumes that the current token is an identifier.
+   *
+   *     type ::=
+   *         qualified typeArguments?
+   */
+  TypeName _parseTypeNameAfterIdentifier() {
+    Identifier typeName = _parsePrefixedIdentifierUnchecked();
+    TypeArgumentList typeArguments = _parseOptionalTypeArguments();
+    // If this is followed by a generic method type comment, allow the comment
+    // type to replace the real type name.
+    // TODO(jmesserly): this feels like a big hammer. Can we restrict it to
+    // only work inside generic methods?
+    TypeName typeFromComment = _parseOptionalTypeNameComment();
+    return typeFromComment ?? new TypeName(typeName, typeArguments);
+  }
+
+  /**
    * Parse a unary expression. Return the unary expression that was parsed.
    *
    *     unaryExpression ::=
@@ -8020,13 +8516,15 @@
    *       | incrementOperator assignableExpression
    */
   Expression _parseUnaryExpression() {
-    if (_matches(TokenType.MINUS) ||
-        _matches(TokenType.BANG) ||
-        _matches(TokenType.TILDE)) {
+    TokenType type = _currentToken.type;
+    if (type == TokenType.MINUS ||
+        type == TokenType.BANG ||
+        type == TokenType.TILDE) {
       Token operator = getAndAdvance();
       if (_matchesKeyword(Keyword.SUPER)) {
-        if (_tokenMatches(_peek(), TokenType.OPEN_SQUARE_BRACKET) ||
-            _tokenMatches(_peek(), TokenType.PERIOD)) {
+        TokenType nextType = _peek().type;
+        if (nextType == TokenType.OPEN_SQUARE_BRACKET ||
+            nextType == TokenType.PERIOD) {
           //     "prefixOperator unaryExpression"
           // --> "prefixOperator postfixExpression"
           // --> "prefixOperator primary                    selector*"
@@ -8040,8 +8538,9 @@
     } else if (_currentToken.type.isIncrementOperator) {
       Token operator = getAndAdvance();
       if (_matchesKeyword(Keyword.SUPER)) {
-        if (_tokenMatches(_peek(), TokenType.OPEN_SQUARE_BRACKET) ||
-            _tokenMatches(_peek(), TokenType.PERIOD)) {
+        TokenType nextType = _peek().type;
+        if (nextType == TokenType.OPEN_SQUARE_BRACKET ||
+            nextType == TokenType.PERIOD) {
           // --> "prefixOperator 'super' assignableSelector selector*"
           return new PrefixExpression(operator, _parseUnaryExpression());
         }
@@ -8052,7 +8551,7 @@
         // we cannot do the same for "++super" because "+super" is also not
         // valid.
         //
-        if (operator.type == TokenType.MINUS_MINUS) {
+        if (type == TokenType.MINUS_MINUS) {
           Token firstOperator = _createToken(operator, TokenType.MINUS);
           Token secondOperator =
               new Token(TokenType.MINUS, operator.offset + 1);
@@ -8063,16 +8562,16 @@
               firstOperator,
               new PrefixExpression(
                   secondOperator, new SuperExpression(getAndAdvance())));
-        } else {
-          // Invalid operator before 'super'
-          _reportErrorForCurrentToken(
-              ParserErrorCode.INVALID_OPERATOR_FOR_SUPER, [operator.lexeme]);
-          return new PrefixExpression(
-              operator, new SuperExpression(getAndAdvance()));
         }
+        // Invalid operator before 'super'
+        _reportErrorForCurrentToken(
+            ParserErrorCode.INVALID_OPERATOR_FOR_SUPER, [operator.lexeme]);
+        return new PrefixExpression(
+            operator, new SuperExpression(getAndAdvance()));
       }
-      return new PrefixExpression(operator, _parseAssignableExpression(false));
-    } else if (_matches(TokenType.PLUS)) {
+      return new PrefixExpression(
+          operator, _parseAssignableExpressionNotStartingWithSuper(false));
+    } else if (type == TokenType.PLUS) {
       _reportErrorForCurrentToken(ParserErrorCode.MISSING_IDENTIFIER);
       return _createSyntheticIdentifier();
     } else if (_inAsync && _matchesString(_AWAIT)) {
@@ -8086,28 +8585,35 @@
    * was parsed.
    */
   StringLiteral _parseUri() {
-    bool iskeywordAfterUri(Token token) =>
+    // TODO(brianwilkerson) Should this function also return true for valid
+    // top-level keywords?
+    bool isKeywordAfterUri(Token token) =>
         token.lexeme == Keyword.AS.syntax ||
         token.lexeme == _HIDE ||
         token.lexeme == _SHOW;
-    if (!_matches(TokenType.STRING) &&
-        !_matches(TokenType.SEMICOLON) &&
-        !iskeywordAfterUri(_currentToken)) {
+    TokenType type = _currentToken.type;
+    if (type != TokenType.STRING &&
+        type != TokenType.SEMICOLON &&
+        !isKeywordAfterUri(_currentToken)) {
       // Attempt to recover in the case where the URI was not enclosed in
       // quotes.
       Token token = _currentToken;
-      while ((_tokenMatchesIdentifier(token) && !iskeywordAfterUri(token)) ||
-          _tokenMatches(token, TokenType.COLON) ||
-          _tokenMatches(token, TokenType.SLASH) ||
-          _tokenMatches(token, TokenType.PERIOD) ||
-          _tokenMatches(token, TokenType.PERIOD_PERIOD) ||
-          _tokenMatches(token, TokenType.PERIOD_PERIOD_PERIOD) ||
-          _tokenMatches(token, TokenType.INT) ||
-          _tokenMatches(token, TokenType.DOUBLE)) {
+      bool isValidInUri(Token token) {
+        TokenType type = token.type;
+        return type == TokenType.COLON ||
+            type == TokenType.SLASH ||
+            type == TokenType.PERIOD ||
+            type == TokenType.PERIOD_PERIOD ||
+            type == TokenType.PERIOD_PERIOD_PERIOD ||
+            type == TokenType.INT ||
+            type == TokenType.DOUBLE;
+      }
+      while ((_tokenMatchesIdentifier(token) && !isKeywordAfterUri(token)) ||
+          isValidInUri(token)) {
         token = token.next;
       }
       if (_tokenMatches(token, TokenType.SEMICOLON) ||
-          iskeywordAfterUri(token)) {
+          isKeywordAfterUri(token)) {
         Token endToken = token.previous;
         token = _currentToken;
         int endOffset = token.end;
@@ -8193,10 +8699,10 @@
         _tokenMatchesKeyword(keyword, Keyword.VAR)) {
       _reportErrorForToken(ParserErrorCode.VAR_AND_TYPE, keyword);
     }
-    List<VariableDeclaration> variables = new List<VariableDeclaration>();
-    variables.add(_parseVariableDeclaration());
-    while (_matches(TokenType.COMMA)) {
-      _advance();
+    List<VariableDeclaration> variables = <VariableDeclaration>[
+      _parseVariableDeclaration()
+    ];
+    while (_optional(TokenType.COMMA)) {
       variables.add(_parseVariableDeclaration());
     }
     return new VariableDeclarationList(commentAndMetadata?.comment,
@@ -8250,6 +8756,8 @@
   /**
    * Parse a while statement. Return the while statement that was parsed.
    *
+   * This method assumes that the current token matches [Keyword.WHILE].
+   *
    *     whileStatement ::=
    *         'while' '(' expression ')' statement
    */
@@ -8257,7 +8765,7 @@
     bool wasInLoop = _inLoop;
     _inLoop = true;
     try {
-      Token keyword = _expectKeyword(Keyword.WHILE);
+      Token keyword = getAndAdvance();
       Token leftParenthesis = _expect(TokenType.OPEN_PAREN);
       Expression condition = parseExpression2();
       Token rightParenthesis = _expect(TokenType.CLOSE_PAREN);
@@ -8272,6 +8780,8 @@
   /**
    * Parse a yield statement. Return the yield statement that was parsed.
    *
+   * This method assumes that the current token matches [Keyword.YIELD].
+   *
    *     yieldStatement ::=
    *         'yield' '*'? expression ';'
    */
@@ -8415,8 +8925,8 @@
    *   | type
    */
   Token _skipFinalConstVarOrType(Token startToken) {
-    if (_tokenMatchesKeyword(startToken, Keyword.FINAL) ||
-        _tokenMatchesKeyword(startToken, Keyword.CONST)) {
+    Keyword keyword = startToken.keyword;
+    if (keyword == Keyword.FINAL || keyword == Keyword.CONST) {
       Token next = startToken.next;
       if (_tokenMatchesIdentifier(next)) {
         Token next2 = next.next;
@@ -8429,7 +8939,7 @@
         // "parameter"
         return next;
       }
-    } else if (_tokenMatchesKeyword(startToken, Keyword.VAR)) {
+    } else if (keyword == Keyword.VAR) {
       return startToken.next;
     } else if (_tokenMatchesIdentifier(startToken)) {
       Token next = startToken.next;
diff --git a/pkg/analyzer/lib/src/generated/resolver.dart b/pkg/analyzer/lib/src/generated/resolver.dart
index cf2ced8..15c41d6 100644
--- a/pkg/analyzer/lib/src/generated/resolver.dart
+++ b/pkg/analyzer/lib/src/generated/resolver.dart
@@ -30,8 +30,6 @@
 import 'package:analyzer/src/generated/static_type_analyzer.dart';
 import 'package:analyzer/src/generated/type_system.dart';
 import 'package:analyzer/src/generated/utilities_dart.dart';
-import 'package:analyzer/src/task/strong/info.dart'
-    show InferredType, StaticInfo;
 
 export 'package:analyzer/src/dart/resolver/inheritance_manager.dart';
 export 'package:analyzer/src/dart/resolver/scope.dart';
@@ -93,6 +91,20 @@
         _typeSystem = typeSystem ?? new TypeSystemImpl();
 
   @override
+  Object visitAnnotation(Annotation node) {
+    if (node.elementAnnotation?.isFactory == true) {
+      AstNode parent = node.parent;
+      if (parent is MethodDeclaration) {
+        _checkForInvalidFactory(parent);
+      } else {
+        _errorReporter
+            .reportErrorForNode(HintCode.INVALID_FACTORY_ANNOTATION, node, []);
+      }
+    }
+    return super.visitAnnotation(node);
+  }
+
+  @override
   Object visitArgumentList(ArgumentList node) {
     for (Expression argument in node.arguments) {
       ParameterElement parameter = argument.bestParameterElement;
@@ -262,7 +274,6 @@
   @override
   Object visitMethodInvocation(MethodInvocation node) {
     _checkForCanBeNullAfterNullAware(node.realTarget, node.operator);
-    _checkForInvalidProtectedMethodCalls(node);
     DartType staticInvokeType = node.staticInvokeType;
     if (staticInvokeType is InterfaceType) {
       MethodElement methodElement = staticInvokeType.lookUpMethod(
@@ -300,7 +311,7 @@
   @override
   Object visitSimpleIdentifier(SimpleIdentifier node) {
     _checkForDeprecatedMemberUseAtIdentifier(node);
-    _checkForInvalidProtectedPropertyAccess(node);
+    _checkForInvalidProtectedMemberAccess(node);
     return super.visitSimpleIdentifier(node);
   }
 
@@ -677,62 +688,83 @@
     return false;
   }
 
-  /**
-   * Produces a hint if the given invocation is of a protected method outside
-   * a subclass instance method.
-   */
-  void _checkForInvalidProtectedMethodCalls(MethodInvocation node) {
-    Element element = node.methodName.bestElement;
-    if (element == null || !element.isProtected) {
+  void _checkForInvalidFactory(MethodDeclaration decl) {
+    // Check declaration.
+    // Note that null return types are expected to be flagged by other analyses.
+    DartType returnType = decl.returnType?.type;
+    if (returnType is VoidType) {
+      _errorReporter.reportErrorForNode(HintCode.INVALID_FACTORY_METHOD_DECL,
+          decl.name, [decl.name.toString()]);
       return;
     }
 
-    ClassElement definingClass = element.enclosingElement;
+    // Check implementation.
 
-    MethodDeclaration decl =
-        node.getAncestor((AstNode node) => node is MethodDeclaration);
-    if (decl == null) {
-      _errorReporter.reportErrorForNode(
-          HintCode.INVALID_USE_OF_PROTECTED_MEMBER,
-          node,
-          [node.methodName.toString(), definingClass.name]);
+    FunctionBody body = decl.body;
+    if (body is EmptyFunctionBody) {
+      // Abstract methods are OK.
       return;
     }
 
-    ClassElement invokingClass = decl.element?.enclosingElement;
-    if (invokingClass != null) {
-      if (!_hasSuperClassOrMixin(invokingClass, definingClass.type)) {
-        _errorReporter.reportErrorForNode(
-            HintCode.INVALID_USE_OF_PROTECTED_MEMBER,
-            node,
-            [node.methodName.toString(), definingClass.name]);
+    // `new Foo()` or `null`.
+    bool factoryExpression(Expression expression) =>
+        expression is InstanceCreationExpression || expression is NullLiteral;
+
+    if (body is ExpressionFunctionBody && factoryExpression(body.expression)) {
+      return;
+    } else if (body is BlockFunctionBody) {
+      NodeList<Statement> statements = body.block.statements;
+      if (statements.isNotEmpty) {
+        Statement last = statements.last;
+        if (last is ReturnStatement && factoryExpression(last.expression)) {
+          return;
+        }
       }
     }
+
+    _errorReporter.reportErrorForNode(HintCode.INVALID_FACTORY_METHOD_IMPL,
+        decl.name, [decl.name.toString()]);
   }
 
   /**
-   * Produces a hint if the given identifier is a protected field or getter
-   * accessed outside a subclass.
+   * Produces a hint if the given identifier is a protected closure, field or
+   * getter/setter, method closure or invocation accessed outside a subclass.
    */
-  void _checkForInvalidProtectedPropertyAccess(SimpleIdentifier identifier) {
+  void _checkForInvalidProtectedMemberAccess(SimpleIdentifier identifier) {
     if (identifier.inDeclarationContext()) {
       return;
     }
+
+    bool isProtected(Element element) {
+      if (element is PropertyAccessorElement &&
+          element.enclosingElement is ClassElement &&
+          (element.isProtected || element.variable.isProtected)) {
+        return true;
+      }
+      if (element is MethodElement &&
+          element.enclosingElement is ClassElement &&
+          element.isProtected) {
+        return true;
+      }
+      return false;
+    }
+
+    bool inCommentReference(SimpleIdentifier identifier) =>
+        identifier.getAncestor((AstNode node) => node is CommentReference) !=
+        null;
+
+    bool inCurrentLibrary(Element element) =>
+        element.library == _currentLibrary;
+
     Element element = identifier.bestElement;
-    if (element is PropertyAccessorElement &&
-        element.enclosingElement is ClassElement &&
-        (element.isProtected || element.variable.isProtected)) {
+    if (isProtected(element) &&
+        !inCurrentLibrary(element) &&
+        !inCommentReference(identifier)) {
       ClassElement definingClass = element.enclosingElement;
       ClassDeclaration accessingClass =
           identifier.getAncestor((AstNode node) => node is ClassDeclaration);
-
-      if (accessingClass == null) {
-        _errorReporter.reportErrorForNode(
-            HintCode.INVALID_USE_OF_PROTECTED_MEMBER,
-            identifier,
-            [identifier.name.toString(), definingClass.name]);
-      } else if (!_hasSuperClassOrMixin(
-          accessingClass.element, definingClass.type)) {
+      if (accessingClass == null ||
+          !_hasTypeOrSuperType(accessingClass.element, definingClass.type)) {
         _errorReporter.reportErrorForNode(
             HintCode.INVALID_USE_OF_PROTECTED_MEMBER,
             identifier,
@@ -962,6 +994,25 @@
   }
 
   /**
+   * Check for situations where the result of a method or function is used, when
+   * it returns 'void'.
+   *
+   * See [HintCode.USE_OF_VOID_RESULT].
+   */
+  void _checkForUseOfVoidResult(Expression expression) {
+    // TODO(jwren) Many other situations of use could be covered. We currently
+    // cover the cases var x = m() and x = m(), but we could also cover cases
+    // such as m().x, m()[k], a + m(), f(m()), return m().
+    if (expression is MethodInvocation) {
+      if (identical(expression.staticType, VoidTypeImpl.instance)) {
+        SimpleIdentifier methodName = expression.methodName;
+        _errorReporter.reportErrorForNode(
+            HintCode.USE_OF_VOID_RESULT, methodName, [methodName.name]);
+      }
+    }
+  }
+
+  /**
    * Check for the passed class declaration for the
    * [HintCode.OVERRIDE_EQUALS_BUT_NOT_HASH_CODE] hint code.
    *
@@ -990,41 +1041,14 @@
 //    return false;
 //  }
 
-  /**
-   * Check for situations where the result of a method or function is used, when
-   * it returns 'void'.
-   *
-   * See [HintCode.USE_OF_VOID_RESULT].
-   */
-  void _checkForUseOfVoidResult(Expression expression) {
-    // TODO(jwren) Many other situations of use could be covered. We currently
-    // cover the cases var x = m() and x = m(), but we could also cover cases
-    // such as m().x, m()[k], a + m(), f(m()), return m().
-    if (expression is MethodInvocation) {
-      if (identical(expression.staticType, VoidTypeImpl.instance)) {
-        SimpleIdentifier methodName = expression.methodName;
-        _errorReporter.reportErrorForNode(
-            HintCode.USE_OF_VOID_RESULT, methodName, [methodName.name]);
-      }
+  bool _hasTypeOrSuperType(ClassElement element, InterfaceType type) {
+    if (element == null) {
+      return false;
     }
-  }
-
-  bool _hasSuperClassOrMixin(ClassElement element, InterfaceType type) {
-    List<ClassElement> seenClasses = <ClassElement>[];
-    while (element != null && !seenClasses.contains(element)) {
-      if (element.type == type) {
-        return true;
-      }
-
-      if (element.mixins.any((InterfaceType t) => t == type)) {
-        return true;
-      }
-
-      seenClasses.add(element);
-      element = element.supertype?.element;
-    }
-
-    return false;
+    ClassElement typeElement = type.element;
+    return element == typeElement ||
+        element.allSupertypes
+            .any((InterfaceType t) => t.element == typeElement);
   }
 
   /**
@@ -1950,13 +1974,13 @@
 
   @override
   Object visitSwitchCase(SwitchCase node) {
-    _checkForDeadStatementsInNodeList(node.statements);
+    _checkForDeadStatementsInNodeList(node.statements, allowMandated: true);
     return super.visitSwitchCase(node);
   }
 
   @override
   Object visitSwitchDefault(SwitchDefault node) {
-    _checkForDeadStatementsInNodeList(node.statements);
+    _checkForDeadStatementsInNodeList(node.statements, allowMandated: true);
     return super.visitSwitchDefault(node);
   }
 
@@ -2079,24 +2103,38 @@
 
   /**
    * Given some [NodeList] of [Statement]s, from either a [Block] or
-   * [SwitchMember], this loops through the list in reverse order searching for statements
-   * after a return, unlabeled break or unlabeled continue statement to mark them as dead code.
+   * [SwitchMember], this loops through the list searching for dead statements.
    *
    * @param statements some ordered list of statements in a [Block] or [SwitchMember]
+   * @param allowMandated allow dead statements mandated by the language spec.
+   *            This allows for a final break, continue, return, or throw statement
+   *            at the end of a switch case, that are mandated by the language spec.
    */
-  void _checkForDeadStatementsInNodeList(NodeList<Statement> statements) {
+  void _checkForDeadStatementsInNodeList(NodeList<Statement> statements,
+      {bool allowMandated: false}) {
+    bool statementExits(Statement statement) {
+      if (statement is BreakStatement) {
+        return statement.label == null;
+      } else if (statement is ContinueStatement) {
+        return statement.label == null;
+      }
+      return ExitDetector.exits(statement);
+    }
+
     int size = statements.length;
     for (int i = 0; i < size; i++) {
       Statement currentStatement = statements[i];
       currentStatement?.accept(this);
-      bool returnOrBreakingStatement = currentStatement is ReturnStatement ||
-          (currentStatement is BreakStatement &&
-              currentStatement.label == null) ||
-          (currentStatement is ContinueStatement &&
-              currentStatement.label == null);
-      if (returnOrBreakingStatement && i != size - 1) {
+      if (statementExits(currentStatement) && i != size - 1) {
         Statement nextStatement = statements[i + 1];
         Statement lastStatement = statements[size - 1];
+        // If mandated statements are allowed, and only the last statement is
+        // dead, and it's a BreakStatement, then assume it is a statement
+        // mandated by the language spec, there to avoid a
+        // CASE_BLOCK_NOT_TERMINATED error.
+        if (allowMandated && i == size - 2 && nextStatement is BreakStatement) {
+          return;
+        }
         int offset = nextStatement.offset;
         int length = lastStatement.end - offset;
         _errorReporter.reportErrorForOffset(HintCode.DEAD_CODE, offset, length);
@@ -3346,9 +3384,8 @@
     //
     // Finish building the enum.
     //
-    ClassElementImpl enumElement = node.name.staticElement as ClassElementImpl;
+    EnumElementImpl enumElement = node.name.staticElement as EnumElementImpl;
     InterfaceType enumType = enumElement.type;
-    enumElement.supertype = _typeProvider.objectType;
     //
     // Populate the fields.
     //
@@ -3407,19 +3444,10 @@
   }
 
   /**
-   * Create a getter that corresponds to the given field.
-   *
-   * @param field the field for which a getter is to be created
-   * @return the getter that was created
+   * Create a getter that corresponds to the given [field].
    */
   PropertyAccessorElement _createGetter(FieldElementImpl field) {
-    PropertyAccessorElementImpl getter =
-        new PropertyAccessorElementImpl.forVariable(field);
-    getter.getter = true;
-    getter.returnType = field.type;
-    getter.type = new FunctionTypeImpl(getter);
-    field.getter = getter;
-    return getter;
+    return new PropertyAccessorElementImpl_ImplicitGetter(field);
   }
 }
 
@@ -3471,6 +3499,12 @@
   bool _enclosingBlockContainsBreak = false;
 
   /**
+   * Set to `true` when a `continue` is encountered, and reset to `false` when a
+   * `do`, `while`, `for` or `switch` block is entered.
+   */
+  bool _enclosingBlockContainsContinue = false;
+
+  /**
    * Add node when a labelled `break` is encountered.
    */
   Set<AstNode> _enclosingBlockBreaksLabel = new Set<AstNode>();
@@ -3578,14 +3612,24 @@
   }
 
   @override
-  bool visitContinueStatement(ContinueStatement node) => false;
+  bool visitContinueStatement(ContinueStatement node) {
+    _enclosingBlockContainsContinue = true;
+    return false;
+  }
 
   @override
   bool visitDoStatement(DoStatement node) {
     bool outerBreakValue = _enclosingBlockContainsBreak;
+    bool outerContinueValue = _enclosingBlockContainsContinue;
     _enclosingBlockContainsBreak = false;
+    _enclosingBlockContainsContinue = false;
     try {
-      if (_nodeExits(node.body)) {
+      bool bodyExits = _nodeExits(node.body);
+      bool containsBreakOrContinue =
+          _enclosingBlockContainsBreak || _enclosingBlockContainsContinue;
+      // Even if we determine that the body "exits", there might be break or
+      // continue statements that actually mean it _doesn't_ always exit.
+      if (bodyExits && !containsBreakOrContinue) {
         return true;
       }
       Expression conditionExpression = node.condition;
@@ -3602,6 +3646,7 @@
       return false;
     } finally {
       _enclosingBlockContainsBreak = outerBreakValue;
+      _enclosingBlockContainsContinue = outerContinueValue;
     }
   }
 
@@ -3799,36 +3844,30 @@
     _enclosingBlockContainsBreak = false;
     try {
       bool hasDefault = false;
+      bool hasNonExitingCase = false;
       List<SwitchMember> members = node.members;
       for (int i = 0; i < members.length; i++) {
         SwitchMember switchMember = members[i];
         if (switchMember is SwitchDefault) {
           hasDefault = true;
-          // If this is the last member and there are no statements, return
-          // false
+          // If this is the last member and there are no statements, then it
+          // does not exit.
           if (switchMember.statements.isEmpty && i + 1 == members.length) {
-            return false;
+            hasNonExitingCase = true;
+            continue;
           }
         }
-        // For switch members with no statements, don't visit the children,
-        // otherwise, return false if no return is found in the children
-        // statements.
+        // For switch members with no statements, don't visit the children.
+        // Otherwise, if there children statements don't exit, mark this as a
+        // non-exiting case.
         if (!switchMember.statements.isEmpty && !switchMember.accept(this)) {
-          return false;
+          hasNonExitingCase = true;
         }
       }
-      // All of the members exit, determine whether there are possible cases
-      // that are not caught by the members.
-      DartType type = node.expression?.bestType;
-      if (type is InterfaceType) {
-        ClassElement element = type.element;
-        if (element != null && element.isEnum) {
-          // If some of the enum values are not covered, then a warning will
-          // have already been generated, so there's no point in generating a
-          // hint.
-          return true;
-        }
+      if (hasNonExitingCase) {
+        return false;
       }
+      // As all cases exit, return whether that list includes `default`.
       return hasDefault;
     } finally {
       _enclosingBlockContainsBreak = outerBreakValue;
@@ -3843,14 +3882,18 @@
 
   @override
   bool visitTryStatement(TryStatement node) {
-    if (_nodeExits(node.body)) {
+    if (_nodeExits(node.finallyBlock)) {
       return true;
     }
-    Block finallyBlock = node.finallyBlock;
-    if (_nodeExits(finallyBlock)) {
-      return true;
+    if (!_nodeExits(node.body)) {
+      return false;
     }
-    return false;
+    for (CatchClause c in node.catchClauses) {
+      if (!_nodeExits(c.body)) {
+        return false;
+      }
+    }
+    return true;
   }
 
   @override
@@ -3889,13 +3932,21 @@
       if (conditionExpression.accept(this)) {
         return true;
       }
-      bool blockReturns = node.body.accept(this);
+      node.body.accept(this);
       // TODO(jwren) Do we want to take all constant expressions into account?
       if (conditionExpression is BooleanLiteral) {
-        // If while(true), and the body doesn't return or the body doesn't have
-        // a break, then return true.
-        if (conditionExpression.value &&
-            (blockReturns || !_enclosingBlockContainsBreak)) {
+        // If while(true), and the body doesn't have a break, then return true.
+        // The body might be found to exit, but if there are any break
+        // statements, then it is a faulty finding. In other words:
+        //
+        // * If the body exits, and does not contain a break statement, then
+        //   it exits.
+        // * If the body does not exit, and does not contain a break statement,
+        //   then it loops infinitely (also an exit).
+        //
+        // As both conditions forbid any break statements to be found, the logic
+        // just boils down to checking [_enclosingBlockContainsBreak].
+        if (conditionExpression.value && !_enclosingBlockContainsBreak) {
           return true;
         }
       }
@@ -3905,6 +3956,9 @@
     }
   }
 
+  @override
+  bool visitYieldStatement(YieldStatement node) => _nodeExits(node.expression);
+
   /**
    * Return `true` if the given node exits.
    *
@@ -4692,7 +4746,7 @@
   /**
    * The error listener on which to record inference information.
    */
-  final AnalysisErrorListener _errorListener;
+  final ErrorReporter _errorReporter;
 
   /**
    * If true, emit hints when types are inferred
@@ -4725,7 +4779,7 @@
   // https://github.com/dart-lang/sdk/issues/25322
   final List<DartType> _returnStack = <DartType>[];
 
-  InferenceContext._(this._errorListener, TypeProvider typeProvider,
+  InferenceContext._(this._errorReporter, TypeProvider typeProvider,
       this._typeSystem, this._inferenceHints)
       : _typeProvider = typeProvider;
 
@@ -4804,12 +4858,22 @@
    * [type] has been inferred as the type of [node].
    */
   void recordInference(Expression node, DartType type) {
-    StaticInfo info = InferredType.create(_typeSystem, node, type);
-    if (!_inferenceHints || info == null) {
+    if (!_inferenceHints) {
       return;
     }
-    AnalysisError error = info.toAnalysisError();
-    _errorListener.onError(error);
+
+    ErrorCode error;
+    if (node is Literal) {
+      error = StrongModeCode.INFERRED_TYPE_LITERAL;
+    } else if (node is InstanceCreationExpression) {
+      error = StrongModeCode.INFERRED_TYPE_ALLOCATION;
+    } else if (node is FunctionExpression) {
+      error = StrongModeCode.INFERRED_TYPE_CLOSURE;
+    } else {
+      error = StrongModeCode.INFERRED_TYPE;
+    }
+
+    _errorReporter.reportErrorForNode(error, node, [node, type]);
   }
 
   List<DartType> _matchTypes(InterfaceType t1, InterfaceType t2) {
@@ -5623,7 +5687,7 @@
       strongModeHints = options.strongModeHints;
     }
     this.inferenceContext = new InferenceContext._(
-        errorListener, typeProvider, typeSystem, strongModeHints);
+        errorReporter, typeProvider, typeSystem, strongModeHints);
     this.typeAnalyzer = new StaticTypeAnalyzer(this);
   }
 
@@ -5976,7 +6040,6 @@
 
   @override
   Object visitAsExpression(AsExpression node) {
-    InferenceContext.setType(node.expression, node.type.type);
     super.visitAsExpression(node);
     // Since an as-statement doesn't actually change the type, we don't
     // let it affect the propagated type when it would result in a loss
@@ -9995,8 +10058,7 @@
   @override
   Object visitConstructorDeclaration(ConstructorDeclaration node) {
     super.visitConstructorDeclaration(node);
-    ExecutableElementImpl element = node.element as ExecutableElementImpl;
-    if (element == null) {
+    if (node.element == null) {
       ClassDeclaration classNode =
           node.getAncestor((node) => node is ClassDeclaration);
       StringBuffer buffer = new StringBuffer();
@@ -10013,10 +10075,6 @@
       buffer.write(" was not set while trying to resolve types.");
       AnalysisEngine.instance.logger.logError(buffer.toString(),
           new CaughtException(new AnalysisException(), null));
-    } else {
-      ClassElement definingClass = element.enclosingElement as ClassElement;
-      element.returnType = definingClass.type;
-      element.type = new FunctionTypeImpl(element);
     }
     return null;
   }
@@ -10047,18 +10105,15 @@
         TypeName typeName = node.type;
         if (typeName == null) {
           element.hasImplicitType = true;
-          type = _dynamicType;
           if (element is FieldFormalParameterElement) {
             FieldElement fieldElement =
                 (element as FieldFormalParameterElement).field;
-            if (fieldElement != null) {
-              type = fieldElement.type;
-            }
+            type = fieldElement?.type;
           }
         } else {
           type = _typeNameResolver._getType(typeName);
         }
-        element.type = type;
+        element.type = type ?? _dynamicType;
       } else {
         _setFunctionTypedParameterType(element, node.type, node.parameters);
       }
@@ -10216,26 +10271,8 @@
       declaredType = _typeNameResolver._getType(typeName);
     }
     Element element = node.name.staticElement;
-    if (element is VariableElement) {
-      (element as VariableElementImpl).type = declaredType;
-      if (element is PropertyInducingElement) {
-        PropertyAccessorElementImpl getter =
-            element.getter as PropertyAccessorElementImpl;
-        getter.returnType = declaredType;
-        getter.type = new FunctionTypeImpl(getter);
-        PropertyAccessorElementImpl setter =
-            element.setter as PropertyAccessorElementImpl;
-        if (setter != null) {
-          List<ParameterElement> parameters = setter.parameters;
-          if (parameters.length > 0) {
-            (parameters[0] as ParameterElementImpl).type = declaredType;
-          }
-          setter.returnType = VoidTypeImpl.instance;
-          setter.type = new FunctionTypeImpl(setter);
-        }
-      }
-    } else {
-      // TODO(brianwilkerson) Report the internal error.
+    if (element is VariableElementImpl) {
+      element.type = declaredType;
     }
     return null;
   }
diff --git a/pkg/analyzer/lib/src/generated/sdk.dart b/pkg/analyzer/lib/src/generated/sdk.dart
index 6f50b44..e46b644 100644
--- a/pkg/analyzer/lib/src/generated/sdk.dart
+++ b/pkg/analyzer/lib/src/generated/sdk.dart
@@ -9,8 +9,9 @@
 import 'package:analyzer/dart/ast/ast.dart';
 import 'package:analyzer/dart/ast/visitor.dart';
 import 'package:analyzer/src/generated/engine.dart'
-    show AnalysisContext, AnalysisOptions;
+    show AnalysisContext, AnalysisOptions, AnalysisOptionsImpl;
 import 'package:analyzer/src/generated/source.dart' show Source;
+import 'package:analyzer/src/generated/utilities_general.dart';
 
 /**
  * A function used to create a new DartSdk with the given [options]. If the
@@ -38,6 +39,11 @@
   static const String DART_HTML = "dart:html";
 
   /**
+   * The prefix shared by all dart library URIs.
+   */
+  static const String DART_LIBRARY_PREFIX = "dart:";
+
+  /**
    * The version number that is returned when the real version number could not
    * be determined.
    */
@@ -91,20 +97,32 @@
  */
 class DartSdkManager {
   /**
+   * The absolute path to the directory containing the default SDK.
+   */
+  final String defaultSdkDirectory;
+
+  /**
+   * A flag indicating whether it is acceptable to use summaries when they are
+   * available.
+   */
+  final bool canUseSummaries;
+
+  /**
    * The function used to create new SDK's.
    */
   final SdkCreator sdkCreator;
 
   /**
-   * A table mapping (an encoding of) analysis options to the SDK that has been
-   * configured with those options.
+   * A table mapping (an encoding of) analysis options and SDK locations to the
+   * DartSdk from that location that has been configured with those options.
    */
-  Map<int, DartSdk> sdkMap = new HashMap<int, DartSdk>();
+  Map<SdkDescription, DartSdk> sdkMap = new HashMap<SdkDescription, DartSdk>();
 
   /**
    * Initialize a newly created manager.
    */
-  DartSdkManager(this.sdkCreator);
+  DartSdkManager(
+      this.defaultSdkDirectory, this.canUseSummaries, this.sdkCreator);
 
   /**
    * Return any SDK that has been created, or `null` if no SDKs have been
@@ -118,18 +136,30 @@
   }
 
   /**
+   * Return a list of the descriptors of the SDKs that are currently being
+   * managed.
+   */
+  List<SdkDescription> get sdkDescriptors => sdkMap.keys.toList();
+
+  /**
+   * Return the Dart SDK that is appropriate for the given analysis [options].
+   * If such an SDK has not yet been created, then the [sdkCreator] will be
+   * invoked to create it.
+   */
+  DartSdk getSdk(SdkDescription description, DartSdk ifAbsent()) {
+    return sdkMap.putIfAbsent(description, ifAbsent);
+  }
+
+  /**
    * Return the Dart SDK that is appropriate for the given analysis [options].
    * If such an SDK has not yet been created, then the [sdkCreator] will be
    * invoked to create it.
    */
   DartSdk getSdkForOptions(AnalysisOptions options) {
-    int encoding = options.encodeCrossContextOptions();
-    DartSdk sdk = sdkMap[encoding];
-    if (sdk == null) {
-      sdk = sdkCreator(options);
-      sdkMap[encoding] = sdk;
-    }
-    return sdk;
+    // TODO(brianwilkerson) Remove this method and the field sdkCreator.
+    SdkDescription description =
+        new SdkDescription(<String>[defaultSdkDirectory], options);
+    return getSdk(description, () => sdkCreator(options));
   }
 }
 
@@ -173,6 +203,82 @@
   int size() => _libraryMap.length;
 }
 
+/**
+ * A description of a [DartSdk].
+ */
+class SdkDescription {
+  /**
+   * The paths to the files or directories that define the SDK.
+   */
+  final List<String> paths;
+
+  /**
+   * The analysis options that will be used by the SDK's context.
+   */
+  final AnalysisOptions options;
+
+  /**
+   * Initialize a newly created SDK description to describe an SDK based on the
+   * files or directories at the given [paths] that is analyzed using the given
+   * [options].
+   */
+  SdkDescription(this.paths, this.options);
+
+  @override
+  int get hashCode {
+    int hashCode = options.encodeCrossContextOptions();
+    for (String path in paths) {
+      hashCode = JenkinsSmiHash.combine(hashCode, path.hashCode);
+    }
+    return JenkinsSmiHash.finish(hashCode);
+  }
+
+  @override
+  bool operator ==(Object other) {
+    if (other is SdkDescription) {
+      if (options.encodeCrossContextOptions() !=
+          other.options.encodeCrossContextOptions()) {
+        return false;
+      }
+      int length = paths.length;
+      if (other.paths.length != length) {
+        return false;
+      }
+      for (int i = 0; i < length; i++) {
+        if (other.paths[i] != paths[i]) {
+          return false;
+        }
+      }
+      return true;
+    }
+    return false;
+  }
+
+  @override
+  String toString() {
+    StringBuffer buffer = new StringBuffer();
+    bool needsSeparator = false;
+    void add(String optionName) {
+      if (needsSeparator) {
+        buffer.write(', ');
+      }
+      buffer.write(optionName);
+      needsSeparator = true;
+    }
+    for (String path in paths) {
+      add(path);
+    }
+    if (needsSeparator) {
+      buffer.write(' ');
+    }
+    buffer.write('(');
+    buffer.write(AnalysisOptionsImpl
+        .decodeCrossContextOptions(options.encodeCrossContextOptions()));
+    buffer.write(')');
+    return buffer.toString();
+  }
+}
+
 class SdkLibrariesReader_LibraryBuilder extends RecursiveAstVisitor<Object> {
   /**
    * The prefix added to the name of a library to form the URI used in code to
diff --git a/pkg/analyzer/lib/src/generated/sdk_io.dart b/pkg/analyzer/lib/src/generated/sdk_io.dart
index 0d053f6b..be7c27a 100644
--- a/pkg/analyzer/lib/src/generated/sdk_io.dart
+++ b/pkg/analyzer/lib/src/generated/sdk_io.dart
@@ -25,6 +25,205 @@
 import 'package:path/path.dart' as pathos;
 
 /**
+ * An abstract implementation of a Dart SDK in which the available libraries are
+ * stored in a library map. Subclasses are responsible for populating the
+ * library map.
+ */
+abstract class AbstractDartSdk implements DartSdk {
+  /**
+   * A mapping from Dart library URI's to the library represented by that URI.
+   */
+  LibraryMap libraryMap = new LibraryMap();
+
+  /**
+   * The [AnalysisOptions] to use to create the [context].
+   */
+  AnalysisOptions _analysisOptions;
+
+  /**
+   * The flag that specifies whether an SDK summary should be used. This is a
+   * temporary flag until summaries are enabled by default.
+   */
+  bool _useSummary = false;
+
+  /**
+   * The [AnalysisContext] which is used for all of the sources in this SDK.
+   */
+  InternalAnalysisContext _analysisContext;
+
+  /**
+   * The mapping from Dart URI's to the corresponding sources.
+   */
+  Map<String, Source> _uriToSourceMap = new HashMap<String, Source>();
+
+  /**
+   * Set the [options] for this SDK analysis context.  Throw [StateError] if the
+   * context has been already created.
+   */
+  void set analysisOptions(AnalysisOptions options) {
+    if (_analysisContext != null) {
+      throw new StateError(
+          'Analysis options cannot be changed after context creation.');
+    }
+    _analysisOptions = options;
+  }
+
+  @override
+  AnalysisContext get context {
+    if (_analysisContext == null) {
+      _analysisContext = new SdkAnalysisContext(_analysisOptions);
+      SourceFactory factory = new SourceFactory([new DartUriResolver(this)]);
+      _analysisContext.sourceFactory = factory;
+      if (_useSummary) {
+        bool strongMode = _analysisOptions?.strongMode ?? false;
+        PackageBundle sdkBundle = getSummarySdkBundle(strongMode);
+        if (sdkBundle != null) {
+          _analysisContext.resultProvider = new SdkSummaryResultProvider(
+              _analysisContext, sdkBundle, strongMode);
+        }
+      }
+    }
+    return _analysisContext;
+  }
+
+  @override
+  List<SdkLibrary> get sdkLibraries => libraryMap.sdkLibraries;
+
+  @override
+  List<String> get uris => libraryMap.uris;
+
+  /**
+   * Return `true` if the SDK summary will be used when available.
+   */
+  bool get useSummary => _useSummary;
+
+  /**
+   * Specify whether SDK summary should be used.
+   */
+  void set useSummary(bool use) {
+    if (_analysisContext != null) {
+      throw new StateError(
+          'The "useSummary" flag cannot be changed after context creation.');
+    }
+    _useSummary = use;
+  }
+
+  @override
+  Source fromFileUri(Uri uri) {
+    JavaFile file = new JavaFile.fromUri(uri);
+
+    String path = _getPath(file);
+    if (path == null) {
+      return null;
+    }
+    try {
+      return new FileBasedSource(file, parseUriWithException(path));
+    } on URISyntaxException catch (exception, stackTrace) {
+      AnalysisEngine.instance.logger.logInformation(
+          "Failed to create URI: $path",
+          new CaughtException(exception, stackTrace));
+    }
+    return null;
+  }
+
+  String getRelativePathFromFile(JavaFile file);
+
+  @override
+  SdkLibrary getSdkLibrary(String dartUri) => libraryMap.getLibrary(dartUri);
+
+  /**
+   * Return the [PackageBundle] for this SDK, if it exists, or `null` otherwise.
+   * This method should not be used outside of `analyzer` and `analyzer_cli`
+   * packages.
+   */
+  PackageBundle getSummarySdkBundle(bool strongMode);
+
+  FileBasedSource internalMapDartUri(String dartUri) {
+    // TODO(brianwilkerson) Figure out how to unify the implementations in the
+    // two subclasses.
+    String libraryName;
+    String relativePath;
+    int index = dartUri.indexOf('/');
+    if (index >= 0) {
+      libraryName = dartUri.substring(0, index);
+      relativePath = dartUri.substring(index + 1);
+    } else {
+      libraryName = dartUri;
+      relativePath = "";
+    }
+    SdkLibrary library = getSdkLibrary(libraryName);
+    if (library == null) {
+      return null;
+    }
+    String srcPath;
+    if (relativePath.isEmpty) {
+      srcPath = library.path;
+    } else {
+      String libraryPath = library.path;
+      int index = libraryPath.lastIndexOf(JavaFile.separator);
+      if (index == -1) {
+        index = libraryPath.lastIndexOf('/');
+        if (index == -1) {
+          return null;
+        }
+      }
+      String prefix = libraryPath.substring(0, index + 1);
+      srcPath = '$prefix$relativePath';
+    }
+    String filePath = srcPath.replaceAll('/', JavaFile.separator);
+    try {
+      JavaFile file = new JavaFile(filePath);
+      return new FileBasedSource(file, parseUriWithException(dartUri));
+    } on URISyntaxException {
+      return null;
+    }
+  }
+
+  @override
+  Source mapDartUri(String dartUri) {
+    Source source = _uriToSourceMap[dartUri];
+    if (source == null) {
+      source = internalMapDartUri(dartUri);
+      _uriToSourceMap[dartUri] = source;
+    }
+    return source;
+  }
+
+  String _getPath(JavaFile file) {
+    List<SdkLibrary> libraries = libraryMap.sdkLibraries;
+    int length = libraries.length;
+    List<String> paths = new List(length);
+    String filePath = getRelativePathFromFile(file);
+    if (filePath == null) {
+      return null;
+    }
+    for (int i = 0; i < length; i++) {
+      SdkLibrary library = libraries[i];
+      String libraryPath = library.path.replaceAll('/', JavaFile.separator);
+      if (filePath == libraryPath) {
+        return library.shortName;
+      }
+      paths[i] = libraryPath;
+    }
+    for (int i = 0; i < length; i++) {
+      SdkLibrary library = libraries[i];
+      String libraryPath = paths[i];
+      int index = libraryPath.lastIndexOf(JavaFile.separator);
+      if (index >= 0) {
+        String prefix = libraryPath.substring(0, index + 1);
+        if (filePath.startsWith(prefix)) {
+          String relPath = filePath
+              .substring(prefix.length)
+              .replaceAll(JavaFile.separator, '/');
+          return '${library.shortName}/$relPath';
+        }
+      }
+    }
+    return null;
+  }
+}
+
+/**
  * A Dart SDK installed in a specified directory. Typical Dart SDK layout is
  * something like...
  *
@@ -40,7 +239,7 @@
  *           ... Dart utilities ...
  *     Chromium/   <-- Dartium typically exists in a sibling directory
  */
-class DirectoryBasedDartSdk implements DartSdk {
+class DirectoryBasedDartSdk extends AbstractDartSdk {
   /**
    * The default SDK, or `null` if the default SDK either has not yet been
    * created or cannot be created for some reason.
@@ -193,11 +392,6 @@
   }
 
   /**
-   * The [AnalysisContext] which is used for all of the sources in this sdk.
-   */
-  InternalAnalysisContext _analysisContext;
-
-  /**
    * The directory containing the SDK.
    */
   JavaFile _sdkDirectory;
@@ -208,11 +402,6 @@
   JavaFile _libraryDirectory;
 
   /**
-   * The flag that specifies whether SDK summary should be used.
-   */
-  bool _useSummary = false;
-
-  /**
    * The revision number of this SDK, or `"0"` if the revision number cannot be
    * discovered.
    */
@@ -239,58 +428,13 @@
   JavaFile _vmExecutable;
 
   /**
-   * A mapping from Dart library URI's to the library represented by that URI.
-   */
-  LibraryMap _libraryMap;
-
-  /**
-   * The mapping from Dart URI's to the corresponding sources.
-   */
-  Map<String, Source> _uriToSourceMap = new HashMap<String, Source>();
-
-  /**
-   * The [AnalysisOptions] to use to create the [context].
-   */
-  AnalysisOptions _analysisOptions;
-
-  /**
    * Initialize a newly created SDK to represent the Dart SDK installed in the
    * [sdkDirectory]. The flag [useDart2jsPaths] is `true` if the dart2js path
    * should be used when it is available
    */
   DirectoryBasedDartSdk(JavaFile sdkDirectory, [bool useDart2jsPaths = false]) {
     this._sdkDirectory = sdkDirectory.getAbsoluteFile();
-    _libraryMap = initialLibraryMap(useDart2jsPaths);
-  }
-
-  /**
-   * Set the [options] for this SDK analysis context.  Throw [StateError] if the
-   * context has been already created.
-   */
-  void set analysisOptions(AnalysisOptions options) {
-    if (_analysisContext != null) {
-      throw new StateError(
-          'Analysis options cannot be changed after context creation.');
-    }
-    _analysisOptions = options;
-  }
-
-  @override
-  AnalysisContext get context {
-    if (_analysisContext == null) {
-      _analysisContext = new SdkAnalysisContext(_analysisOptions);
-      SourceFactory factory = new SourceFactory([new DartUriResolver(this)]);
-      _analysisContext.sourceFactory = factory;
-      if (_useSummary) {
-        PackageBundle sdkBundle = getSummarySdkBundle();
-        if (sdkBundle != null) {
-          bool strongMode = _analysisOptions?.strongMode ?? false;
-          _analysisContext.resultProvider = new SdkSummaryResultProvider(
-              _analysisContext, sdkBundle, strongMode);
-        }
-      }
-    }
-    return _analysisContext;
+    libraryMap = initialLibraryMap(useDart2jsPaths);
   }
 
   /**
@@ -386,9 +530,6 @@
     return _pubExecutable;
   }
 
-  @override
-  List<SdkLibrary> get sdkLibraries => _libraryMap.sdkLibraries;
-
   /**
    * Return the revision number of this SDK, or `"0"` if the revision number
    * cannot be discovered.
@@ -411,25 +552,6 @@
     return _sdkVersion;
   }
 
-  @override
-  List<String> get uris => _libraryMap.uris;
-
-  /**
-   * Whether an SDK summary should be used.
-   */
-  bool get useSummary => _useSummary;
-
-  /**
-   * Specify whether SDK summary should be used.
-   */
-  void set useSummary(bool use) {
-    if (_analysisContext != null) {
-      throw new StateError(
-          'The "useSummary" flag cannot be changed after context creation.');
-    }
-    _useSummary = use;
-  }
-
   /**
    * Return the name of the file containing the VM executable.
    */
@@ -470,45 +592,6 @@
         _LIBRARIES_FILE);
   }
 
-  @override
-  Source fromFileUri(Uri uri) {
-    JavaFile file = new JavaFile.fromUri(uri);
-    String filePath = file.getAbsolutePath();
-    String libPath = libraryDirectory.getAbsolutePath();
-    if (!filePath.startsWith("$libPath${JavaFile.separator}")) {
-      return null;
-    }
-    filePath = filePath.substring(libPath.length + 1);
-    for (SdkLibrary library in _libraryMap.sdkLibraries) {
-      String libraryPath = library.path;
-      if (filePath.replaceAll('\\', '/') == libraryPath) {
-        String path = library.shortName;
-        try {
-          return new FileBasedSource(file, parseUriWithException(path));
-        } on URISyntaxException catch (exception, stackTrace) {
-          AnalysisEngine.instance.logger.logInformation(
-              "Failed to create URI: $path",
-              new CaughtException(exception, stackTrace));
-          return null;
-        }
-      }
-      libraryPath = new JavaFile(library.path).getParent();
-      if (filePath.startsWith("$libraryPath${JavaFile.separator}")) {
-        String path =
-            "${library.shortName}/${filePath.substring(libraryPath.length + 1)}";
-        try {
-          return new FileBasedSource(file, parseUriWithException(path));
-        } on URISyntaxException catch (exception, stackTrace) {
-          AnalysisEngine.instance.logger.logInformation(
-              "Failed to create URI: $path",
-              new CaughtException(exception, stackTrace));
-          return null;
-        }
-      }
-    }
-    return null;
-  }
-
   /**
    * Return the directory where dartium can be found (the directory that will be
    * the working directory if Dartium is invoked without changing the default),
@@ -536,16 +619,22 @@
   }
 
   @override
-  SdkLibrary getSdkLibrary(String dartUri) => _libraryMap.getLibrary(dartUri);
+  String getRelativePathFromFile(JavaFile file) {
+    String filePath = file.getAbsolutePath();
+    String libPath = libraryDirectory.getAbsolutePath();
+    if (!filePath.startsWith("$libPath${JavaFile.separator}")) {
+      return null;
+    }
+    return filePath.substring(libPath.length + 1);
+  }
 
   /**
    * Return the [PackageBundle] for this SDK, if it exists, or `null` otherwise.
    * This method should not be used outside of `analyzer` and `analyzer_cli`
    * packages.
    */
-  PackageBundle getSummarySdkBundle() {
+  PackageBundle getSummarySdkBundle(bool strongMode) {
     String rootPath = directory.getAbsolutePath();
-    bool strongMode = _analysisOptions?.strongMode ?? false;
     String name = strongMode ? 'strong.sum' : 'spec.sum';
     String path = pathos.join(rootPath, 'lib', '_internal', name);
     try {
@@ -589,16 +678,7 @@
   }
 
   @override
-  Source mapDartUri(String dartUri) {
-    Source source = _uriToSourceMap[dartUri];
-    if (source == null) {
-      source = _mapDartUri(dartUri);
-      _uriToSourceMap[dartUri] = source;
-    }
-    return source;
-  }
-
-  FileBasedSource _mapDartUri(String dartUri) {
+  FileBasedSource internalMapDartUri(String dartUri) {
     String libraryName;
     String relativePath;
     int index = dartUri.indexOf('/');
diff --git a/pkg/analyzer/lib/src/generated/source.dart b/pkg/analyzer/lib/src/generated/source.dart
index 4ba4c51..6412de3 100644
--- a/pkg/analyzer/lib/src/generated/source.dart
+++ b/pkg/analyzer/lib/src/generated/source.dart
@@ -15,7 +15,7 @@
 import 'package:analyzer/src/generated/java_io.dart' show JavaFile;
 import 'package:analyzer/src/generated/sdk.dart' show DartSdk;
 import 'package:analyzer/src/generated/source_io.dart'
-    show FileBasedSource, FileUriResolver, PackageUriResolver;
+    show FileBasedSource, PackageUriResolver;
 import 'package:analyzer/task/model.dart';
 import 'package:package_config/packages.dart';
 import 'package:path/path.dart' as pathos;
@@ -484,6 +484,9 @@
    */
   bool get isInSystemLibrary;
 
+  @override
+  Source get librarySource => null;
+
   /**
    * Return the modification stamp for this source, or a negative value if the
    * source does not exist. A modification stamp is a non-negative integer with
@@ -905,7 +908,7 @@
       return UriKind.PACKAGE_URI;
     } else if (scheme == DartUriResolver.DART_SCHEME) {
       return UriKind.DART_URI;
-    } else if (scheme == FileUriResolver.FILE_SCHEME) {
+    } else if (scheme == ResourceUriResolver.FILE_SCHEME) {
       return UriKind.FILE_URI;
     }
     return UriKind.FILE_URI;
diff --git a/pkg/analyzer/lib/src/generated/source_io.dart b/pkg/analyzer/lib/src/generated/source_io.dart
index 491e3eb..f7b47ef 100644
--- a/pkg/analyzer/lib/src/generated/source_io.dart
+++ b/pkg/analyzer/lib/src/generated/source_io.dart
@@ -239,8 +239,14 @@
   }
 
   @override
-  bool operator ==(Object object) =>
-      object is FileBasedSource && id == object.id;
+  bool operator ==(Object object) {
+    if (object is FileBasedSource) {
+      return id == object.id;
+    } else if (object is Source) {
+      return uri == object.uri;
+    }
+    return false;
+  }
 
   @override
   bool exists() => file.isFile();
@@ -256,7 +262,11 @@
 
 /**
  * Instances of the class `FileUriResolver` resolve `file` URI's.
+ *
+ * This class is now deprecated, 'new FileUriResolver()' is equivalent to
+ * 'new ResourceUriResolver(PhysicalResourceProvider.INSTANCE)'.
  */
+@deprecated
 class FileUriResolver extends UriResolver {
   /**
    * The name of the `file` scheme.
@@ -510,7 +520,12 @@
 
 /**
  * Instances of the class `RelativeFileUriResolver` resolve `file` URI's.
+ *
+ * This class is now deprecated, file URI resolution should be done with
+ * ResourceUriResolver, i.e.
+ * 'new ResourceUriResolver(PhysicalResourceProvider.INSTANCE)'.
  */
+@deprecated
 class RelativeFileUriResolver extends UriResolver {
   /**
    * The name of the `file` scheme.
diff --git a/pkg/analyzer/lib/src/generated/static_type_analyzer.dart b/pkg/analyzer/lib/src/generated/static_type_analyzer.dart
index fefd3eb..5c57bb8 100644
--- a/pkg/analyzer/lib/src/generated/static_type_analyzer.dart
+++ b/pkg/analyzer/lib/src/generated/static_type_analyzer.dart
@@ -662,8 +662,12 @@
       // elements
       if (node.elements.isNotEmpty) {
         // Infer the list type from the arguments.
-        DartType staticType =
-            node.elements.map((e) => e.staticType).reduce(_leastUpperBound);
+        Iterable<DartType> types =
+            node.elements.map((e) => e.staticType).where((t) => t != null);
+        if (types.isEmpty) {
+          return null;
+        }
+        DartType staticType = types.reduce(_leastUpperBound);
         if (staticType.isBottom) {
           staticType = _dynamicType;
         }
diff --git a/pkg/analyzer/lib/src/generated/testing/element_factory.dart b/pkg/analyzer/lib/src/generated/testing/element_factory.dart
index 9fd53dd..106977c 100644
--- a/pkg/analyzer/lib/src/generated/testing/element_factory.dart
+++ b/pkg/analyzer/lib/src/generated/testing/element_factory.dart
@@ -47,16 +47,8 @@
     ClassElementImpl element = new ClassElementImpl(typeName, 0);
     element.constructors = const <ConstructorElement>[];
     element.supertype = superclassType;
-    InterfaceTypeImpl type = new InterfaceTypeImpl(element);
-    element.type = type;
     if (parameterNames != null) {
-      int count = parameterNames.length;
-      if (count > 0) {
-        element.typeParameters = typeParameters(parameterNames);
-        type.typeArguments = new List<DartType>.from(
-            element.typeParameters.map((p) => p.type),
-            growable: false);
-      }
+      element.typeParameters = typeParameters(parameterNames);
     }
     return element;
   }
@@ -96,7 +88,6 @@
   static ConstructorElementImpl constructorElement(
       ClassElement definingClass, String name, bool isConst,
       [List<DartType> argumentTypes]) {
-    DartType type = definingClass.type;
     ConstructorElementImpl constructor = name == null
         ? new ConstructorElementImpl("", -1)
         : new ConstructorElementImpl(name, 0);
@@ -123,9 +114,7 @@
     } else {
       constructor.parameters = <ParameterElement>[];
     }
-    constructor.returnType = type;
     constructor.enclosingElement = definingClass;
-    constructor.type = new FunctionTypeImpl(constructor);
     if (!constructor.isSynthetic) {
       constructor.constantInitializers = <ConstructorInitializer>[];
     }
@@ -137,17 +126,13 @@
           [List<DartType> argumentTypes]) =>
       constructorElement(definingClass, name, false, argumentTypes);
 
-  static ClassElementImpl enumElement(
-      TypeProvider typeProvider, String enumName,
+  static EnumElementImpl enumElement(TypeProvider typeProvider, String enumName,
       [List<String> constantNames]) {
     //
     // Build the enum.
     //
-    ClassElementImpl enumElement = new ClassElementImpl(enumName, -1);
-    InterfaceTypeImpl enumType = new InterfaceTypeImpl(enumElement);
-    enumElement.type = enumType;
-    enumElement.supertype = objectType;
-    enumElement.enum2 = true;
+    EnumElementImpl enumElement = new EnumElementImpl(enumName, -1);
+    InterfaceTypeImpl enumType = enumElement.type;
     //
     // Populate the fields.
     //
@@ -222,27 +207,9 @@
     if (isConst) {
       (field as ConstFieldElementImpl).constantInitializer = initializer;
     }
-    PropertyAccessorElementImpl getter =
-        new PropertyAccessorElementImpl.forVariable(field);
-    getter.getter = true;
-    getter.synthetic = true;
-    getter.variable = field;
-    getter.returnType = type;
-    field.getter = getter;
-    FunctionTypeImpl getterType = new FunctionTypeImpl(getter);
-    getter.type = getterType;
+    new PropertyAccessorElementImpl_ImplicitGetter(field);
     if (!isConst && !isFinal) {
-      PropertyAccessorElementImpl setter =
-          new PropertyAccessorElementImpl.forVariable(field);
-      setter.setter = true;
-      setter.synthetic = true;
-      setter.variable = field;
-      setter.parameters = <ParameterElement>[
-        requiredParameter2("_$name", type)
-      ];
-      setter.returnType = VoidTypeImpl.instance;
-      setter.type = new FunctionTypeImpl(setter);
-      field.setter = setter;
+      new PropertyAccessorElementImpl_ImplicitSetter(field);
     }
     return field;
   }
@@ -564,7 +531,7 @@
     field.synthetic = true;
     field.type = type;
     PropertyAccessorElementImpl getter =
-        new PropertyAccessorElementImpl.forVariable(field);
+        new PropertyAccessorElementImpl(name, -1);
     getter.getter = true;
     getter.variable = field;
     getter.returnType = type;
@@ -573,7 +540,7 @@
     getter.type = getterType;
     ParameterElementImpl parameter = requiredParameter2("a", type);
     PropertyAccessorElementImpl setter =
-        new PropertyAccessorElementImpl.forVariable(field);
+        new PropertyAccessorElementImpl(name, -1);
     setter.setter = true;
     setter.synthetic = true;
     setter.variable = field;
@@ -614,28 +581,9 @@
     variable.final2 = isFinal;
     variable.synthetic = false;
     variable.type = type;
-    PropertyAccessorElementImpl getter =
-        new PropertyAccessorElementImpl.forVariable(variable);
-    getter.getter = true;
-    getter.synthetic = true;
-    getter.variable = variable;
-    getter.returnType = type;
-    variable.getter = getter;
-    FunctionTypeImpl getterType = new FunctionTypeImpl(getter);
-    getter.type = getterType;
+    new PropertyAccessorElementImpl_ImplicitGetter(variable);
     if (!isConst && !isFinal) {
-      PropertyAccessorElementImpl setter =
-          new PropertyAccessorElementImpl.forVariable(variable);
-      setter.setter = true;
-      setter.static = true;
-      setter.synthetic = true;
-      setter.variable = variable;
-      setter.parameters = <ParameterElement>[
-        requiredParameter2("_$name", type)
-      ];
-      setter.returnType = VoidTypeImpl.instance;
-      setter.type = new FunctionTypeImpl(setter);
-      variable.setter = setter;
+      new PropertyAccessorElementImpl_ImplicitSetter(variable);
     }
     return variable;
   }
diff --git a/pkg/analyzer/lib/src/generated/testing/test_type_provider.dart b/pkg/analyzer/lib/src/generated/testing/test_type_provider.dart
index 64dadf2..0a524fa 100644
--- a/pkg/analyzer/lib/src/generated/testing/test_type_provider.dart
+++ b/pkg/analyzer/lib/src/generated/testing/test_type_provider.dart
@@ -149,7 +149,7 @@
   InterfaceType _typeType;
 
   /**
-   * The type representing typenames that can't be resolved.
+   * The type representing type names that can't be resolved.
    */
   DartType _undefinedType;
 
@@ -684,10 +684,6 @@
     for (MethodElement method in classElement.methods) {
       (method as ExecutableElementImpl).type = new FunctionTypeImpl(method);
     }
-    for (ConstructorElement constructor in classElement.constructors) {
-      (constructor as ExecutableElementImpl).type =
-          new FunctionTypeImpl(constructor);
-    }
   }
 
   /**
diff --git a/pkg/analyzer/lib/src/generated/type_system.dart b/pkg/analyzer/lib/src/generated/type_system.dart
index 19b5be5..74e3fd2 100644
--- a/pkg/analyzer/lib/src/generated/type_system.dart
+++ b/pkg/analyzer/lib/src/generated/type_system.dart
@@ -12,7 +12,8 @@
 import 'package:analyzer/dart/element/type.dart';
 import 'package:analyzer/src/dart/element/element.dart';
 import 'package:analyzer/src/dart/element/type.dart';
-import 'package:analyzer/src/generated/engine.dart' show AnalysisContext;
+import 'package:analyzer/src/generated/engine.dart'
+    show AnalysisContext, AnalysisOptionsImpl;
 import 'package:analyzer/src/generated/resolver.dart' show TypeProvider;
 import 'package:analyzer/src/generated/utilities_dart.dart';
 
@@ -23,6 +24,15 @@
  * https://github.com/dart-lang/dev_compiler/blob/master/STRONG_MODE.md
  */
 class StrongTypeSystemImpl extends TypeSystem {
+  /**
+   * True if implicit casts should be allowed, otherwise false.
+   *
+   * This affects the behavior of [isAssignableTo].
+   */
+  final bool implicitCasts;
+
+  StrongTypeSystemImpl({this.implicitCasts: true});
+
   bool anyParameterType(FunctionType ft, bool predicate(DartType t)) {
     return ft.parameters.any((p) => predicate(p.type));
   }
@@ -73,7 +83,6 @@
   }
 
   /// Computes the greatest lower bound of [type1] and [type2].
-  @override
   DartType getGreatestLowerBound(
       TypeProvider provider, DartType type1, DartType type2) {
     // The greatest lower bound relation is reflexive.
@@ -203,6 +212,23 @@
     var inferringTypeSystem =
         new _StrongInferenceTypeSystem(typeProvider, fnType.typeFormals);
 
+    // Special case inference for Future.then.
+    //
+    // We don't have union types, so Future<T>.then<S> is typed to take a
+    // callback `T -> S`. However, the lambda might actually return a
+    // Future<S>. So we handle that special case here.
+    if (argumentTypes.isNotEmpty && argumentTypes[0] is FunctionType) {
+      Element element = fnType?.element;
+      bool isFutureThen = element is MethodElement &&
+          element.name == 'then' &&
+          element.enclosingElement.type.isDartAsyncFuture;
+      if (isFutureThen) {
+        // Ignore return context. We'll let the onValue function's return type
+        // drive inference.
+        returnContextType = null;
+      }
+    }
+
     if (returnContextType != null) {
       inferringTypeSystem.isSubtypeOf(fnType.returnType, returnContextType);
     }
@@ -274,6 +300,10 @@
       return true;
     }
 
+    if (!implicitCasts) {
+      return false;
+    }
+
     // Don't allow implicit downcasts between function types
     // and call method objects, as these will almost always fail.
     if ((fromType is FunctionType && getCallMethodType(toType) != null) ||
@@ -293,16 +323,13 @@
       return false;
     }
 
-    // If the subtype relation goes the other way, allow the implicit downcast.
-    // TODO(leafp): Emit warnings and hints for these in some way.
-    // TODO(leafp): Consider adding a flag to disable these?  Or just rely on
-    //   --warnings-as-errors?
+    // If the subtype relation goes the other way, allow the implicit
+    // downcast.
     if (isSubtypeOf(toType, fromType) || toType.isAssignableTo(fromType)) {
-      // TODO(leafp): error if type is known to be exact (literal,
-      //  instance creation).
-      // TODO(leafp): Warn on composite downcast.
-      // TODO(leafp): hint on object/dynamic downcast.
-      // TODO(leafp): Consider allowing assignment casts.
+      // TODO(leafp,jmesserly): we emit warnings/hints for these in
+      // src/task/strong/checker.dart, which is a bit inconsistent. That
+      // code should be handled into places that use isAssignableTo, such as
+      // ErrorVerifier.
       return true;
     }
 
@@ -1164,8 +1191,9 @@
    * Create either a strong mode or regular type system based on context.
    */
   static TypeSystem create(AnalysisContext context) {
-    return (context.analysisOptions.strongMode)
-        ? new StrongTypeSystemImpl()
+    var options = context.analysisOptions as AnalysisOptionsImpl;
+    return options.strongMode
+        ? new StrongTypeSystemImpl(implicitCasts: options.implicitCasts)
         : new TypeSystemImpl();
   }
 }
diff --git a/pkg/analyzer/lib/src/summary/format.dart b/pkg/analyzer/lib/src/summary/format.dart
index 701b10e..1034bc5 100644
--- a/pkg/analyzer/lib/src/summary/format.dart
+++ b/pkg/analyzer/lib/src/summary/format.dart
@@ -141,9 +141,190 @@
   }
 }
 
-class CacheSourceContentBuilder extends Object with _CacheSourceContentMixin implements idl.CacheSourceContent {
-  bool _finished = false;
+class CacheAnalysisErrorBuilder extends Object with _CacheAnalysisErrorMixin implements idl.CacheAnalysisError {
+  String _correction;
+  String _errorCodeUniqueName;
+  int _length;
+  String _message;
+  int _offset;
 
+  @override
+  String get correction => _correction ??= '';
+
+  /**
+   * The correction to be displayed for this error, or `null` if there is no
+   * correction information for this error. The correction should indicate how
+   * the user can fix the error.
+   */
+  void set correction(String _value) {
+    _correction = _value;
+  }
+
+  @override
+  String get errorCodeUniqueName => _errorCodeUniqueName ??= '';
+
+  /**
+   * The unique name of the error code.
+   */
+  void set errorCodeUniqueName(String _value) {
+    _errorCodeUniqueName = _value;
+  }
+
+  @override
+  int get length => _length ??= 0;
+
+  /**
+   * Length of the error range.
+   */
+  void set length(int _value) {
+    assert(_value == null || _value >= 0);
+    _length = _value;
+  }
+
+  @override
+  String get message => _message ??= '';
+
+  /**
+   * The message to be displayed for this error. The message should indicate
+   * what is wrong and why it is wrong.
+   */
+  void set message(String _value) {
+    _message = _value;
+  }
+
+  @override
+  int get offset => _offset ??= 0;
+
+  /**
+   * Offset of the error range relative to the beginning of the file.
+   */
+  void set offset(int _value) {
+    assert(_value == null || _value >= 0);
+    _offset = _value;
+  }
+
+  CacheAnalysisErrorBuilder({String correction, String errorCodeUniqueName, int length, String message, int offset})
+    : _correction = correction,
+      _errorCodeUniqueName = errorCodeUniqueName,
+      _length = length,
+      _message = message,
+      _offset = offset;
+
+  /**
+   * Flush [informative] data recursively.
+   */
+  void flushInformative() {
+  }
+
+  fb.Offset finish(fb.Builder fbBuilder) {
+    fb.Offset offset_correction;
+    fb.Offset offset_errorCodeUniqueName;
+    fb.Offset offset_message;
+    if (_correction != null) {
+      offset_correction = fbBuilder.writeString(_correction);
+    }
+    if (_errorCodeUniqueName != null) {
+      offset_errorCodeUniqueName = fbBuilder.writeString(_errorCodeUniqueName);
+    }
+    if (_message != null) {
+      offset_message = fbBuilder.writeString(_message);
+    }
+    fbBuilder.startTable();
+    if (offset_correction != null) {
+      fbBuilder.addOffset(4, offset_correction);
+    }
+    if (offset_errorCodeUniqueName != null) {
+      fbBuilder.addOffset(0, offset_errorCodeUniqueName);
+    }
+    if (_length != null && _length != 0) {
+      fbBuilder.addUint32(2, _length);
+    }
+    if (offset_message != null) {
+      fbBuilder.addOffset(3, offset_message);
+    }
+    if (_offset != null && _offset != 0) {
+      fbBuilder.addUint32(1, _offset);
+    }
+    return fbBuilder.endTable();
+  }
+}
+
+class _CacheAnalysisErrorReader extends fb.TableReader<_CacheAnalysisErrorImpl> {
+  const _CacheAnalysisErrorReader();
+
+  @override
+  _CacheAnalysisErrorImpl createObject(fb.BufferContext bc, int offset) => new _CacheAnalysisErrorImpl(bc, offset);
+}
+
+class _CacheAnalysisErrorImpl extends Object with _CacheAnalysisErrorMixin implements idl.CacheAnalysisError {
+  final fb.BufferContext _bc;
+  final int _bcOffset;
+
+  _CacheAnalysisErrorImpl(this._bc, this._bcOffset);
+
+  String _correction;
+  String _errorCodeUniqueName;
+  int _length;
+  String _message;
+  int _offset;
+
+  @override
+  String get correction {
+    _correction ??= const fb.StringReader().vTableGet(_bc, _bcOffset, 4, '');
+    return _correction;
+  }
+
+  @override
+  String get errorCodeUniqueName {
+    _errorCodeUniqueName ??= const fb.StringReader().vTableGet(_bc, _bcOffset, 0, '');
+    return _errorCodeUniqueName;
+  }
+
+  @override
+  int get length {
+    _length ??= const fb.Uint32Reader().vTableGet(_bc, _bcOffset, 2, 0);
+    return _length;
+  }
+
+  @override
+  String get message {
+    _message ??= const fb.StringReader().vTableGet(_bc, _bcOffset, 3, '');
+    return _message;
+  }
+
+  @override
+  int get offset {
+    _offset ??= const fb.Uint32Reader().vTableGet(_bc, _bcOffset, 1, 0);
+    return _offset;
+  }
+}
+
+abstract class _CacheAnalysisErrorMixin implements idl.CacheAnalysisError {
+  @override
+  Map<String, Object> toJson() {
+    Map<String, Object> _result = <String, Object>{};
+    if (correction != '') _result["correction"] = correction;
+    if (errorCodeUniqueName != '') _result["errorCodeUniqueName"] = errorCodeUniqueName;
+    if (length != 0) _result["length"] = length;
+    if (message != '') _result["message"] = message;
+    if (offset != 0) _result["offset"] = offset;
+    return _result;
+  }
+
+  @override
+  Map<String, Object> toMap() => {
+    "correction": correction,
+    "errorCodeUniqueName": errorCodeUniqueName,
+    "length": length,
+    "message": message,
+    "offset": offset,
+  };
+
+  @override
+  String toString() => convert.JSON.encode(toJson());
+}
+
+class CacheSourceContentBuilder extends Object with _CacheSourceContentMixin implements idl.CacheSourceContent {
   List<String> _exportedUris;
   List<String> _importedUris;
   idl.CacheSourceKind _kind;
@@ -157,7 +338,6 @@
    * or `package:foo/bar.dart`.  Empty if [kind] is [CacheSourceKind.part].
    */
   void set exportedUris(List<String> _value) {
-    assert(!_finished);
     _exportedUris = _value;
   }
 
@@ -169,7 +349,6 @@
    * or `package:foo/bar.dart`.  Empty if [kind] is [CacheSourceKind.part].
    */
   void set importedUris(List<String> _value) {
-    assert(!_finished);
     _importedUris = _value;
   }
 
@@ -180,7 +359,6 @@
    * The kind of the source.
    */
   void set kind(idl.CacheSourceKind _value) {
-    assert(!_finished);
     _kind = _value;
   }
 
@@ -192,7 +370,6 @@
    * [CacheSourceKind.part].
    */
   void set partUris(List<String> _value) {
-    assert(!_finished);
     _partUris = _value;
   }
 
@@ -214,8 +391,6 @@
   }
 
   fb.Offset finish(fb.Builder fbBuilder) {
-    assert(!_finished);
-    _finished = true;
     fb.Offset offset_exportedUris;
     fb.Offset offset_importedUris;
     fb.Offset offset_partUris;
@@ -316,9 +491,92 @@
   String toString() => convert.JSON.encode(toJson());
 }
 
-class CodeRangeBuilder extends Object with _CodeRangeMixin implements idl.CodeRange {
-  bool _finished = false;
+class CacheSourceErrorsInLibraryBuilder extends Object with _CacheSourceErrorsInLibraryMixin implements idl.CacheSourceErrorsInLibrary {
+  List<CacheAnalysisErrorBuilder> _errors;
 
+  @override
+  List<CacheAnalysisErrorBuilder> get errors => _errors ??= <CacheAnalysisErrorBuilder>[];
+
+  /**
+   * The list of errors in the source in the library.
+   */
+  void set errors(List<CacheAnalysisErrorBuilder> _value) {
+    _errors = _value;
+  }
+
+  CacheSourceErrorsInLibraryBuilder({List<CacheAnalysisErrorBuilder> errors})
+    : _errors = errors;
+
+  /**
+   * Flush [informative] data recursively.
+   */
+  void flushInformative() {
+    _errors?.forEach((b) => b.flushInformative());
+  }
+
+  List<int> toBuffer() {
+    fb.Builder fbBuilder = new fb.Builder();
+    return fbBuilder.finish(finish(fbBuilder), "CSEL");
+  }
+
+  fb.Offset finish(fb.Builder fbBuilder) {
+    fb.Offset offset_errors;
+    if (!(_errors == null || _errors.isEmpty)) {
+      offset_errors = fbBuilder.writeList(_errors.map((b) => b.finish(fbBuilder)).toList());
+    }
+    fbBuilder.startTable();
+    if (offset_errors != null) {
+      fbBuilder.addOffset(0, offset_errors);
+    }
+    return fbBuilder.endTable();
+  }
+}
+
+idl.CacheSourceErrorsInLibrary readCacheSourceErrorsInLibrary(List<int> buffer) {
+  fb.BufferContext rootRef = new fb.BufferContext.fromBytes(buffer);
+  return const _CacheSourceErrorsInLibraryReader().read(rootRef, 0);
+}
+
+class _CacheSourceErrorsInLibraryReader extends fb.TableReader<_CacheSourceErrorsInLibraryImpl> {
+  const _CacheSourceErrorsInLibraryReader();
+
+  @override
+  _CacheSourceErrorsInLibraryImpl createObject(fb.BufferContext bc, int offset) => new _CacheSourceErrorsInLibraryImpl(bc, offset);
+}
+
+class _CacheSourceErrorsInLibraryImpl extends Object with _CacheSourceErrorsInLibraryMixin implements idl.CacheSourceErrorsInLibrary {
+  final fb.BufferContext _bc;
+  final int _bcOffset;
+
+  _CacheSourceErrorsInLibraryImpl(this._bc, this._bcOffset);
+
+  List<idl.CacheAnalysisError> _errors;
+
+  @override
+  List<idl.CacheAnalysisError> get errors {
+    _errors ??= const fb.ListReader<idl.CacheAnalysisError>(const _CacheAnalysisErrorReader()).vTableGet(_bc, _bcOffset, 0, const <idl.CacheAnalysisError>[]);
+    return _errors;
+  }
+}
+
+abstract class _CacheSourceErrorsInLibraryMixin implements idl.CacheSourceErrorsInLibrary {
+  @override
+  Map<String, Object> toJson() {
+    Map<String, Object> _result = <String, Object>{};
+    if (errors.isNotEmpty) _result["errors"] = errors.map((_value) => _value.toJson()).toList();
+    return _result;
+  }
+
+  @override
+  Map<String, Object> toMap() => {
+    "errors": errors,
+  };
+
+  @override
+  String toString() => convert.JSON.encode(toJson());
+}
+
+class CodeRangeBuilder extends Object with _CodeRangeMixin implements idl.CodeRange {
   int _length;
   int _offset;
 
@@ -329,7 +587,6 @@
    * Length of the element code.
    */
   void set length(int _value) {
-    assert(!_finished);
     assert(_value == null || _value >= 0);
     _length = _value;
   }
@@ -341,7 +598,6 @@
    * Offset of the element code relative to the beginning of the file.
    */
   void set offset(int _value) {
-    assert(!_finished);
     assert(_value == null || _value >= 0);
     _offset = _value;
   }
@@ -357,8 +613,6 @@
   }
 
   fb.Offset finish(fb.Builder fbBuilder) {
-    assert(!_finished);
-    _finished = true;
     fbBuilder.startTable();
     if (_length != null && _length != 0) {
       fbBuilder.addUint32(1, _length);
@@ -419,8 +673,6 @@
 }
 
 class EntityRefBuilder extends Object with _EntityRefMixin implements idl.EntityRef {
-  bool _finished = false;
-
   List<int> _implicitFunctionTypeIndices;
   int _paramReference;
   int _reference;
@@ -455,7 +707,6 @@
    * first to the class and then to the method.
    */
   void set implicitFunctionTypeIndices(List<int> _value) {
-    assert(!_finished);
     assert(_value == null || _value.every((e) => e >= 0));
     _implicitFunctionTypeIndices = _value;
   }
@@ -483,7 +734,6 @@
    * zero.
    */
   void set paramReference(int _value) {
-    assert(!_finished);
     assert(_value == null || _value >= 0);
     _paramReference = _value;
   }
@@ -496,7 +746,6 @@
    * zero if this is a reference to a type parameter.
    */
   void set reference(int _value) {
-    assert(!_finished);
     assert(_value == null || _value >= 0);
     _reference = _value;
   }
@@ -512,7 +761,6 @@
    * Otherwise zero.
    */
   void set slot(int _value) {
-    assert(!_finished);
     assert(_value == null || _value >= 0);
     _slot = _value;
   }
@@ -527,7 +775,6 @@
    * empty.
    */
   void set syntheticParams(List<UnlinkedParamBuilder> _value) {
-    assert(!_finished);
     _syntheticParams = _value;
   }
 
@@ -541,7 +788,6 @@
    * Otherwise `null`.
    */
   void set syntheticReturnType(EntityRefBuilder _value) {
-    assert(!_finished);
     _syntheticReturnType = _value;
   }
 
@@ -553,7 +799,6 @@
    * type arguments used to instantiate it (if any).
    */
   void set typeArguments(List<EntityRefBuilder> _value) {
-    assert(!_finished);
     _typeArguments = _value;
   }
 
@@ -576,8 +821,6 @@
   }
 
   fb.Offset finish(fb.Builder fbBuilder) {
-    assert(!_finished);
-    _finished = true;
     fb.Offset offset_implicitFunctionTypeIndices;
     fb.Offset offset_syntheticParams;
     fb.Offset offset_syntheticReturnType;
@@ -714,8 +957,6 @@
 }
 
 class LinkedDependencyBuilder extends Object with _LinkedDependencyMixin implements idl.LinkedDependency {
-  bool _finished = false;
-
   List<String> _parts;
   String _uri;
 
@@ -727,7 +968,6 @@
    * These URIs are relative to the importing library.
    */
   void set parts(List<String> _value) {
-    assert(!_finished);
     _parts = _value;
   }
 
@@ -742,7 +982,6 @@
    * `b/d/e.dart`.
    */
   void set uri(String _value) {
-    assert(!_finished);
     _uri = _value;
   }
 
@@ -757,8 +996,6 @@
   }
 
   fb.Offset finish(fb.Builder fbBuilder) {
-    assert(!_finished);
-    _finished = true;
     fb.Offset offset_parts;
     fb.Offset offset_uri;
     if (!(_parts == null || _parts.isEmpty)) {
@@ -827,8 +1064,6 @@
 }
 
 class LinkedExportNameBuilder extends Object with _LinkedExportNameMixin implements idl.LinkedExportName {
-  bool _finished = false;
-
   int _dependency;
   idl.ReferenceKind _kind;
   String _name;
@@ -842,7 +1077,6 @@
    * entity is defined.
    */
   void set dependency(int _value) {
-    assert(!_finished);
     assert(_value == null || _value >= 0);
     _dependency = _value;
   }
@@ -854,7 +1088,6 @@
    * The kind of the entity being referred to.
    */
   void set kind(idl.ReferenceKind _value) {
-    assert(!_finished);
     _kind = _value;
   }
 
@@ -866,7 +1099,6 @@
    * the trailing '='.
    */
   void set name(String _value) {
-    assert(!_finished);
     _name = _value;
   }
 
@@ -880,7 +1112,6 @@
    * represent parts in the order of the corresponding `part` declarations.
    */
   void set unit(int _value) {
-    assert(!_finished);
     assert(_value == null || _value >= 0);
     _unit = _value;
   }
@@ -898,8 +1129,6 @@
   }
 
   fb.Offset finish(fb.Builder fbBuilder) {
-    assert(!_finished);
-    _finished = true;
     fb.Offset offset_name;
     if (_name != null) {
       offset_name = fbBuilder.writeString(_name);
@@ -988,8 +1217,6 @@
 }
 
 class LinkedLibraryBuilder extends Object with _LinkedLibraryMixin implements idl.LinkedLibrary {
-  bool _finished = false;
-
   List<LinkedDependencyBuilder> _dependencies;
   List<int> _exportDependencies;
   List<LinkedExportNameBuilder> _exportNames;
@@ -1018,7 +1245,6 @@
    * depends on the lack of a certain declaration in the library).
    */
   void set dependencies(List<LinkedDependencyBuilder> _value) {
-    assert(!_finished);
     _dependencies = _value;
   }
 
@@ -1030,7 +1256,6 @@
    * of the library being exported.
    */
   void set exportDependencies(List<int> _value) {
-    assert(!_finished);
     assert(_value == null || _value.every((e) => e >= 0));
     _exportDependencies = _value;
   }
@@ -1046,7 +1271,6 @@
    * Sorted by name.
    */
   void set exportNames(List<LinkedExportNameBuilder> _value) {
-    assert(!_finished);
     _exportNames = _value;
   }
 
@@ -1058,7 +1282,6 @@
    * true, all other fields in the data structure have their default values.
    */
   void set fallbackMode(bool _value) {
-    assert(!_finished);
     _fallbackMode = _value;
   }
 
@@ -1070,7 +1293,6 @@
    * of the library being imported.
    */
   void set importDependencies(List<int> _value) {
-    assert(!_finished);
     assert(_value == null || _value.every((e) => e >= 0));
     _importDependencies = _value;
   }
@@ -1084,7 +1306,6 @@
    * the transitive closure of exports, plus the library itself).
    */
   void set numPrelinkedDependencies(int _value) {
-    assert(!_finished);
     assert(_value == null || _value >= 0);
     _numPrelinkedDependencies = _value;
   }
@@ -1099,7 +1320,6 @@
    * declarations in the defining compilation unit.
    */
   void set units(List<LinkedUnitBuilder> _value) {
-    assert(!_finished);
     _units = _value;
   }
 
@@ -1127,8 +1347,6 @@
   }
 
   fb.Offset finish(fb.Builder fbBuilder) {
-    assert(!_finished);
-    _finished = true;
     fb.Offset offset_dependencies;
     fb.Offset offset_exportDependencies;
     fb.Offset offset_exportNames;
@@ -1274,8 +1492,6 @@
 }
 
 class LinkedReferenceBuilder extends Object with _LinkedReferenceMixin implements idl.LinkedReference {
-  bool _finished = false;
-
   int _containingReference;
   int _dependency;
   idl.ReferenceKind _kind;
@@ -1299,7 +1515,6 @@
    * LinkedUnit.references[i].containingReference < i.
    */
   void set containingReference(int _value) {
-    assert(!_finished);
     assert(_value == null || _value >= 0);
     _containingReference = _value;
   }
@@ -1315,7 +1530,6 @@
    * member), or if [kind] is [ReferenceKind.prefix].
    */
   void set dependency(int _value) {
-    assert(!_finished);
     assert(_value == null || _value >= 0);
     _dependency = _value;
   }
@@ -1328,7 +1542,6 @@
    * and `void`, the kind is [ReferenceKind.classOrEnum].
    */
   void set kind(idl.ReferenceKind _value) {
-    assert(!_finished);
     _kind = _value;
   }
 
@@ -1343,7 +1556,6 @@
    * [UnlinkedExecutable.localVariables].  Otherwise zero.
    */
   void set localIndex(int _value) {
-    assert(!_finished);
     assert(_value == null || _value >= 0);
     _localIndex = _value;
   }
@@ -1357,7 +1569,6 @@
    * string is "dynamic".  For the pseudo-type `void`, the string is "void".
    */
   void set name(String _value) {
-    assert(!_finished);
     _name = _value;
   }
 
@@ -1370,7 +1581,6 @@
    * Otherwise zero.
    */
   void set numTypeParameters(int _value) {
-    assert(!_finished);
     assert(_value == null || _value >= 0);
     _numTypeParameters = _value;
   }
@@ -1388,7 +1598,6 @@
    * member).
    */
   void set unit(int _value) {
-    assert(!_finished);
     assert(_value == null || _value >= 0);
     _unit = _value;
   }
@@ -1409,8 +1618,6 @@
   }
 
   fb.Offset finish(fb.Builder fbBuilder) {
-    assert(!_finished);
-    _finished = true;
     fb.Offset offset_name;
     if (_name != null) {
       offset_name = fbBuilder.writeString(_name);
@@ -1535,8 +1742,6 @@
 }
 
 class LinkedUnitBuilder extends Object with _LinkedUnitMixin implements idl.LinkedUnit {
-  bool _finished = false;
-
   List<int> _constCycles;
   List<LinkedReferenceBuilder> _references;
   List<EntityRefBuilder> _types;
@@ -1549,7 +1754,6 @@
    * corresponding to const constructors that are part of cycles.
    */
   void set constCycles(List<int> _value) {
-    assert(!_finished);
     assert(_value == null || _value.every((e) => e >= 0));
     _constCycles = _value;
   }
@@ -1566,7 +1770,6 @@
    * (e.g. elements involved in inferred or propagated types).
    */
   void set references(List<LinkedReferenceBuilder> _value) {
-    assert(!_finished);
     _references = _value;
   }
 
@@ -1578,7 +1781,6 @@
    * compilation unit with propagated and inferred types.
    */
   void set types(List<EntityRefBuilder> _value) {
-    assert(!_finished);
     _types = _value;
   }
 
@@ -1596,8 +1798,6 @@
   }
 
   fb.Offset finish(fb.Builder fbBuilder) {
-    assert(!_finished);
-    _finished = true;
     fb.Offset offset_constCycles;
     fb.Offset offset_references;
     fb.Offset offset_types;
@@ -1682,8 +1882,6 @@
 }
 
 class PackageBundleBuilder extends Object with _PackageBundleMixin implements idl.PackageBundle {
-  bool _finished = false;
-
   List<LinkedLibraryBuilder> _linkedLibraries;
   List<String> _linkedLibraryUris;
   int _majorVersion;
@@ -1699,7 +1897,6 @@
    * Linked libraries.
    */
   void set linkedLibraries(List<LinkedLibraryBuilder> _value) {
-    assert(!_finished);
     _linkedLibraries = _value;
   }
 
@@ -1711,7 +1908,6 @@
    * `package:foo/bar.dart`.
    */
   void set linkedLibraryUris(List<String> _value) {
-    assert(!_finished);
     _linkedLibraryUris = _value;
   }
 
@@ -1723,7 +1919,6 @@
    * [PackageBundleAssembler.currentMajorVersion].
    */
   void set majorVersion(int _value) {
-    assert(!_finished);
     assert(_value == null || _value >= 0);
     _majorVersion = _value;
   }
@@ -1736,7 +1931,6 @@
    * [PackageBundleAssembler.currentMinorVersion].
    */
   void set minorVersion(int _value) {
-    assert(!_finished);
     assert(_value == null || _value >= 0);
     _minorVersion = _value;
   }
@@ -1749,7 +1943,6 @@
    * is encoded as a hexadecimal string using lower case letters.
    */
   void set unlinkedUnitHashes(List<String> _value) {
-    assert(!_finished);
     _unlinkedUnitHashes = _value;
   }
 
@@ -1760,7 +1953,6 @@
    * Unlinked information for the compilation units constituting the package.
    */
   void set unlinkedUnits(List<UnlinkedUnitBuilder> _value) {
-    assert(!_finished);
     _unlinkedUnits = _value;
   }
 
@@ -1771,7 +1963,6 @@
    * The list of URIs of items in [unlinkedUnits], e.g. `dart:core/bool.dart`.
    */
   void set unlinkedUnitUris(List<String> _value) {
-    assert(!_finished);
     _unlinkedUnitUris = _value;
   }
 
@@ -1799,8 +1990,6 @@
   }
 
   fb.Offset finish(fb.Builder fbBuilder) {
-    assert(!_finished);
-    _finished = true;
     fb.Offset offset_linkedLibraries;
     fb.Offset offset_linkedLibraryUris;
     fb.Offset offset_unlinkedUnitHashes;
@@ -1946,8 +2135,6 @@
 }
 
 class PackageIndexBuilder extends Object with _PackageIndexMixin implements idl.PackageIndex {
-  bool _finished = false;
-
   List<idl.IndexSyntheticElementKind> _elementKinds;
   List<int> _elementOffsets;
   List<int> _elementUnits;
@@ -1964,7 +2151,6 @@
    * the kind of the synthetic element.
    */
   void set elementKinds(List<idl.IndexSyntheticElementKind> _value) {
-    assert(!_finished);
     _elementKinds = _value;
   }
 
@@ -1978,7 +2164,6 @@
    * whether an element is referenced in this [PackageIndex].
    */
   void set elementOffsets(List<int> _value) {
-    assert(!_finished);
     assert(_value == null || _value.every((e) => e >= 0));
     _elementOffsets = _value;
   }
@@ -1992,7 +2177,6 @@
    * specific unit where the element is declared.
    */
   void set elementUnits(List<int> _value) {
-    assert(!_finished);
     assert(_value == null || _value.every((e) => e >= 0));
     _elementUnits = _value;
   }
@@ -2006,7 +2190,6 @@
    * presence of a string in this [PackageIndex].
    */
   void set strings(List<String> _value) {
-    assert(!_finished);
     _strings = _value;
   }
 
@@ -2019,7 +2202,6 @@
    * [strings] list.
    */
   void set unitLibraryUris(List<int> _value) {
-    assert(!_finished);
     assert(_value == null || _value.every((e) => e >= 0));
     _unitLibraryUris = _value;
   }
@@ -2031,7 +2213,6 @@
    * List of indexes of each unit in this [PackageIndex].
    */
   void set units(List<UnitIndexBuilder> _value) {
-    assert(!_finished);
     _units = _value;
   }
 
@@ -2044,7 +2225,6 @@
    * [strings] list.
    */
   void set unitUnitUris(List<int> _value) {
-    assert(!_finished);
     assert(_value == null || _value.every((e) => e >= 0));
     _unitUnitUris = _value;
   }
@@ -2071,8 +2251,6 @@
   }
 
   fb.Offset finish(fb.Builder fbBuilder) {
-    assert(!_finished);
-    _finished = true;
     fb.Offset offset_elementKinds;
     fb.Offset offset_elementOffsets;
     fb.Offset offset_elementUnits;
@@ -2226,8 +2404,6 @@
 }
 
 class UnitIndexBuilder extends Object with _UnitIndexMixin implements idl.UnitIndex {
-  bool _finished = false;
-
   List<idl.IndexNameKind> _definedNameKinds;
   List<int> _definedNameOffsets;
   List<int> _definedNames;
@@ -2249,7 +2425,6 @@
    * Each item of this list is the kind of an element defined in this unit.
    */
   void set definedNameKinds(List<idl.IndexNameKind> _value) {
-    assert(!_finished);
     _definedNameKinds = _value;
   }
 
@@ -2261,7 +2436,6 @@
    * unit relative to the beginning of the file.
    */
   void set definedNameOffsets(List<int> _value) {
-    assert(!_finished);
     assert(_value == null || _value.every((e) => e >= 0));
     _definedNameOffsets = _value;
   }
@@ -2276,7 +2450,6 @@
    * this [UnitIndex].
    */
   void set definedNames(List<int> _value) {
-    assert(!_finished);
     assert(_value == null || _value.every((e) => e >= 0));
     _definedNames = _value;
   }
@@ -2289,7 +2462,6 @@
    * for the library specific unit that corresponds to this [UnitIndex].
    */
   void set unit(int _value) {
-    assert(!_finished);
     assert(_value == null || _value >= 0);
     _unit = _value;
   }
@@ -2302,7 +2474,6 @@
    * is qualified with some prefix.
    */
   void set usedElementIsQualifiedFlags(List<bool> _value) {
-    assert(!_finished);
     _usedElementIsQualifiedFlags = _value;
   }
 
@@ -2313,7 +2484,6 @@
    * Each item of this list is the kind of the element usage.
    */
   void set usedElementKinds(List<idl.IndexRelationKind> _value) {
-    assert(!_finished);
     _usedElementKinds = _value;
   }
 
@@ -2324,7 +2494,6 @@
    * Each item of this list is the length of the element usage.
    */
   void set usedElementLengths(List<int> _value) {
-    assert(!_finished);
     assert(_value == null || _value.every((e) => e >= 0));
     _usedElementLengths = _value;
   }
@@ -2337,7 +2506,6 @@
    * beginning of the file.
    */
   void set usedElementOffsets(List<int> _value) {
-    assert(!_finished);
     assert(_value == null || _value.every((e) => e >= 0));
     _usedElementOffsets = _value;
   }
@@ -2351,7 +2519,6 @@
    * that the client can quickly find element references in this [UnitIndex].
    */
   void set usedElements(List<int> _value) {
-    assert(!_finished);
     assert(_value == null || _value.every((e) => e >= 0));
     _usedElements = _value;
   }
@@ -2364,7 +2531,6 @@
    * is qualified with some prefix.
    */
   void set usedNameIsQualifiedFlags(List<bool> _value) {
-    assert(!_finished);
     _usedNameIsQualifiedFlags = _value;
   }
 
@@ -2375,7 +2541,6 @@
    * Each item of this list is the kind of the name usage.
    */
   void set usedNameKinds(List<idl.IndexRelationKind> _value) {
-    assert(!_finished);
     _usedNameKinds = _value;
   }
 
@@ -2387,7 +2552,6 @@
    * beginning of the file.
    */
   void set usedNameOffsets(List<int> _value) {
-    assert(!_finished);
     assert(_value == null || _value.every((e) => e >= 0));
     _usedNameOffsets = _value;
   }
@@ -2401,7 +2565,6 @@
    * quickly find name uses in this [UnitIndex].
    */
   void set usedNames(List<int> _value) {
-    assert(!_finished);
     assert(_value == null || _value.every((e) => e >= 0));
     _usedNames = _value;
   }
@@ -2428,8 +2591,6 @@
   }
 
   fb.Offset finish(fb.Builder fbBuilder) {
-    assert(!_finished);
-    _finished = true;
     fb.Offset offset_definedNameKinds;
     fb.Offset offset_definedNameOffsets;
     fb.Offset offset_definedNames;
@@ -2670,8 +2831,6 @@
 }
 
 class UnlinkedClassBuilder extends Object with _UnlinkedClassMixin implements idl.UnlinkedClass {
-  bool _finished = false;
-
   List<UnlinkedConstBuilder> _annotations;
   CodeRangeBuilder _codeRange;
   UnlinkedDocumentationCommentBuilder _documentationComment;
@@ -2694,7 +2853,6 @@
    * Annotations for this class.
    */
   void set annotations(List<UnlinkedConstBuilder> _value) {
-    assert(!_finished);
     _annotations = _value;
   }
 
@@ -2705,7 +2863,6 @@
    * Code range of the class.
    */
   void set codeRange(CodeRangeBuilder _value) {
-    assert(!_finished);
     _codeRange = _value;
   }
 
@@ -2717,7 +2874,6 @@
    * documentation comment.
    */
   void set documentationComment(UnlinkedDocumentationCommentBuilder _value) {
-    assert(!_finished);
     _documentationComment = _value;
   }
 
@@ -2728,7 +2884,6 @@
    * Executable objects (methods, getters, and setters) contained in the class.
    */
   void set executables(List<UnlinkedExecutableBuilder> _value) {
-    assert(!_finished);
     _executables = _value;
   }
 
@@ -2739,7 +2894,6 @@
    * Field declarations contained in the class.
    */
   void set fields(List<UnlinkedVariableBuilder> _value) {
-    assert(!_finished);
     _fields = _value;
   }
 
@@ -2751,7 +2905,6 @@
    * supertype)
    */
   void set hasNoSupertype(bool _value) {
-    assert(!_finished);
     _hasNoSupertype = _value;
   }
 
@@ -2762,7 +2915,6 @@
    * Interfaces appearing in an `implements` clause, if any.
    */
   void set interfaces(List<EntityRefBuilder> _value) {
-    assert(!_finished);
     _interfaces = _value;
   }
 
@@ -2773,7 +2925,6 @@
    * Indicates whether the class is declared with the `abstract` keyword.
    */
   void set isAbstract(bool _value) {
-    assert(!_finished);
     _isAbstract = _value;
   }
 
@@ -2784,7 +2935,6 @@
    * Indicates whether the class is declared using mixin application syntax.
    */
   void set isMixinApplication(bool _value) {
-    assert(!_finished);
     _isMixinApplication = _value;
   }
 
@@ -2795,7 +2945,6 @@
    * Mixins appearing in a `with` clause, if any.
    */
   void set mixins(List<EntityRefBuilder> _value) {
-    assert(!_finished);
     _mixins = _value;
   }
 
@@ -2806,7 +2955,6 @@
    * Name of the class.
    */
   void set name(String _value) {
-    assert(!_finished);
     _name = _value;
   }
 
@@ -2817,7 +2965,6 @@
    * Offset of the class name relative to the beginning of the file.
    */
   void set nameOffset(int _value) {
-    assert(!_finished);
     assert(_value == null || _value >= 0);
     _nameOffset = _value;
   }
@@ -2831,7 +2978,6 @@
    * the class *is* `Object` (and hence has no supertype).
    */
   void set supertype(EntityRefBuilder _value) {
-    assert(!_finished);
     _supertype = _value;
   }
 
@@ -2842,7 +2988,6 @@
    * Type parameters of the class, if any.
    */
   void set typeParameters(List<UnlinkedTypeParamBuilder> _value) {
-    assert(!_finished);
     _typeParameters = _value;
   }
 
@@ -2879,8 +3024,6 @@
   }
 
   fb.Offset finish(fb.Builder fbBuilder) {
-    assert(!_finished);
-    _finished = true;
     fb.Offset offset_annotations;
     fb.Offset offset_codeRange;
     fb.Offset offset_documentationComment;
@@ -3125,8 +3268,6 @@
 }
 
 class UnlinkedCombinatorBuilder extends Object with _UnlinkedCombinatorMixin implements idl.UnlinkedCombinator {
-  bool _finished = false;
-
   int _end;
   List<String> _hides;
   int _offset;
@@ -3140,7 +3281,6 @@
    * names.  Otherwise zero.
    */
   void set end(int _value) {
-    assert(!_finished);
     assert(_value == null || _value >= 0);
     _end = _value;
   }
@@ -3152,7 +3292,6 @@
    * List of names which are hidden.  Empty if this is a `show` combinator.
    */
   void set hides(List<String> _value) {
-    assert(!_finished);
     _hides = _value;
   }
 
@@ -3164,7 +3303,6 @@
    * zero.
    */
   void set offset(int _value) {
-    assert(!_finished);
     assert(_value == null || _value >= 0);
     _offset = _value;
   }
@@ -3176,7 +3314,6 @@
    * List of names which are shown.  Empty if this is a `hide` combinator.
    */
   void set shows(List<String> _value) {
-    assert(!_finished);
     _shows = _value;
   }
 
@@ -3195,8 +3332,6 @@
   }
 
   fb.Offset finish(fb.Builder fbBuilder) {
-    assert(!_finished);
-    _finished = true;
     fb.Offset offset_hides;
     fb.Offset offset_shows;
     if (!(_hides == null || _hides.isEmpty)) {
@@ -3289,8 +3424,6 @@
 }
 
 class UnlinkedConstBuilder extends Object with _UnlinkedConstMixin implements idl.UnlinkedConst {
-  bool _finished = false;
-
   List<idl.UnlinkedExprAssignOperator> _assignmentOperators;
   List<double> _doubles;
   List<int> _ints;
@@ -3306,7 +3439,6 @@
    * Sequence of operators used by assignment operations.
    */
   void set assignmentOperators(List<idl.UnlinkedExprAssignOperator> _value) {
-    assert(!_finished);
     _assignmentOperators = _value;
   }
 
@@ -3317,7 +3449,6 @@
    * Sequence of 64-bit doubles consumed by the operation `pushDouble`.
    */
   void set doubles(List<double> _value) {
-    assert(!_finished);
     _doubles = _value;
   }
 
@@ -3330,7 +3461,6 @@
    * `makeList`, and `makeMap`.
    */
   void set ints(List<int> _value) {
-    assert(!_finished);
     assert(_value == null || _value.every((e) => e >= 0));
     _ints = _value;
   }
@@ -3343,7 +3473,6 @@
    * expression.
    */
   void set isValidConst(bool _value) {
-    assert(!_finished);
     _isValidConst = _value;
   }
 
@@ -3355,7 +3484,6 @@
    * the constant value.
    */
   void set operations(List<idl.UnlinkedConstOperation> _value) {
-    assert(!_finished);
     _operations = _value;
   }
 
@@ -3369,7 +3497,6 @@
    * actual entity being referred to may be something other than a type.
    */
   void set references(List<EntityRefBuilder> _value) {
-    assert(!_finished);
     _references = _value;
   }
 
@@ -3381,7 +3508,6 @@
    * `invokeConstructor`.
    */
   void set strings(List<String> _value) {
-    assert(!_finished);
     _strings = _value;
   }
 
@@ -3402,8 +3528,6 @@
   }
 
   fb.Offset finish(fb.Builder fbBuilder) {
-    assert(!_finished);
-    _finished = true;
     fb.Offset offset_assignmentOperators;
     fb.Offset offset_doubles;
     fb.Offset offset_ints;
@@ -3548,8 +3672,6 @@
 }
 
 class UnlinkedConstructorInitializerBuilder extends Object with _UnlinkedConstructorInitializerMixin implements idl.UnlinkedConstructorInitializer {
-  bool _finished = false;
-
   List<String> _argumentNames;
   List<UnlinkedConstBuilder> _arguments;
   UnlinkedConstBuilder _expression;
@@ -3565,7 +3687,6 @@
    * with the name at `n + i - m`.
    */
   void set argumentNames(List<String> _value) {
-    assert(!_finished);
     _argumentNames = _value;
   }
 
@@ -3577,7 +3698,6 @@
    * invocation.  Otherwise empty.
    */
   void set arguments(List<UnlinkedConstBuilder> _value) {
-    assert(!_finished);
     _arguments = _value;
   }
 
@@ -3589,7 +3709,6 @@
    * Otherwise `null`.
    */
   void set expression(UnlinkedConstBuilder _value) {
-    assert(!_finished);
     _expression = _value;
   }
 
@@ -3600,7 +3719,6 @@
    * The kind of the constructor initializer (field, redirect, super).
    */
   void set kind(idl.UnlinkedConstructorInitializerKind _value) {
-    assert(!_finished);
     _kind = _value;
   }
 
@@ -3614,7 +3732,6 @@
    * constructor, declared in the superclass, to invoke.
    */
   void set name(String _value) {
-    assert(!_finished);
     _name = _value;
   }
 
@@ -3634,8 +3751,6 @@
   }
 
   fb.Offset finish(fb.Builder fbBuilder) {
-    assert(!_finished);
-    _finished = true;
     fb.Offset offset_argumentNames;
     fb.Offset offset_arguments;
     fb.Offset offset_expression;
@@ -3748,8 +3863,6 @@
 }
 
 class UnlinkedDocumentationCommentBuilder extends Object with _UnlinkedDocumentationCommentMixin implements idl.UnlinkedDocumentationComment {
-  bool _finished = false;
-
   int _length;
   int _offset;
   String _text;
@@ -3761,7 +3874,6 @@
    * Length of the documentation comment (prior to replacing '\r\n' with '\n').
    */
   void set length(int _value) {
-    assert(!_finished);
     assert(_value == null || _value >= 0);
     _length = _value;
   }
@@ -3774,7 +3886,6 @@
    * beginning of the file.
    */
   void set offset(int _value) {
-    assert(!_finished);
     assert(_value == null || _value >= 0);
     _offset = _value;
   }
@@ -3789,7 +3900,6 @@
    * specially encoded.
    */
   void set text(String _value) {
-    assert(!_finished);
     _text = _value;
   }
 
@@ -3805,8 +3915,6 @@
   }
 
   fb.Offset finish(fb.Builder fbBuilder) {
-    assert(!_finished);
-    _finished = true;
     fb.Offset offset_text;
     if (_text != null) {
       offset_text = fbBuilder.writeString(_text);
@@ -3883,8 +3991,6 @@
 }
 
 class UnlinkedEnumBuilder extends Object with _UnlinkedEnumMixin implements idl.UnlinkedEnum {
-  bool _finished = false;
-
   List<UnlinkedConstBuilder> _annotations;
   CodeRangeBuilder _codeRange;
   UnlinkedDocumentationCommentBuilder _documentationComment;
@@ -3899,7 +4005,6 @@
    * Annotations for this enum.
    */
   void set annotations(List<UnlinkedConstBuilder> _value) {
-    assert(!_finished);
     _annotations = _value;
   }
 
@@ -3910,7 +4015,6 @@
    * Code range of the enum.
    */
   void set codeRange(CodeRangeBuilder _value) {
-    assert(!_finished);
     _codeRange = _value;
   }
 
@@ -3922,7 +4026,6 @@
    * comment.
    */
   void set documentationComment(UnlinkedDocumentationCommentBuilder _value) {
-    assert(!_finished);
     _documentationComment = _value;
   }
 
@@ -3933,7 +4036,6 @@
    * Name of the enum type.
    */
   void set name(String _value) {
-    assert(!_finished);
     _name = _value;
   }
 
@@ -3944,7 +4046,6 @@
    * Offset of the enum name relative to the beginning of the file.
    */
   void set nameOffset(int _value) {
-    assert(!_finished);
     assert(_value == null || _value >= 0);
     _nameOffset = _value;
   }
@@ -3956,7 +4057,6 @@
    * Values listed in the enum declaration, in declaration order.
    */
   void set values(List<UnlinkedEnumValueBuilder> _value) {
-    assert(!_finished);
     _values = _value;
   }
 
@@ -3980,8 +4080,6 @@
   }
 
   fb.Offset finish(fb.Builder fbBuilder) {
-    assert(!_finished);
-    _finished = true;
     fb.Offset offset_annotations;
     fb.Offset offset_codeRange;
     fb.Offset offset_documentationComment;
@@ -4110,8 +4208,6 @@
 }
 
 class UnlinkedEnumValueBuilder extends Object with _UnlinkedEnumValueMixin implements idl.UnlinkedEnumValue {
-  bool _finished = false;
-
   UnlinkedDocumentationCommentBuilder _documentationComment;
   String _name;
   int _nameOffset;
@@ -4124,7 +4220,6 @@
    * documentation comment.
    */
   void set documentationComment(UnlinkedDocumentationCommentBuilder _value) {
-    assert(!_finished);
     _documentationComment = _value;
   }
 
@@ -4135,7 +4230,6 @@
    * Name of the enumerated value.
    */
   void set name(String _value) {
-    assert(!_finished);
     _name = _value;
   }
 
@@ -4146,7 +4240,6 @@
    * Offset of the enum value name relative to the beginning of the file.
    */
   void set nameOffset(int _value) {
-    assert(!_finished);
     assert(_value == null || _value >= 0);
     _nameOffset = _value;
   }
@@ -4165,8 +4258,6 @@
   }
 
   fb.Offset finish(fb.Builder fbBuilder) {
-    assert(!_finished);
-    _finished = true;
     fb.Offset offset_documentationComment;
     fb.Offset offset_name;
     if (_documentationComment != null) {
@@ -4247,8 +4338,6 @@
 }
 
 class UnlinkedExecutableBuilder extends Object with _UnlinkedExecutableMixin implements idl.UnlinkedExecutable {
-  bool _finished = false;
-
   List<UnlinkedConstBuilder> _annotations;
   UnlinkedConstBuilder _bodyExpr;
   CodeRangeBuilder _codeRange;
@@ -4287,7 +4376,6 @@
    * Annotations for this executable.
    */
   void set annotations(List<UnlinkedConstBuilder> _value) {
-    assert(!_finished);
     _annotations = _value;
   }
 
@@ -4300,7 +4388,6 @@
    * constant evaluation depends on the function body.
    */
   void set bodyExpr(UnlinkedConstBuilder _value) {
-    assert(!_finished);
     _bodyExpr = _value;
   }
 
@@ -4311,7 +4398,6 @@
    * Code range of the executable.
    */
   void set codeRange(CodeRangeBuilder _value) {
-    assert(!_finished);
     _codeRange = _value;
   }
 
@@ -4323,7 +4409,6 @@
    * initializers.  Otherwise empty.
    */
   void set constantInitializers(List<UnlinkedConstructorInitializerBuilder> _value) {
-    assert(!_finished);
     _constantInitializers = _value;
   }
 
@@ -4339,7 +4424,6 @@
    * Otherwise, zero.
    */
   void set constCycleSlot(int _value) {
-    assert(!_finished);
     assert(_value == null || _value >= 0);
     _constCycleSlot = _value;
   }
@@ -4352,7 +4436,6 @@
    * documentation comment.
    */
   void set documentationComment(UnlinkedDocumentationCommentBuilder _value) {
-    assert(!_finished);
     _documentationComment = _value;
   }
 
@@ -4367,7 +4450,6 @@
    * `dynamic`.
    */
   void set inferredReturnTypeSlot(int _value) {
-    assert(!_finished);
     assert(_value == null || _value >= 0);
     _inferredReturnTypeSlot = _value;
   }
@@ -4379,7 +4461,6 @@
    * Indicates whether the executable is declared using the `abstract` keyword.
    */
   void set isAbstract(bool _value) {
-    assert(!_finished);
     _isAbstract = _value;
   }
 
@@ -4390,7 +4471,6 @@
    * Indicates whether the executable has body marked as being asynchronous.
    */
   void set isAsynchronous(bool _value) {
-    assert(!_finished);
     _isAsynchronous = _value;
   }
 
@@ -4401,7 +4481,6 @@
    * Indicates whether the executable is declared using the `const` keyword.
    */
   void set isConst(bool _value) {
-    assert(!_finished);
     _isConst = _value;
   }
 
@@ -4412,7 +4491,6 @@
    * Indicates whether the executable is declared using the `external` keyword.
    */
   void set isExternal(bool _value) {
-    assert(!_finished);
     _isExternal = _value;
   }
 
@@ -4423,7 +4501,6 @@
    * Indicates whether the executable is declared using the `factory` keyword.
    */
   void set isFactory(bool _value) {
-    assert(!_finished);
     _isFactory = _value;
   }
 
@@ -4434,7 +4511,6 @@
    * Indicates whether the executable has body marked as being a generator.
    */
   void set isGenerator(bool _value) {
-    assert(!_finished);
     _isGenerator = _value;
   }
 
@@ -4445,7 +4521,6 @@
    * Indicates whether the executable is a redirected constructor.
    */
   void set isRedirectedConstructor(bool _value) {
-    assert(!_finished);
     _isRedirectedConstructor = _value;
   }
 
@@ -4460,7 +4535,6 @@
    * static for semantic purposes).
    */
   void set isStatic(bool _value) {
-    assert(!_finished);
     _isStatic = _value;
   }
 
@@ -4472,7 +4546,6 @@
    * constructor).
    */
   void set kind(idl.UnlinkedExecutableKind _value) {
-    assert(!_finished);
     _kind = _value;
   }
 
@@ -4483,7 +4556,6 @@
    * The list of local functions.
    */
   void set localFunctions(List<UnlinkedExecutableBuilder> _value) {
-    assert(!_finished);
     _localFunctions = _value;
   }
 
@@ -4494,7 +4566,6 @@
    * The list of local labels.
    */
   void set localLabels(List<UnlinkedLabelBuilder> _value) {
-    assert(!_finished);
     _localLabels = _value;
   }
 
@@ -4505,7 +4576,6 @@
    * The list of local variables.
    */
   void set localVariables(List<UnlinkedVariableBuilder> _value) {
-    assert(!_finished);
     _localVariables = _value;
   }
 
@@ -4518,7 +4588,6 @@
    * For unnamed constructors, this is the empty string.
    */
   void set name(String _value) {
-    assert(!_finished);
     _name = _value;
   }
 
@@ -4530,7 +4599,6 @@
    * the offset of the end of the constructor name.  Otherwise zero.
    */
   void set nameEnd(int _value) {
-    assert(!_finished);
     assert(_value == null || _value >= 0);
     _nameEnd = _value;
   }
@@ -4545,7 +4613,6 @@
    * offset of the second "C" in "class C { C(); }").
    */
   void set nameOffset(int _value) {
-    assert(!_finished);
     assert(_value == null || _value >= 0);
     _nameOffset = _value;
   }
@@ -4559,7 +4626,6 @@
    * parameter.
    */
   void set parameters(List<UnlinkedParamBuilder> _value) {
-    assert(!_finished);
     _parameters = _value;
   }
 
@@ -4571,7 +4637,6 @@
    * the offset of the period before the constructor name.  Otherwise zero.
    */
   void set periodOffset(int _value) {
-    assert(!_finished);
     assert(_value == null || _value >= 0);
     _periodOffset = _value;
   }
@@ -4584,7 +4649,6 @@
    * constructor to which this constructor redirects; otherwise empty.
    */
   void set redirectedConstructor(EntityRefBuilder _value) {
-    assert(!_finished);
     _redirectedConstructor = _value;
   }
 
@@ -4597,7 +4661,6 @@
    * empty.
    */
   void set redirectedConstructorName(String _value) {
-    assert(!_finished);
     _redirectedConstructorName = _value;
   }
 
@@ -4612,7 +4675,6 @@
    * imports.
    */
   void set returnType(EntityRefBuilder _value) {
-    assert(!_finished);
     _returnType = _value;
   }
 
@@ -4624,7 +4686,6 @@
    * method syntax is disabled.
    */
   void set typeParameters(List<UnlinkedTypeParamBuilder> _value) {
-    assert(!_finished);
     _typeParameters = _value;
   }
 
@@ -4635,7 +4696,6 @@
    * If a local function, the length of the visible range; zero otherwise.
    */
   void set visibleLength(int _value) {
-    assert(!_finished);
     assert(_value == null || _value >= 0);
     _visibleLength = _value;
   }
@@ -4647,7 +4707,6 @@
    * If a local function, the beginning of the visible range; zero otherwise.
    */
   void set visibleOffset(int _value) {
-    assert(!_finished);
     assert(_value == null || _value >= 0);
     _visibleOffset = _value;
   }
@@ -4708,8 +4767,6 @@
   }
 
   fb.Offset finish(fb.Builder fbBuilder) {
-    assert(!_finished);
-    _finished = true;
     fb.Offset offset_annotations;
     fb.Offset offset_bodyExpr;
     fb.Offset offset_codeRange;
@@ -5162,8 +5219,6 @@
 }
 
 class UnlinkedExportNonPublicBuilder extends Object with _UnlinkedExportNonPublicMixin implements idl.UnlinkedExportNonPublic {
-  bool _finished = false;
-
   List<UnlinkedConstBuilder> _annotations;
   int _offset;
   int _uriEnd;
@@ -5176,7 +5231,6 @@
    * Annotations for this export directive.
    */
   void set annotations(List<UnlinkedConstBuilder> _value) {
-    assert(!_finished);
     _annotations = _value;
   }
 
@@ -5187,7 +5241,6 @@
    * Offset of the "export" keyword.
    */
   void set offset(int _value) {
-    assert(!_finished);
     assert(_value == null || _value >= 0);
     _offset = _value;
   }
@@ -5200,7 +5253,6 @@
    * file.
    */
   void set uriEnd(int _value) {
-    assert(!_finished);
     assert(_value == null || _value >= 0);
     _uriEnd = _value;
   }
@@ -5213,7 +5265,6 @@
    * the file.
    */
   void set uriOffset(int _value) {
-    assert(!_finished);
     assert(_value == null || _value >= 0);
     _uriOffset = _value;
   }
@@ -5235,8 +5286,6 @@
   }
 
   fb.Offset finish(fb.Builder fbBuilder) {
-    assert(!_finished);
-    _finished = true;
     fb.Offset offset_annotations;
     if (!(_annotations == null || _annotations.isEmpty)) {
       offset_annotations = fbBuilder.writeList(_annotations.map((b) => b.finish(fbBuilder)).toList());
@@ -5325,8 +5374,6 @@
 }
 
 class UnlinkedExportPublicBuilder extends Object with _UnlinkedExportPublicMixin implements idl.UnlinkedExportPublic {
-  bool _finished = false;
-
   List<UnlinkedCombinatorBuilder> _combinators;
   String _uri;
 
@@ -5337,7 +5384,6 @@
    * Combinators contained in this import declaration.
    */
   void set combinators(List<UnlinkedCombinatorBuilder> _value) {
-    assert(!_finished);
     _combinators = _value;
   }
 
@@ -5348,7 +5394,6 @@
    * URI used in the source code to reference the exported library.
    */
   void set uri(String _value) {
-    assert(!_finished);
     _uri = _value;
   }
 
@@ -5364,8 +5409,6 @@
   }
 
   fb.Offset finish(fb.Builder fbBuilder) {
-    assert(!_finished);
-    _finished = true;
     fb.Offset offset_combinators;
     fb.Offset offset_uri;
     if (!(_combinators == null || _combinators.isEmpty)) {
@@ -5434,8 +5477,6 @@
 }
 
 class UnlinkedImportBuilder extends Object with _UnlinkedImportMixin implements idl.UnlinkedImport {
-  bool _finished = false;
-
   List<UnlinkedConstBuilder> _annotations;
   List<UnlinkedCombinatorBuilder> _combinators;
   bool _isDeferred;
@@ -5454,7 +5495,6 @@
    * Annotations for this import declaration.
    */
   void set annotations(List<UnlinkedConstBuilder> _value) {
-    assert(!_finished);
     _annotations = _value;
   }
 
@@ -5465,7 +5505,6 @@
    * Combinators contained in this import declaration.
    */
   void set combinators(List<UnlinkedCombinatorBuilder> _value) {
-    assert(!_finished);
     _combinators = _value;
   }
 
@@ -5476,7 +5515,6 @@
    * Indicates whether the import declaration uses the `deferred` keyword.
    */
   void set isDeferred(bool _value) {
-    assert(!_finished);
     _isDeferred = _value;
   }
 
@@ -5487,7 +5525,6 @@
    * Indicates whether the import declaration is implicit.
    */
   void set isImplicit(bool _value) {
-    assert(!_finished);
     _isImplicit = _value;
   }
 
@@ -5499,7 +5536,6 @@
    * is true, zero.
    */
   void set offset(int _value) {
-    assert(!_finished);
     assert(_value == null || _value >= 0);
     _offset = _value;
   }
@@ -5512,7 +5548,6 @@
    * if there is no prefix.
    */
   void set prefixOffset(int _value) {
-    assert(!_finished);
     assert(_value == null || _value >= 0);
     _prefixOffset = _value;
   }
@@ -5527,7 +5562,6 @@
    * Note that multiple imports can declare the same prefix.
    */
   void set prefixReference(int _value) {
-    assert(!_finished);
     assert(_value == null || _value >= 0);
     _prefixReference = _value;
   }
@@ -5539,7 +5573,6 @@
    * URI used in the source code to reference the imported library.
    */
   void set uri(String _value) {
-    assert(!_finished);
     _uri = _value;
   }
 
@@ -5551,7 +5584,6 @@
    * file.  If [isImplicit] is true, zero.
    */
   void set uriEnd(int _value) {
-    assert(!_finished);
     assert(_value == null || _value >= 0);
     _uriEnd = _value;
   }
@@ -5564,7 +5596,6 @@
    * the file.  If [isImplicit] is true, zero.
    */
   void set uriOffset(int _value) {
-    assert(!_finished);
     assert(_value == null || _value >= 0);
     _uriOffset = _value;
   }
@@ -5594,8 +5625,6 @@
   }
 
   fb.Offset finish(fb.Builder fbBuilder) {
-    assert(!_finished);
-    _finished = true;
     fb.Offset offset_annotations;
     fb.Offset offset_combinators;
     fb.Offset offset_uri;
@@ -5764,8 +5793,6 @@
 }
 
 class UnlinkedLabelBuilder extends Object with _UnlinkedLabelMixin implements idl.UnlinkedLabel {
-  bool _finished = false;
-
   bool _isOnSwitchMember;
   bool _isOnSwitchStatement;
   String _name;
@@ -5779,7 +5806,6 @@
    * `default`).
    */
   void set isOnSwitchMember(bool _value) {
-    assert(!_finished);
     _isOnSwitchMember = _value;
   }
 
@@ -5790,7 +5816,6 @@
    * Return `true` if this label is associated with a `switch` statement.
    */
   void set isOnSwitchStatement(bool _value) {
-    assert(!_finished);
     _isOnSwitchStatement = _value;
   }
 
@@ -5801,7 +5826,6 @@
    * Name of the label.
    */
   void set name(String _value) {
-    assert(!_finished);
     _name = _value;
   }
 
@@ -5812,7 +5836,6 @@
    * Offset of the label relative to the beginning of the file.
    */
   void set nameOffset(int _value) {
-    assert(!_finished);
     assert(_value == null || _value >= 0);
     _nameOffset = _value;
   }
@@ -5831,8 +5854,6 @@
   }
 
   fb.Offset finish(fb.Builder fbBuilder) {
-    assert(!_finished);
-    _finished = true;
     fb.Offset offset_name;
     if (_name != null) {
       offset_name = fbBuilder.writeString(_name);
@@ -5921,8 +5942,6 @@
 }
 
 class UnlinkedParamBuilder extends Object with _UnlinkedParamMixin implements idl.UnlinkedParam {
-  bool _finished = false;
-
   List<UnlinkedConstBuilder> _annotations;
   CodeRangeBuilder _codeRange;
   String _defaultValueCode;
@@ -5945,7 +5964,6 @@
    * Annotations for this parameter.
    */
   void set annotations(List<UnlinkedConstBuilder> _value) {
-    assert(!_finished);
     _annotations = _value;
   }
 
@@ -5956,7 +5974,6 @@
    * Code range of the parameter.
    */
   void set codeRange(CodeRangeBuilder _value) {
-    assert(!_finished);
     _codeRange = _value;
   }
 
@@ -5968,7 +5985,6 @@
    * expression in the default value.  Otherwise the empty string.
    */
   void set defaultValueCode(String _value) {
-    assert(!_finished);
     _defaultValueCode = _value;
   }
 
@@ -5987,7 +6003,6 @@
    * field.
    */
   void set inferredTypeSlot(int _value) {
-    assert(!_finished);
     assert(_value == null || _value >= 0);
     _inferredTypeSlot = _value;
   }
@@ -6000,7 +6015,6 @@
    * does not have an initializer.
    */
   void set initializer(UnlinkedExecutableBuilder _value) {
-    assert(!_finished);
     _initializer = _value;
   }
 
@@ -6011,7 +6025,6 @@
    * Indicates whether this is a function-typed parameter.
    */
   void set isFunctionTyped(bool _value) {
-    assert(!_finished);
     _isFunctionTyped = _value;
   }
 
@@ -6023,7 +6036,6 @@
    * declared using `this.` syntax).
    */
   void set isInitializingFormal(bool _value) {
-    assert(!_finished);
     _isInitializingFormal = _value;
   }
 
@@ -6034,7 +6046,6 @@
    * Kind of the parameter.
    */
   void set kind(idl.UnlinkedParamKind _value) {
-    assert(!_finished);
     _kind = _value;
   }
 
@@ -6045,7 +6056,6 @@
    * Name of the parameter.
    */
   void set name(String _value) {
-    assert(!_finished);
     _name = _value;
   }
 
@@ -6056,7 +6066,6 @@
    * Offset of the parameter name relative to the beginning of the file.
    */
   void set nameOffset(int _value) {
-    assert(!_finished);
     assert(_value == null || _value >= 0);
     _nameOffset = _value;
   }
@@ -6068,7 +6077,6 @@
    * If [isFunctionTyped] is `true`, the parameters of the function type.
    */
   void set parameters(List<UnlinkedParamBuilder> _value) {
-    assert(!_finished);
     _parameters = _value;
   }
 
@@ -6081,7 +6089,6 @@
    * implicit.
    */
   void set type(EntityRefBuilder _value) {
-    assert(!_finished);
     _type = _value;
   }
 
@@ -6092,7 +6099,6 @@
    * The length of the visible range.
    */
   void set visibleLength(int _value) {
-    assert(!_finished);
     assert(_value == null || _value >= 0);
     _visibleLength = _value;
   }
@@ -6104,7 +6110,6 @@
    * The beginning of the visible range.
    */
   void set visibleOffset(int _value) {
-    assert(!_finished);
     assert(_value == null || _value >= 0);
     _visibleOffset = _value;
   }
@@ -6139,8 +6144,6 @@
   }
 
   fb.Offset finish(fb.Builder fbBuilder) {
-    assert(!_finished);
-    _finished = true;
     fb.Offset offset_annotations;
     fb.Offset offset_codeRange;
     fb.Offset offset_defaultValueCode;
@@ -6373,8 +6376,6 @@
 }
 
 class UnlinkedPartBuilder extends Object with _UnlinkedPartMixin implements idl.UnlinkedPart {
-  bool _finished = false;
-
   List<UnlinkedConstBuilder> _annotations;
   int _uriEnd;
   int _uriOffset;
@@ -6386,7 +6387,6 @@
    * Annotations for this part declaration.
    */
   void set annotations(List<UnlinkedConstBuilder> _value) {
-    assert(!_finished);
     _annotations = _value;
   }
 
@@ -6398,7 +6398,6 @@
    * file.
    */
   void set uriEnd(int _value) {
-    assert(!_finished);
     assert(_value == null || _value >= 0);
     _uriEnd = _value;
   }
@@ -6411,7 +6410,6 @@
    * the file.
    */
   void set uriOffset(int _value) {
-    assert(!_finished);
     assert(_value == null || _value >= 0);
     _uriOffset = _value;
   }
@@ -6431,8 +6429,6 @@
   }
 
   fb.Offset finish(fb.Builder fbBuilder) {
-    assert(!_finished);
-    _finished = true;
     fb.Offset offset_annotations;
     if (!(_annotations == null || _annotations.isEmpty)) {
       offset_annotations = fbBuilder.writeList(_annotations.map((b) => b.finish(fbBuilder)).toList());
@@ -6509,8 +6505,6 @@
 }
 
 class UnlinkedPublicNameBuilder extends Object with _UnlinkedPublicNameMixin implements idl.UnlinkedPublicName {
-  bool _finished = false;
-
   idl.ReferenceKind _kind;
   List<UnlinkedPublicNameBuilder> _members;
   String _name;
@@ -6523,7 +6517,6 @@
    * The kind of object referred to by the name.
    */
   void set kind(idl.ReferenceKind _value) {
-    assert(!_finished);
     _kind = _value;
   }
 
@@ -6539,7 +6532,6 @@
    * separate name added to any namespace.
    */
   void set members(List<UnlinkedPublicNameBuilder> _value) {
-    assert(!_finished);
     _members = _value;
   }
 
@@ -6550,7 +6542,6 @@
    * The name itself.
    */
   void set name(String _value) {
-    assert(!_finished);
     _name = _value;
   }
 
@@ -6562,7 +6553,6 @@
    * it accepts.  Otherwise zero.
    */
   void set numTypeParameters(int _value) {
-    assert(!_finished);
     assert(_value == null || _value >= 0);
     _numTypeParameters = _value;
   }
@@ -6581,8 +6571,6 @@
   }
 
   fb.Offset finish(fb.Builder fbBuilder) {
-    assert(!_finished);
-    _finished = true;
     fb.Offset offset_members;
     fb.Offset offset_name;
     if (!(_members == null || _members.isEmpty)) {
@@ -6675,8 +6663,6 @@
 }
 
 class UnlinkedPublicNamespaceBuilder extends Object with _UnlinkedPublicNamespaceMixin implements idl.UnlinkedPublicNamespace {
-  bool _finished = false;
-
   List<UnlinkedExportPublicBuilder> _exports;
   List<UnlinkedPublicNameBuilder> _names;
   List<String> _parts;
@@ -6688,7 +6674,6 @@
    * Export declarations in the compilation unit.
    */
   void set exports(List<UnlinkedExportPublicBuilder> _value) {
-    assert(!_finished);
     _exports = _value;
   }
 
@@ -6702,7 +6687,6 @@
    * relinking.
    */
   void set names(List<UnlinkedPublicNameBuilder> _value) {
-    assert(!_finished);
     _names = _value;
   }
 
@@ -6713,7 +6697,6 @@
    * URIs referenced by part declarations in the compilation unit.
    */
   void set parts(List<String> _value) {
-    assert(!_finished);
     _parts = _value;
   }
 
@@ -6736,8 +6719,6 @@
   }
 
   fb.Offset finish(fb.Builder fbBuilder) {
-    assert(!_finished);
-    _finished = true;
     fb.Offset offset_exports;
     fb.Offset offset_names;
     fb.Offset offset_parts;
@@ -6827,8 +6808,6 @@
 }
 
 class UnlinkedReferenceBuilder extends Object with _UnlinkedReferenceMixin implements idl.UnlinkedReference {
-  bool _finished = false;
-
   String _name;
   int _prefixReference;
 
@@ -6841,7 +6820,6 @@
    * For the pseudo-type `bottom`, the string is "*bottom*".
    */
   void set name(String _value) {
-    assert(!_finished);
     _name = _value;
   }
 
@@ -6857,7 +6835,6 @@
    * UnlinkedUnit.references[i].prefixReference < i.
    */
   void set prefixReference(int _value) {
-    assert(!_finished);
     assert(_value == null || _value >= 0);
     _prefixReference = _value;
   }
@@ -6873,8 +6850,6 @@
   }
 
   fb.Offset finish(fb.Builder fbBuilder) {
-    assert(!_finished);
-    _finished = true;
     fb.Offset offset_name;
     if (_name != null) {
       offset_name = fbBuilder.writeString(_name);
@@ -6939,8 +6914,6 @@
 }
 
 class UnlinkedTypedefBuilder extends Object with _UnlinkedTypedefMixin implements idl.UnlinkedTypedef {
-  bool _finished = false;
-
   List<UnlinkedConstBuilder> _annotations;
   CodeRangeBuilder _codeRange;
   UnlinkedDocumentationCommentBuilder _documentationComment;
@@ -6957,7 +6930,6 @@
    * Annotations for this typedef.
    */
   void set annotations(List<UnlinkedConstBuilder> _value) {
-    assert(!_finished);
     _annotations = _value;
   }
 
@@ -6968,7 +6940,6 @@
    * Code range of the typedef.
    */
   void set codeRange(CodeRangeBuilder _value) {
-    assert(!_finished);
     _codeRange = _value;
   }
 
@@ -6980,7 +6951,6 @@
    * documentation comment.
    */
   void set documentationComment(UnlinkedDocumentationCommentBuilder _value) {
-    assert(!_finished);
     _documentationComment = _value;
   }
 
@@ -6991,7 +6961,6 @@
    * Name of the typedef.
    */
   void set name(String _value) {
-    assert(!_finished);
     _name = _value;
   }
 
@@ -7002,7 +6971,6 @@
    * Offset of the typedef name relative to the beginning of the file.
    */
   void set nameOffset(int _value) {
-    assert(!_finished);
     assert(_value == null || _value >= 0);
     _nameOffset = _value;
   }
@@ -7014,7 +6982,6 @@
    * Parameters of the executable, if any.
    */
   void set parameters(List<UnlinkedParamBuilder> _value) {
-    assert(!_finished);
     _parameters = _value;
   }
 
@@ -7025,7 +6992,6 @@
    * Return type of the typedef.
    */
   void set returnType(EntityRefBuilder _value) {
-    assert(!_finished);
     _returnType = _value;
   }
 
@@ -7036,7 +7002,6 @@
    * Type parameters of the typedef, if any.
    */
   void set typeParameters(List<UnlinkedTypeParamBuilder> _value) {
-    assert(!_finished);
     _typeParameters = _value;
   }
 
@@ -7064,8 +7029,6 @@
   }
 
   fb.Offset finish(fb.Builder fbBuilder) {
-    assert(!_finished);
-    _finished = true;
     fb.Offset offset_annotations;
     fb.Offset offset_codeRange;
     fb.Offset offset_documentationComment;
@@ -7226,8 +7189,6 @@
 }
 
 class UnlinkedTypeParamBuilder extends Object with _UnlinkedTypeParamMixin implements idl.UnlinkedTypeParam {
-  bool _finished = false;
-
   List<UnlinkedConstBuilder> _annotations;
   EntityRefBuilder _bound;
   CodeRangeBuilder _codeRange;
@@ -7241,7 +7202,6 @@
    * Annotations for this type parameter.
    */
   void set annotations(List<UnlinkedConstBuilder> _value) {
-    assert(!_finished);
     _annotations = _value;
   }
 
@@ -7253,7 +7213,6 @@
    * null.
    */
   void set bound(EntityRefBuilder _value) {
-    assert(!_finished);
     _bound = _value;
   }
 
@@ -7264,7 +7223,6 @@
    * Code range of the type parameter.
    */
   void set codeRange(CodeRangeBuilder _value) {
-    assert(!_finished);
     _codeRange = _value;
   }
 
@@ -7275,7 +7233,6 @@
    * Name of the type parameter.
    */
   void set name(String _value) {
-    assert(!_finished);
     _name = _value;
   }
 
@@ -7286,7 +7243,6 @@
    * Offset of the type parameter name relative to the beginning of the file.
    */
   void set nameOffset(int _value) {
-    assert(!_finished);
     assert(_value == null || _value >= 0);
     _nameOffset = _value;
   }
@@ -7309,8 +7265,6 @@
   }
 
   fb.Offset finish(fb.Builder fbBuilder) {
-    assert(!_finished);
-    _finished = true;
     fb.Offset offset_annotations;
     fb.Offset offset_bound;
     fb.Offset offset_codeRange;
@@ -7423,8 +7377,6 @@
 }
 
 class UnlinkedUnitBuilder extends Object with _UnlinkedUnitMixin implements idl.UnlinkedUnit {
-  bool _finished = false;
-
   List<UnlinkedClassBuilder> _classes;
   CodeRangeBuilder _codeRange;
   List<UnlinkedEnumBuilder> _enums;
@@ -7450,7 +7402,6 @@
    * Classes declared in the compilation unit.
    */
   void set classes(List<UnlinkedClassBuilder> _value) {
-    assert(!_finished);
     _classes = _value;
   }
 
@@ -7461,7 +7412,6 @@
    * Code range of the unit.
    */
   void set codeRange(CodeRangeBuilder _value) {
-    assert(!_finished);
     _codeRange = _value;
   }
 
@@ -7472,7 +7422,6 @@
    * Enums declared in the compilation unit.
    */
   void set enums(List<UnlinkedEnumBuilder> _value) {
-    assert(!_finished);
     _enums = _value;
   }
 
@@ -7484,7 +7433,6 @@
    * the compilation unit.
    */
   void set executables(List<UnlinkedExecutableBuilder> _value) {
-    assert(!_finished);
     _executables = _value;
   }
 
@@ -7495,7 +7443,6 @@
    * Export declarations in the compilation unit.
    */
   void set exports(List<UnlinkedExportNonPublicBuilder> _value) {
-    assert(!_finished);
     _exports = _value;
   }
 
@@ -7510,7 +7457,6 @@
    * their default values.
    */
   void set fallbackModePath(String _value) {
-    assert(!_finished);
     _fallbackModePath = _value;
   }
 
@@ -7521,7 +7467,6 @@
    * Import declarations in the compilation unit.
    */
   void set imports(List<UnlinkedImportBuilder> _value) {
-    assert(!_finished);
     _imports = _value;
   }
 
@@ -7533,7 +7478,6 @@
    * library declaration.
    */
   void set libraryAnnotations(List<UnlinkedConstBuilder> _value) {
-    assert(!_finished);
     _libraryAnnotations = _value;
   }
 
@@ -7545,7 +7489,6 @@
    * documentation comment.
    */
   void set libraryDocumentationComment(UnlinkedDocumentationCommentBuilder _value) {
-    assert(!_finished);
     _libraryDocumentationComment = _value;
   }
 
@@ -7556,7 +7499,6 @@
    * Name of the library (from a "library" declaration, if present).
    */
   void set libraryName(String _value) {
-    assert(!_finished);
     _libraryName = _value;
   }
 
@@ -7568,7 +7510,6 @@
    * library has no name).
    */
   void set libraryNameLength(int _value) {
-    assert(!_finished);
     assert(_value == null || _value >= 0);
     _libraryNameLength = _value;
   }
@@ -7581,7 +7522,6 @@
    * the library has no name).
    */
   void set libraryNameOffset(int _value) {
-    assert(!_finished);
     assert(_value == null || _value >= 0);
     _libraryNameOffset = _value;
   }
@@ -7593,7 +7533,6 @@
    * Part declarations in the compilation unit.
    */
   void set parts(List<UnlinkedPartBuilder> _value) {
-    assert(!_finished);
     _parts = _value;
   }
 
@@ -7604,7 +7543,6 @@
    * Unlinked public namespace of this compilation unit.
    */
   void set publicNamespace(UnlinkedPublicNamespaceBuilder _value) {
-    assert(!_finished);
     _publicNamespace = _value;
   }
 
@@ -7619,7 +7557,6 @@
    * UnlinkedImport.prefixReference]).
    */
   void set references(List<UnlinkedReferenceBuilder> _value) {
-    assert(!_finished);
     _references = _value;
   }
 
@@ -7630,7 +7567,6 @@
    * Typedefs declared in the compilation unit.
    */
   void set typedefs(List<UnlinkedTypedefBuilder> _value) {
-    assert(!_finished);
     _typedefs = _value;
   }
 
@@ -7641,7 +7577,6 @@
    * Top level variables declared in the compilation unit.
    */
   void set variables(List<UnlinkedVariableBuilder> _value) {
-    assert(!_finished);
     _variables = _value;
   }
 
@@ -7691,8 +7626,6 @@
   }
 
   fb.Offset finish(fb.Builder fbBuilder) {
-    assert(!_finished);
-    _finished = true;
     fb.Offset offset_classes;
     fb.Offset offset_codeRange;
     fb.Offset offset_enums;
@@ -7998,8 +7931,6 @@
 }
 
 class UnlinkedVariableBuilder extends Object with _UnlinkedVariableMixin implements idl.UnlinkedVariable {
-  bool _finished = false;
-
   List<UnlinkedConstBuilder> _annotations;
   CodeRangeBuilder _codeRange;
   UnlinkedDocumentationCommentBuilder _documentationComment;
@@ -8022,7 +7953,6 @@
    * Annotations for this variable.
    */
   void set annotations(List<UnlinkedConstBuilder> _value) {
-    assert(!_finished);
     _annotations = _value;
   }
 
@@ -8033,7 +7963,6 @@
    * Code range of the variable.
    */
   void set codeRange(CodeRangeBuilder _value) {
-    assert(!_finished);
     _codeRange = _value;
   }
 
@@ -8045,7 +7974,6 @@
    * documentation comment.
    */
   void set documentationComment(UnlinkedDocumentationCommentBuilder _value) {
-    assert(!_finished);
     _documentationComment = _value;
   }
 
@@ -8059,7 +7987,6 @@
    * inferred for this variable, so its static type is `dynamic`.
    */
   void set inferredTypeSlot(int _value) {
-    assert(!_finished);
     assert(_value == null || _value >= 0);
     _inferredTypeSlot = _value;
   }
@@ -8072,7 +7999,6 @@
    * does not have an initializer.
    */
   void set initializer(UnlinkedExecutableBuilder _value) {
-    assert(!_finished);
     _initializer = _value;
   }
 
@@ -8083,7 +8009,6 @@
    * Indicates whether the variable is declared using the `const` keyword.
    */
   void set isConst(bool _value) {
-    assert(!_finished);
     _isConst = _value;
   }
 
@@ -8094,7 +8019,6 @@
    * Indicates whether the variable is declared using the `final` keyword.
    */
   void set isFinal(bool _value) {
-    assert(!_finished);
     _isFinal = _value;
   }
 
@@ -8109,7 +8033,6 @@
    * static for semantic purposes).
    */
   void set isStatic(bool _value) {
-    assert(!_finished);
     _isStatic = _value;
   }
 
@@ -8120,7 +8043,6 @@
    * Name of the variable.
    */
   void set name(String _value) {
-    assert(!_finished);
     _name = _value;
   }
 
@@ -8131,7 +8053,6 @@
    * Offset of the variable name relative to the beginning of the file.
    */
   void set nameOffset(int _value) {
-    assert(!_finished);
     assert(_value == null || _value >= 0);
     _nameOffset = _value;
   }
@@ -8148,7 +8069,6 @@
    * Non-propagable variables have a [propagatedTypeSlot] of zero.
    */
   void set propagatedTypeSlot(int _value) {
-    assert(!_finished);
     assert(_value == null || _value >= 0);
     _propagatedTypeSlot = _value;
   }
@@ -8160,7 +8080,6 @@
    * Declared type of the variable.  Absent if the type is implicit.
    */
   void set type(EntityRefBuilder _value) {
-    assert(!_finished);
     _type = _value;
   }
 
@@ -8171,7 +8090,6 @@
    * If a local variable, the length of the visible range; zero otherwise.
    */
   void set visibleLength(int _value) {
-    assert(!_finished);
     assert(_value == null || _value >= 0);
     _visibleLength = _value;
   }
@@ -8183,7 +8101,6 @@
    * If a local variable, the beginning of the visible range; zero otherwise.
    */
   void set visibleOffset(int _value) {
-    assert(!_finished);
     assert(_value == null || _value >= 0);
     _visibleOffset = _value;
   }
@@ -8217,8 +8134,6 @@
   }
 
   fb.Offset finish(fb.Builder fbBuilder) {
-    assert(!_finished);
-    _finished = true;
     fb.Offset offset_annotations;
     fb.Offset offset_codeRange;
     fb.Offset offset_documentationComment;
diff --git a/pkg/analyzer/lib/src/summary/format.fbs b/pkg/analyzer/lib/src/summary/format.fbs
index 26c6256..d08851c 100644
--- a/pkg/analyzer/lib/src/summary/format.fbs
+++ b/pkg/analyzer/lib/src/summary/format.fbs
@@ -561,8 +561,11 @@
    * stack (where `m` is obtained from [UnlinkedConst.ints]) into a list (filled
    * from the end) and use them as positional arguments.  Use the lists of
    * positional and names arguments to invoke a method (or a function) with
-   * the reference from [UnlinkedConst.references].  Push the result of
-   * invocation value into the stack.
+   * the reference from [UnlinkedConst.references].  If `k` is nonzero (where
+   * `k` is obtained from [UnlinkedConst.ints]), obtain `k` type arguments from
+   * [UnlinkedConst.references] and use them as generic type arguments for the
+   * aforementioned method or function.  Push the result of the invocation onto
+   * the stack.
    *
    * In general `a.b` cannot not be distinguished between: `a` is a prefix and
    * `b` is a top-level function; or `a` is an object and `b` is the name of a
@@ -579,11 +582,14 @@
    * stack (where `m` is obtained from [UnlinkedConst.ints]) into a list (filled
    * from the end) and use them as positional arguments.  Use the lists of
    * positional and names arguments to invoke the method with the name from
-   * [UnlinkedConst.strings] of the target popped from the stack, and push the
-   * resulting value into the stack.
+   * [UnlinkedConst.strings] of the target popped from the stack.  If `k` is
+   * nonzero (where `k` is obtained from [UnlinkedConst.ints]), obtain `k` type
+   * arguments from [UnlinkedConst.references] and use them as generic type
+   * arguments for the aforementioned method.  Push the result of the
+   * invocation onto the stack.
    *
    * This operation should be used for invocation of a method invocation
-   * where `target` is know to be an object instance.
+   * where `target` is known to be an object instance.
    */
   invokeMethod,
 
@@ -785,6 +791,39 @@
 }
 
 /**
+ * Information about an analysis error in a source.
+ */
+table CacheAnalysisError {
+  /**
+   * The correction to be displayed for this error, or `null` if there is no
+   * correction information for this error. The correction should indicate how
+   * the user can fix the error.
+   */
+  correction:string (id: 4);
+
+  /**
+   * The unique name of the error code.
+   */
+  errorCodeUniqueName:string (id: 0);
+
+  /**
+   * Length of the error range.
+   */
+  length:uint (id: 2);
+
+  /**
+   * The message to be displayed for this error. The message should indicate
+   * what is wrong and why it is wrong.
+   */
+  message:string (id: 3);
+
+  /**
+   * Offset of the error range relative to the beginning of the file.
+   */
+  offset:uint (id: 1);
+}
+
+/**
  * Information about a source that depends only on its content.
  */
 table CacheSourceContent {
@@ -813,6 +852,17 @@
 }
 
 /**
+ * Errors of a source in a library, which depends on the import/export closure
+ * of the containing library and the source.
+ */
+table CacheSourceErrorsInLibrary {
+  /**
+   * The list of errors in the source in the library.
+   */
+  errors:[CacheAnalysisError] (id: 0);
+}
+
+/**
  * Information about an element code range.
  */
 table CodeRange {
diff --git a/pkg/analyzer/lib/src/summary/idl.dart b/pkg/analyzer/lib/src/summary/idl.dart
index 7f2fe39..505ade8 100644
--- a/pkg/analyzer/lib/src/summary/idl.dart
+++ b/pkg/analyzer/lib/src/summary/idl.dart
@@ -58,6 +58,44 @@
 const informative = null;
 
 /**
+ * Information about an analysis error in a source.
+ */
+abstract class CacheAnalysisError extends base.SummaryClass {
+  /**
+   * The correction to be displayed for this error, or `null` if there is no
+   * correction information for this error. The correction should indicate how
+   * the user can fix the error.
+   */
+  @Id(4)
+  String get correction;
+
+  /**
+   * The unique name of the error code.
+   */
+  @Id(0)
+  String get errorCodeUniqueName;
+
+  /**
+   * Length of the error range.
+   */
+  @Id(2)
+  int get length;
+
+  /**
+   * The message to be displayed for this error. The message should indicate
+   * what is wrong and why it is wrong.
+   */
+  @Id(3)
+  String get message;
+
+  /**
+   * Offset of the error range relative to the beginning of the file.
+   */
+  @Id(1)
+  int get offset;
+}
+
+/**
  * Information about a source that depends only on its content.
  */
 @TopLevel('CaSS')
@@ -94,6 +132,21 @@
 }
 
 /**
+ * Errors of a source in a library, which depends on the import/export closure
+ * of the containing library and the source.
+ */
+@TopLevel('CSEL')
+abstract class CacheSourceErrorsInLibrary extends base.SummaryClass {
+  factory CacheSourceErrorsInLibrary.fromBuffer(List<int> buffer) =>
+      generated.readCacheSourceErrorsInLibrary(buffer);
+  /**
+   * The list of errors in the source in the library.
+   */
+  @Id(0)
+  List<CacheAnalysisError> get errors;
+}
+
+/**
  * Kind of a source in the cache.
  */
 enum CacheSourceKind { library, part }
@@ -1401,8 +1454,11 @@
    * stack (where `m` is obtained from [UnlinkedConst.ints]) into a list (filled
    * from the end) and use them as positional arguments.  Use the lists of
    * positional and names arguments to invoke a method (or a function) with
-   * the reference from [UnlinkedConst.references].  Push the result of
-   * invocation value into the stack.
+   * the reference from [UnlinkedConst.references].  If `k` is nonzero (where
+   * `k` is obtained from [UnlinkedConst.ints]), obtain `k` type arguments from
+   * [UnlinkedConst.references] and use them as generic type arguments for the
+   * aforementioned method or function.  Push the result of the invocation onto
+   * the stack.
    *
    * In general `a.b` cannot not be distinguished between: `a` is a prefix and
    * `b` is a top-level function; or `a` is an object and `b` is the name of a
@@ -1419,11 +1475,14 @@
    * stack (where `m` is obtained from [UnlinkedConst.ints]) into a list (filled
    * from the end) and use them as positional arguments.  Use the lists of
    * positional and names arguments to invoke the method with the name from
-   * [UnlinkedConst.strings] of the target popped from the stack, and push the
-   * resulting value into the stack.
+   * [UnlinkedConst.strings] of the target popped from the stack.  If `k` is
+   * nonzero (where `k` is obtained from [UnlinkedConst.ints]), obtain `k` type
+   * arguments from [UnlinkedConst.references] and use them as generic type
+   * arguments for the aforementioned method.  Push the result of the
+   * invocation onto the stack.
    *
    * This operation should be used for invocation of a method invocation
-   * where `target` is know to be an object instance.
+   * where `target` is known to be an object instance.
    */
   invokeMethod,
 
diff --git a/pkg/analyzer/lib/src/summary/incremental_cache.dart b/pkg/analyzer/lib/src/summary/incremental_cache.dart
index 1d913be..6370234 100644
--- a/pkg/analyzer/lib/src/summary/incremental_cache.dart
+++ b/pkg/analyzer/lib/src/summary/incremental_cache.dart
@@ -2,12 +2,14 @@
 // for details. All rights reserved. Use of this source code is governed by a
 // BSD-style license that can be found in the LICENSE file.
 
-import 'dart:convert' show ChunkedConversionSink, UTF8;
+import 'dart:collection';
+import 'dart:convert';
 import 'dart:core' hide Resource;
 
 import 'package:analyzer/dart/element/element.dart';
 import 'package:analyzer/file_system/file_system.dart';
 import 'package:analyzer/src/generated/engine.dart';
+import 'package:analyzer/src/generated/error.dart';
 import 'package:analyzer/src/generated/source.dart';
 import 'package:analyzer/src/summary/format.dart';
 import 'package:analyzer/src/summary/idl.dart';
@@ -16,10 +18,52 @@
 import 'package:crypto/crypto.dart';
 
 /**
+ * The version of the incremental cache.  It should be incremented every time
+ * when any cache data structure is changed.
+ */
+const int _VERSION = 1;
+
+/**
+ * Compare the given file paths [a] and [b].  Because paths usually have long
+ * equal prefix, comparison is done not as comparision of two generic [String]s.
+ * Instead it starts from the ends of each strings.
+ *
+ * Return `-1` if [a] is ordered before [b], `1` if `this` is ordered after [b],
+ * and zero if [a] and [b] are ordered together.
+ */
+int comparePaths(String a, String b) {
+  int thisLength = a.length;
+  int otherLength = b.length;
+  int len = (thisLength < otherLength) ? thisLength : otherLength;
+  for (int i = 0; i < len; i++) {
+    int thisCodeUnit = a.codeUnitAt(thisLength - 1 - i);
+    int otherCodeUnit = b.codeUnitAt(otherLength - 1 - i);
+    if (thisCodeUnit < otherCodeUnit) {
+      return -1;
+    }
+    if (thisCodeUnit > otherCodeUnit) {
+      return 1;
+    }
+  }
+  if (thisLength < otherLength) {
+    return -1;
+  }
+  if (thisLength > otherLength) {
+    return 1;
+  }
+  return 0;
+}
+
+/**
  * Storage for cache data.
  */
 abstract class CacheStorage {
   /**
+   * Compact the storage, e.g. remove unused entries.
+   */
+  void compact();
+
+  /**
    * Return bytes for the given [key], `null` if [key] is not in the storage.
    */
   List<int> get(String key);
@@ -42,6 +86,11 @@
  */
 class FolderCacheStorage implements CacheStorage {
   /**
+   * The maximum number of entries to keep in the cache.
+   */
+  static const MAX_ENTRIES = 20000;
+
+  /**
    * The folder to read and write files.
    */
   final Folder folder;
@@ -53,14 +102,46 @@
    */
   final String tempFileName;
 
-  FolderCacheStorage(this.folder, this.tempFileName);
+  /**
+   * The set of recently used entries, with the most recently used entries
+   * on the bottom.
+   */
+  final LinkedHashSet<String> _recentEntries = new LinkedHashSet<String>();
+
+  FolderCacheStorage(this.folder, this.tempFileName) {
+    try {
+      File file = folder.getChildAssumingFile('.entries');
+      if (file.exists) {
+        String entriesString = file.readAsStringSync();
+        List<String> entriesLists = entriesString.split('\n');
+        _recentEntries.addAll(entriesLists);
+      }
+    } catch (_) {}
+  }
+
+  @override
+  void compact() {
+    while (_recentEntries.length > MAX_ENTRIES) {
+      String key = _recentEntries.first;
+      _recentEntries.remove(key);
+      try {
+        folder.getChildAssumingFile(key).delete();
+      } catch (_) {}
+    }
+    try {
+      List<int> bytes = UTF8.encode(_recentEntries.join('\n'));
+      folder.getChildAssumingFile('.entries').writeAsBytesSync(bytes);
+    } catch (_) {}
+  }
 
   @override
   List<int> get(String key) {
     Resource file = folder.getChild(key);
     if (file is File) {
       try {
-        return file.readAsBytesSync();
+        List<int> bytes = file.readAsBytesSync();
+        _accessedKey(key);
+        return bytes;
       } on FileSystemException {}
     }
     return null;
@@ -73,8 +154,17 @@
     tempFile.writeAsBytesSync(bytes);
     try {
       tempFile.renameSync(absPath);
+      _accessedKey(key);
     } catch (e) {}
   }
+
+  /**
+   * The given [key] was accessed, update recently used entries.
+   */
+  void _accessedKey(String key) {
+    _recentEntries.remove(key);
+    _recentEntries.add(key);
+  }
 }
 
 /**
@@ -139,9 +229,6 @@
       List<Source> closureSources = _getLibraryClosure(librarySource);
       List<LibraryBundleWithId> closureBundles = <LibraryBundleWithId>[];
       for (Source source in closureSources) {
-        if (source.isInSystemLibrary) {
-          continue;
-        }
         if (getSourceKind(source) == SourceKind.PART) {
           continue;
         }
@@ -159,6 +246,48 @@
   }
 
   /**
+   * Return the parts of the given [librarySource], or `null` if unknown.
+   */
+  List<Source> getLibraryParts(Source librarySource) {
+    try {
+      CacheSourceContent contentSource = _getCacheSourceContent(librarySource);
+      if (contentSource != null) {
+        return contentSource.partUris.map((String partUri) {
+          Source partSource = _resolveUri(librarySource, partUri);
+          if (partSource == null) {
+            throw new StateError(
+                'Unable to resolve $partUri in $librarySource');
+          }
+          return partSource;
+        }).toList();
+      }
+    } catch (e) {}
+    return null;
+  }
+
+  /**
+   * Return cached errors in the given [source] in the context of the given
+   * [librarySource], or `null` if the cache does not have this information.
+   */
+  List<AnalysisError> getSourceErrorsInLibrary(
+      Source librarySource, Source source) {
+    try {
+      String key = _getSourceErrorsKey(librarySource, source);
+      List<int> bytes = storage.get(key);
+      if (bytes == null) {
+        return null;
+      }
+      CacheSourceErrorsInLibrary errorsObject =
+          new CacheSourceErrorsInLibrary.fromBuffer(bytes);
+      return errorsObject.errors
+          .map((e) => _convertErrorFromCached(source, e))
+          .toList();
+    } catch (e) {
+      return null;
+    }
+  }
+
+  /**
    * Return the kind of the given [source], or `null` if unknown.
    */
   SourceKind getSourceKind(Source source) {
@@ -189,11 +318,27 @@
   }
 
   /**
+   * Associate the given [errors] with the [source] in the [librarySource].
+   */
+  void putSourceErrorsInLibrary(
+      Source librarySource, Source source, List<AnalysisError> errors) {
+    CacheSourceErrorsInLibraryBuilder builder =
+        new CacheSourceErrorsInLibraryBuilder(
+            errors: errors.map(_convertErrorToCached).toList());
+    String key = _getSourceErrorsKey(librarySource, source);
+    List<int> bytes = builder.toBuffer();
+    storage.put(key, bytes);
+  }
+
+  /**
    * Fill the whole source closure of the library with the given
    * [librarySource]. It includes defining units and parts of the library and
    * all its directly or indirectly imported or exported libraries.
    */
   void _appendLibraryClosure(Set<Source> closure, Source librarySource) {
+    if (librarySource.isInSystemLibrary) {
+      return;
+    }
     if (closure.add(librarySource)) {
       CacheSourceContent contentSource = _getCacheSourceContent(librarySource);
       if (contentSource == null) {
@@ -213,13 +358,66 @@
         if (refSource == null) {
           throw new StateError('Unable to resolve $refUri in $librarySource');
         }
-        _appendLibraryClosure(closure, refSource);
+        // If we have already the closure for the 'refSource', use it.
+        // Otherwise, continue computing recursively.
+        // It's not the most efficient algorithm, but in practice we might
+        // visit each library multiple times only for the first top-level
+        // bundle requested in `getLibraryClosureBundles`.
+        List<Source> refClosure = _libraryClosureMap[refSource];
+        if (refClosure != null) {
+          closure.addAll(refClosure);
+        } else {
+          _appendLibraryClosure(closure, refSource);
+        }
       }
       contentSource.importedUris.forEach(appendLibrarySources);
       contentSource.exportedUris.forEach(appendLibrarySources);
     }
   }
 
+  List<int> _computeSaltedMD5OfBytes(addData(ByteConversionSink byteSink)) {
+    Digest digest;
+    ChunkedConversionSink<Digest> digestSink =
+        new ChunkedConversionSink<Digest>.withCallback((List<Digest> digests) {
+      digest = digests.single;
+    });
+    ByteConversionSink byteSink = md5.startChunkedConversion(digestSink);
+    // Add data.
+    addData(byteSink);
+    byteSink.add(const <int>[_VERSION]);
+    byteSink.add(configSalt);
+    // Done.
+    byteSink.close();
+    return digest.bytes;
+  }
+
+  /**
+   * Return the [AnalysisError] for the given [cachedError].
+   */
+  AnalysisError _convertErrorFromCached(
+      Source source, CacheAnalysisError cachedError) {
+    ErrorCode errorCode = _getErrorCode(cachedError);
+    return new AnalysisError.forValues(
+        source,
+        cachedError.offset,
+        cachedError.length,
+        errorCode,
+        cachedError.message,
+        cachedError.correction);
+  }
+
+  /**
+   * Return the [CacheAnalysisError] for the given [error].
+   */
+  CacheAnalysisError _convertErrorToCached(AnalysisError error) {
+    return new CacheAnalysisErrorBuilder(
+        errorCodeUniqueName: error.errorCode.uniqueName,
+        offset: error.offset,
+        length: error.length,
+        message: error.message,
+        correction: error.correction);
+  }
+
   /**
    * Get the content based information about the given [source], maybe `null`
    * if the information is not in the cache.
@@ -248,6 +446,18 @@
   }
 
   /**
+   * Return the [ErrorCode] of the given [error], throws if not found.
+   */
+  ErrorCode _getErrorCode(CacheAnalysisError error) {
+    String uniqueName = error.errorCodeUniqueName;
+    ErrorCode errorCode = ErrorCode.byUniqueName(uniqueName);
+    if (errorCode != null) {
+      return errorCode;
+    }
+    throw new StateError('Unable to find ErrorCode: $uniqueName');
+  }
+
+  /**
    * Get the bundle for the given key.
    */
   PackageBundle _getLibraryBundle(String key) {
@@ -258,6 +468,10 @@
         return null;
       }
       bundle = new PackageBundle.fromBuffer(bytes);
+      if (bundle.majorVersion != PackageBundleAssembler.currentMajorVersion ||
+          bundle.minorVersion != PackageBundleAssembler.currentMinorVersion) {
+        return null;
+      }
       _bundleMap[key] = bundle;
     }
     return bundle;
@@ -279,9 +493,11 @@
    */
   List<Source> _getLibraryClosure(Source librarySource) {
     return _libraryClosureMap.putIfAbsent(librarySource, () {
-      Set<Source> closure = new Set<Source>();
-      _appendLibraryClosure(closure, librarySource);
-      return closure.toList();
+      Set<Source> closureSet = new Set<Source>();
+      _appendLibraryClosure(closureSet, librarySource);
+      List<Source> closureList = closureSet.toList();
+      closureList.sort((a, b) => comparePaths(a.fullName, b.fullName));
+      return closureList;
     });
   }
 
@@ -292,31 +508,21 @@
   List<int> _getLibraryClosureHash(Source librarySource) {
     return _libraryClosureHashMap.putIfAbsent(librarySource, () {
       List<Source> closure = _getLibraryClosure(librarySource);
-
-      Digest digest;
-
-      var digestSink = new ChunkedConversionSink<Digest>.withCallback(
-          (List<Digest> digests) {
-        digest = digests.single;
+      return _computeSaltedMD5OfBytes((ByteConversionSink byteSink) {
+        for (Source source in closure) {
+          List<int> sourceHash = _getSourceContentHash(source);
+          byteSink.add(sourceHash);
+        }
+        // When we sort closure sources for two libraries (A, B) we get exactly
+        // the same list of sources for both A and B. So, their hash is exactly
+        // the same. But we use it to store separate summary bundles for
+        // separate libraries. Ideally would be nice to group these libraries
+        // into a single summary bundle. But this would require delaying
+        // saving bundles until we know all of them.
+        // So, for now we make hashes for separate libraries unique be mixing
+        // in the library source again.
+        byteSink.add(_getSourceContentHash(librarySource));
       });
-
-      var byteSink = md5.startChunkedConversion(digestSink);
-
-      for (Source source in closure) {
-        List<int> sourceHash = _getSourceContentHash(source);
-        byteSink.add(sourceHash);
-      }
-      byteSink.add(configSalt);
-
-      byteSink.close();
-      // TODO(paulberry): this call to `close` should not be needed.
-      // Can be removed once
-      //   https://github.com/dart-lang/crypto/issues/33
-      // is fixed – ensure the min version constraint on crypto is updated, tho.
-      // Does not cause any problems in the mean time.
-      digestSink.close();
-
-      return digest.bytes;
     });
   }
 
@@ -332,6 +538,18 @@
   }
 
   /**
+   * Return the key for errors in the [source] in the [librarySource].
+   */
+  String _getSourceErrorsKey(Source librarySource, Source source) {
+    List<int> hash = _computeSaltedMD5OfBytes((ByteConversionSink byteSink) {
+      byteSink.add(_getLibraryClosureHash(librarySource));
+      byteSink.add(_getSourceContentHash(source));
+    });
+    String hashStr = hex.encode(hash);
+    return '$hashStr.errorsInLibrary';
+  }
+
+  /**
    * Return a source representing the URI that results from resolving the given
    * (possibly relative) [containedUri] against the URI associated with the
    * [containingSource], whether or not the resulting source exists, or `null`
@@ -354,11 +572,13 @@
    * Write the content based information about the given [source].
    */
   void _writeCacheSourceContent(Source source, CacheSourceContentBuilder b) {
-    String key = _getCacheSourceContentKey(source);
-    List<int> bytes = b.toBuffer();
-    storage.put(key, bytes);
-    // Put into the cache to avoid reading it later.
-    _sourceContentMap[source] = new CacheSourceContent.fromBuffer(bytes);
+    if (!_sourceContentMap.containsKey(source)) {
+      String key = _getCacheSourceContentKey(source);
+      List<int> bytes = b.toBuffer();
+      storage.put(key, bytes);
+      // Put into the cache to avoid reading it later.
+      _sourceContentMap[source] = new CacheSourceContent.fromBuffer(bytes);
+    }
   }
 
   /**
@@ -368,10 +588,6 @@
   void _writeCacheSourceContents(LibraryElement library,
       [Set<LibraryElement> writtenLibraries]) {
     Source librarySource = library.source;
-    // Do nothing if already cached.
-    if (_sourceContentMap.containsKey(librarySource)) {
-      return;
-    }
     // Stop recursion cycle.
     writtenLibraries ??= new Set<LibraryElement>();
     if (!writtenLibraries.add(library)) {
diff --git a/pkg/analyzer/lib/src/summary/link.dart b/pkg/analyzer/lib/src/summary/link.dart
index 77c86f2..57dccf1 100644
--- a/pkg/analyzer/lib/src/summary/link.dart
+++ b/pkg/analyzer/lib/src/summary/link.dart
@@ -242,6 +242,13 @@
   throw new UnimplementedError('${type.runtimeType}');
 }
 
+DartType _dynamicIfNull(DartType type) {
+  if (type == null || type.isBottom || type.isVoid) {
+    return DynamicTypeImpl.instance;
+  }
+  return type;
+}
+
 /**
  * Create an [UnlinkedParam] representing the given [parameter], which should be
  * a parameter of a synthetic function type (e.g. one produced during type
@@ -334,7 +341,7 @@
  */
 abstract class ClassElementForLink extends Object
     with ReferenceableElementForLink
-    implements ClassElementImpl {
+    implements AbstractClassElementImpl {
   Map<String, ReferenceableElementForLink> _containedNames;
 
   @override
@@ -381,9 +388,6 @@
   String get name;
 
   @override
-  ResynthesizerContext get resynthesizerContext => enclosingElement;
-
-  @override
   ConstructorElementForLink get unnamedConstructor;
 
   @override
@@ -420,7 +424,8 @@
  * linking.
  */
 class ClassElementForLink_Class extends ClassElementForLink
-    with TypeParameterizedElementMixin {
+    with TypeParameterizedElementMixin
+    implements ClassElementImpl {
   /**
    * The unlinked representation of the class in the summary.
    */
@@ -668,7 +673,8 @@
  * Element representing an enum resynthesized from a summary during
  * linking.
  */
-class ClassElementForLink_Enum extends ClassElementForLink {
+class ClassElementForLink_Enum extends ClassElementForLink
+    implements EnumElementImpl {
   /**
    * The unlinked representation of the enum in the summary.
    */
@@ -1491,10 +1497,20 @@
       return;
     }
     int refPtr = 0;
+    int intPtr = 0;
     for (UnlinkedConstOperation operation in unlinkedConst.operations) {
       switch (operation) {
+        case UnlinkedConstOperation.pushInt:
+          intPtr++;
+          break;
+        case UnlinkedConstOperation.pushLongInt:
+          int numInts = unlinkedConst.ints[intPtr++];
+          intPtr += numInts;
+          break;
+        case UnlinkedConstOperation.concatenate:
+          intPtr++;
+          break;
         case UnlinkedConstOperation.pushReference:
-        case UnlinkedConstOperation.invokeMethodRef:
           EntityRef ref = unlinkedConst.references[refPtr++];
           ConstVariableNode variable =
               compilationUnit.resolveRef(ref.reference).asConstVariable;
@@ -1502,11 +1518,36 @@
             dependencies.add(variable);
           }
           break;
+        case UnlinkedConstOperation.makeUntypedList:
+        case UnlinkedConstOperation.makeUntypedMap:
+          intPtr++;
+          break;
+        case UnlinkedConstOperation.assignToRef:
+          refPtr++;
+          break;
+        case UnlinkedConstOperation.invokeMethodRef:
+          EntityRef ref = unlinkedConst.references[refPtr++];
+          ConstVariableNode variable =
+              compilationUnit.resolveRef(ref.reference).asConstVariable;
+          if (variable != null) {
+            dependencies.add(variable);
+          }
+          intPtr += 2;
+          int numTypeArguments = unlinkedConst.ints[intPtr++];
+          refPtr += numTypeArguments;
+          break;
+        case UnlinkedConstOperation.invokeMethod:
+          intPtr += 2;
+          int numTypeArguments = unlinkedConst.ints[intPtr++];
+          refPtr += numTypeArguments;
+          break;
         case UnlinkedConstOperation.makeTypedList:
           refPtr++;
+          intPtr++;
           break;
         case UnlinkedConstOperation.makeTypedMap:
           refPtr += 2;
+          intPtr++;
           break;
         case UnlinkedConstOperation.invokeConstructor:
           EntityRef ref = unlinkedConst.references[refPtr++];
@@ -1515,12 +1556,21 @@
           if (element?._constNode != null) {
             dependencies.add(element._constNode);
           }
+          intPtr += 2;
+          break;
+        case UnlinkedConstOperation.typeCast:
+        case UnlinkedConstOperation.typeCheck:
+          refPtr++;
+          break;
+        case UnlinkedConstOperation.pushLocalFunctionReference:
+          intPtr += 2;
           break;
         default:
           break;
       }
     }
     assert(refPtr == unlinkedConst.references.length);
+    assert(intPtr == unlinkedConst.ints.length);
   }
 }
 
@@ -1972,13 +2022,12 @@
 }
 
 class ExprTypeComputer {
-  VariableElementForLink variable;
-  FunctionElementForLink_Initializer initializer;
-  CompilationUnitElementForLink unit;
-  LibraryElementForLink library;
-  Linker linker;
-  TypeProvider typeProvider;
-  UnlinkedConst unlinkedConst;
+  final FunctionElementForLink_Local function;
+  final CompilationUnitElementForLink unit;
+  final LibraryElementForLink library;
+  final Linker linker;
+  final TypeProvider typeProvider;
+  final UnlinkedConst unlinkedConst;
 
   final List<DartType> stack = <DartType>[];
   int intPtr = 0;
@@ -1986,17 +2035,25 @@
   int strPtr = 0;
   int assignmentOperatorPtr = 0;
 
-  ExprTypeComputer(VariableElementForLink variableElement) {
-    this.variable = variableElement;
-    initializer = variableElement.initializer;
-    unit = variableElement.compilationUnit;
-    library = unit.enclosingElement;
-    linker = library._linker;
-    typeProvider = linker.typeProvider;
-    unlinkedConst = variableElement.unlinkedVariable.initializer?.bodyExpr;
+  factory ExprTypeComputer(FunctionElementForLink_Local functionElement) {
+    CompilationUnitElementForLink unit = functionElement.compilationUnit;
+    LibraryElementForLink library = unit.enclosingElement;
+    Linker linker = library._linker;
+    TypeProvider typeProvider = linker.typeProvider;
+    UnlinkedConst unlinkedConst = functionElement._unlinkedExecutable.bodyExpr;
+    return new ExprTypeComputer._(
+        functionElement, unit, library, linker, typeProvider, unlinkedConst);
   }
 
+  ExprTypeComputer._(this.function, this.unit, this.library, this.linker,
+      this.typeProvider, this.unlinkedConst);
+
   DartType compute() {
+    if (unlinkedConst == null) {
+      // No function body was stored for this function, so we can't infer its
+      // return type.  Assume `dynamic`.
+      return DynamicTypeImpl.instance;
+    }
     // Perform RPN evaluation of the constant, using a stack of inferred types.
     for (UnlinkedConstOperation operation in unlinkedConst.operations) {
       switch (operation) {
@@ -2158,7 +2215,10 @@
         case UnlinkedConstOperation.pushLocalFunctionReference:
           int popCount = _getNextInt();
           assert(popCount == 0); // TODO(paulberry): handle the nonzero case.
-          stack.add(initializer.functions[_getNextInt()].type);
+          stack.add(function.functions[_getNextInt()].type);
+          break;
+        case UnlinkedConstOperation.pushParameter:
+          stack.add(_findParameterType(_getNextString()));
           break;
         default:
           // TODO(paulberry): implement.
@@ -2170,7 +2230,7 @@
     assert(strPtr == unlinkedConst.strings.length);
     assert(assignmentOperatorPtr == unlinkedConst.assignmentOperators.length);
     assert(stack.length == 1);
-    return _dynamicIfNull(stack[0]);
+    return stack[0];
   }
 
   void _computeBinaryExpressionType(TokenType operator) {
@@ -2315,7 +2375,7 @@
         // Type argument explicitly specified.
         if (i < ref.typeArguments.length) {
           return unit.resolveTypeRef(
-              ref.typeArguments[i], variable._typeParameterContext);
+              ref.typeArguments[i], function.typeParameterContext);
         } else {
           return null;
         }
@@ -2334,14 +2394,21 @@
     // TODO(scheglov) if we pushed target and method name first, we might be
     // able to move work with arguments in _inferExecutableType()
     String methodName = _getNextString();
+    List<DartType> typeArguments = _getTypeArguments();
     DartType target = stack.removeLast();
     stack.add(() {
       if (target is InterfaceType) {
         MethodElement method =
             target.lookUpInheritedMethod(methodName, library: library);
         FunctionType rawType = method?.type;
-        FunctionType inferredType = _inferExecutableType(rawType, numNamed,
-            numPositional, namedArgNames, namedArgTypeList, positionalArgTypes);
+        FunctionType inferredType = _inferExecutableType(
+            rawType,
+            numNamed,
+            numPositional,
+            namedArgNames,
+            namedArgTypeList,
+            positionalArgTypes,
+            typeArguments);
         if (inferredType != null) {
           return inferredType.returnType;
         }
@@ -2358,11 +2425,18 @@
     List<DartType> positionalArgTypes = _popList(numPositional);
     EntityRef ref = _getNextRef();
     ReferenceableElementForLink element = unit.resolveRef(ref.reference);
+    List<DartType> typeArguments = _getTypeArguments();
     stack.add(() {
       DartType rawType = element.asStaticType;
       if (rawType is FunctionType) {
-        FunctionType inferredType = _inferExecutableType(rawType, numNamed,
-            numPositional, namedArgNames, namedArgTypeList, positionalArgTypes);
+        FunctionType inferredType = _inferExecutableType(
+            rawType,
+            numNamed,
+            numPositional,
+            namedArgNames,
+            namedArgTypeList,
+            positionalArgTypes,
+            typeArguments);
         if (inferredType != null) {
           return inferredType.returnType;
         }
@@ -2428,6 +2502,29 @@
     }
   }
 
+  /**
+   * Find the parameter in scope called [parameterName] and return its type.
+   */
+  DartType _findParameterType(String parameterName) {
+    FunctionElementForLink_Local f = this.function;
+    while (true) {
+      for (ParameterElement parameter in f.parameters) {
+        if (parameter.name == parameterName) {
+          return parameter.type;
+        }
+      }
+      Element parent = f.enclosingElement;
+      if (parent is FunctionElementForLink_Local) {
+        f = parent;
+      } else {
+        // Parameter not found.  This should never happen in a well-formed
+        // summary.
+        assert(false);
+        return DynamicTypeImpl.instance;
+      }
+    }
+  }
+
   int _getNextInt() {
     return unlinkedConst.ints[intPtr++];
   }
@@ -2448,7 +2545,7 @@
 
   DartType _getNextTypeRef() {
     EntityRef ref = _getNextRef();
-    return unit.resolveTypeRef(ref, variable._typeParameterContext);
+    return unit.resolveTypeRef(ref, function.typeParameterContext);
   }
 
   /**
@@ -2464,16 +2561,33 @@
         : DynamicTypeImpl.instance;
   }
 
+  List<DartType> _getTypeArguments() {
+    int numTypeArguments = _getNextInt();
+    List<DartType> typeArguments = new List<DartType>(numTypeArguments);
+    for (int i = 0; i < numTypeArguments; i++) {
+      typeArguments[i] = _getNextTypeRef();
+    }
+    return typeArguments;
+  }
+
   FunctionType _inferExecutableType(
       FunctionType rawMethodType,
       int numNamed,
       int numPositional,
       List<String> namedArgNames,
       List<DartType> namedArgTypeList,
-      List<DartType> positionalArgTypes) {
+      List<DartType> positionalArgTypes,
+      List<DartType> typeArguments) {
     TypeSystem ts = linker.typeSystem;
     if (rawMethodType != null) {
-      if (rawMethodType.typeFormals.isNotEmpty && ts is StrongTypeSystemImpl) {
+      if (rawMethodType.typeFormals.isNotEmpty && typeArguments.isNotEmpty) {
+        Element methodElement = rawMethodType.element;
+        if (methodElement is TypeParameterizedElement &&
+            methodElement.typeParameters.length == typeArguments.length) {
+          return rawMethodType.instantiate(typeArguments);
+        }
+      } else if (rawMethodType.typeFormals.isNotEmpty &&
+          ts is StrongTypeSystemImpl) {
         List<DartType> paramTypes = <DartType>[];
         List<DartType> argTypes = <DartType>[];
         // Add positional parameter and argument types.
@@ -2586,13 +2700,7 @@
       case UnlinkedExprAssignOperator.postfixDecrement:
         return TokenType.MINUS_MINUS;
     }
-  }
-
-  static DartType _dynamicIfNull(DartType type) {
-    if (type == null || type.isBottom || type.isVoid) {
-      return DynamicTypeImpl.instance;
-    }
-    return type;
+    return null;
   }
 }
 
@@ -2650,6 +2758,7 @@
           unlinkedVariable.inferredTypeSlot,
           isStatic ? inferredType : _inferredInstanceType,
           _typeParameterContext);
+      initializer?.link(compilationUnit);
     }
   }
 
@@ -2758,18 +2867,33 @@
  * Element representing the initializer expression of a variable.
  */
 class FunctionElementForLink_Initializer extends Object
-    with ReferenceableElementForLink
+    with ReferenceableElementForLink, TypeParameterizedElementMixin
     implements FunctionElementForLink_Local {
   /**
    * The variable for which this element is the initializer.
    */
   final VariableElementForLink _variable;
 
+  /**
+   * The type inference node for this function, or `null` if it hasn't been
+   * computed yet.
+   */
+  TypeInferenceNode _typeInferenceNode;
+
   List<FunctionElementForLink_Local_NonSynthetic> _functions;
+  DartType _inferredReturnType;
 
   FunctionElementForLink_Initializer(this._variable);
 
   @override
+  TypeInferenceNode get asTypeInferenceNode =>
+      _typeInferenceNode ??= new TypeInferenceNode(this);
+
+  @override
+  CompilationUnitElementForLink get compilationUnit =>
+      _variable.compilationUnit;
+
+  @override
   VariableElementForLink get enclosingElement => _variable;
 
   TypeParameterizedElementMixin get enclosingTypeParameterContext =>
@@ -2778,6 +2902,9 @@
           : null;
 
   @override
+  CompilationUnitElementForLink get enclosingUnit => _variable.compilationUnit;
+
+  @override
   List<FunctionElementForLink_Local_NonSynthetic> get functions =>
       _functions ??= _variable.unlinkedVariable.initializer.localFunctions
           .map((UnlinkedExecutable ex) =>
@@ -2807,10 +2934,17 @@
   }
 
   @override
-  int get typeParameterNestingLevel =>
-      enclosingTypeParameterContext?.typeParameterNestingLevel ?? 0;
+  TypeParameterizedElementMixin get typeParameterContext => this;
 
-  List<TypeParameterElement> get typeParameters => const [];
+  @override
+  List<UnlinkedTypeParam> get unlinkedTypeParams => const [];
+
+  @override
+  bool get _hasTypeBeenInferred => _inferredReturnType != null;
+
+  @override
+  UnlinkedExecutable get _unlinkedExecutable =>
+      _variable.unlinkedVariable.initializer;
 
   @override
   FunctionElementForLink_Local getLocalFunction(int index) {
@@ -2818,8 +2952,27 @@
     return index < functions.length ? functions[index] : null;
   }
 
+  /**
+   * Store the results of type inference for this initializer in
+   * [compilationUnit].
+   */
+  void link(CompilationUnitElementInBuildUnit compilationUnit) {
+    compilationUnit._storeLinkedType(_unlinkedExecutable.inferredReturnTypeSlot,
+        _inferredReturnType, typeParameterContext);
+    for (FunctionElementForLink_Local_NonSynthetic function in functions) {
+      function.link(compilationUnit);
+    }
+  }
+
   @override
   noSuchMethod(Invocation invocation) => super.noSuchMethod(invocation);
+
+  @override
+  void _setInferredType(DartType type) {
+    assert(!_hasTypeBeenInferred);
+    _inferredReturnType = type;
+    _variable._inferredType = _dynamicIfNull(type);
+  }
 }
 
 /**
@@ -2829,7 +2982,18 @@
     implements
         ExecutableElementForLink,
         FunctionElementImpl,
-        ReferenceableElementForLink {}
+        ReferenceableElementForLink {
+  /**
+   * Indicates whether type inference has completed for this function.
+   */
+  bool get _hasTypeBeenInferred;
+
+  /**
+   * Stores the given [type] as the inferred return type for this function.
+   * Should only be called if [_hasTypeBeenInferred] is `false`.
+   */
+  void _setInferredType(DartType type);
+}
 
 /**
  * Element representing a local function (possibly a closure) inside another
@@ -2841,6 +3005,14 @@
   @override
   final ExecutableElementForLink enclosingElement;
 
+  List<FunctionElementForLink_Local_NonSynthetic> _functions;
+
+  /**
+   * The type inference node for this function, or `null` if it hasn't been
+   * computed yet.
+   */
+  TypeInferenceNode _typeInferenceNode;
+
   FunctionElementForLink_Local_NonSynthetic(
       CompilationUnitElementForLink compilationUnit,
       this.enclosingElement,
@@ -2848,10 +3020,25 @@
       : super(compilationUnit, unlinkedExecutable);
 
   @override
+  TypeInferenceNode get asTypeInferenceNode =>
+      _typeInferenceNode ??= new TypeInferenceNode(this);
+
+  @override
   TypeParameterizedElementMixin get enclosingTypeParameterContext =>
       enclosingElement;
 
   @override
+  List<FunctionElementForLink_Local_NonSynthetic> get functions =>
+      _functions ??= _unlinkedExecutable.localFunctions
+          .map((UnlinkedExecutable ex) =>
+              new FunctionElementForLink_Local_NonSynthetic(
+                  compilationUnit, this, ex))
+          .toList();
+
+  @override
+  bool get _hasTypeBeenInferred => _inferredReturnType != null;
+
+  @override
   DartType buildType(
       DartType getTypeArgument(int i), List<int> implicitFunctionTypeIndices) {
     assert(implicitFunctionTypeIndices.isEmpty);
@@ -2860,12 +3047,30 @@
 
   @override
   FunctionElementForLink_Local getLocalFunction(int index) {
-    // TODO(paulberry): implement.
-    throw new UnimplementedError();
+    List<FunctionElementForLink_Local_NonSynthetic> functions = this.functions;
+    return index < functions.length ? functions[index] : null;
+  }
+
+  /**
+   * Store the results of type inference for this function in [compilationUnit].
+   */
+  void link(CompilationUnitElementInBuildUnit compilationUnit) {
+    compilationUnit._storeLinkedType(
+        _unlinkedExecutable.inferredReturnTypeSlot, inferredReturnType, this);
+    for (FunctionElementForLink_Local_NonSynthetic function in functions) {
+      function.link(compilationUnit);
+    }
   }
 
   @override
   noSuchMethod(Invocation invocation) => super.noSuchMethod(invocation);
+
+  @override
+  void _setInferredType(DartType type) {
+    // TODO(paulberry): store the inferred return type in the summary.
+    assert(!_hasTypeBeenInferred);
+    _inferredReturnType = _dynamicIfNull(type);
+  }
 }
 
 /**
@@ -3713,6 +3918,7 @@
       case UnlinkedParamKind.named:
         return ParameterKind.NAMED;
     }
+    return null;
   }
 
   @override
@@ -4290,6 +4496,7 @@
         compilationUnit._storeLinkedType(
             unlinkedVariable.inferredTypeSlot, inferredType, null);
       }
+      initializer?.link(compilationUnit);
     }
   }
 }
@@ -4319,32 +4526,43 @@
  */
 class TypeInferenceNode extends Node<TypeInferenceNode> {
   /**
-   * The [FieldElement] or [TopLevelVariableElement] to which this
-   * node refers.
+   * The [FunctionElementForLink_Local] to which this node refers.
    */
-  final VariableElementForLink variableElement;
+  final FunctionElementForLink_Local functionElement;
 
-  TypeInferenceNode(this.variableElement);
+  TypeInferenceNode(this.functionElement);
 
   @override
-  bool get isEvaluated => variableElement._inferredType != null;
+  bool get isEvaluated => functionElement._hasTypeBeenInferred;
 
   /**
-   * Collect the type inference dependencies in [unlinkedConst] (which should be
-   * interpreted relative to [compilationUnit]) and store them in
+   * Collect the type inference dependencies in [unlinkedExecutable] (which
+   * should be interpreted relative to [compilationUnit]) and store them in
    * [dependencies].
    */
   void collectDependencies(
       List<TypeInferenceNode> dependencies,
-      UnlinkedConst unlinkedConst,
+      UnlinkedExecutable unlinkedExecutable,
       CompilationUnitElementForLink compilationUnit) {
+    UnlinkedConst unlinkedConst = unlinkedExecutable?.bodyExpr;
     if (unlinkedConst == null) {
       return;
     }
     int refPtr = 0;
+    int intPtr = 0;
 
     for (UnlinkedConstOperation operation in unlinkedConst.operations) {
       switch (operation) {
+        case UnlinkedConstOperation.pushInt:
+          intPtr++;
+          break;
+        case UnlinkedConstOperation.pushLongInt:
+          int numInts = unlinkedConst.ints[intPtr++];
+          intPtr += numInts;
+          break;
+        case UnlinkedConstOperation.concatenate:
+          intPtr++;
+          break;
         case UnlinkedConstOperation.pushReference:
           EntityRef ref = unlinkedConst.references[refPtr++];
           // TODO(paulberry): cache these resolved references for
@@ -4355,12 +4573,21 @@
             dependencies.add(dependency);
           }
           break;
-        case UnlinkedConstOperation.makeTypedList:
         case UnlinkedConstOperation.invokeConstructor:
           refPtr++;
+          intPtr += 2;
+          break;
+        case UnlinkedConstOperation.makeUntypedList:
+        case UnlinkedConstOperation.makeUntypedMap:
+          intPtr++;
+          break;
+        case UnlinkedConstOperation.makeTypedList:
+          refPtr++;
+          intPtr++;
           break;
         case UnlinkedConstOperation.makeTypedMap:
           refPtr += 2;
+          intPtr++;
           break;
         case UnlinkedConstOperation.assignToRef:
           // TODO(paulberry): if this reference refers to a variable, should it
@@ -4371,39 +4598,53 @@
           // TODO(paulberry): if this reference refers to a variable, should it
           // be considered a type inference dependency?
           refPtr++;
+          intPtr += 2;
+          int numTypeArguments = unlinkedConst.ints[intPtr++];
+          refPtr += numTypeArguments;
+          break;
+        case UnlinkedConstOperation.invokeMethod:
+          intPtr += 2;
+          int numTypeArguments = unlinkedConst.ints[intPtr++];
+          refPtr += numTypeArguments;
           break;
         case UnlinkedConstOperation.typeCast:
         case UnlinkedConstOperation.typeCheck:
           refPtr++;
           break;
+        case UnlinkedConstOperation.pushLocalFunctionReference:
+          int popCount = unlinkedConst.ints[intPtr++];
+          assert(popCount == 0); // TODO(paulberry): handle the nonzero case.
+          dependencies.add(functionElement
+              .getLocalFunction(unlinkedConst.ints[intPtr++])
+              .asTypeInferenceNode);
+          break;
         default:
           break;
       }
     }
     assert(refPtr == unlinkedConst.references.length);
+    assert(intPtr == unlinkedConst.ints.length);
   }
 
   @override
   List<TypeInferenceNode> computeDependencies() {
     List<TypeInferenceNode> dependencies = <TypeInferenceNode>[];
-    collectDependencies(
-        dependencies,
-        variableElement.unlinkedVariable.initializer?.bodyExpr,
-        variableElement.compilationUnit);
+    collectDependencies(dependencies, functionElement._unlinkedExecutable,
+        functionElement.compilationUnit);
     return dependencies;
   }
 
   void evaluate(bool inCycle) {
     if (inCycle) {
-      variableElement._inferredType = DynamicTypeImpl.instance;
+      functionElement._setInferredType(DynamicTypeImpl.instance);
     } else {
-      variableElement._inferredType =
-          new ExprTypeComputer(variableElement).compute();
+      functionElement
+          ._setInferredType(new ExprTypeComputer(functionElement).compute());
     }
   }
 
   @override
-  String toString() => 'TypeInferenceNode($variableElement)';
+  String toString() => 'TypeInferenceNode($functionElement)';
 }
 
 class TypeProviderForLink implements TypeProvider {
@@ -4606,7 +4847,7 @@
         unlinkedVariable.initializer?.bodyExpr != null) {
       _constNode = new ConstVariableNode(this);
       if (unlinkedVariable.type == null) {
-        _typeInferenceNode = new TypeInferenceNode(this);
+        _typeInferenceNode = initializer.asTypeInferenceNode;
       }
     }
   }
diff --git a/pkg/analyzer/lib/src/summary/package_bundle_reader.dart b/pkg/analyzer/lib/src/summary/package_bundle_reader.dart
index 7c6ee32..0055866 100644
--- a/pkg/analyzer/lib/src/summary/package_bundle_reader.dart
+++ b/pkg/analyzer/lib/src/summary/package_bundle_reader.dart
@@ -11,7 +11,6 @@
 import 'package:analyzer/src/generated/source_io.dart';
 import 'package:analyzer/src/summary/idl.dart';
 import 'package:analyzer/src/summary/resynthesize.dart';
-import 'package:analyzer/src/summary/summary_sdk.dart';
 import 'package:analyzer/src/task/dart.dart';
 import 'package:analyzer/task/dart.dart';
 import 'package:analyzer/task/model.dart';
@@ -20,114 +19,18 @@
 /**
  * The [ResultProvider] that provides results from input package summaries.
  */
-class InputPackagesResultProvider extends ResultProvider {
-  final InternalAnalysisContext _context;
-
-  _FileBasedSummaryResynthesizer _resynthesizer;
-  SummaryResultProvider _sdkProvider;
-
-  InputPackagesResultProvider(this._context, SummaryDataStore dataStore) {
-    InternalAnalysisContext sdkContext = _context.sourceFactory.dartSdk.context;
-    _sdkProvider = sdkContext.resultProvider;
-    // Set the type provider to prevent the context from computing it.
-    _context.typeProvider = sdkContext.typeProvider;
-    // Create a chained resynthesizer.
-    _resynthesizer = new _FileBasedSummaryResynthesizer(
-        _sdkProvider.resynthesizer,
-        _context,
-        _context.typeProvider,
-        _context.sourceFactory,
-        _context.analysisOptions.strongMode,
-        dataStore);
+class InputPackagesResultProvider extends ResynthesizerResultProvider {
+  InputPackagesResultProvider(
+      InternalAnalysisContext context, SummaryDataStore dataStore)
+      : super(context, dataStore) {
+    AnalysisContext sdkContext = context.sourceFactory.dartSdk.context;
+    createResynthesizer(sdkContext, sdkContext.typeProvider);
   }
 
   @override
-  bool compute(CacheEntry entry, ResultDescriptor result) {
-    if (_sdkProvider.compute(entry, result)) {
-      return true;
-    }
-    AnalysisTarget target = entry.target;
-    // Only library results are supported for now.
-    if (target is Source) {
-      Uri uri = target.uri;
-      // We know how to server results to input packages.
-      String uriString = uri.toString();
-      if (!_resynthesizer.hasLibrarySummary(uriString)) {
-        return false;
-      }
-      // Provide known results.
-      if (result == LIBRARY_ELEMENT1 ||
-          result == LIBRARY_ELEMENT2 ||
-          result == LIBRARY_ELEMENT3 ||
-          result == LIBRARY_ELEMENT4 ||
-          result == LIBRARY_ELEMENT5 ||
-          result == LIBRARY_ELEMENT6 ||
-          result == LIBRARY_ELEMENT7 ||
-          result == LIBRARY_ELEMENT8 ||
-          result == LIBRARY_ELEMENT9 ||
-          result == LIBRARY_ELEMENT ||
-          false) {
-        LibraryElement libraryElement =
-            _resynthesizer.getLibraryElement(uriString);
-        entry.setValue(result, libraryElement, TargetedResult.EMPTY_LIST);
-        return true;
-      } else if (result == READY_LIBRARY_ELEMENT2 ||
-          result == READY_LIBRARY_ELEMENT6 ||
-          result == READY_LIBRARY_ELEMENT7) {
-        entry.setValue(result, true, TargetedResult.EMPTY_LIST);
-        return true;
-      } else if (result == SOURCE_KIND) {
-        if (_resynthesizer._dataStore.linkedMap.containsKey(uriString)) {
-          entry.setValue(result, SourceKind.LIBRARY, TargetedResult.EMPTY_LIST);
-          return true;
-        }
-        if (_resynthesizer._dataStore.unlinkedMap.containsKey(uriString)) {
-          entry.setValue(result, SourceKind.PART, TargetedResult.EMPTY_LIST);
-          return true;
-        }
-        return false;
-      }
-    } else if (target is LibrarySpecificUnit) {
-      String uriString = target.library.uri.toString();
-      if (!_resynthesizer.hasLibrarySummary(uriString)) {
-        return false;
-      }
-      if (result == CREATED_RESOLVED_UNIT1 ||
-          result == CREATED_RESOLVED_UNIT2 ||
-          result == CREATED_RESOLVED_UNIT3 ||
-          result == CREATED_RESOLVED_UNIT4 ||
-          result == CREATED_RESOLVED_UNIT5 ||
-          result == CREATED_RESOLVED_UNIT6 ||
-          result == CREATED_RESOLVED_UNIT7 ||
-          result == CREATED_RESOLVED_UNIT8 ||
-          result == CREATED_RESOLVED_UNIT9 ||
-          result == CREATED_RESOLVED_UNIT10 ||
-          result == CREATED_RESOLVED_UNIT11 ||
-          result == CREATED_RESOLVED_UNIT12) {
-        entry.setValue(result, true, TargetedResult.EMPTY_LIST);
-        return true;
-      }
-      if (result == COMPILATION_UNIT_ELEMENT) {
-        String libraryUri = target.library.uri.toString();
-        String unitUri = target.unit.uri.toString();
-        CompilationUnitElement unit = _resynthesizer.getElement(
-            new ElementLocationImpl.con3(<String>[libraryUri, unitUri]));
-        if (unit != null) {
-          entry.setValue(result, unit, TargetedResult.EMPTY_LIST);
-          return true;
-        }
-      }
-    } else if (target is VariableElement) {
-      if (!_resynthesizer
-          .hasLibrarySummary(target.library.source.uri.toString())) {
-        return false;
-      }
-      if (result == PROPAGATED_VARIABLE || result == INFERRED_STATIC_VARIABLE) {
-        entry.setValue(result, target, TargetedResult.EMPTY_LIST);
-        return true;
-      }
-    }
-    return false;
+  bool hasResultsForSource(Source source) {
+    String uriString = source.uri.toString();
+    return resynthesizer.hasLibrarySummary(uriString);
   }
 }
 
@@ -200,8 +103,7 @@
   UriKind get uriKind => UriKind.PACKAGE_URI;
 
   @override
-  bool operator ==(Object object) =>
-      object is InSummarySource && object.uri == uri;
+  bool operator ==(Object object) => object is Source && object.uri == uri;
 
   @override
   bool exists() => true;
@@ -211,6 +113,141 @@
 }
 
 /**
+ * The [ResultProvider] that provides results using summary resynthesizer.
+ */
+abstract class ResynthesizerResultProvider extends ResultProvider {
+  final InternalAnalysisContext context;
+  final SummaryDataStore _dataStore;
+
+  _FileBasedSummaryResynthesizer _resynthesizer;
+  ResynthesizerResultProvider _sdkProvider;
+
+  ResynthesizerResultProvider(this.context, this._dataStore);
+
+  SummaryResynthesizer get resynthesizer => _resynthesizer;
+
+  /**
+   * Add a new [bundle] to the resynthesizer.
+   */
+  void addBundle(String path, PackageBundle bundle) {
+    _dataStore.addBundle(path, bundle);
+  }
+
+  @override
+  bool compute(CacheEntry entry, ResultDescriptor result) {
+    if (_sdkProvider != null && _sdkProvider.compute(entry, result)) {
+      return true;
+    }
+    AnalysisTarget target = entry.target;
+    // Check whether there are results for the source.
+    if (!hasResultsForSource(target.librarySource ?? target.source)) {
+      return false;
+    }
+    // Constant expressions are always resolved in summaries.
+    if (result == CONSTANT_EXPRESSION_RESOLVED &&
+        target is ConstantEvaluationTarget) {
+      entry.setValue(result, true, TargetedResult.EMPTY_LIST);
+      return true;
+    }
+    // Provide results for Source.
+    if (target is Source) {
+      String uriString = target.uri.toString();
+      // Provide known results.
+      if (result == LIBRARY_ELEMENT1 ||
+          result == LIBRARY_ELEMENT2 ||
+          result == LIBRARY_ELEMENT3 ||
+          result == LIBRARY_ELEMENT4 ||
+          result == LIBRARY_ELEMENT5 ||
+          result == LIBRARY_ELEMENT6 ||
+          result == LIBRARY_ELEMENT7 ||
+          result == LIBRARY_ELEMENT8 ||
+          result == LIBRARY_ELEMENT9 ||
+          result == LIBRARY_ELEMENT) {
+        LibraryElement libraryElement =
+            resynthesizer.getLibraryElement(uriString);
+        entry.setValue(result, libraryElement, TargetedResult.EMPTY_LIST);
+        return true;
+      } else if (result == READY_LIBRARY_ELEMENT2 ||
+          result == READY_LIBRARY_ELEMENT6 ||
+          result == READY_LIBRARY_ELEMENT7) {
+        entry.setValue(result, true, TargetedResult.EMPTY_LIST);
+        return true;
+      } else if (result == SOURCE_KIND) {
+        if (_dataStore.linkedMap.containsKey(uriString)) {
+          entry.setValue(result, SourceKind.LIBRARY, TargetedResult.EMPTY_LIST);
+          return true;
+        }
+        if (_dataStore.unlinkedMap.containsKey(uriString)) {
+          entry.setValue(result, SourceKind.PART, TargetedResult.EMPTY_LIST);
+          return true;
+        }
+        return false;
+      }
+    } else if (target is LibrarySpecificUnit) {
+      if (result == CREATED_RESOLVED_UNIT1 ||
+          result == CREATED_RESOLVED_UNIT2 ||
+          result == CREATED_RESOLVED_UNIT3 ||
+          result == CREATED_RESOLVED_UNIT4 ||
+          result == CREATED_RESOLVED_UNIT5 ||
+          result == CREATED_RESOLVED_UNIT6 ||
+          result == CREATED_RESOLVED_UNIT7 ||
+          result == CREATED_RESOLVED_UNIT8 ||
+          result == CREATED_RESOLVED_UNIT9 ||
+          result == CREATED_RESOLVED_UNIT10 ||
+          result == CREATED_RESOLVED_UNIT11 ||
+          result == CREATED_RESOLVED_UNIT12) {
+        entry.setValue(result, true, TargetedResult.EMPTY_LIST);
+        return true;
+      }
+      if (result == COMPILATION_UNIT_ELEMENT) {
+        String libraryUri = target.library.uri.toString();
+        String unitUri = target.unit.uri.toString();
+        CompilationUnitElement unit = resynthesizer.getElement(
+            new ElementLocationImpl.con3(<String>[libraryUri, unitUri]));
+        if (unit != null) {
+          entry.setValue(result, unit, TargetedResult.EMPTY_LIST);
+          return true;
+        }
+      }
+    } else if (target is VariableElement) {
+      if (result == PROPAGATED_VARIABLE || result == INFERRED_STATIC_VARIABLE) {
+        entry.setValue(result, target, TargetedResult.EMPTY_LIST);
+        return true;
+      }
+    }
+    // Unknown target.
+    return false;
+  }
+
+  /**
+   * Create the [resynthesizer] instance.
+   *
+   * Subclasses must call this method in their constructors.
+   */
+  void createResynthesizer(
+      InternalAnalysisContext sdkContext, TypeProvider typeProvider) {
+    // Set the type provider to prevent the context from computing it.
+    context.typeProvider = typeProvider;
+    // Create a chained resynthesizer.
+    _sdkProvider = sdkContext?.resultProvider;
+    _resynthesizer = new _FileBasedSummaryResynthesizer(
+        _sdkProvider?.resynthesizer,
+        context,
+        typeProvider,
+        context.sourceFactory,
+        context.analysisOptions.strongMode,
+        _dataStore);
+  }
+
+  /**
+   * Return `true` if this result provider can provide a result for the
+   * given [source].  The provider must ensure that [addBundle] is invoked for
+   * every bundle that would be required to provide results for the [source].
+   */
+  bool hasResultsForSource(Source source);
+}
+
+/**
  * A [SummaryDataStore] is a container for the data extracted from a set of
  * summary package bundles.  It contains maps which can be used to find linked
  * and unlinked summaries by URI.
diff --git a/pkg/analyzer/lib/src/summary/resynthesize.dart b/pkg/analyzer/lib/src/summary/resynthesize.dart
index 298c20d..e929b67 100644
--- a/pkg/analyzer/lib/src/summary/resynthesize.dart
+++ b/pkg/analyzer/lib/src/summary/resynthesize.dart
@@ -14,13 +14,11 @@
 import 'package:analyzer/src/dart/element/handle.dart';
 import 'package:analyzer/src/dart/element/member.dart';
 import 'package:analyzer/src/dart/element/type.dart';
-import 'package:analyzer/src/generated/constant.dart';
 import 'package:analyzer/src/generated/engine.dart';
 import 'package:analyzer/src/generated/resolver.dart';
 import 'package:analyzer/src/generated/source_io.dart';
 import 'package:analyzer/src/generated/testing/ast_factory.dart';
 import 'package:analyzer/src/generated/testing/token_factory.dart';
-import 'package:analyzer/src/generated/utilities_dart.dart';
 import 'package:analyzer/src/summary/idl.dart';
 
 /**
@@ -310,6 +308,19 @@
 
   _ConstExprBuilder(this.resynthesizer, this.context, this.uc);
 
+  /**
+   * Return the [ConstructorElement] enclosing [context].
+   */
+  ConstructorElement get _enclosingConstructor {
+    for (Element e = context; e != null; e = e.enclosingElement) {
+      if (e is ConstructorElement) {
+        return e;
+      }
+    }
+    throw new StateError(
+        'Unable to find the enclosing constructor of $context');
+  }
+
   Expression build() {
     if (!uc.isValidConst) {
       return AstFactory.identifier3(r'$$invalidConstExpr$$');
@@ -471,7 +482,7 @@
         case UnlinkedConstOperation.pushParameter:
           String name = uc.strings[stringPtr++];
           SimpleIdentifier identifier = AstFactory.identifier3(name);
-          identifier.staticElement = resynthesizer.currentConstructor.parameters
+          identifier.staticElement = _enclosingConstructor.parameters
               .firstWhere((parameter) => parameter.name == name,
                   orElse: () => throw new StateError(
                       'Unable to resolve constructor parameter: $name'));
@@ -575,8 +586,8 @@
    */
   TypeName _newTypeName() {
     EntityRef typeRef = uc.references[refPtr++];
-    DartType type = resynthesizer.buildType(
-        typeRef, resynthesizer._currentTypeParameterizedElement);
+    DartType type =
+        resynthesizer.buildType(typeRef, context?.typeParameterContext);
     return _buildTypeAst(type);
   }
 
@@ -622,7 +633,7 @@
       InterfaceType definingType = resynthesizer._createConstructorDefiningType(
           context?.typeParameterContext, info, ref.typeArguments);
       constructorElement =
-          resynthesizer._createConstructorElement(definingType, info);
+          resynthesizer._getConstructorForInfo(definingType, info);
       typeNode = _buildTypeAst(definingType);
     } else {
       if (info.enclosing != null) {
@@ -675,12 +686,21 @@
     EntityRef ref = uc.references[refPtr++];
     _ReferenceInfo info = resynthesizer.getReferenceInfo(ref.reference);
     Expression node = _buildIdentifierSequence(info);
+    TypeArgumentList typeArguments;
+    int numTypeArguments = uc.ints[intPtr++];
+    if (numTypeArguments > 0) {
+      List<TypeName> typeNames = new List<TypeName>(numTypeArguments);
+      for (int i = 0; i < numTypeArguments; i++) {
+        typeNames[i] = _newTypeName();
+      }
+      typeArguments = AstFactory.typeArgumentList(typeNames);
+    }
     if (node is SimpleIdentifier) {
       _push(new MethodInvocation(
           null,
           TokenFactory.tokenFromType(TokenType.PERIOD),
           node,
-          null,
+          typeArguments,
           AstFactory.argumentList(arguments)));
     } else {
       throw new UnimplementedError('For ${node?.runtimeType}: $node');
@@ -729,184 +749,6 @@
 }
 
 /**
- * Temporary [TypeParameterizedElementMixin] implementation.
- *
- * TODO(scheglov) remove after moving resynthesize logic to Impl.
- */
-class _CurrentTypeParameterizedElement
-    implements TypeParameterizedElementMixin {
-  final _UnitResynthesizer unitResynthesizer;
-
-  _CurrentTypeParameterizedElement(this.unitResynthesizer);
-
-  @override
-  TypeParameterType getTypeParameterType(int index) {
-    return unitResynthesizer.getTypeParameterFromScope(index);
-  }
-
-  noSuchMethod(Invocation invocation) => super.noSuchMethod(invocation);
-}
-
-/**
- * A class element that has been resynthesized from a summary.  The actual
- * element won't be constructed until it is requested.  But properties
- * [context],  [displayName], [enclosingElement] and [name] can be used without
- * creating the actual element.  This allows to put these elements into
- * namespaces without creating actual elements until they are really needed.
- */
-class _DeferredClassElement extends ClassElementHandle {
-  final _UnitResynthesizer unitResynthesizer;
-  final CompilationUnitElement unitElement;
-  final UnlinkedClass serializedClass;
-
-  ClassElementImpl _actualElement;
-
-  /**
-   * We don't resynthesize executables of classes until they are requested.
-   * TODO(scheglov) Check whether we need separate flags for separate kinds.
-   */
-  bool _executablesResynthesized = false;
-
-  @override
-  final String name;
-
-  factory _DeferredClassElement(_UnitResynthesizer unitResynthesizer,
-      CompilationUnitElement unitElement, UnlinkedClass serializedClass) {
-    String name = serializedClass.name;
-    List<String> components =
-        unitResynthesizer.unit.location.components.toList();
-    components.add(name);
-    ElementLocationImpl location = new ElementLocationImpl.con3(components);
-    return new _DeferredClassElement._(
-        unitResynthesizer, unitElement, serializedClass, name, location);
-  }
-
-  _DeferredClassElement._(this.unitResynthesizer, this.unitElement,
-      this.serializedClass, this.name, ElementLocation location)
-      : super(null, location);
-
-  @override
-  List<PropertyAccessorElement> get accessors {
-    _ensureExecutables();
-    return actualElement.accessors;
-  }
-
-  @override
-  ClassElementImpl get actualElement {
-    if (_actualElement == null) {
-      _actualElement = unitResynthesizer.buildClassImpl(serializedClass, this);
-      _actualElement.enclosingElement = unitElement;
-    }
-    return _actualElement;
-  }
-
-  @override
-  List<ConstructorElement> get constructors {
-    _ensureExecutables();
-    return actualElement.constructors;
-  }
-
-  @override
-  AnalysisContext get context => unitElement.context;
-
-  @override
-  String get displayName => name;
-
-  @override
-  CompilationUnitElement get enclosingElement {
-    return unitElement;
-  }
-
-  @override
-  List<FieldElement> get fields {
-    _ensureExecutables();
-    return actualElement.fields;
-  }
-
-  @override
-  List<MethodElement> get methods {
-    _ensureExecutables();
-    return actualElement.methods;
-  }
-
-  @override
-  void ensureAccessorsReady() {
-    _ensureExecutables();
-  }
-
-  @override
-  void ensureActualElementComplete() {
-    _ensureExecutables();
-  }
-
-  @override
-  void ensureConstructorsReady() {
-    _ensureExecutables();
-  }
-
-  @override
-  void ensureMethodsReady() {
-    _ensureExecutables();
-  }
-
-  /**
-   * Ensure that we have [actualElement], and it has all executables.
-   */
-  void _ensureExecutables() {
-    if (!_executablesResynthesized) {
-      _executablesResynthesized = true;
-      unitResynthesizer.buildClassExecutables(actualElement, serializedClass);
-    }
-  }
-}
-
-/**
- * The constructor element that has been resynthesized from a summary.  The
- * actual element won't be constructed until it is requested.  But properties
- * [displayName], [enclosingElement] and [name] can be used without creating
- * the actual element.
- */
-class _DeferredConstructorElement extends ConstructorElementHandle {
-  /**
-   * The type defining this constructor element.  If [_isMember] is `false`,
-   * then the type parameters of [_definingType] are not guaranteed to be
-   * valid.
-   */
-  final InterfaceType _definingType;
-
-  /**
-   * The constructor name.
-   */
-  final String name;
-
-  factory _DeferredConstructorElement(InterfaceType definingType, String name) {
-    List<String> components = definingType.element.location.components.toList();
-    components.add(name);
-    ElementLocationImpl location = new ElementLocationImpl.con3(components);
-    return new _DeferredConstructorElement._(definingType, name, location);
-  }
-
-  _DeferredConstructorElement._(
-      this._definingType, this.name, ElementLocation location)
-      : super(null, location);
-
-  @override
-  ConstructorElement get actualElement =>
-      enclosingElement.getNamedConstructor(name);
-
-  @override
-  AnalysisContext get context => _definingType.element.context;
-
-  @override
-  String get displayName => name;
-
-  @override
-  ClassElement get enclosingElement {
-    return _definingType.element;
-  }
-}
-
-/**
  * Local function element representing the initializer for a variable that has
  * been resynthesized from a summary.  The actual element won't be constructed
  * until it is requested.  But properties [context] and [enclosingElement] can
@@ -1043,12 +885,6 @@
   LibraryElementImpl library;
 
   /**
-   * Classes which should have their supertype set to "object" once
-   * resynthesis is complete.  Only used if [isCoreLibrary] is `true`.
-   */
-  List<ClassElementImpl> delayedObjectSubclasses = <ClassElementImpl>[];
-
-  /**
    * Map of compilation unit elements that have been resynthesized so far.  The
    * key is the URI of the compilation unit.
    */
@@ -1114,6 +950,7 @@
         // and they always refer to defined top-level entities.
         throw new StateError('Unexpected export name kind: ${exportName.kind}');
     }
+    return null;
   }
 
   /**
@@ -1170,17 +1007,9 @@
     }
     library.parts = partResynthesizers.map((r) => r.unit).toList();
     // Populate units.
-    populateUnit(definingUnitResynthesizer);
+    rememberUriToUnit(definingUnitResynthesizer);
     for (_UnitResynthesizer partResynthesizer in partResynthesizers) {
-      populateUnit(partResynthesizer);
-    }
-    // Update delayed Object class references.
-    if (isCoreLibrary) {
-      ClassElement objectElement = library.getType('Object');
-      assert(objectElement != null);
-      for (ClassElementImpl classElement in delayedObjectSubclasses) {
-        classElement.supertype = objectElement.type;
-      }
+      rememberUriToUnit(partResynthesizer);
     }
     // Create the synthetic element for `loadLibrary`.
     // Until the client received dart:core and dart:async, we cannot do this,
@@ -1261,13 +1090,12 @@
   }
 
   /**
-   * Populate a [CompilationUnitElement] by deserializing all the elements
-   * contained in it.
+   * Remember the absolute URI to the corresponding unit mapping.
    */
-  void populateUnit(_UnitResynthesizer unitResynthesized) {
-    unitResynthesized.populateUnit();
-    String absoluteUri = unitResynthesized.unit.source.uri.toString();
-    resynthesizedUnits[absoluteUri] = unitResynthesized.unit;
+  void rememberUriToUnit(_UnitResynthesizer unitResynthesized) {
+    CompilationUnitElementImpl unit = unitResynthesized.unit;
+    String absoluteUri = unit.source.uri.toString();
+    resynthesizedUnits[absoluteUri] = unit;
   }
 }
 
@@ -1536,13 +1364,20 @@
   }
 
   @override
-  List<FunctionElementImpl> buildTopLevelFunctions() {
-    return _unitResynthesizer.buildTopLevelFunctions();
+  UnitExplicitTopLevelVariables buildTopLevelVariables() {
+    return _unitResynthesizer.buildUnitExplicitTopLevelVariables();
   }
 
   @override
-  UnitExplicitTopLevelVariables buildTopLevelVariables() {
-    return _unitResynthesizer.buildUnitExplicitTopLevelVariables();
+  bool isInConstCycle(int slot) {
+    return _unitResynthesizer.constCycles.contains(slot);
+  }
+
+  @override
+  ConstructorElement resolveConstructorRef(
+      TypeParameterizedElementMixin typeParameterContext, EntityRef entry) {
+    return _unitResynthesizer._getConstructorForEntry(
+        typeParameterContext, entry);
   }
 
   @override
@@ -1588,13 +1423,6 @@
   CompilationUnitElementImpl unit;
 
   /**
-   * [ElementHolder] into which resynthesized elements should be placed.  This
-   * object is recreated afresh for each unit in the library, and is used to
-   * populate the [CompilationUnitElement].
-   */
-  final ElementHolder unitHolder = new ElementHolder();
-
-  /**
    * Map from slot id to the corresponding [EntityRef] object for linked types
    * (i.e. propagated and inferred types).
    */
@@ -1606,39 +1434,6 @@
    */
   Set<int> constCycles;
 
-  /**
-   * The [ConstructorElementImpl] for the constructor currently being
-   * resynthesized.
-   */
-  ConstructorElementImpl currentConstructor;
-
-  /**
-   * Type parameters for the generic class, typedef, or executable currently
-   * being resynthesized, if any.  This is a list of lists; if multiple
-   * entities with type parameters are nested (e.g. a generic executable inside
-   * a generic class), then the zeroth element of [currentTypeParameters]
-   * contains the type parameters for the outermost nested entity, and further
-   * elements contain the type parameters for entities that are more deeply
-   * nested.  If we are not currently resynthesizing a class, typedef, or
-   * executable, then this is an empty list.
-   */
-  final List<List<TypeParameterElement>> currentTypeParameters =
-      <List<TypeParameterElement>>[];
-
-  /**
-   * If a class is currently being resynthesized, map from field name to the
-   * corresponding field element.  This is used when resynthesizing
-   * initializing formal parameters.
-   */
-  Map<String, FieldElementImpl> fields;
-
-  /**
-   * If a class is currently being resynthesized, map from constructor name to
-   * the corresponding constructor element.  This is used when resynthesizing
-   * constructor initializers.
-   */
-  Map<String, ConstructorElementImpl> constructors;
-
   int numLinkedReferences;
   int numUnlinkedReferences;
 
@@ -1654,11 +1449,6 @@
    */
   ResynthesizerContext _resynthesizerContext;
 
-  /**
-   * TODO(scheglov) clean up after moving resynthesize logic to Impl.
-   */
-  TypeParameterizedElementMixin _currentTypeParameterizedElement;
-
   _UnitResynthesizer(this.libraryResynthesizer, this.unlinkedUnit,
       this.linkedUnit, Source unitSource, UnlinkedPart unlinkedPart) {
     _resynthesizerContext = new _ResynthesizerContext(this);
@@ -1675,8 +1465,6 @@
     numLinkedReferences = linkedUnit.references.length;
     numUnlinkedReferences = unlinkedUnit.references.length;
     referenceInfos = new List<_ReferenceInfo>(numLinkedReferences);
-    _currentTypeParameterizedElement =
-        new _CurrentTypeParameterizedElement(this);
   }
 
   SummaryResynthesizer get summaryResynthesizer =>
@@ -1713,499 +1501,14 @@
   }
 
   /**
-   * Build the annotations for the given [element].
-   */
-  void buildAnnotations(
-      ElementImpl element, List<UnlinkedConst> serializedAnnotations) {
-    if (serializedAnnotations.isNotEmpty) {
-      element.metadata = serializedAnnotations
-          .map((a) => buildAnnotation(element, a))
-          .toList();
-    }
-  }
-
-  /**
-   * Resynthesize a [ClassElement] and place it in [unitHolder].
-   */
-  void buildClass(UnlinkedClass serializedClass) {
-    ClassElement classElement;
-    if (libraryResynthesizer.isCoreLibrary &&
-        serializedClass.supertype == null) {
-      classElement = buildClassImpl(serializedClass, null);
-      if (!serializedClass.hasNoSupertype) {
-        libraryResynthesizer.delayedObjectSubclasses.add(classElement);
-      }
-    } else {
-      classElement = new _DeferredClassElement(this, unit, serializedClass);
-    }
-    unitHolder.addType(classElement);
-  }
-
-  /**
-   * Fill the given [ClassElementImpl] with executable elements and fields.
-   */
-  void buildClassExecutables(
-      ClassElementImpl classElement, UnlinkedClass serializedClass) {
-    currentTypeParameters.add(classElement.typeParameters);
-    ElementHolder memberHolder = new ElementHolder();
-    fields = <String, FieldElementImpl>{};
-    for (UnlinkedVariable serializedVariable in serializedClass.fields) {
-      buildVariable(classElement, serializedVariable, memberHolder);
-    }
-    bool constructorFound = false;
-    constructors = <String, ConstructorElementImpl>{};
-    for (UnlinkedExecutable serializedExecutable
-        in serializedClass.executables) {
-      switch (serializedExecutable.kind) {
-        case UnlinkedExecutableKind.constructor:
-          constructorFound = true;
-          buildConstructor(serializedExecutable, classElement, memberHolder);
-          break;
-        case UnlinkedExecutableKind.functionOrMethod:
-        case UnlinkedExecutableKind.getter:
-        case UnlinkedExecutableKind.setter:
-          if (serializedExecutable.isStatic) {
-            currentTypeParameters.removeLast();
-          }
-          buildExecutable(serializedExecutable, classElement, memberHolder);
-          if (serializedExecutable.isStatic) {
-            currentTypeParameters.add(classElement.typeParameters);
-          }
-          break;
-      }
-    }
-    if (!serializedClass.isMixinApplication) {
-      if (!constructorFound) {
-        // Synthesize implicit constructors.
-        ConstructorElementImpl constructor = new ConstructorElementImpl('', -1);
-        constructor.synthetic = true;
-        constructor.returnType = classElement.type;
-        constructor.type = new FunctionTypeImpl.elementWithNameAndArgs(
-            constructor, null, getCurrentTypeArguments(), false);
-        memberHolder.addConstructor(constructor);
-      }
-      classElement.constructors = memberHolder.constructors;
-    }
-    classElement.accessors = memberHolder.accessors;
-    classElement.fields = memberHolder.fields;
-    classElement.methods = memberHolder.methods;
-    resolveConstructorInitializers(classElement);
-    currentTypeParameters.removeLast();
-    assert(currentTypeParameters.isEmpty);
-  }
-
-  /**
-   * Resynthesize a [ClassElementImpl].  If [handle] is not `null`, then
-   * executables are not resynthesized, and [InterfaceTypeImpl] is created
-   * around the [handle], so that executables are resynthesized lazily.
-   */
-  ClassElementImpl buildClassImpl(
-      UnlinkedClass serializedClass, ClassElementHandle handle) {
-    ClassElementImpl classElement =
-        new ClassElementImpl.forSerialized(serializedClass, unit);
-    classElement.hasBeenInferred = summaryResynthesizer.strongMode;
-    InterfaceTypeImpl correspondingType =
-        new InterfaceTypeImpl(handle ?? classElement);
-    if (serializedClass.supertype != null) {
-      classElement.supertype =
-          buildType(serializedClass.supertype, classElement);
-    } else if (!libraryResynthesizer.isCoreLibrary) {
-      classElement.supertype = typeProvider.objectType;
-    }
-    classElement.interfaces = serializedClass.interfaces
-        .map((EntityRef t) => buildType(t, classElement))
-        .toList();
-    classElement.mixins = serializedClass.mixins
-        .map((EntityRef t) => buildType(t, classElement))
-        .toList();
-    // TODO(scheglov) move to ClassElementImpl
-    correspondingType.typeArguments = classElement.typeParameterTypes;
-    classElement.type = correspondingType;
-    assert(currentTypeParameters.isEmpty);
-    // TODO(scheglov) Somehow Observatory shows too much time spent here
-    // during DDC run on the large codebase. I would expect only Object here.
-    if (handle == null) {
-      buildClassExecutables(classElement, serializedClass);
-    }
-    fields = null;
-    constructors = null;
-    return classElement;
-  }
-
-  void buildCodeRange(ElementImpl element, CodeRange codeRange) {
-    if (codeRange != null) {
-      element.setCodeRange(codeRange.offset, codeRange.length);
-    }
-  }
-
-  /**
-   * Resynthesize a [NamespaceCombinator].
-   */
-  NamespaceCombinator buildCombinator(UnlinkedCombinator serializedCombinator) {
-    if (serializedCombinator.shows.isNotEmpty) {
-      ShowElementCombinatorImpl combinator = new ShowElementCombinatorImpl();
-      // Note: we call toList() so that we don't retain a reference to the
-      // deserialized data structure.
-      combinator.shownNames = serializedCombinator.shows.toList();
-      combinator.offset = serializedCombinator.offset;
-      combinator.end = serializedCombinator.end;
-      return combinator;
-    } else {
-      HideElementCombinatorImpl combinator = new HideElementCombinatorImpl();
-      // Note: we call toList() so that we don't retain a reference to the
-      // deserialized data structure.
-      combinator.hiddenNames = serializedCombinator.hides.toList();
-      return combinator;
-    }
-  }
-
-  /**
-   * Resynthesize a [ConstructorElement] and place it in the given [holder].
-   * [classElement] is the element of the class for which this element is a
-   * constructor.
-   */
-  void buildConstructor(UnlinkedExecutable serializedExecutable,
-      ClassElementImpl classElement, ElementHolder holder) {
-    assert(serializedExecutable.kind == UnlinkedExecutableKind.constructor);
-    currentConstructor = new ConstructorElementImpl.forSerialized(
-        serializedExecutable, classElement);
-    currentConstructor.isCycleFree = serializedExecutable.isConst &&
-        !constCycles.contains(serializedExecutable.constCycleSlot);
-    if (serializedExecutable.name.isEmpty) {
-      currentConstructor.nameEnd =
-          serializedExecutable.nameOffset + classElement.name.length;
-    } else {
-      currentConstructor.nameEnd = serializedExecutable.nameEnd;
-      currentConstructor.periodOffset = serializedExecutable.periodOffset;
-    }
-    constructors[serializedExecutable.name] = currentConstructor;
-    buildExecutableCommonParts(currentConstructor, serializedExecutable);
-    currentConstructor.constantInitializers = serializedExecutable
-        .constantInitializers
-        .map((i) => buildConstructorInitializer(currentConstructor, i))
-        .toList();
-    if (serializedExecutable.isRedirectedConstructor) {
-      if (serializedExecutable.isFactory) {
-        EntityRef redirectedConstructor =
-            serializedExecutable.redirectedConstructor;
-        _ReferenceInfo info = getReferenceInfo(redirectedConstructor.reference);
-        List<EntityRef> typeArguments = redirectedConstructor.typeArguments;
-        currentConstructor.redirectedConstructor = _createConstructorElement(
-            _createConstructorDefiningType(classElement, info, typeArguments),
-            info);
-      } else {
-        List<String> locationComponents = unit.location.components.toList();
-        locationComponents.add(classElement.name);
-        locationComponents.add(serializedExecutable.redirectedConstructorName);
-        currentConstructor.redirectedConstructor =
-            new _DeferredConstructorElement._(
-                classElement.type,
-                serializedExecutable.redirectedConstructorName,
-                new ElementLocationImpl.con3(locationComponents));
-      }
-    }
-    holder.addConstructor(currentConstructor);
-    currentConstructor = null;
-  }
-
-  /**
-   * Resynthesize the [ConstructorInitializer] in context of
-   * [currentConstructor], which is used to resolve constructor parameter names.
-   */
-  ConstructorInitializer buildConstructorInitializer(
-      ConstructorElementImpl enclosingConstructor,
-      UnlinkedConstructorInitializer serialized) {
-    UnlinkedConstructorInitializerKind kind = serialized.kind;
-    String name = serialized.name;
-    List<Expression> arguments = <Expression>[];
-    {
-      int numArguments = serialized.arguments.length;
-      int numNames = serialized.argumentNames.length;
-      for (int i = 0; i < numArguments; i++) {
-        Expression expression = _buildConstExpression(
-            enclosingConstructor, serialized.arguments[i]);
-        int nameIndex = numNames + i - numArguments;
-        if (nameIndex >= 0) {
-          expression = AstFactory.namedExpression2(
-              serialized.argumentNames[nameIndex], expression);
-        }
-        arguments.add(expression);
-      }
-    }
-    switch (kind) {
-      case UnlinkedConstructorInitializerKind.field:
-        return AstFactory.constructorFieldInitializer(false, name,
-            _buildConstExpression(enclosingConstructor, serialized.expression));
-      case UnlinkedConstructorInitializerKind.superInvocation:
-        return AstFactory.superConstructorInvocation2(
-            name.isNotEmpty ? name : null, arguments);
-      case UnlinkedConstructorInitializerKind.thisInvocation:
-        return AstFactory.redirectingConstructorInvocation2(
-            name.isNotEmpty ? name : null, arguments);
-    }
-  }
-
-  /**
-   * Build the documentation for the given [element].  Does nothing if
-   * [serializedDocumentationComment] is `null`.
-   */
-  void buildDocumentation(ElementImpl element,
-      UnlinkedDocumentationComment serializedDocumentationComment) {
-    if (serializedDocumentationComment != null) {
-      element.documentationComment = serializedDocumentationComment.text;
-      element.setDocRange(serializedDocumentationComment.offset,
-          serializedDocumentationComment.length);
-    }
-  }
-
-  /**
-   * Resynthesize the [ClassElement] corresponding to an enum, along with the
-   * associated fields and implicit accessors.
-   */
-  void buildEnum(UnlinkedEnum serializedEnum) {
-    assert(!libraryResynthesizer.isCoreLibrary);
-    ClassElementImpl classElement =
-        new ClassElementImpl(serializedEnum.name, serializedEnum.nameOffset);
-    classElement.enum2 = true;
-    InterfaceType enumType = new InterfaceTypeImpl(classElement);
-    classElement.type = enumType;
-    classElement.supertype = typeProvider.objectType;
-    buildDocumentation(classElement, serializedEnum.documentationComment);
-    buildAnnotations(classElement, serializedEnum.annotations);
-    buildCodeRange(classElement, serializedEnum.codeRange);
-    ElementHolder memberHolder = new ElementHolder();
-    // Build the 'index' field.
-    FieldElementImpl indexField = new FieldElementImpl('index', -1);
-    indexField.final2 = true;
-    indexField.synthetic = true;
-    indexField.type = typeProvider.intType;
-    memberHolder.addField(indexField);
-    buildImplicitAccessors(indexField, memberHolder);
-    // Build the 'values' field.
-    FieldElementImpl valuesField = new ConstFieldElementImpl('values', -1);
-    valuesField.synthetic = true;
-    valuesField.const3 = true;
-    valuesField.static = true;
-    valuesField.type = typeProvider.listType.instantiate(<DartType>[enumType]);
-    memberHolder.addField(valuesField);
-    buildImplicitAccessors(valuesField, memberHolder);
-    // Build fields for all enum constants.
-    List<DartObjectImpl> constantValues = <DartObjectImpl>[];
-    for (int i = 0; i < serializedEnum.values.length; i++) {
-      UnlinkedEnumValue serializedEnumValue = serializedEnum.values[i];
-      String fieldName = serializedEnumValue.name;
-      ConstFieldElementImpl field =
-          new ConstFieldElementImpl(fieldName, serializedEnumValue.nameOffset);
-      buildDocumentation(field, serializedEnumValue.documentationComment);
-      field.const3 = true;
-      field.static = true;
-      field.type = enumType;
-      // Create a value for the constant.
-      Map<String, DartObjectImpl> fieldMap = <String, DartObjectImpl>{
-        fieldName: new DartObjectImpl(typeProvider.intType, new IntState(i))
-      };
-      DartObjectImpl value =
-          new DartObjectImpl(enumType, new GenericState(fieldMap));
-      constantValues.add(value);
-      field.evaluationResult = new EvaluationResultImpl(value);
-      // Add the field.
-      memberHolder.addField(field);
-      buildImplicitAccessors(field, memberHolder);
-    }
-    // Build the value of the 'values' field.
-    valuesField.evaluationResult = new EvaluationResultImpl(
-        new DartObjectImpl(valuesField.type, new ListState(constantValues)));
-    // done
-    classElement.fields = memberHolder.fields;
-    classElement.accessors = memberHolder.accessors;
-    classElement.constructors = <ConstructorElement>[];
-    unitHolder.addEnum(classElement);
-  }
-
-  /**
-   * Resynthesize an [ExecutableElement] and place it in the given [holder].
-   */
-  void buildExecutable(
-      UnlinkedExecutable serializedExecutable, ElementImpl enclosingElement,
-      [ElementHolder holder]) {
-    bool isTopLevel = holder == null;
-    if (holder == null) {
-      holder = unitHolder;
-    }
-    UnlinkedExecutableKind kind = serializedExecutable.kind;
-    String name = serializedExecutable.name;
-    if (kind == UnlinkedExecutableKind.setter) {
-      assert(name.endsWith('='));
-      name = name.substring(0, name.length - 1);
-    }
-    switch (kind) {
-      case UnlinkedExecutableKind.functionOrMethod:
-        if (isTopLevel) {
-          // Created lazily.
-        } else {
-          MethodElementImpl executableElement =
-              new MethodElementImpl.forSerialized(
-                  serializedExecutable, enclosingElement);
-          buildExecutableCommonParts(executableElement, serializedExecutable);
-          holder.addMethod(executableElement);
-        }
-        break;
-      case UnlinkedExecutableKind.getter:
-      case UnlinkedExecutableKind.setter:
-        // Top-level accessors are created lazily.
-        if (isTopLevel) {
-          break;
-        }
-        // Class member accessors.
-        PropertyAccessorElementImpl executableElement =
-            new PropertyAccessorElementImpl.forSerialized(
-                serializedExecutable, enclosingElement);
-        buildExecutableCommonParts(executableElement, serializedExecutable);
-        DartType type;
-        if (kind == UnlinkedExecutableKind.getter) {
-          type = executableElement.returnType;
-        } else {
-          type = executableElement.parameters[0].type;
-        }
-        holder.addAccessor(executableElement);
-        FieldElementImpl field = buildImplicitField(name, type, kind, holder);
-        field.static = serializedExecutable.isStatic;
-        executableElement.variable = field;
-        if (kind == UnlinkedExecutableKind.getter) {
-          field.getter = executableElement;
-        } else {
-          field.setter = executableElement;
-        }
-        break;
-      default:
-        // The only other executable type is a constructor, and that is handled
-        // separately (in [buildConstructor].  So this code should be
-        // unreachable.
-        assert(false);
-    }
-  }
-
-  /**
-   * Handle the parts of an executable element that are common to constructors,
-   * functions, methods, getters, and setters.
-   */
-  void buildExecutableCommonParts(ExecutableElementImpl executableElement,
-      UnlinkedExecutable serializedExecutable) {
-    executableElement.typeParameters =
-        buildTypeParameters(serializedExecutable.typeParameters);
-    {
-      List<UnlinkedParam> unlinkedParameters = serializedExecutable.parameters;
-      int length = unlinkedParameters.length;
-      if (length != 0) {
-        List<ParameterElementImpl> parameters =
-            new List<ParameterElementImpl>(length);
-        for (int i = 0; i < length; i++) {
-          parameters[i] =
-              buildParameter(unlinkedParameters[i], executableElement);
-        }
-        executableElement.parameters = parameters;
-      }
-    }
-    executableElement.type = new FunctionTypeImpl.elementWithNameAndArgs(
-        executableElement, null, getCurrentTypeArguments(skipLevels: 1), false);
-    {
-      List<UnlinkedExecutable> unlinkedFunctions =
-          serializedExecutable.localFunctions;
-      int length = unlinkedFunctions.length;
-      if (length != 0) {
-        List<FunctionElementImpl> localFunctions =
-            new List<FunctionElementImpl>(length);
-        for (int i = 0; i < length; i++) {
-          localFunctions[i] =
-              buildLocalFunction(unlinkedFunctions[i], executableElement);
-        }
-        executableElement.functions = localFunctions;
-      }
-    }
-    {
-      List<UnlinkedLabel> unlinkedLabels = serializedExecutable.localLabels;
-      int length = unlinkedLabels.length;
-      if (length != 0) {
-        List<LabelElementImpl> localLabels = new List<LabelElementImpl>(length);
-        for (int i = 0; i < length; i++) {
-          localLabels[i] = buildLocalLabel(unlinkedLabels[i]);
-        }
-        executableElement.labels = localLabels;
-      }
-    }
-    {
-      List<UnlinkedVariable> unlinkedVariables =
-          serializedExecutable.localVariables;
-      int length = unlinkedVariables.length;
-      if (length != 0) {
-        List<LocalVariableElementImpl> localVariables =
-            new List<LocalVariableElementImpl>(length);
-        for (int i = 0; i < length; i++) {
-          localVariables[i] =
-              buildLocalVariable(unlinkedVariables[i], executableElement);
-        }
-        executableElement.localVariables = localVariables;
-      }
-    }
-    currentTypeParameters.removeLast();
-  }
-
-  /**
-   * Build the implicit getter and setter associated with [element], and place
-   * them in [holder].
-   */
-  void buildImplicitAccessors(
-      PropertyInducingElementImpl element, ElementHolder holder) {
-    String name = element.name;
-    DartType type = element.type;
-    PropertyAccessorElementImpl getter =
-        buildImplicitGetter(element, name, type);
-    holder?.addAccessor(getter);
-    if (!(element.isConst || element.isFinal)) {
-      PropertyAccessorElementImpl setter =
-          buildImplicitSetter(element, name, type);
-      holder?.addAccessor(setter);
-    }
-  }
-
-  /**
-   * Build the implicit field associated with a getter or setter, and place it
-   * in [holder].
-   */
-  FieldElementImpl buildImplicitField(String name, DartType type,
-      UnlinkedExecutableKind kind, ElementHolder holder) {
-    FieldElementImpl field = holder.getField(name);
-    if (field == null) {
-      field = new FieldElementImpl(name, -1);
-      field.synthetic = true;
-      field.final2 = kind == UnlinkedExecutableKind.getter;
-      field.type = type;
-      holder.addField(field);
-      return field;
-    } else {
-      // TODO(paulberry): what if the getter and setter have a type mismatch?
-      field.final2 = false;
-      return field;
-    }
-  }
-
-  /**
    * Build an implicit getter for the given [property] and bind it to the
    * [property] and to its enclosing element.
    */
   PropertyAccessorElementImpl buildImplicitGetter(
-      PropertyInducingElementImpl property, String name, DartType type) {
-    PropertyAccessorElementImpl getter =
-        new PropertyAccessorElementImpl(name, property.nameOffset);
+      PropertyInducingElementImpl property) {
+    PropertyAccessorElementImpl_ImplicitGetter getter =
+        new PropertyAccessorElementImpl_ImplicitGetter(property);
     getter.enclosingElement = property.enclosingElement;
-    getter.getter = true;
-    getter.static = property.isStatic;
-    getter.synthetic = true;
-    getter.returnType = type;
-    getter.type = new FunctionTypeImpl(getter);
-    getter.variable = property;
-    getter.hasImplicitReturnType = property.hasImplicitType;
-    property.getter = getter;
     return getter;
   }
 
@@ -2214,23 +1517,10 @@
    * [property] and to its enclosing element.
    */
   PropertyAccessorElementImpl buildImplicitSetter(
-      PropertyInducingElementImpl property, String name, DartType type) {
-    PropertyAccessorElementImpl setter =
-        new PropertyAccessorElementImpl(name, property.nameOffset);
+      PropertyInducingElementImpl property) {
+    PropertyAccessorElementImpl_ImplicitSetter setter =
+        new PropertyAccessorElementImpl_ImplicitSetter(property);
     setter.enclosingElement = property.enclosingElement;
-    setter.setter = true;
-    setter.static = property.isStatic;
-    setter.synthetic = true;
-    setter.parameters = <ParameterElement>[
-      new ParameterElementImpl('_$name', property.nameOffset)
-        ..synthetic = true
-        ..type = type
-        ..parameterKind = ParameterKind.REQUIRED
-    ];
-    setter.returnType = VoidTypeImpl.instance;
-    setter.type = new FunctionTypeImpl(setter);
-    setter.variable = property;
-    property.setter = setter;
     return setter;
   }
 
@@ -2254,137 +1544,6 @@
   }
 
   /**
-   * Resynthesize a local [FunctionElement].
-   */
-  FunctionElementImpl buildLocalFunction(
-      UnlinkedExecutable serializedExecutable, ElementImpl enclosingElement) {
-    FunctionElementImpl element = new FunctionElementImpl.forSerialized(
-        serializedExecutable, enclosingElement);
-    if (serializedExecutable.visibleOffset != 0) {
-      element.setVisibleRange(serializedExecutable.visibleOffset,
-          serializedExecutable.visibleLength);
-    }
-    buildExecutableCommonParts(element, serializedExecutable);
-    return element;
-  }
-
-  /**
-   * Resynthesize a [LabelElement].
-   */
-  LabelElement buildLocalLabel(UnlinkedLabel serializedLabel) {
-    return new LabelElementImpl(
-        serializedLabel.name,
-        serializedLabel.nameOffset,
-        serializedLabel.isOnSwitchStatement,
-        serializedLabel.isOnSwitchMember);
-  }
-
-  /**
-   * Resynthesize a [LocalVariableElement].
-   */
-  LocalVariableElement buildLocalVariable(UnlinkedVariable serializedVariable,
-      ExecutableElementImpl enclosingExecutable) {
-    LocalVariableElementImpl element;
-    if (serializedVariable.initializer?.bodyExpr != null &&
-        serializedVariable.isConst) {
-      ConstLocalVariableElementImpl constElement =
-          new ConstLocalVariableElementImpl.forSerialized(
-              serializedVariable, enclosingExecutable);
-      element = constElement;
-      constElement.constantInitializer = _buildConstExpression(
-          enclosingExecutable, serializedVariable.initializer.bodyExpr);
-    } else {
-      element = new LocalVariableElementImpl.forSerialized(
-          serializedVariable, enclosingExecutable);
-    }
-    if (serializedVariable.visibleOffset != 0) {
-      element.setVisibleRange(
-          serializedVariable.visibleOffset, serializedVariable.visibleLength);
-    }
-    buildVariableCommonParts(element, serializedVariable);
-    return element;
-  }
-
-  /**
-   * Resynthesize a [ParameterElement].
-   */
-  ParameterElement buildParameter(
-      UnlinkedParam serializedParameter, ElementImpl enclosingElement,
-      {bool synthetic: false}) {
-    ParameterElementImpl parameterElement;
-    if (serializedParameter.isInitializingFormal) {
-      if (serializedParameter.kind == UnlinkedParamKind.required) {
-        parameterElement = new FieldFormalParameterElementImpl.forSerialized(
-            serializedParameter, enclosingElement);
-      } else {
-        parameterElement =
-            new DefaultFieldFormalParameterElementImpl.forSerialized(
-                serializedParameter, enclosingElement);
-      }
-    } else {
-      if (serializedParameter.kind == UnlinkedParamKind.required) {
-        parameterElement = new ParameterElementImpl.forSerialized(
-            serializedParameter, enclosingElement);
-      } else {
-        parameterElement = new DefaultParameterElementImpl.forSerialized(
-            serializedParameter, enclosingElement);
-      }
-    }
-    parameterElement.synthetic = synthetic;
-    if (serializedParameter.isFunctionTyped) {
-      FunctionElementImpl parameterTypeElement =
-          new FunctionElementImpl_forFunctionTypedParameter(
-              unit, parameterElement);
-      if (!synthetic) {
-        parameterTypeElement.enclosingElement = parameterElement;
-      }
-      List<ParameterElement> subParameters = serializedParameter.parameters
-          .map((UnlinkedParam p) =>
-              buildParameter(p, parameterTypeElement, synthetic: synthetic))
-          .toList();
-      if (synthetic) {
-        parameterTypeElement.parameters = subParameters;
-      } else {
-        parameterElement.parameters = subParameters;
-        parameterTypeElement.shareParameters(subParameters);
-      }
-      parameterTypeElement.returnType =
-          buildType(serializedParameter.type, _currentTypeParameterizedElement);
-      parameterElement.type = new FunctionTypeImpl.elementWithNameAndArgs(
-          parameterTypeElement, null, getCurrentTypeArguments(), false);
-      parameterTypeElement.type = parameterElement.type;
-    }
-    buildVariableInitializer(parameterElement, serializedParameter.initializer);
-    return parameterElement;
-  }
-
-  /**
-   * Handle the parts that are common to top level variables and fields.
-   */
-  void buildPropertyIntroducingElementCommonParts(
-      PropertyInducingElementImpl element,
-      UnlinkedVariable serializedVariable) {
-    buildVariableCommonParts(element, serializedVariable);
-    element.propagatedType = buildLinkedType(
-        serializedVariable.propagatedTypeSlot,
-        _currentTypeParameterizedElement);
-  }
-
-  List<FunctionElementImpl> buildTopLevelFunctions() {
-    List<FunctionElementImpl> functions = <FunctionElementImpl>[];
-    List<UnlinkedExecutable> executables = unlinkedUnit.executables;
-    for (UnlinkedExecutable unlinkedExecutable in executables) {
-      if (unlinkedExecutable.kind == UnlinkedExecutableKind.functionOrMethod) {
-        FunctionElementImpl function =
-            new FunctionElementImpl.forSerialized(unlinkedExecutable, unit);
-        buildExecutableCommonParts(function, unlinkedExecutable);
-        functions.add(function);
-      }
-    }
-    return functions;
-  }
-
-  /**
    * Build a [DartType] object based on a [EntityRef].  This [DartType]
    * may refer to elements in other libraries than the library being
    * deserialized, so handles are used to avoid having to deserialize other
@@ -2404,17 +1563,8 @@
       return typeParameterContext.getTypeParameterType(type.paramReference);
     } else if (type.syntheticReturnType != null) {
       FunctionElementImpl element =
-          new FunctionElementImpl_forLUB(unit, typeParameterContext);
-      element.parameters = type.syntheticParams
-          .map((UnlinkedParam param) =>
-              buildParameter(param, element, synthetic: true))
-          .toList();
-      element.returnType =
-          buildType(type.syntheticReturnType, typeParameterContext);
-      FunctionTypeImpl result = new FunctionTypeImpl.elementWithNameAndArgs(
-          element, null, null, false);
-      element.type = result;
-      return result;
+          new FunctionElementImpl_forLUB(unit, typeParameterContext, type);
+      return element.type;
     } else {
       DartType getTypeArgument(int i) {
         if (i < type.typeArguments.length) {
@@ -2432,72 +1582,6 @@
     }
   }
 
-  /**
-   * Resynthesize a [FunctionTypeAliasElement] and place it in the
-   * [unitHolder].
-   */
-  void buildTypedef(UnlinkedTypedef serializedTypedef) {
-    FunctionTypeAliasElementImpl functionTypeAliasElement =
-        new FunctionTypeAliasElementImpl.forSerialized(serializedTypedef, unit);
-    // TODO(scheglov) remove this after delaying parameters and their types
-    currentTypeParameters.add(functionTypeAliasElement.typeParameters);
-    functionTypeAliasElement.parameters = serializedTypedef.parameters
-        .map((p) => buildParameter(p, functionTypeAliasElement))
-        .toList();
-    functionTypeAliasElement.type =
-        new FunctionTypeImpl.forTypedef(functionTypeAliasElement);
-    unitHolder.addTypeAlias(functionTypeAliasElement);
-    // TODO(scheglov) remove this after delaying parameters and their types
-    currentTypeParameters.removeLast();
-    assert(currentTypeParameters.isEmpty);
-  }
-
-  /**
-   * Resynthesize a [TypeParameterElement], handling all parts of its except
-   * its bound.
-   *
-   * The bound is deferred until later since it may refer to other type
-   * parameters that have not been resynthesized yet.  To handle the bound,
-   * call [finishTypeParameter].
-   */
-  TypeParameterElement buildTypeParameter(
-      UnlinkedTypeParam serializedTypeParameter) {
-    TypeParameterElementImpl typeParameterElement =
-        new TypeParameterElementImpl(
-            serializedTypeParameter.name, serializedTypeParameter.nameOffset);
-    typeParameterElement.type = new TypeParameterTypeImpl(typeParameterElement);
-    buildAnnotations(typeParameterElement, serializedTypeParameter.annotations);
-    buildCodeRange(typeParameterElement, serializedTypeParameter.codeRange);
-    return typeParameterElement;
-  }
-
-  /**
-   * Build [TypeParameterElement]s corresponding to the type parameters in
-   * [serializedTypeParameters] and store them in [currentTypeParameters].
-   * Also return them.
-   */
-  List<TypeParameterElement> buildTypeParameters(
-      List<UnlinkedTypeParam> serializedTypeParameters) {
-    int length = serializedTypeParameters.length;
-    if (length != 0) {
-      List<TypeParameterElement> typeParameters =
-          new List<TypeParameterElement>(length);
-      for (int i = 0; i < length; i++) {
-        typeParameters[i] = buildTypeParameter(serializedTypeParameters[i]);
-      }
-      currentTypeParameters.add(typeParameters);
-      for (int i = 0; i < length; i++) {
-        finishTypeParameter(serializedTypeParameters[i], typeParameters[i]);
-      }
-      return typeParameters;
-    } else {
-      List<TypeParameterElement> typeParameters =
-          const <TypeParameterElement>[];
-      currentTypeParameters.add(typeParameters);
-      return typeParameters;
-    }
-  }
-
   UnitExplicitTopLevelAccessors buildUnitExplicitTopLevelAccessors() {
     HashMap<String, TopLevelVariableElementImpl> implicitVariables =
         new HashMap<String, TopLevelVariableElementImpl>();
@@ -2518,7 +1602,6 @@
             new PropertyAccessorElementImpl.forSerialized(
                 unlinkedExecutable, unit);
         accessorsData.accessors.add(accessor);
-        buildExecutableCommonParts(accessor, unlinkedExecutable);
         // implicit variable
         TopLevelVariableElementImpl variable = implicitVariables[name];
         if (variable == null) {
@@ -2553,120 +1636,23 @@
       TopLevelVariableElementImpl element;
       if (unlinkedVariable.initializer?.bodyExpr != null &&
           unlinkedVariable.isConst) {
-        ConstTopLevelVariableElementImpl constElement =
-            new ConstTopLevelVariableElementImpl.forSerialized(
-                unlinkedVariable, unit);
-        element = constElement;
-        constElement.constantInitializer =
-            _buildConstExpression(null, unlinkedVariable.initializer.bodyExpr);
+        element = new ConstTopLevelVariableElementImpl.forSerialized(
+            unlinkedVariable, unit);
       } else {
         element = new TopLevelVariableElementImpl.forSerialized(
             unlinkedVariable, unit);
       }
-      buildPropertyIntroducingElementCommonParts(element, unlinkedVariable);
       variablesData.variables[i] = element;
       // implicit accessors
-      String name = element.name;
-      DartType type = element.type;
-      variablesData.implicitAccessors
-          .add(buildImplicitGetter(element, name, type));
+      variablesData.implicitAccessors.add(buildImplicitGetter(element));
       if (!(element.isConst || element.isFinal)) {
-        variablesData.implicitAccessors
-            .add(buildImplicitSetter(element, name, type));
+        variablesData.implicitAccessors.add(buildImplicitSetter(element));
       }
     }
     return variablesData;
   }
 
   /**
-   * Resynthesize a [TopLevelVariableElement] or [FieldElement].
-   */
-  void buildVariable(
-      ClassElementImpl enclosingClass, UnlinkedVariable serializedVariable,
-      [ElementHolder holder]) {
-    if (holder == null) {
-      throw new UnimplementedError('Must be lazy');
-    } else {
-      FieldElementImpl element;
-      if (serializedVariable.initializer?.bodyExpr != null &&
-          (serializedVariable.isConst ||
-              serializedVariable.isFinal && !serializedVariable.isStatic)) {
-        ConstFieldElementImpl constElement =
-            new ConstFieldElementImpl.forSerialized(
-                serializedVariable, enclosingClass);
-        element = constElement;
-        constElement.constantInitializer = _buildConstExpression(
-            enclosingClass, serializedVariable.initializer.bodyExpr);
-      } else {
-        element = new FieldElementImpl.forSerialized(
-            serializedVariable, enclosingClass);
-      }
-      buildPropertyIntroducingElementCommonParts(element, serializedVariable);
-      element.static = serializedVariable.isStatic;
-      holder.addField(element);
-      buildImplicitAccessors(element, holder);
-      fields[element.name] = element;
-    }
-  }
-
-  /**
-   * Handle the parts that are common to variables.
-   */
-  void buildVariableCommonParts(
-      VariableElementImpl element, UnlinkedVariable serializedVariable) {
-    element.type = buildLinkedType(serializedVariable.inferredTypeSlot,
-            element.typeParameterContext) ??
-        buildType(serializedVariable.type, element.typeParameterContext);
-    buildVariableInitializer(element, serializedVariable.initializer);
-  }
-
-  /**
-   * If the given [serializedInitializer] is not `null`, create the
-   * corresponding [FunctionElementImpl] and set it for the [variable].
-   */
-  void buildVariableInitializer(
-      VariableElementImpl variable, UnlinkedExecutable serializedInitializer) {
-    if (serializedInitializer == null) {
-      return null;
-    }
-    FunctionElementImpl initializerElement =
-        buildLocalFunction(serializedInitializer, variable);
-    initializerElement.synthetic = true;
-    variable.initializer = initializerElement;
-  }
-
-  /**
-   * Finish creating a [TypeParameterElement] by deserializing its bound.
-   */
-  void finishTypeParameter(UnlinkedTypeParam serializedTypeParameter,
-      TypeParameterElementImpl typeParameterElement) {
-    if (serializedTypeParameter.bound != null) {
-      typeParameterElement.bound = buildType(
-          serializedTypeParameter.bound, _currentTypeParameterizedElement,
-          instantiateToBoundsAllowed: false);
-    }
-  }
-
-  /**
-   * Return a list of type arguments corresponding to [currentTypeParameters],
-   * skipping the innermost [skipLevels] nesting levels.
-   *
-   * Type parameters are listed in nesting order from innermost to outermost,
-   * and then in declaration order.  So for instance if we are resynthesizing a
-   * method declared as `class C<T, U> { void m<V, W>() { ... } }`, then the
-   * type parameters will be returned in the order `[V, W, T, U]`.
-   */
-  List<DartType> getCurrentTypeArguments({int skipLevels: 0}) {
-    assert(currentTypeParameters.length >= skipLevels);
-    List<DartType> result = <DartType>[];
-    for (int i = currentTypeParameters.length - 1 - skipLevels; i >= 0; i--) {
-      result.addAll(currentTypeParameters[i]
-          .map((TypeParameterElement param) => param.type));
-    }
-    return result;
-  }
-
-  /**
    * Return [_ReferenceInfo] with the given [index], lazily resolving it.
    */
   _ReferenceInfo getReferenceInfo(int index) {
@@ -2782,72 +1768,6 @@
     return result;
   }
 
-  /**
-   * Get the type parameter from the surrounding scope whose De Bruijn index is
-   * [index].
-   */
-  DartType getTypeParameterFromScope(int index) {
-    for (int i = currentTypeParameters.length - 1; i >= 0; i--) {
-      List<TypeParameterElement> paramsAtThisNestingLevel =
-          currentTypeParameters[i];
-      int numParamsAtThisNestingLevel = paramsAtThisNestingLevel.length;
-      if (index <= numParamsAtThisNestingLevel) {
-        return paramsAtThisNestingLevel[numParamsAtThisNestingLevel - index]
-            .type;
-      }
-      index -= numParamsAtThisNestingLevel;
-    }
-    throw new StateError('Type parameter not found');
-  }
-
-  /**
-   * Populate a [CompilationUnitElement] by deserializing all the elements
-   * contained in it.
-   */
-  void populateUnit() {
-    unlinkedUnit.classes.forEach(buildClass);
-    unlinkedUnit.enums.forEach(buildEnum);
-    unlinkedUnit.typedefs.forEach(buildTypedef);
-    unit.enums = unitHolder.enums;
-    List<FunctionTypeAliasElement> typeAliases = unitHolder.typeAliases;
-    for (FunctionTypeAliasElementImpl typeAlias in typeAliases) {
-      if (typeAlias.isSynthetic) {
-        typeAlias.enclosingElement = unit;
-      }
-    }
-    unit.typeAliases = typeAliases.where((e) => !e.isSynthetic).toList();
-    unit.types = unitHolder.types;
-    assert(currentTypeParameters.isEmpty);
-  }
-
-  /**
-   * Constructor initializers can reference fields and other constructors of
-   * the same class, including forward references. So, we need to delay
-   * resolution until after class elements are built.
-   */
-  void resolveConstructorInitializers(ClassElementImpl classElement) {
-    for (ConstructorElementImpl constructor in constructors.values) {
-      for (ConstructorInitializer initializer
-          in constructor.constantInitializers) {
-        if (initializer is ConstructorFieldInitializer) {
-          SimpleIdentifier nameNode = initializer.fieldName;
-          nameNode.staticElement = fields[nameNode.name];
-        } else if (initializer is SuperConstructorInvocation) {
-          SimpleIdentifier nameNode = initializer.constructorName;
-          ConstructorElement element = new _DeferredConstructorElement(
-              classElement.supertype, nameNode?.name ?? '');
-          initializer.staticElement = element;
-          nameNode?.staticElement = element;
-        } else if (initializer is RedirectingConstructorInvocation) {
-          SimpleIdentifier nameNode = initializer.constructorName;
-          ConstructorElement element = constructors[nameNode?.name ?? ''];
-          initializer.staticElement = element;
-          nameNode?.staticElement = element;
-        }
-      }
-    }
-  }
-
   Expression _buildConstExpression(ElementImpl context, UnlinkedConst uc) {
     return new _ConstExprBuilder(this, context, uc).build();
   }
@@ -2856,7 +1776,7 @@
    * Return the defining type for a [ConstructorElement] by applying
    * [typeArgumentRefs] to the given linked [info].
    */
-  InterfaceType _createConstructorDefiningType(
+  DartType _createConstructorDefiningType(
       TypeParameterizedElementMixin typeParameterContext,
       _ReferenceInfo info,
       List<EntityRef> typeArgumentRefs) {
@@ -2875,22 +1795,38 @@
   }
 
   /**
+   * Return the [ConstructorElement] corresponding to the given [entry].
+   */
+  ConstructorElement _getConstructorForEntry(
+      TypeParameterizedElementMixin typeParameterContext, EntityRef entry) {
+    _ReferenceInfo info = getReferenceInfo(entry.reference);
+    DartType type = _createConstructorDefiningType(
+        typeParameterContext, info, entry.typeArguments);
+    if (type is InterfaceType) {
+      return _getConstructorForInfo(type, info);
+    }
+    return null;
+  }
+
+  /**
    * Return the [ConstructorElement] corresponding to the given linked [info],
    * using the [classType] which has already been computed (e.g. by
    * [_createConstructorDefiningType]).  Both cases when [info] is a
    * [ClassElement] and [ConstructorElement] are supported.
    */
-  ConstructorElement _createConstructorElement(
+  ConstructorElement _getConstructorForInfo(
       InterfaceType classType, _ReferenceInfo info) {
-    bool isClass = info.element is ClassElement;
-    String name = isClass ? '' : info.name;
-    _DeferredConstructorElement element =
-        new _DeferredConstructorElement(classType, name);
-    if (info.numTypeParameters != 0) {
-      return new ConstructorMember(element, classType);
-    } else {
-      return element;
+    ConstructorElement element;
+    Element infoElement = info.element;
+    if (infoElement is ConstructorElement) {
+      element = infoElement;
+    } else if (infoElement is ClassElement) {
+      element = infoElement.unnamedConstructor;
     }
+    if (element != null && info.numTypeParameters != 0) {
+      return new ConstructorMember(element, classType);
+    }
+    return element;
   }
 
   /**
diff --git a/pkg/analyzer/lib/src/summary/summarize_const_expr.dart b/pkg/analyzer/lib/src/summary/summarize_const_expr.dart
index 8be5f21..18db39f 100644
--- a/pkg/analyzer/lib/src/summary/summarize_const_expr.dart
+++ b/pkg/analyzer/lib/src/summary/summarize_const_expr.dart
@@ -137,11 +137,6 @@
       TypeArgumentList typeArguments, SimpleIdentifier name);
 
   /**
-   * Return [EntityRefBuilder] that corresponds to the given [identifier].
-   */
-  EntityRefBuilder serializeIdentifier(Identifier identifier);
-
-  /**
    * Return a pair of ints showing how the given [functionExpression] is nested
    * within the constant currently being serialized.  The first int indicates
    * how many levels of function nesting must be popped in order to reach the
@@ -154,6 +149,11 @@
   List<int> serializeFunctionExpression(FunctionExpression functionExpression);
 
   /**
+   * Return [EntityRefBuilder] that corresponds to the given [identifier].
+   */
+  EntityRefBuilder serializeIdentifier(Identifier identifier);
+
+  /**
    * Return [EntityRefBuilder] that corresponds to the given [expr], which
    * must be a sequence of identifiers.
    */
@@ -198,6 +198,34 @@
   }
 
   /**
+   * Return `true` if the given [expr] is a sequence of identifiers.
+   */
+  bool _isIdentifierSequence(Expression expr) {
+    while (expr != null) {
+      if (expr is SimpleIdentifier) {
+        AstNode parent = expr.parent;
+        if (parent is MethodInvocation && parent.methodName == expr) {
+          if (parent.isCascaded) {
+            return false;
+          }
+          return parent.target == null || _isIdentifierSequence(parent.target);
+        }
+        if (isParameterName(expr.name)) {
+          return false;
+        }
+        return true;
+      } else if (expr is PrefixedIdentifier) {
+        expr = (expr as PrefixedIdentifier).prefix;
+      } else if (expr is PropertyAccess) {
+        expr = (expr as PropertyAccess).target;
+      } else {
+        return false;
+      }
+    }
+    return false;
+  }
+
+  /**
    * Push the operation for the given assignable [expr].
    */
   void _pushAssignable(Expression expr) {
@@ -217,6 +245,11 @@
       }
       _serialize(expr.index);
       operations.add(UnlinkedConstOperation.assignToIndex);
+    } else if (expr is PrefixedIdentifier) {
+      strings.add(expr.prefix.name);
+      operations.add(UnlinkedConstOperation.pushParameter);
+      strings.add(expr.identifier.name);
+      operations.add(UnlinkedConstOperation.assignToProperty);
     } else {
       throw new StateError('Unsupported assignable: $expr');
     }
@@ -269,6 +302,12 @@
       if (expr is SimpleIdentifier && isParameterName(expr.name)) {
         strings.add(expr.name);
         operations.add(UnlinkedConstOperation.pushParameter);
+      } else if (expr is PrefixedIdentifier &&
+          isParameterName(expr.prefix.name)) {
+        strings.add(expr.prefix.name);
+        operations.add(UnlinkedConstOperation.pushParameter);
+        strings.add(expr.identifier.name);
+        operations.add(UnlinkedConstOperation.extractProperty);
       } else {
         references.add(serializeIdentifier(expr));
         operations.add(UnlinkedConstOperation.pushReference);
@@ -501,6 +540,7 @@
       EntityRefBuilder ref = serializeIdentifierSequence(methodName);
       _serializeArguments(argumentList);
       references.add(ref);
+      _serializeTypeArguments(invocation.typeArguments);
       operations.add(UnlinkedConstOperation.invokeMethodRef);
     } else {
       if (!invocation.isCascaded) {
@@ -508,6 +548,7 @@
       }
       _serializeArguments(argumentList);
       strings.add(methodName.name);
+      _serializeTypeArguments(invocation.typeArguments);
       operations.add(UnlinkedConstOperation.invokeMethod);
     }
   }
@@ -596,28 +637,14 @@
     }
   }
 
-  /**
-   * Return `true` if the given [expr] is a sequence of identifiers.
-   */
-  static bool _isIdentifierSequence(Expression expr) {
-    while (expr != null) {
-      if (expr is SimpleIdentifier) {
-        AstNode parent = expr.parent;
-        if (parent is MethodInvocation && parent.methodName == expr) {
-          if (parent.isCascaded) {
-            return false;
-          }
-          return parent.target == null || _isIdentifierSequence(parent.target);
-        }
-        return true;
-      } else if (expr is PrefixedIdentifier) {
-        expr = (expr as PrefixedIdentifier).prefix;
-      } else if (expr is PropertyAccess) {
-        expr = (expr as PropertyAccess).target;
-      } else {
-        return false;
+  void _serializeTypeArguments(TypeArgumentList typeArguments) {
+    if (typeArguments == null) {
+      ints.add(0);
+    } else {
+      ints.add(typeArguments.arguments.length);
+      for (TypeName typeName in typeArguments.arguments) {
+        references.add(serializeTypeName(typeName));
       }
     }
-    return false;
   }
 }
diff --git a/pkg/analyzer/lib/src/summary/summary_file_builder.dart b/pkg/analyzer/lib/src/summary/summary_file_builder.dart
new file mode 100644
index 0000000..3cd21f1
--- /dev/null
+++ b/pkg/analyzer/lib/src/summary/summary_file_builder.dart
@@ -0,0 +1,222 @@
+// Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+library analyzer.src.summary.summary_file_builder;
+
+import 'dart:collection';
+import 'dart:io';
+
+import 'package:analyzer/dart/ast/ast.dart';
+import 'package:analyzer/dart/element/element.dart';
+import 'package:analyzer/src/generated/engine.dart';
+import 'package:analyzer/src/generated/java_io.dart';
+import 'package:analyzer/src/generated/sdk.dart';
+import 'package:analyzer/src/generated/sdk_io.dart';
+import 'package:analyzer/src/generated/source.dart';
+import 'package:analyzer/src/summary/flat_buffers.dart' as fb;
+import 'package:analyzer/src/summary/index_unit.dart';
+import 'package:analyzer/src/summary/summarize_elements.dart';
+import 'package:path/path.dart';
+
+const int FIELD_SPEC_INDEX = 1;
+const int FIELD_SPEC_SUM = 0;
+const int FIELD_STRONG_INDEX = 3;
+const int FIELD_STRONG_SUM = 2;
+
+class BuilderOutput {
+  final List<int> sum;
+  final List<int> index;
+
+  BuilderOutput(this.sum, this.index);
+
+  void writeMultiple(String outputDirectoryPath, String modeName) {
+    // Write summary.
+    {
+      String outputPath = join(outputDirectoryPath, '$modeName.sum');
+      File file = new File(outputPath);
+      file.writeAsBytesSync(sum, mode: FileMode.WRITE_ONLY);
+    }
+    // Write index.
+    {
+      String outputPath = join(outputDirectoryPath, '$modeName.index');
+      File file = new File(outputPath);
+      file.writeAsBytesSync(index, mode: FileMode.WRITE_ONLY);
+    }
+  }
+}
+
+/**
+ * Summary build configuration.
+ */
+class SummaryBuildConfig {
+
+  /**
+   * Whether to use exclude informative data from created summaries.
+   */
+  final bool buildSummaryExcludeInformative;
+
+  /**
+   * Whether to output a summary in "fallback mode".
+   */
+  final bool buildSummaryFallback;
+
+  /**
+   * Whether to create summaries directly from ASTs, i.e. don't create a
+   * full element model.
+   */
+  final bool buildSummaryOnlyAst;
+
+  /**
+   * Path to the dart SDK summary file.
+   */
+  final String dartSdkSummaryPath;
+
+  /**
+   * Whether to use strong static checking.
+   */
+  final bool strongMode;
+
+  /**
+   * List of summary input file paths.
+   */
+  final Iterable<String> summaryInputs;
+
+  /**
+   * Create a build configuration with the given set options.
+   */
+  SummaryBuildConfig(
+      {this.strongMode: false,
+      this.summaryInputs,
+      this.dartSdkSummaryPath,
+      this.buildSummaryExcludeInformative: false,
+      this.buildSummaryFallback: false,
+      this.buildSummaryOnlyAst: false});
+}
+
+class SummaryBuilder {
+  final AnalysisContext context;
+  final Iterable<Source> librarySources;
+  final SummaryBuildConfig config;
+
+  /**
+   * Create a summary builder for these [librarySources] and [context] using the
+   * given [config].
+   */
+  SummaryBuilder(this.librarySources, this.context, this.config);
+
+  /**
+   * Create an SDK summary builder for the dart SDK at the given [sdkPath],
+   * using this [config].
+   */
+  factory SummaryBuilder.forSdk(String sdkPath, SummaryBuildConfig config) {
+    bool strongMode = config.strongMode;
+
+    //
+    // Prepare SDK.
+    //
+    DirectoryBasedDartSdk sdk =
+        new DirectoryBasedDartSdk(new JavaFile(sdkPath), strongMode);
+    sdk.useSummary = false;
+    sdk.analysisOptions = new AnalysisOptionsImpl()..strongMode = strongMode;
+
+    //
+    // Prepare 'dart:' URIs to serialize.
+    //
+    Set<String> uriSet =
+        sdk.sdkLibraries.map((SdkLibrary library) => library.shortName).toSet();
+    if (!strongMode) {
+      uriSet.add('dart:html/nativewrappers.dart');
+    }
+    uriSet.add('dart:html_common/html_common_dart2js.dart');
+
+    Set<Source> librarySources = new HashSet<Source>();
+    for (String uri in uriSet) {
+      librarySources.add(sdk.mapDartUri(uri));
+    }
+
+    return new SummaryBuilder(librarySources, sdk.context, config);
+  }
+
+  BuilderOutput build() => new _Builder(context, librarySources).build();
+}
+
+/**
+ * Intermediary summary output result.
+ */
+class SummaryOutput {
+  final BuilderOutput spec;
+  final BuilderOutput strong;
+  SummaryOutput(this.spec, this.strong);
+
+  /**
+   * Write this summary output to the given [outputPath] and return the
+   * created file.
+   */
+  File write(String outputPath) {
+    fb.Builder builder = new fb.Builder();
+    fb.Offset specSumOffset = builder.writeListUint8(spec.sum);
+    fb.Offset specIndexOffset = builder.writeListUint8(spec.index);
+    fb.Offset strongSumOffset = builder.writeListUint8(strong.sum);
+    fb.Offset strongIndexOffset = builder.writeListUint8(strong.index);
+    builder.startTable();
+    builder.addOffset(FIELD_SPEC_SUM, specSumOffset);
+    builder.addOffset(FIELD_SPEC_INDEX, specIndexOffset);
+    builder.addOffset(FIELD_STRONG_SUM, strongSumOffset);
+    builder.addOffset(FIELD_STRONG_INDEX, strongIndexOffset);
+    fb.Offset offset = builder.endTable();
+    return new File(outputPath)
+      ..writeAsBytesSync(builder.finish(offset), mode: FileMode.WRITE_ONLY);
+  }
+}
+
+class _Builder {
+  final Set<Source> processedSources = new Set<Source>();
+
+  final PackageBundleAssembler bundleAssembler = new PackageBundleAssembler();
+  final PackageIndexAssembler indexAssembler = new PackageIndexAssembler();
+
+  final AnalysisContext context;
+  final Iterable<Source> librarySources;
+
+  _Builder(this.context, this.librarySources);
+
+  /**
+   * Build summary output.
+   */
+  BuilderOutput build() {
+    //
+    // Serialize each source.
+    //
+    for (Source source in librarySources) {
+      _serializeLibrary(source);
+    }
+    //
+    // Assemble the output.
+    //
+    List<int> sumBytes = bundleAssembler.assemble().toBuffer();
+    List<int> indexBytes = indexAssembler.assemble().toBuffer();
+    return new BuilderOutput(sumBytes, indexBytes);
+  }
+
+  /**
+   * Serialize the library with the given [source] and all its direct or
+   * indirect imports and exports.
+   */
+  void _serializeLibrary(Source source) {
+    if (!processedSources.add(source)) {
+      return;
+    }
+    LibraryElement element = context.computeLibraryElement(source);
+    bundleAssembler.serializeLibraryElement(element);
+    element.importedLibraries.forEach((e) => _serializeLibrary(e.source));
+    element.exportedLibraries.forEach((e) => _serializeLibrary(e.source));
+    // Index every unit of the library.
+    for (CompilationUnitElement unitElement in element.units) {
+      Source unitSource = unitElement.source;
+      CompilationUnit unit =
+          context.resolveCompilationUnit2(unitSource, source);
+      indexAssembler.indexUnit(unit);
+    }
+  }
+}
diff --git a/pkg/analyzer/lib/src/summary/summary_sdk.dart b/pkg/analyzer/lib/src/summary/summary_sdk.dart
index ec9c988..987a187 100644
--- a/pkg/analyzer/lib/src/summary/summary_sdk.dart
+++ b/pkg/analyzer/lib/src/summary/summary_sdk.dart
@@ -8,33 +8,27 @@
 import 'package:analyzer/dart/element/type.dart';
 import 'package:analyzer/src/context/cache.dart' show CacheEntry;
 import 'package:analyzer/src/context/context.dart';
-import 'package:analyzer/src/dart/element/element.dart';
 import 'package:analyzer/src/dart/element/type.dart';
 import 'package:analyzer/src/generated/constant.dart';
 import 'package:analyzer/src/generated/engine.dart';
 import 'package:analyzer/src/generated/resolver.dart';
 import 'package:analyzer/src/generated/sdk.dart';
 import 'package:analyzer/src/generated/source.dart'
-    show DartUriResolver, Source, SourceFactory, SourceKind;
+    show DartUriResolver, Source, SourceFactory;
 import 'package:analyzer/src/summary/idl.dart';
 import 'package:analyzer/src/summary/package_bundle_reader.dart';
 import 'package:analyzer/src/summary/resynthesize.dart';
 import 'package:analyzer/src/task/dart.dart';
-import 'package:analyzer/task/dart.dart';
-import 'package:analyzer/task/model.dart'
-    show AnalysisTarget, ResultDescriptor, TargetedResult;
+import 'package:analyzer/task/model.dart' show ResultDescriptor, TargetedResult;
 
-class SdkSummaryResultProvider implements SummaryResultProvider {
-  final InternalAnalysisContext context;
-  final PackageBundle bundle;
+class SdkSummaryResultProvider extends ResynthesizerResultProvider {
   final SummaryTypeProvider typeProvider = new SummaryTypeProvider();
 
-  @override
-  SummaryResynthesizer resynthesizer;
-
-  SdkSummaryResultProvider(this.context, this.bundle, bool strongMode) {
-    resynthesizer = new SdkSummaryResynthesizer(
-        context, typeProvider, context.sourceFactory, bundle, strongMode);
+  SdkSummaryResultProvider(
+      InternalAnalysisContext context, PackageBundle bundle, bool strongMode)
+      : super(context, new SummaryDataStore(const <String>[])) {
+    addBundle(null, bundle);
+    createResynthesizer(null, typeProvider);
     _buildCoreLibrary();
     _buildAsyncLibrary();
     resynthesizer.finalizeCoreAsyncLibraries();
@@ -47,87 +41,12 @@
       entry.setValue(result, typeProvider, TargetedResult.EMPTY_LIST);
       return true;
     }
-    AnalysisTarget target = entry.target;
-    // Only SDK sources after this point.
-    if (target.source == null || !target.source.isInSystemLibrary) {
-      return false;
-    }
-    // Constant expressions are always resolved in summaries.
-    if (result == CONSTANT_EXPRESSION_RESOLVED &&
-        target is ConstantEvaluationTarget) {
-      entry.setValue(result, true, TargetedResult.EMPTY_LIST);
-      return true;
-    }
-    if (target is Source) {
-      if (result == LIBRARY_ELEMENT1 ||
-          result == LIBRARY_ELEMENT2 ||
-          result == LIBRARY_ELEMENT3 ||
-          result == LIBRARY_ELEMENT4 ||
-          result == LIBRARY_ELEMENT5 ||
-          result == LIBRARY_ELEMENT6 ||
-          result == LIBRARY_ELEMENT7 ||
-          result == LIBRARY_ELEMENT8 ||
-          result == LIBRARY_ELEMENT9 ||
-          result == LIBRARY_ELEMENT) {
-        // TODO(scheglov) try to find a way to avoid listing every result
-        // e.g. "result.whenComplete == LIBRARY_ELEMENT"
-        String uri = target.uri.toString();
-        LibraryElement libraryElement = resynthesizer.getLibraryElement(uri);
-        entry.setValue(result, libraryElement, TargetedResult.EMPTY_LIST);
-        return true;
-      } else if (result == READY_LIBRARY_ELEMENT2 ||
-          result == READY_LIBRARY_ELEMENT6 ||
-          result == READY_LIBRARY_ELEMENT7) {
-        entry.setValue(result, true, TargetedResult.EMPTY_LIST);
-        return true;
-      } else if (result == SOURCE_KIND) {
-        String uri = target.uri.toString();
-        SourceKind kind = _getSourceKind(uri);
-        if (kind != null) {
-          entry.setValue(result, kind, TargetedResult.EMPTY_LIST);
-          return true;
-        }
-        return false;
-      } else {
-//        throw new UnimplementedError('$result of $target');
-      }
-    }
-    if (target is LibrarySpecificUnit) {
-      if (target.library == null || !target.library.isInSystemLibrary) {
-        return false;
-      }
-      if (result == CREATED_RESOLVED_UNIT1 ||
-          result == CREATED_RESOLVED_UNIT2 ||
-          result == CREATED_RESOLVED_UNIT3 ||
-          result == CREATED_RESOLVED_UNIT4 ||
-          result == CREATED_RESOLVED_UNIT5 ||
-          result == CREATED_RESOLVED_UNIT6 ||
-          result == CREATED_RESOLVED_UNIT7 ||
-          result == CREATED_RESOLVED_UNIT8 ||
-          result == CREATED_RESOLVED_UNIT9 ||
-          result == CREATED_RESOLVED_UNIT10 ||
-          result == CREATED_RESOLVED_UNIT11 ||
-          result == CREATED_RESOLVED_UNIT12) {
-        entry.setValue(result, true, TargetedResult.EMPTY_LIST);
-        return true;
-      }
-      if (result == COMPILATION_UNIT_ELEMENT) {
-        String libraryUri = target.library.uri.toString();
-        String unitUri = target.unit.uri.toString();
-        CompilationUnitElement unit = resynthesizer.getElement(
-            new ElementLocationImpl.con3(<String>[libraryUri, unitUri]));
-        if (unit != null) {
-          entry.setValue(result, unit, TargetedResult.EMPTY_LIST);
-          return true;
-        }
-      }
-    } else if (target is VariableElement) {
-      if (result == PROPAGATED_VARIABLE || result == INFERRED_STATIC_VARIABLE) {
-        entry.setValue(result, target, TargetedResult.EMPTY_LIST);
-        return true;
-      }
-    }
-    return false;
+    return super.compute(entry, result);
+  }
+
+  @override
+  bool hasResultsForSource(Source source) {
+    return source.source != null && source.isInSystemLibrary;
   }
 
   void _buildAsyncLibrary() {
@@ -139,19 +58,6 @@
     LibraryElement library = resynthesizer.getLibraryElement('dart:core');
     typeProvider.initializeCore(library);
   }
-
-  /**
-   * Return the [SourceKind] of the given [uri] or `null` if it is unknown.
-   */
-  SourceKind _getSourceKind(String uri) {
-    if (bundle.linkedLibraryUris.contains(uri)) {
-      return SourceKind.LIBRARY;
-    }
-    if (bundle.unlinkedUnitUris.contains(uri)) {
-      return SourceKind.PART;
-    }
-    return null;
-  }
 }
 
 /**
@@ -266,22 +172,12 @@
 }
 
 /**
- * Provider for analysis results.
- */
-abstract class SummaryResultProvider extends ResultProvider {
-  /**
-   * The [SummaryResynthesizer] of this context, maybe `null`.
-   */
-  SummaryResynthesizer get resynthesizer;
-}
-
-/**
  * Implementation of [TypeProvider] which can be initialized separately with
  * `dart:core` and `dart:async` libraries.
  */
 class SummaryTypeProvider implements TypeProvider {
-  bool _isCoreInitialized = false;
-  bool _isAsyncInitialized = false;
+  LibraryElement _coreLibrary;
+  LibraryElement _asyncLibrary;
 
   InterfaceType _boolType;
   InterfaceType _deprecatedType;
@@ -308,7 +204,8 @@
 
   @override
   InterfaceType get boolType {
-    assert(_isCoreInitialized);
+    assert(_coreLibrary != null);
+    _boolType ??= _getType(_coreLibrary, "bool");
     return _boolType;
   }
 
@@ -317,13 +214,15 @@
 
   @override
   InterfaceType get deprecatedType {
-    assert(_isCoreInitialized);
+    assert(_coreLibrary != null);
+    _deprecatedType ??= _getType(_coreLibrary, "Deprecated");
     return _deprecatedType;
   }
 
   @override
   InterfaceType get doubleType {
-    assert(_isCoreInitialized);
+    assert(_coreLibrary != null);
+    _doubleType ??= _getType(_coreLibrary, "double");
     return _doubleType;
   }
 
@@ -332,55 +231,64 @@
 
   @override
   InterfaceType get functionType {
-    assert(_isCoreInitialized);
+    assert(_coreLibrary != null);
+    _functionType ??= _getType(_coreLibrary, "Function");
     return _functionType;
   }
 
   @override
   InterfaceType get futureDynamicType {
-    assert(_isAsyncInitialized);
+    assert(_asyncLibrary != null);
+    _futureDynamicType ??= futureType.instantiate(<DartType>[dynamicType]);
     return _futureDynamicType;
   }
 
   @override
   InterfaceType get futureNullType {
-    assert(_isAsyncInitialized);
+    assert(_asyncLibrary != null);
+    _futureNullType ??= futureType.instantiate(<DartType>[nullType]);
     return _futureNullType;
   }
 
   @override
   InterfaceType get futureType {
-    assert(_isAsyncInitialized);
+    assert(_asyncLibrary != null);
+    _futureType ??= _getType(_asyncLibrary, "Future");
     return _futureType;
   }
 
   @override
   InterfaceType get intType {
-    assert(_isCoreInitialized);
+    assert(_coreLibrary != null);
+    _intType ??= _getType(_coreLibrary, "int");
     return _intType;
   }
 
   @override
   InterfaceType get iterableDynamicType {
-    assert(_isCoreInitialized);
+    assert(_coreLibrary != null);
+    _iterableDynamicType ??= iterableType.instantiate(<DartType>[dynamicType]);
     return _iterableDynamicType;
   }
 
   @override
   InterfaceType get iterableType {
-    assert(_isCoreInitialized);
+    assert(_coreLibrary != null);
+    _iterableType ??= _getType(_coreLibrary, "Iterable");
     return _iterableType;
   }
 
   @override
   InterfaceType get listType {
-    assert(_isCoreInitialized);
+    assert(_coreLibrary != null);
+    _listType ??= _getType(_coreLibrary, "List");
     return _listType;
   }
 
   @override
   InterfaceType get mapType {
-    assert(_isCoreInitialized);
+    assert(_coreLibrary != null);
+    _mapType ??= _getType(_coreLibrary, "Map");
     return _mapType;
   }
 
@@ -404,55 +312,64 @@
 
   @override
   InterfaceType get nullType {
-    assert(_isCoreInitialized);
+    assert(_coreLibrary != null);
+    _nullType ??= _getType(_coreLibrary, "Null");
     return _nullType;
   }
 
   @override
   InterfaceType get numType {
-    assert(_isCoreInitialized);
+    assert(_coreLibrary != null);
+    _numType ??= _getType(_coreLibrary, "num");
     return _numType;
   }
 
   @override
   InterfaceType get objectType {
-    assert(_isCoreInitialized);
+    assert(_coreLibrary != null);
+    _objectType ??= _getType(_coreLibrary, "Object");
     return _objectType;
   }
 
   @override
   InterfaceType get stackTraceType {
-    assert(_isCoreInitialized);
+    assert(_coreLibrary != null);
+    _stackTraceType ??= _getType(_coreLibrary, "StackTrace");
     return _stackTraceType;
   }
 
   @override
   InterfaceType get streamDynamicType {
-    assert(_isAsyncInitialized);
+    assert(_asyncLibrary != null);
+    _streamDynamicType ??= streamType.instantiate(<DartType>[dynamicType]);
     return _streamDynamicType;
   }
 
   @override
   InterfaceType get streamType {
-    assert(_isAsyncInitialized);
+    assert(_asyncLibrary != null);
+    _streamType ??= _getType(_asyncLibrary, "Stream");
     return _streamType;
   }
 
   @override
   InterfaceType get stringType {
-    assert(_isCoreInitialized);
+    assert(_coreLibrary != null);
+    _stringType ??= _getType(_coreLibrary, "String");
     return _stringType;
   }
 
   @override
   InterfaceType get symbolType {
-    assert(_isCoreInitialized);
+    assert(_coreLibrary != null);
+    _symbolType ??= _getType(_coreLibrary, "Symbol");
     return _symbolType;
   }
 
   @override
   InterfaceType get typeType {
-    assert(_isCoreInitialized);
+    assert(_coreLibrary != null);
+    _typeType ??= _getType(_coreLibrary, "Type");
     return _typeType;
   }
 
@@ -463,39 +380,18 @@
    * Initialize the `dart:async` types provided by this type provider.
    */
   void initializeAsync(LibraryElement library) {
-    assert(_isCoreInitialized);
-    assert(!_isAsyncInitialized);
-    _isAsyncInitialized = true;
-    _futureType = _getType(library, "Future");
-    _streamType = _getType(library, "Stream");
-    _futureDynamicType = _futureType.instantiate(<DartType>[dynamicType]);
-    _futureNullType = _futureType.instantiate(<DartType>[_nullType]);
-    _streamDynamicType = _streamType.instantiate(<DartType>[dynamicType]);
+    assert(_coreLibrary != null);
+    assert(_asyncLibrary == null);
+    _asyncLibrary = library;
   }
 
   /**
    * Initialize the `dart:core` types provided by this type provider.
    */
   void initializeCore(LibraryElement library) {
-    assert(!_isCoreInitialized);
-    assert(!_isAsyncInitialized);
-    _isCoreInitialized = true;
-    _boolType = _getType(library, "bool");
-    _deprecatedType = _getType(library, "Deprecated");
-    _doubleType = _getType(library, "double");
-    _functionType = _getType(library, "Function");
-    _intType = _getType(library, "int");
-    _iterableType = _getType(library, "Iterable");
-    _listType = _getType(library, "List");
-    _mapType = _getType(library, "Map");
-    _nullType = _getType(library, "Null");
-    _numType = _getType(library, "num");
-    _objectType = _getType(library, "Object");
-    _stackTraceType = _getType(library, "StackTrace");
-    _stringType = _getType(library, "String");
-    _symbolType = _getType(library, "Symbol");
-    _typeType = _getType(library, "Type");
-    _iterableDynamicType = _iterableType.instantiate(<DartType>[dynamicType]);
+    assert(_coreLibrary == null);
+    assert(_asyncLibrary == null);
+    _coreLibrary = library;
   }
 
   /**
diff --git a/pkg/analyzer/lib/src/task/dart.dart b/pkg/analyzer/lib/src/task/dart.dart
index 3d9d552..25e74ec 100644
--- a/pkg/analyzer/lib/src/task/dart.dart
+++ b/pkg/analyzer/lib/src/task/dart.dart
@@ -36,6 +36,7 @@
 import 'package:analyzer/src/task/driver.dart';
 import 'package:analyzer/src/task/general.dart';
 import 'package:analyzer/src/task/html.dart';
+import 'package:analyzer/src/task/incremental_element_builder.dart';
 import 'package:analyzer/src/task/inputs.dart';
 import 'package:analyzer/src/task/model.dart';
 import 'package:analyzer/src/task/strong/checker.dart';
@@ -51,6 +52,17 @@
     const SimpleResultCachingPolicy(16384, 16384);
 
 /**
+ * The [ResultCachingPolicy] for ASTs that can be reused when a library
+ * on which the source depends is changed.  It is worth to keep some number
+ * of these ASTs in memory in order to avoid parsing sources.  In contrast,
+ * none of [AST_CACHING_POLICY] managed ASTs can be reused after a change, so
+ * it is worth to keep them in memory while analysis is being performed, but
+ * once analysis is done, they can be flushed.
+ */
+const ResultCachingPolicy<CompilationUnit> AST_REUSABLE_CACHING_POLICY =
+    const SimpleResultCachingPolicy(1024, 1024);
+
+/**
  * The [ResultCachingPolicy] for lists of [ConstantEvaluationTarget]s.
  */
 const ResultCachingPolicy<List<ConstantEvaluationTarget>>
@@ -335,6 +347,12 @@
         'HINT_ERRORS', AnalysisError.NO_ERRORS);
 
 /**
+ * The ignore information for a [Source].
+ */
+final ResultDescriptor<IgnoreInfo> IGNORE_INFO =
+    new ResultDescriptor<IgnoreInfo>('IGNORE_INFO', null);
+
+/**
  * A list of the [VariableElement]s whose type should be inferred that another
  * inferable static variable (the target) depends on.
  *
@@ -375,7 +393,7 @@
  *
  * Only non-empty in strongMode.
  *
- * The result is only available for [LibrarySpecificUnit]s.
+ * The result is only available for [Source]s representing a library.
  */
 final ListResultDescriptor<LibraryElement> LIBRARY_CYCLE =
     new ListResultDescriptor<LibraryElement>('LIBRARY_CYCLE', null);
@@ -387,7 +405,7 @@
  *
  * Only non-empty in strongMode.
  *
- * The result is only available for [LibrarySpecificUnit]s.
+ * The result is only available for [Source]s representing a library.
  */
 final ListResultDescriptor<CompilationUnitElement> LIBRARY_CYCLE_DEPENDENCIES =
     new ListResultDescriptor<CompilationUnitElement>(
@@ -400,7 +418,7 @@
  *
  * Only non-empty in strongMode.
  *
- * The result is only available for [LibrarySpecificUnit]s.
+ * The result is only available for [Source]s representing a library.
  */
 final ListResultDescriptor<CompilationUnitElement> LIBRARY_CYCLE_UNITS =
     new ListResultDescriptor<CompilationUnitElement>(
@@ -421,7 +439,7 @@
 /**
  * The partial [LibraryElement] associated with a library.
  *
- * In addition to [LIBRARY_ELEMENT1] [LibraryElement.imports] and
+ * In addition to [LIBRARY_ELEMENT1] also [LibraryElement.imports] and
  * [LibraryElement.exports] are set.
  *
  * The result is only available for [Source]s representing a library.
@@ -512,6 +530,23 @@
         cachingPolicy: ELEMENT_CACHING_POLICY);
 
 /**
+ * List of all `LIBRARY_ELEMENT` results.
+ */
+final List<ResultDescriptor<LibraryElement>> LIBRARY_ELEMENT_RESULTS =
+    <ResultDescriptor<LibraryElement>>[
+  LIBRARY_ELEMENT1,
+  LIBRARY_ELEMENT2,
+  LIBRARY_ELEMENT3,
+  LIBRARY_ELEMENT4,
+  LIBRARY_ELEMENT5,
+  LIBRARY_ELEMENT6,
+  LIBRARY_ELEMENT7,
+  LIBRARY_ELEMENT8,
+  LIBRARY_ELEMENT9,
+  LIBRARY_ELEMENT
+];
+
+/**
  * The flag specifying whether all analysis errors are computed in a specific
  * library.
  *
@@ -711,7 +746,7 @@
  */
 final ResultDescriptor<CompilationUnit> RESOLVED_UNIT1 =
     new ResultDescriptor<CompilationUnit>('RESOLVED_UNIT1', null,
-        cachingPolicy: AST_CACHING_POLICY);
+        cachingPolicy: AST_REUSABLE_CACHING_POLICY);
 
 /**
  * The partially resolved [CompilationUnit] associated with a compilation unit.
@@ -862,6 +897,27 @@
         cachingPolicy: AST_CACHING_POLICY);
 
 /**
+ * List of all `RESOLVED_UNITx` results.
+ */
+final List<ResultDescriptor<CompilationUnit>> RESOLVED_UNIT_RESULTS =
+    <ResultDescriptor<CompilationUnit>>[
+  RESOLVED_UNIT1,
+  RESOLVED_UNIT2,
+  RESOLVED_UNIT3,
+  RESOLVED_UNIT4,
+  RESOLVED_UNIT5,
+  RESOLVED_UNIT6,
+  RESOLVED_UNIT7,
+  RESOLVED_UNIT8,
+  RESOLVED_UNIT9,
+  RESOLVED_UNIT10,
+  RESOLVED_UNIT11,
+  RESOLVED_UNIT12,
+  RESOLVED_UNIT13,
+  RESOLVED_UNIT
+];
+
+/**
  * The errors produced while scanning a compilation unit.
  *
  * The list will be empty if there were no errors, but will not be `null`.
@@ -1329,6 +1385,7 @@
     //
     // Compute export namespace.
     //
+    library.exportNamespace = null;
     NamespaceBuilder builder = new NamespaceBuilder();
     Namespace namespace = builder.createExportNamespaceForLibrary(library);
     library.exportNamespace = namespace;
@@ -1393,7 +1450,8 @@
       'BuildLibraryElementTask', createTask, buildInputs, <ResultDescriptor>[
     BUILD_LIBRARY_ERRORS,
     LIBRARY_ELEMENT1,
-    IS_LAUNCHABLE
+    IS_LAUNCHABLE,
+    REFERENCED_NAMES
   ]);
 
   /**
@@ -1565,12 +1623,19 @@
       Directive directive = directivesToResolve[i];
       directive.element = libraryElement;
     }
+    // Compute referenced names.
+    ReferencedNames referencedNames = new ReferencedNames(librarySource);
+    new ReferencedNamesBuilder(referencedNames).build(definingCompilationUnit);
+    for (CompilationUnit partUnit in partUnits) {
+      new ReferencedNamesBuilder(referencedNames).build(partUnit);
+    }
     //
     // Record outputs.
     //
     outputs[BUILD_LIBRARY_ERRORS] = errors;
     outputs[LIBRARY_ELEMENT1] = libraryElement;
     outputs[IS_LAUNCHABLE] = entryPoint != null;
+    outputs[REFERENCED_NAMES] = referencedNames;
   }
 
   /**
@@ -1798,8 +1863,9 @@
     LibraryElement coreLibrary = getRequiredInput(CORE_INPUT);
     LibraryElement asyncLibrary = getOptionalInput(ASYNC_INPUT);
     if (asyncLibrary == null) {
-      asyncLibrary =
-          (context as AnalysisContextImpl).createMockAsyncLib(coreLibrary);
+      Source asyncSource = context.sourceFactory.forUri(DartSdk.DART_ASYNC);
+      asyncLibrary = (context as AnalysisContextImpl)
+          .createMockAsyncLib(coreLibrary, asyncSource);
     }
     Namespace coreNamespace = coreLibrary.publicNamespace;
     Namespace asyncNamespace = asyncLibrary.publicNamespace;
@@ -1808,7 +1874,8 @@
     //
     if (!context.analysisOptions.enableAsync) {
       AnalysisContextImpl contextImpl = context;
-      asyncLibrary = contextImpl.createMockAsyncLib(coreLibrary);
+      Source asyncSource = context.sourceFactory.forUri(DartSdk.DART_ASYNC);
+      asyncLibrary = contextImpl.createMockAsyncLib(coreLibrary, asyncSource);
       asyncNamespace = asyncLibrary.publicNamespace;
     }
     TypeProvider typeProvider =
@@ -2169,10 +2236,10 @@
    * given [target].
    */
   static Map<String, TaskInput> buildInputs(AnalysisTarget target) {
-    LibrarySpecificUnit unit = target;
+    Source librarySource = target;
     return <String, TaskInput>{
-      LIBRARY_ELEMENT_INPUT: LIBRARY_ELEMENT2.of(unit.library),
-      'resolveReachableLibraries': READY_LIBRARY_ELEMENT2.of(unit.library),
+      LIBRARY_ELEMENT_INPUT: LIBRARY_ELEMENT2.of(librarySource),
+      'resolveReachableLibraries': READY_LIBRARY_ELEMENT2.of(librarySource),
     };
   }
 
@@ -2419,36 +2486,162 @@
 class DartDelta extends Delta {
   bool hasDirectiveChange = false;
 
-  final Set<String> addedNames = new Set<String>();
   final Set<String> changedNames = new Set<String>();
-  final Set<String> removedNames = new Set<String>();
+  final Map<Source, Set<String>> changedPrivateNames = <Source, Set<String>>{};
 
-  final Set<Source> invalidatedSources = new Set<Source>();
+  final Map<String, ClassElementDelta> changedClasses =
+      <String, ClassElementDelta>{};
 
-  DartDelta(Source source) : super(source) {
-    invalidatedSources.add(source);
+  /**
+   * The cache of libraries in which all results are invalid.
+   */
+  final Set<Source> librariesWithInvalidResults = new Set<Source>();
+
+  /**
+   * The cache of libraries in which all results are valid.
+   */
+  final Set<Source> librariesWithValidResults = new Set<Source>();
+
+  DartDelta(Source source) : super(source);
+
+  /**
+   * Add names that are changed in the given [references].
+   */
+  void addChangedElements(ReferencedNames references) {
+    Source refLibrary = references.librarySource;
+    bool hasProgress = true;
+    while (hasProgress) {
+      hasProgress = false;
+      // Classes that extend changed classes are also changed.
+      // If there is a delta for a superclass, use it for the subclass.
+      // Otherwise mark the subclass as "general name change".
+      references.superToSubs.forEach((String superName, Set<String> subNames) {
+        ClassElementDelta superDelta = changedClasses[superName];
+        for (String subName in subNames) {
+          if (superDelta != null) {
+            ClassElementDelta subDelta = changedClasses.putIfAbsent(subName,
+                () => new ClassElementDelta(null, refLibrary, subName));
+            _log(() => '$subName in $refLibrary has delta because of its '
+                'superclass $superName has delta');
+            if (subDelta.superDeltas.add(superDelta)) {
+              hasProgress = true;
+            }
+          } else if (isChanged(refLibrary, superName)) {
+            if (nameChanged(refLibrary, subName)) {
+              _log(() => '$subName in $refLibrary is changed because its '
+                  'superclass $superName is changed');
+              hasProgress = true;
+            }
+          }
+        }
+      });
+      // If a user element uses a changed top-level element, then the user is
+      // also changed. Note that if a changed class with delta is used, this
+      // does not make the user changed - classes with delta keep their
+      // original elements, so resolution of their names does not change.
+      references.userToDependsOn.forEach((user, dependencies) {
+        for (String dependency in dependencies) {
+          if (isChangedOrClassMember(refLibrary, dependency)) {
+            if (nameChanged(refLibrary, user)) {
+              _log(() => '$user in $refLibrary is changed because '
+                  'of $dependency in $dependencies');
+              hasProgress = true;
+            }
+          }
+        }
+      });
+    }
   }
 
-  void elementAdded(Element element) {
-    addedNames.add(element.name);
+  void classChanged(ClassElementDelta classDelta) {
+    changedClasses[classDelta.name] = classDelta;
   }
 
   void elementChanged(Element element) {
-    changedNames.add(element.name);
+    Source librarySource = element.library.source;
+    nameChanged(librarySource, element.name);
   }
 
-  void elementRemoved(Element element) {
-    removedNames.add(element.name);
+  bool hasAffectedReferences(ReferencedNames references) {
+    Source refLibrary = references.librarySource;
+    // Verify errors must be recomputed when a superclass changes.
+    for (String superName in references.superToSubs.keys) {
+      if (isChangedOrClass(refLibrary, superName)) {
+        _log(() => '$refLibrary is affected because '
+            '${references.superToSubs[superName]} subclasses $superName');
+        return true;
+      }
+    }
+    // Verify errors must be recomputed when an instantiated class changes.
+    for (String name in references.instantiatedNames) {
+      if (isChangedOrClass(refLibrary, name)) {
+        _log(() => '$refLibrary is affected because $name is instantiated');
+        return true;
+      }
+    }
+    // Resolution must be performed when a referenced element changes.
+    for (String name in references.names) {
+      if (isChangedOrClassMember(refLibrary, name)) {
+        _log(() => '$refLibrary is affected by $name');
+        return true;
+      }
+    }
+    return false;
   }
 
-  bool isNameAffected(String name) {
-    return addedNames.contains(name) ||
-        changedNames.contains(name) ||
-        removedNames.contains(name);
+  /**
+   * Return `true` if the given [name], used in a unit of the [librarySource],
+   * is affected by a changed top-level element, excluding classes.
+   */
+  bool isChanged(Source librarySource, String name) {
+    if (_isPrivateName(name)) {
+      if (changedPrivateNames[librarySource]?.contains(name) ?? false) {
+        return true;
+      }
+    }
+    return changedNames.contains(name);
   }
 
-  bool nameChanged(String name) {
-    return changedNames.add(name);
+  /**
+   * Return `true` if the given [name], used in a unit of the [librarySource],
+   * is affected by a changed top-level element or a class.
+   */
+  bool isChangedOrClass(Source librarySource, String name) {
+    if (isChanged(librarySource, name)) {
+      return true;
+    }
+    return changedClasses[name] != null;
+  }
+
+  /**
+   * Return `true` if the given [name], used in a unit of the [librarySource],
+   * is affected by a changed top-level element or a class member.
+   */
+  bool isChangedOrClassMember(Source librarySource, String name) {
+    if (isChanged(librarySource, name)) {
+      return true;
+    }
+    // TODO(scheglov) Optimize this.
+    for (ClassElementDelta classDelta in changedClasses.values) {
+      if (classDelta.hasChanges(librarySource, name)) {
+        return true;
+      }
+    }
+    return false;
+  }
+
+  /**
+   * Register the fact that the given [name], defined in the [librarySource]
+   * is changed.  Return `true` if the [name] is a new name, not yet registered.
+   */
+  bool nameChanged(Source librarySource, String name) {
+    if (_isPrivateName(name)) {
+      return changedPrivateNames
+          .putIfAbsent(librarySource, () => new Set<String>())
+          .add(name);
+    } else {
+      return changedNames.add(name);
+    }
   }
 
   @override
@@ -2458,61 +2651,82 @@
       return DeltaResult.INVALIDATE;
     }
     // Prepare target source.
-    Source targetSource = null;
+    Source targetSource = target.source;
+    Source librarySource = target.librarySource;
     if (target is Source) {
-      targetSource = target;
+      if (context.getKindOf(target) == SourceKind.LIBRARY) {
+        librarySource = target;
+      }
     }
-    if (target is LibrarySpecificUnit) {
-      targetSource = target.library;
+    // We don't know what to do with the given target, invalidate it.
+    if (targetSource == null) {
+      return DeltaResult.INVALIDATE;
     }
-    if (target is Element) {
-      targetSource = target.source;
+    // Keep results that don't change: any library.
+    if (_isTaskResult(BuildLibraryElementTask.DESCRIPTOR, descriptor) ||
+        _isTaskResult(BuildDirectiveElementsTask.DESCRIPTOR, descriptor) ||
+        _isTaskResult(ResolveDirectiveElementsTask.DESCRIPTOR, descriptor) ||
+        _isTaskResult(BuildEnumMemberElementsTask.DESCRIPTOR, descriptor) ||
+        _isTaskResult(BuildSourceExportClosureTask.DESCRIPTOR, descriptor) ||
+        _isTaskResult(ReadyLibraryElement2Task.DESCRIPTOR, descriptor) ||
+        _isTaskResult(ComputeLibraryCycleTask.DESCRIPTOR, descriptor)) {
+      return DeltaResult.KEEP_CONTINUE;
     }
-    // Keep results that are updated incrementally.
-    // If we want to analyze only some references to the source being changed,
-    // we need to keep the same instances of CompilationUnitElement and
-    // LibraryElement.
+    // Keep results that don't change: changed library.
     if (targetSource == source) {
-      if (ParseDartTask.DESCRIPTOR.results.contains(descriptor)) {
-        return DeltaResult.KEEP_CONTINUE;
-      }
-      if (BuildCompilationUnitElementTask.DESCRIPTOR.results
-          .contains(descriptor)) {
-        return DeltaResult.KEEP_CONTINUE;
-      }
-      if (BuildLibraryElementTask.DESCRIPTOR.results.contains(descriptor)) {
-        // Invalidate cached results.
-        if (value is LibraryElementImpl) {
-          value.exportNamespace = null;
-        }
+      if (_isTaskResult(ScanDartTask.DESCRIPTOR, descriptor) ||
+          _isTaskResult(ParseDartTask.DESCRIPTOR, descriptor) ||
+          _isTaskResult(
+              BuildCompilationUnitElementTask.DESCRIPTOR, descriptor) ||
+          _isTaskResult(BuildLibraryElementTask.DESCRIPTOR, descriptor)) {
         return DeltaResult.KEEP_CONTINUE;
       }
       return DeltaResult.INVALIDATE;
     }
-    // Use the target library dependency information to decide whether
-    // the delta affects the library.
-    if (targetSource != null) {
-      List<Source> librarySources =
-          context.getLibrariesContaining(targetSource);
-      int length = librarySources.length;
-      for (int i = 0; i < length; i++) {
-        Source librarySource = librarySources[i];
-        AnalysisCache cache = context.analysisCache;
-        ReferencedNames referencedNames =
-            cache.getValue(librarySource, REFERENCED_NAMES);
-        if (referencedNames == null) {
-          return DeltaResult.INVALIDATE;
-        }
-        referencedNames.addChangedElements(this);
-        if (referencedNames.isAffectedBy(this)) {
-          return DeltaResult.INVALIDATE;
-        }
+    // Keep results that don't change: dependent library.
+    if (targetSource != source) {
+      if (_isTaskResult(BuildPublicNamespaceTask.DESCRIPTOR, descriptor)) {
+        return DeltaResult.KEEP_CONTINUE;
       }
+    }
+    // Handle in-library results only for now.
+    if (librarySource != null) {
+      // Use cached library results.
+      if (librariesWithInvalidResults.contains(librarySource)) {
+        return DeltaResult.INVALIDATE;
+      }
+      if (librariesWithValidResults.contains(librarySource)) {
+        return DeltaResult.STOP;
+      }
+      // Compute the library result.
+      ReferencedNames referencedNames =
+          context.getResult(librarySource, REFERENCED_NAMES);
+      if (referencedNames == null) {
+        return DeltaResult.INVALIDATE_NO_DELTA;
+      }
+      addChangedElements(referencedNames);
+      if (hasAffectedReferences(referencedNames)) {
+        librariesWithInvalidResults.add(librarySource);
+        return DeltaResult.INVALIDATE;
+      }
+      librariesWithValidResults.add(librarySource);
       return DeltaResult.STOP;
     }
     // We don't know what to do with the given target, invalidate it.
     return DeltaResult.INVALIDATE;
   }
+
+  void _log(String getMessage()) {
+//    String message = getMessage();
+//    print(message);
+  }
+
+  static bool _isPrivateName(String name) => name.startsWith('_');
+
+  static bool _isTaskResult(
+      TaskDescriptor taskDescriptor, ResultDescriptor result) {
+    return taskDescriptor.results.contains(result);
+  }
 }
 
 /**
@@ -2520,8 +2734,6 @@
  * of errors.
  */
 class DartErrorsTask extends SourceBasedAnalysisTask {
-  static final RegExp spacesRegExp = new RegExp(r'\s+');
-
   /**
    * The task descriptor describing this kind of task.
    */
@@ -2529,18 +2741,15 @@
       createTask, buildInputs, <ResultDescriptor>[DART_ERRORS]);
 
   /**
+   * The name of the [IGNORE_INFO_INPUT] input.
+   */
+  static const String IGNORE_INFO_INPUT = 'IGNORE_INFO_INPUT';
+
+  /**
    * The name of the [LINE_INFO_INPUT] input.
    */
   static const String LINE_INFO_INPUT = 'LINE_INFO_INPUT';
 
-  /**
-   * The name of the [PARSED_UNIT_INPUT] input.
-   */
-  static const String PARSED_UNIT_INPUT = 'PARSED_UNIT_INPUT';
-
-  // Prefix for comments ignoring error codes.
-  static const String _normalizedIgnorePrefix = '//ignore:';
-
   DartErrorsTask(InternalAnalysisContext context, AnalysisTarget target)
       : super(context, target);
 
@@ -2584,69 +2793,19 @@
     outputs[DART_ERRORS] = errors;
   }
 
-  Token _advanceToLine(Token token, LineInfo lineInfo, int line) {
-    int offset = lineInfo.getOffsetOfLine(line - 1); // 0-based
-    while (token.offset < offset) {
-      token = token.next;
-    }
-    return token;
-  }
-
   List<AnalysisError> _filterIgnores(List<AnalysisError> errors) {
     if (errors.isEmpty) {
       return errors;
     }
 
-    // Sort errors.
-    errors.sort((AnalysisError e1, AnalysisError e2) => e1.offset - e2.offset);
+    IgnoreInfo ignoreInfo = getRequiredInput(IGNORE_INFO_INPUT);
+    if (!ignoreInfo.hasIgnores) {
+      return errors;
+    }
 
-    CompilationUnit cu = getRequiredInput(PARSED_UNIT_INPUT);
-    Token token = cu.beginToken;
     LineInfo lineInfo = getRequiredInput(LINE_INFO_INPUT);
 
-    bool isIgnored(AnalysisError error) {
-      int errorLine = lineInfo.getLocation(error.offset).lineNumber;
-      token = _advanceToLine(token, lineInfo, errorLine);
-
-      //Check for leading comment.
-      Token comments = token.precedingComments;
-      while (comments?.next != null) {
-        comments = comments.next;
-      }
-      if (_isIgnoredBy(error, comments)) {
-        return true;
-      }
-
-      //Check for trailing comment.
-      int lineNumber = errorLine + 1;
-      if (lineNumber <= lineInfo.lineCount) {
-        Token nextLine = _advanceToLine(token, lineInfo, lineNumber);
-        comments = nextLine.precedingComments;
-        if (comments != null && nextLine.previous.type != TokenType.EOF) {
-          int commentLine = lineInfo.getLocation(comments.offset).lineNumber;
-          if (commentLine == errorLine) {
-            return _isIgnoredBy(error, comments);
-          }
-        }
-      }
-
-      return false;
-    }
-
-    return errors.where((AnalysisError e) => !isIgnored(e)).toList();
-  }
-
-  bool _isIgnoredBy(AnalysisError error, Token comment) {
-    //Normalize first.
-    String contents =
-        comment?.lexeme?.toLowerCase()?.replaceAll(spacesRegExp, '');
-    if (contents == null || !contents.startsWith(_normalizedIgnorePrefix)) {
-      return false;
-    }
-    return contents
-        .substring(_normalizedIgnorePrefix.length)
-        .split(',')
-        .contains(error.errorCode.name.toLowerCase());
+    return filterIgnored(errors, ignoreInfo, lineInfo);
   }
 
   /**
@@ -2658,7 +2817,7 @@
     Source source = target;
     Map<String, TaskInput> inputs = <String, TaskInput>{};
     inputs[LINE_INFO_INPUT] = LINE_INFO.of(source);
-    inputs[PARSED_UNIT_INPUT] = PARSED_UNIT.of(source);
+    inputs[IGNORE_INFO_INPUT] = IGNORE_INFO.of(source);
     EnginePlugin enginePlugin = AnalysisEngine.instance.enginePlugin;
     // for Source
     List<ResultDescriptor> errorsForSource = enginePlugin.dartErrorsForSource;
@@ -2691,6 +2850,27 @@
       AnalysisContext context, AnalysisTarget target) {
     return new DartErrorsTask(context, target);
   }
+
+  /**
+   * Return a new list with items from [errors] which are not filtered out by
+   * the [ignoreInfo].
+   */
+  static List<AnalysisError> filterIgnored(
+      List<AnalysisError> errors, IgnoreInfo ignoreInfo, LineInfo lineInfo) {
+    if (errors.isEmpty || !ignoreInfo.hasIgnores) {
+      return errors;
+    }
+
+    bool isIgnored(AnalysisError error) {
+      int errorLine = lineInfo.getLocation(error.offset).lineNumber;
+      String errorCode = error.errorCode.name.toLowerCase();
+      // Ignores can be on the line or just preceding the error.
+      return ignoreInfo.ignoredAt(errorCode, errorLine) ||
+          ignoreInfo.ignoredAt(errorCode, errorLine - 1);
+    }
+
+    return errors.where((AnalysisError e) => !isIgnored(e)).toList();
+  }
 }
 
 /**
@@ -3100,6 +3280,80 @@
 }
 
 /**
+ * Information about analysis `//ignore:` comments within a source file.
+ */
+class IgnoreInfo {
+  /**
+   *  Instance shared by all cases without matches.
+   */
+  static final IgnoreInfo _EMPTY_INFO = new IgnoreInfo();
+
+  /**
+   * A regular expression for matching 'ignore' comments.  Produces matches
+   * containing 2 groups.  For example:
+   *
+   *     * ['//ignore: error_code', 'error_code']
+   *
+   * Resulting codes may be in a list ('error_code_1,error_code2').
+   */
+  static final RegExp _IGNORE_MATCHER =
+      new RegExp(r'//[ ]*ignore:(.*)$', multiLine: true);
+
+  final Map<int, List<String>> _ignoreMap = new HashMap<int, List<String>>();
+
+  /**
+   * Whether this info object defines any ignores.
+   */
+  bool get hasIgnores => ignores.isNotEmpty;
+
+  /**
+   * Map of line numbers to associated ignored error codes.
+   */
+  Map<int, Iterable<String>> get ignores => _ignoreMap;
+
+  /**
+   * Ignore this [errorCode] at [line].
+   */
+  void add(int line, String errorCode) {
+    _ignoreMap.putIfAbsent(line, () => new List<String>()).add(errorCode);
+  }
+
+  /**
+   * Ignore these [errorCodes] at [line].
+   */
+  void addAll(int line, Iterable<String> errorCodes) {
+    _ignoreMap.putIfAbsent(line, () => new List<String>()).addAll(errorCodes);
+  }
+
+  /**
+   * Test whether this [errorCode] is ignored at the given [line].
+   */
+  bool ignoredAt(String errorCode, int line) =>
+      _ignoreMap[line]?.contains(errorCode) == true;
+
+  /**
+   * Calculate ignores for the given [content] with line [info].
+   */
+  static IgnoreInfo calculateIgnores(String content, LineInfo info) {
+    Iterable<Match> matches = _IGNORE_MATCHER.allMatches(content);
+    if (matches.isEmpty) {
+      return _EMPTY_INFO;
+    }
+
+    IgnoreInfo ignoreInfo = new IgnoreInfo();
+    for (Match match in matches) {
+      // See _IGNORE_MATCHER for format --- note the possibility of error lists.
+      Iterable<String> codes = match
+          .group(1)
+          .split(',')
+          .map((String code) => code.trim().toLowerCase());
+      ignoreInfo.addAll(info.getLocation(match.start).lineNumber, codes);
+    }
+    return ignoreInfo;
+  }
+}
+
+/**
  * A task that ensures that all of the inferable instance members in a
  * compilation unit have had their type inferred.
  */
@@ -3173,14 +3427,14 @@
 
       // Require that field re-resolution be complete for all units in the
       // current library cycle.
-      'orderLibraryCycleTasks': LIBRARY_CYCLE_UNITS.of(unit).toList(
+      'orderLibraryCycleTasks': LIBRARY_CYCLE_UNITS.of(unit.library).toList(
           (CompilationUnitElement unit) => CREATED_RESOLVED_UNIT10.of(
               new LibrarySpecificUnit(
                   (unit as CompilationUnitElementImpl).librarySource,
                   unit.source))),
       // Require that full inference be complete for all dependencies of the
       // current library cycle.
-      'orderLibraryCycles': LIBRARY_CYCLE_DEPENDENCIES.of(unit).toList(
+      'orderLibraryCycles': LIBRARY_CYCLE_DEPENDENCIES.of(unit.library).toList(
           (CompilationUnitElement unit) => CREATED_RESOLVED_UNIT11.of(
               new LibrarySpecificUnit(
                   (unit as CompilationUnitElementImpl).librarySource,
@@ -3213,19 +3467,20 @@
    */
   VariableDeclaration getDeclaration(CompilationUnit unit) {
     VariableElement variable = target;
-    AstNode node = new NodeLocator2(variable.nameOffset).searchWithin(unit);
+    int offset = variable.nameOffset;
+    AstNode node = new NodeLocator2(offset).searchWithin(unit);
     if (node == null) {
       Source variableSource = variable.source;
       Source unitSource = unit.element.source;
       if (variableSource != unitSource) {
         throw new AnalysisException(
             "Failed to find the AST node for the variable "
-            "${variable.displayName} in $variableSource "
+            "${variable.displayName} at $offset in $variableSource "
             "because we were looking in $unitSource");
       }
       throw new AnalysisException(
           "Failed to find the AST node for the variable "
-          "${variable.displayName} in $variableSource");
+          "${variable.displayName} at $offset in $variableSource");
     }
     VariableDeclaration declaration =
         node.getAncestor((AstNode ancestor) => ancestor is VariableDeclaration);
@@ -3233,14 +3488,28 @@
       Source variableSource = variable.source;
       Source unitSource = unit.element.source;
       if (variableSource != unitSource) {
+        if (declaration == null) {
+          throw new AnalysisException(
+              "Failed to find the declaration of the variable "
+              "${variable.displayName} at $offset in $variableSource "
+              "because the node was not in a variable declaration "
+              "possibly because we were looking in $unitSource");
+        }
         throw new AnalysisException(
             "Failed to find the declaration of the variable "
-            "${variable.displayName} in $variableSource"
+            "${variable.displayName} at $offset in $variableSource "
             "because we were looking in $unitSource");
       }
+      if (declaration == null) {
+        throw new AnalysisException(
+            "Failed to find the declaration of the variable "
+            "${variable.displayName} at $offset in $variableSource "
+            "because the node was not in a variable declaration");
+      }
       throw new AnalysisException(
           "Failed to find the declaration of the variable "
-          "${variable.displayName} in $variableSource");
+          "${variable.displayName} at $offset in $variableSource "
+          "because the node was not the name in a variable declaration");
     }
     return declaration;
   }
@@ -3437,7 +3706,7 @@
 
       // Require that full inference be complete for all dependencies of the
       // current library cycle.
-      'orderLibraryCycles': LIBRARY_CYCLE_DEPENDENCIES.of(unit).toList(
+      'orderLibraryCycles': LIBRARY_CYCLE_DEPENDENCIES.of(unit.library).toList(
           (CompilationUnitElement unit) => CREATED_RESOLVED_UNIT11.of(
               new LibrarySpecificUnit(
                   (unit as CompilationUnitElementImpl).librarySource,
@@ -3703,6 +3972,7 @@
     parser.parseFunctionBodies = options.analyzeFunctionBodiesPredicate(source);
     parser.parseGenericMethods = options.enableGenericMethods;
     parser.parseGenericMethodComments = options.strongMode;
+    parser.parseTrailingCommas = options.enableTrailingCommas;
     CompilationUnit unit = parser.parseCompilationUnit(tokenStream);
     unit.lineInfo = lineInfo;
 
@@ -3938,7 +4208,7 @@
 
       // Require that full inference be complete for all dependencies of the
       // current library cycle.
-      'orderLibraryCycles': LIBRARY_CYCLE_DEPENDENCIES.of(unit).toList(
+      'orderLibraryCycles': LIBRARY_CYCLE_DEPENDENCIES.of(unit.library).toList(
           (CompilationUnitElement unit) => CREATED_RESOLVED_UNIT11.of(
               new LibrarySpecificUnit(
                   (unit as CompilationUnitElementImpl).librarySource,
@@ -4418,50 +4688,58 @@
  * with their externally visible dependencies.
  */
 class ReferencedNames {
+  final Source librarySource;
+
+  /**
+   * The mapping from the name of a class to the set of names of other classes
+   * that extend, mix-in, or implement it.
+   *
+   * If the set of member of a class is changed, these changes might change
+   * the list of unimplemented inherited members in the class and classes that
+   * extend, mix-in, or implement it. So, we might need to report (or stop
+   * reporting) the corresponding warning.
+   */
+  final Map<String, Set<String>> superToSubs = <String, Set<String>>{};
+
+  /**
+   * The names of instantiated classes.
+   *
+   * If one of these classes changes its set of members, it might change
+   * its list of unimplemented inherited members. So, we might need to report
+   * (or stop reporting) the corresponding warning.
+   */
+  final Set<String> instantiatedNames = new Set<String>();
+
+  /**
+   * The set of names that are referenced by the library, both inside and
+   * outside of method bodies.
+   */
   final Set<String> names = new Set<String>();
+
+  /**
+   * The mapping from the name of a top-level element to the set of names that
+   * the element uses in a way that is visible outside of the element, e.g.
+   * the return type, or a parameter type.
+   */
   final Map<String, Set<String>> userToDependsOn = <String, Set<String>>{};
 
-  /**
-   * Updates [delta] by adding names that are changed in this library.
-   */
-  void addChangedElements(DartDelta delta) {
-    bool hasProgress = true;
-    while (hasProgress) {
-      hasProgress = false;
-      userToDependsOn.forEach((user, dependencies) {
-        for (String dependency in dependencies) {
-          if (delta.isNameAffected(dependency)) {
-            if (delta.nameChanged(user)) {
-              hasProgress = true;
-            }
-          }
-        }
-      });
-    }
-  }
+  ReferencedNames(this.librarySource);
 
-  /**
-   * Returns `true` if the library described by this object is affected by
-   * the given [delta].
-   */
-  bool isAffectedBy(DartDelta delta) {
-    for (String name in names) {
-      if (delta.isNameAffected(name)) {
-        return true;
-      }
-    }
-    return false;
+  void addSubclass(String subName, String superName) {
+    superToSubs.putIfAbsent(superName, () => new Set<String>()).add(subName);
   }
 }
 
 /**
  * A builder for creating [ReferencedNames].
- *
- * TODO(scheglov) Record dependencies for all other top-level declarations.
  */
-class ReferencedNamesBuilder extends RecursiveAstVisitor {
+class ReferencedNamesBuilder extends GeneralizingAstVisitor {
+  final Set<String> importPrefixNames = new Set<String>();
   final ReferencedNames names;
-  int bodyLevel = 0;
+
+  ReferencedNamesScope scope = new ReferencedNamesScope(null);
+
+  int localLevel = 0;
   Set<String> dependsOn;
 
   ReferencedNamesBuilder(this.names);
@@ -4472,41 +4750,308 @@
   }
 
   @override
-  visitBlockFunctionBody(BlockFunctionBody node) {
+  visitBlock(Block node) {
+    ReferencedNamesScope outerScope = scope;
     try {
-      bodyLevel++;
-      super.visitBlockFunctionBody(node);
+      scope = new ReferencedNamesScope.forBlock(scope, node);
+      super.visitBlock(node);
     } finally {
-      bodyLevel--;
+      scope = outerScope;
     }
   }
 
   @override
   visitClassDeclaration(ClassDeclaration node) {
-    dependsOn = new Set<String>();
-    super.visitClassDeclaration(node);
-    names.userToDependsOn[node.name.name] = dependsOn;
-    dependsOn = null;
+    ReferencedNamesScope outerScope = scope;
+    try {
+      scope = new ReferencedNamesScope.forClass(scope, node);
+      dependsOn = new Set<String>();
+      super.visitClassDeclaration(node);
+      String className = node.name.name;
+      names.userToDependsOn[className] = dependsOn;
+      _addSuperName(className, node.extendsClause?.superclass);
+      _addSuperNames(className, node.withClause?.mixinTypes);
+      _addSuperNames(className, node.implementsClause?.interfaces);
+    } finally {
+      dependsOn = null;
+      scope = outerScope;
+    }
   }
 
   @override
-  visitExpressionFunctionBody(ExpressionFunctionBody node) {
+  visitClassTypeAlias(ClassTypeAlias node) {
+    ReferencedNamesScope outerScope = scope;
     try {
-      bodyLevel++;
-      super.visitExpressionFunctionBody(node);
+      scope = new ReferencedNamesScope.forClassTypeAlias(scope, node);
+      dependsOn = new Set<String>();
+      super.visitClassTypeAlias(node);
+      String className = node.name.name;
+      names.userToDependsOn[className] = dependsOn;
+      _addSuperName(className, node.superclass);
+      _addSuperNames(className, node.withClause?.mixinTypes);
+      _addSuperNames(className, node.implementsClause?.interfaces);
     } finally {
-      bodyLevel--;
+      dependsOn = null;
+      scope = outerScope;
+    }
+  }
+
+  @override
+  visitComment(Comment node) {
+    try {
+      localLevel++;
+      super.visitComment(node);
+    } finally {
+      localLevel--;
+    }
+  }
+
+  @override
+  visitConstructorName(ConstructorName node) {
+    if (node.parent is! ConstructorDeclaration) {
+      super.visitConstructorName(node);
+    }
+  }
+
+  @override
+  visitFunctionBody(FunctionBody node) {
+    try {
+      localLevel++;
+      super.visitFunctionBody(node);
+    } finally {
+      localLevel--;
+    }
+  }
+
+  @override
+  visitFunctionDeclaration(FunctionDeclaration node) {
+    if (localLevel == 0) {
+      ReferencedNamesScope outerScope = scope;
+      try {
+        scope = new ReferencedNamesScope.forFunction(scope, node);
+        dependsOn = new Set<String>();
+        super.visitFunctionDeclaration(node);
+        names.userToDependsOn[node.name.name] = dependsOn;
+      } finally {
+        dependsOn = null;
+        scope = outerScope;
+      }
+    } else {
+      super.visitFunctionDeclaration(node);
+    }
+  }
+
+  @override
+  visitFunctionTypeAlias(FunctionTypeAlias node) {
+    if (localLevel == 0) {
+      ReferencedNamesScope outerScope = scope;
+      try {
+        scope = new ReferencedNamesScope.forFunctionTypeAlias(scope, node);
+        dependsOn = new Set<String>();
+        super.visitFunctionTypeAlias(node);
+        names.userToDependsOn[node.name.name] = dependsOn;
+      } finally {
+        dependsOn = null;
+        scope = outerScope;
+      }
+    } else {
+      super.visitFunctionTypeAlias(node);
+    }
+  }
+
+  @override
+  visitImportDirective(ImportDirective node) {
+    if (node.prefix != null) {
+      importPrefixNames.add(node.prefix.name);
+    }
+    super.visitImportDirective(node);
+  }
+
+  @override
+  visitInstanceCreationExpression(InstanceCreationExpression node) {
+    ConstructorName constructorName = node.constructorName;
+    Identifier typeName = constructorName.type.name;
+    if (typeName is SimpleIdentifier) {
+      names.instantiatedNames.add(typeName.name);
+    }
+    if (typeName is PrefixedIdentifier) {
+      String prefixName = typeName.prefix.name;
+      if (importPrefixNames.contains(prefixName)) {
+        names.instantiatedNames.add(typeName.identifier.name);
+      } else {
+        names.instantiatedNames.add(prefixName);
+      }
+    }
+    super.visitInstanceCreationExpression(node);
+  }
+
+  @override
+  visitMethodDeclaration(MethodDeclaration node) {
+    ReferencedNamesScope outerScope = scope;
+    try {
+      scope = new ReferencedNamesScope.forMethod(scope, node);
+      super.visitMethodDeclaration(node);
+    } finally {
+      scope = outerScope;
     }
   }
 
   @override
   visitSimpleIdentifier(SimpleIdentifier node) {
-    if (!node.inDeclarationContext()) {
-      String name = node.name;
-      names.names.add(name);
-      if (dependsOn != null && bodyLevel == 0) {
-        dependsOn.add(name);
+    // Ignore all declarations.
+    if (node.inDeclarationContext()) {
+      return;
+    }
+    // Ignore class names references from constructors.
+    AstNode parent = node.parent;
+    if (parent is ConstructorDeclaration && parent.returnType == node) {
+      return;
+    }
+    // Prepare name.
+    String name = node.name;
+    // Ignore unqualified names shadowed by local elements.
+    if (!node.isQualified) {
+      if (scope.contains(name)) {
+        return;
       }
+      if (importPrefixNames.contains(name)) {
+        return;
+      }
+    }
+    // Do add the dependency.
+    names.names.add(name);
+    if (dependsOn != null && localLevel == 0) {
+      dependsOn.add(name);
+    }
+  }
+
+  @override
+  visitTopLevelVariableDeclaration(TopLevelVariableDeclaration node) {
+    VariableDeclarationList variableList = node.variables;
+    // Prepare type dependencies.
+    Set<String> typeDependencies = new Set<String>();
+    dependsOn = typeDependencies;
+    variableList.type?.accept(this);
+    // Combine individual variable dependencies with the type dependencies.
+    for (VariableDeclaration variable in variableList.variables) {
+      dependsOn = new Set<String>();
+      variable.accept(this);
+      dependsOn.addAll(typeDependencies);
+      names.userToDependsOn[variable.name.name] = dependsOn;
+    }
+    dependsOn = null;
+  }
+
+  void _addSuperName(String className, TypeName type) {
+    if (type != null) {
+      Identifier typeName = type.name;
+      if (typeName is SimpleIdentifier) {
+        names.addSubclass(className, typeName.name);
+      }
+      if (typeName is PrefixedIdentifier) {
+        names.addSubclass(className, typeName.identifier.name);
+      }
+    }
+  }
+
+  void _addSuperNames(String className, List<TypeName> types) {
+    types?.forEach((type) => _addSuperName(className, type));
+  }
+}
+
+class ReferencedNamesScope {
+  final ReferencedNamesScope enclosing;
+  Set<String> names;
+
+  ReferencedNamesScope(this.enclosing);
+
+  factory ReferencedNamesScope.forBlock(
+      ReferencedNamesScope enclosing, Block node) {
+    ReferencedNamesScope scope = new ReferencedNamesScope(enclosing);
+    for (Statement statement in node.statements) {
+      if (statement is FunctionDeclarationStatement) {
+        scope.add(statement.functionDeclaration.name.name);
+      } else if (statement is VariableDeclarationStatement) {
+        for (VariableDeclaration variable in statement.variables.variables) {
+          scope.add(variable.name.name);
+        }
+      }
+    }
+    return scope;
+  }
+
+  factory ReferencedNamesScope.forClass(
+      ReferencedNamesScope enclosing, ClassDeclaration node) {
+    ReferencedNamesScope scope = new ReferencedNamesScope(enclosing);
+    scope._addTypeParameters(node.typeParameters);
+    for (ClassMember member in node.members) {
+      if (member is FieldDeclaration) {
+        for (VariableDeclaration variable in member.fields.variables) {
+          scope.add(variable.name.name);
+        }
+      } else if (member is MethodDeclaration) {
+        scope.add(member.name.name);
+      }
+    }
+    return scope;
+  }
+
+  factory ReferencedNamesScope.forClassTypeAlias(
+      ReferencedNamesScope enclosing, ClassTypeAlias node) {
+    ReferencedNamesScope scope = new ReferencedNamesScope(enclosing);
+    scope._addTypeParameters(node.typeParameters);
+    return scope;
+  }
+
+  factory ReferencedNamesScope.forFunction(
+      ReferencedNamesScope enclosing, FunctionDeclaration node) {
+    ReferencedNamesScope scope = new ReferencedNamesScope(enclosing);
+    scope._addTypeParameters(node.functionExpression.typeParameters);
+    scope._addFormalParameters(node.functionExpression.parameters);
+    return scope;
+  }
+
+  factory ReferencedNamesScope.forFunctionTypeAlias(
+      ReferencedNamesScope enclosing, FunctionTypeAlias node) {
+    ReferencedNamesScope scope = new ReferencedNamesScope(enclosing);
+    scope._addTypeParameters(node.typeParameters);
+    return scope;
+  }
+
+  factory ReferencedNamesScope.forMethod(
+      ReferencedNamesScope enclosing, MethodDeclaration node) {
+    ReferencedNamesScope scope = new ReferencedNamesScope(enclosing);
+    scope._addTypeParameters(node.typeParameters);
+    scope._addFormalParameters(node.parameters);
+    return scope;
+  }
+
+  void add(String name) {
+    names ??= new Set<String>();
+    names.add(name);
+  }
+
+  bool contains(String name) {
+    if (names != null && names.contains(name)) {
+      return true;
+    }
+    if (enclosing != null) {
+      return enclosing.contains(name);
+    }
+    return false;
+  }
+
+  void _addFormalParameters(FormalParameterList parameterList) {
+    if (parameterList != null) {
+      parameterList.parameters
+          .map((p) => p is NormalFormalParameter ? p.identifier.name : '')
+          .forEach(add);
+    }
+  }
+
+  void _addTypeParameters(TypeParameterList typeParameterList) {
+    if (typeParameterList != null) {
+      typeParameterList.typeParameters.map((p) => p.name.name).forEach(add);
     }
   }
 }
@@ -4729,14 +5274,14 @@
 
       // Require that static variable inference  be complete for all units in
       // the current library cycle.
-      'orderLibraryCycleTasks': LIBRARY_CYCLE_UNITS.of(unit).toList(
+      'orderLibraryCycleTasks': LIBRARY_CYCLE_UNITS.of(unit.library).toList(
           (CompilationUnitElement unit) => CREATED_RESOLVED_UNIT9.of(
               new LibrarySpecificUnit(
                   (unit as CompilationUnitElementImpl).librarySource,
                   unit.source))),
       // Require that full inference be complete for all dependencies of the
       // current library cycle.
-      'orderLibraryCycles': LIBRARY_CYCLE_DEPENDENCIES.of(unit).toList(
+      'orderLibraryCycles': LIBRARY_CYCLE_DEPENDENCIES.of(unit.library).toList(
           (CompilationUnitElement unit) => CREATED_RESOLVED_UNIT11.of(
               new LibrarySpecificUnit(
                   (unit as CompilationUnitElementImpl).librarySource,
@@ -4765,18 +5310,13 @@
   static const String LIBRARY_INPUT = 'LIBRARY_INPUT';
 
   /**
-   * The name of the list of [RESOLVED_UNIT12] input.
-   */
-  static const String UNITS_INPUT = 'UNITS_INPUT';
-
-  /**
    * The task descriptor describing this kind of task.
    */
   static final TaskDescriptor DESCRIPTOR = new TaskDescriptor(
       'ResolveLibraryReferencesTask',
       createTask,
       buildInputs,
-      <ResultDescriptor>[LIBRARY_ELEMENT9, REFERENCED_NAMES]);
+      <ResultDescriptor>[LIBRARY_ELEMENT9]);
 
   ResolveLibraryReferencesTask(
       InternalAnalysisContext context, AnalysisTarget target)
@@ -4787,22 +5327,8 @@
 
   @override
   void internalPerform() {
-    //
-    // Prepare inputs.
-    //
     LibraryElement library = getRequiredInput(LIBRARY_INPUT);
-    List<CompilationUnit> units = getRequiredInput(UNITS_INPUT);
-    // Compute referenced names.
-    ReferencedNames referencedNames = new ReferencedNames();
-    int length = units.length;
-    for (int i = 0; i < length; i++) {
-      new ReferencedNamesBuilder(referencedNames).build(units[i]);
-    }
-    //
-    // Record outputs.
-    //
     outputs[LIBRARY_ELEMENT9] = library;
-    outputs[REFERENCED_NAMES] = referencedNames;
   }
 
   /**
@@ -4814,7 +5340,8 @@
     Source source = target;
     return <String, TaskInput>{
       LIBRARY_INPUT: LIBRARY_ELEMENT8.of(source),
-      UNITS_INPUT: LIBRARY_SPECIFIC_UNITS.of(source).toListOf(RESOLVED_UNIT12),
+      'resolvedUnits':
+          LIBRARY_SPECIFIC_UNITS.of(source).toListOf(RESOLVED_UNIT12),
     };
   }
 
@@ -5212,7 +5739,7 @@
 
       // Require that inference be complete for all units in the
       // current library cycle.
-      'orderLibraryCycleTasks': LIBRARY_CYCLE_UNITS.of(unit).toList(
+      'orderLibraryCycleTasks': LIBRARY_CYCLE_UNITS.of(unit.library).toList(
           (CompilationUnitElement unit) => CREATED_RESOLVED_UNIT11.of(
               new LibrarySpecificUnit(
                   (unit as CompilationUnitElementImpl).librarySource,
@@ -5429,7 +5956,7 @@
       'ScanDartTask',
       createTask,
       buildInputs,
-      <ResultDescriptor>[LINE_INFO, SCAN_ERRORS, TOKEN_STREAM],
+      <ResultDescriptor>[IGNORE_INFO, LINE_INFO, SCAN_ERRORS, TOKEN_STREAM],
       suitabilityFor: suitabilityFor);
 
   /**
@@ -5482,8 +6009,12 @@
       scanner.preserveComments = context.analysisOptions.preserveComments;
       scanner.scanGenericMethodComments = context.analysisOptions.strongMode;
 
+      LineInfo lineInfo = new LineInfo(scanner.lineStarts);
+
       outputs[TOKEN_STREAM] = scanner.tokenize();
-      outputs[LINE_INFO] = new LineInfo(scanner.lineStarts);
+      outputs[LINE_INFO] = lineInfo;
+      outputs[IGNORE_INFO] =
+          IgnoreInfo.calculateIgnores(fragment.content, lineInfo);
       outputs[SCAN_ERRORS] = getUniqueErrors(errorListener.errors);
     } else if (target is Source) {
       String content = getRequiredInput(CONTENT_INPUT_NAME);
@@ -5493,8 +6024,11 @@
       scanner.preserveComments = context.analysisOptions.preserveComments;
       scanner.scanGenericMethodComments = context.analysisOptions.strongMode;
 
+      LineInfo lineInfo = new LineInfo(scanner.lineStarts);
+
       outputs[TOKEN_STREAM] = scanner.tokenize();
-      outputs[LINE_INFO] = new LineInfo(scanner.lineStarts);
+      outputs[LINE_INFO] = lineInfo;
+      outputs[IGNORE_INFO] = IgnoreInfo.calculateIgnores(content, lineInfo);
       outputs[SCAN_ERRORS] = getUniqueErrors(errorListener.errors);
     } else {
       throw new AnalysisException(
@@ -5510,7 +6044,7 @@
   static Map<String, TaskInput> buildInputs(AnalysisTarget target) {
     if (target is Source) {
       return <String, TaskInput>{
-        CONTENT_INPUT_NAME: CONTENT.of(target),
+        CONTENT_INPUT_NAME: CONTENT.of(target, flushOnAccess: true),
         MODIFICATION_TIME_INPUT: MODIFICATION_TIME.of(target)
       };
     } else if (target is DartScript) {
@@ -5593,9 +6127,12 @@
     CompilationUnit unit = getRequiredInput(UNIT_INPUT);
     AnalysisOptionsImpl options = context.analysisOptions;
     if (options.strongMode) {
-      unit.accept(new CodeChecker(
-          typeProvider, new StrongTypeSystemImpl(), errorListener,
-          hints: options.strongModeHints));
+      CodeChecker checker = new CodeChecker(
+          typeProvider,
+          new StrongTypeSystemImpl(implicitCasts: options.implicitCasts),
+          errorListener,
+          options);
+      checker.visitCompilationUnit(unit);
     }
     //
     // Record outputs.
diff --git a/pkg/analyzer/lib/src/task/driver.dart b/pkg/analyzer/lib/src/task/driver.dart
index 70ebe3b..07083d8 100644
--- a/pkg/analyzer/lib/src/task/driver.dart
+++ b/pkg/analyzer/lib/src/task/driver.dart
@@ -101,15 +101,18 @@
     try {
       isTaskRunning = true;
       AnalysisTask task;
-      WorkOrder workOrder = createWorkOrderForResult(target, result);
-      if (workOrder != null) {
-        while (workOrder.moveNext()) {
-//          AnalysisTask previousTask = task;
-//          String message = workOrder.current.toString();
-          task = performWorkItem(workOrder.current);
-//          if (task == null) {
-//            throw new AnalysisException(message, previousTask.caughtException);
-//          }
+      while (true) {
+        try {
+          WorkOrder workOrder = createWorkOrderForResult(target, result);
+          if (workOrder != null) {
+            while (workOrder.moveNext()) {
+              task = performWorkItem(workOrder.current);
+            }
+          }
+          break;
+        } on ModificationTimeMismatchError {
+          // Cache inconsistency was detected and fixed by invalidating
+          // corresponding results in cache. Computation must be restarted.
         }
       }
       return task;
@@ -248,7 +251,12 @@
       if (currentWorkOrder == null) {
         currentWorkOrder = createNextWorkOrder();
       } else if (currentWorkOrder.moveNext()) {
-        performWorkItem(currentWorkOrder.current);
+        try {
+          performWorkItem(currentWorkOrder.current);
+        } on ModificationTimeMismatchError {
+          reset();
+          return true;
+        }
       } else {
         currentWorkOrder = createNextWorkOrder();
       }
@@ -278,7 +286,10 @@
       AnalysisTarget target = task.target;
       CacheEntry entry = context.getCacheEntry(target);
       if (task.caughtException == null) {
-        List<TargetedResult> dependedOn = item.inputTargetedResults.toList();
+        List<TargetedResult> dependedOn =
+            context.analysisOptions.trackCacheDependencies
+                ? item.inputTargetedResults.toList()
+                : const <TargetedResult>[];
         Map<ResultDescriptor, dynamic> outputs = task.outputs;
         List<ResultDescriptor> results = task.descriptor.results;
         int resultLength = results.length;
@@ -761,7 +772,6 @@
             throw new AnalysisException(
                 'Cannot create work order to build $inputResult for $inputTarget',
                 this.exception);
-            return null;
           }
         }
       } else {
diff --git a/pkg/analyzer/lib/src/task/general.dart b/pkg/analyzer/lib/src/task/general.dart
index 38d67df..064fd20 100644
--- a/pkg/analyzer/lib/src/task/general.dart
+++ b/pkg/analyzer/lib/src/task/general.dart
@@ -4,6 +4,7 @@
 
 library analyzer.src.task.general;
 
+import 'package:analyzer/src/context/cache.dart';
 import 'package:analyzer/src/generated/engine.dart';
 import 'package:analyzer/src/generated/source.dart';
 import 'package:analyzer/task/general.dart';
@@ -33,13 +34,36 @@
   @override
   internalPerform() {
     Source source = getRequiredSource();
+    String content;
+    int modificationTime;
     try {
       TimestampedData<String> data = context.getContents(source);
-      outputs[CONTENT] = data.data;
-      outputs[MODIFICATION_TIME] = data.modificationTime;
+      content = data.data;
+      modificationTime = data.modificationTime;
     } catch (exception) {
-      outputs[CONTENT] = '';
-      outputs[MODIFICATION_TIME] = -1;
+      content = '';
+      modificationTime = -1;
+    }
+    _validateModificationTime(source, modificationTime);
+    outputs[CONTENT] = content;
+    outputs[MODIFICATION_TIME] = modificationTime;
+  }
+
+  /**
+   * Validate that the [target] cache entry has the same modification time
+   * as the given.  Otherwise throw a new [ModificationTimeMismatchError] and
+   * instruct to invalidate targets content.
+   */
+  void _validateModificationTime(Source source, int modificationTime) {
+    AnalysisContext context = this.context;
+    if (context is InternalAnalysisContext) {
+      CacheEntry entry = context.getCacheEntry(target);
+      if (entry != null && entry.modificationTime != modificationTime) {
+        entry.modificationTime = modificationTime;
+        entry.setState(CONTENT, CacheState.INVALID);
+        entry.setState(MODIFICATION_TIME, CacheState.INVALID);
+        throw new ModificationTimeMismatchError(source);
+      }
     }
   }
 
diff --git a/pkg/analyzer/lib/src/task/html.dart b/pkg/analyzer/lib/src/task/html.dart
index 93e00ed..b56c4b4 100644
--- a/pkg/analyzer/lib/src/task/html.dart
+++ b/pkg/analyzer/lib/src/task/html.dart
@@ -74,13 +74,17 @@
   bool get isInSystemLibrary => source.isInSystemLibrary;
 
   @override
+  Source get librarySource => source;
+
+  @override
   int get modificationStamp => source.modificationStamp;
 
   @override
   String get shortName => source.shortName;
 
   @override
-  Uri get uri => throw new StateError('uri not supported for scripts');
+  Uri get uri => source.uri
+      .replace(queryParameters: {'offset': fragments[0].offset.toString()});
 
   @override
   UriKind get uriKind =>
diff --git a/pkg/analyzer/lib/src/task/incremental_element_builder.dart b/pkg/analyzer/lib/src/task/incremental_element_builder.dart
index d7156e4..9bc30f8 100644
--- a/pkg/analyzer/lib/src/task/incremental_element_builder.dart
+++ b/pkg/analyzer/lib/src/task/incremental_element_builder.dart
@@ -18,6 +18,50 @@
 import 'package:analyzer/src/generated/source.dart';
 
 /**
+ * The change of a single [ClassElement].
+ */
+class ClassElementDelta {
+  final ClassElement _element;
+  final Source librarySource;
+  final String name;
+
+  final Set<ClassElementDelta> superDeltas = new Set<ClassElementDelta>();
+
+  final List<PropertyAccessorElement> addedAccessors =
+      <PropertyAccessorElement>[];
+  final List<PropertyAccessorElement> removedAccessors =
+      <PropertyAccessorElement>[];
+
+  final List<ConstructorElement> addedConstructors = <ConstructorElement>[];
+  final List<ConstructorElement> removedConstructors = <ConstructorElement>[];
+
+  final List<MethodElement> addedMethods = <MethodElement>[];
+  final List<MethodElement> removedMethods = <MethodElement>[];
+
+  ClassElementDelta(this._element, this.librarySource, this.name);
+
+  /**
+   * Return `true` if this delta has changes to the [name] visible in the
+   * given [librarySource].
+   */
+  bool hasChanges(Source librarySource, String name) {
+    if (Identifier.isPrivateName(name) && librarySource != this.librarySource) {
+      return false;
+    }
+    return _hasElementWithName(addedAccessors, name) ||
+        _hasElementWithName(removedAccessors, name) ||
+        _hasElementWithName(addedConstructors, name) ||
+        _hasElementWithName(removedConstructors, name) ||
+        _hasElementWithName(addedMethods, name) ||
+        _hasElementWithName(removedMethods, name);
+  }
+
+  static bool _hasElementWithName(List<Element> elements, String name) {
+    return elements.any((e) => e.displayName == name);
+  }
+}
+
+/**
  * The change of a single [CompilationUnitElement].
  */
 class CompilationUnitElementDelta {
@@ -35,6 +79,12 @@
    * The list of removed top-level elements.
    */
   final List<Element> removedDeclarations = <Element>[];
+
+  /**
+   * The map from names of changed classes to the change deltas.
+   */
+  final Map<String, ClassElementDelta> classDeltas =
+      <String, ClassElementDelta>{};
 }
 
 /**
@@ -47,7 +97,7 @@
   final CompilationUnit oldUnit;
   final CompilationUnitElementImpl unitElement;
   final CompilationUnit newUnit;
-  final ElementHolder holder = new ElementHolder();
+  final ElementHolder unitElementHolder = new ElementHolder();
 
   /**
    * The change between element models of [oldUnit] and [newUnit].
@@ -78,28 +128,224 @@
         .buildCompilationUnit(unitSource, newUnit, librarySource);
     _processDirectives();
     _processUnitMembers();
-    newUnit.element = unitElement;
     _replaceUnitContents(oldUnit, newUnit);
+    newUnit.element = unitElement;
+    unitElement.setCodeRange(0, newUnit.endToken.end);
   }
 
-  void _addElementToHolder(Element element) {
-    if (element is PropertyAccessorElement) {
-      holder.addAccessor(element);
-    } else if (element is ClassElement) {
+  void _addElementToUnitHolder(Element element) {
+    if (element is ClassElement) {
       if (element.isEnum) {
-        holder.addEnum(element);
+        unitElementHolder.addEnum(element);
       } else {
-        holder.addType(element);
+        unitElementHolder.addType(element);
       }
     } else if (element is FunctionElement) {
-      holder.addFunction(element);
+      unitElementHolder.addFunction(element);
     } else if (element is FunctionTypeAliasElement) {
-      holder.addTypeAlias(element);
+      unitElementHolder.addTypeAlias(element);
+    } else if (element is PropertyAccessorElement) {
+      unitElementHolder.addAccessor(element);
     } else if (element is TopLevelVariableElement) {
-      holder.addTopLevelVariable(element);
+      unitElementHolder.addTopLevelVariable(element);
     }
   }
 
+  ClassElementDelta _processClassMembers(
+      ClassDeclaration oldClass, ClassDeclaration newClass) {
+    // If the class hierarchy or type parameters are changed,
+    // then the class changed too much - don't compute the delta.
+    if (TokenUtils.getFullCode(newClass.typeParameters) !=
+            TokenUtils.getFullCode(oldClass.typeParameters) ||
+        TokenUtils.getFullCode(newClass.extendsClause) !=
+            TokenUtils.getFullCode(oldClass.extendsClause) ||
+        TokenUtils.getFullCode(newClass.withClause) !=
+            TokenUtils.getFullCode(oldClass.withClause) ||
+        TokenUtils.getFullCode(newClass.implementsClause) !=
+            TokenUtils.getFullCode(oldClass.implementsClause)) {
+      return null;
+    }
+    // Build the old class members map.
+    Map<String, ClassMember> oldNodeMap = new HashMap<String, ClassMember>();
+    for (ClassMember oldNode in oldClass.members) {
+      String code = TokenUtils.getFullCode(oldNode);
+      oldNodeMap[code] = oldNode;
+    }
+    // Prepare elements.
+    ClassElement newElement = newClass.element;
+    ClassElement oldElement = oldClass.element;
+    // Use the old element for the new node.
+    newClass.name.staticElement = oldElement;
+    if (newElement is ClassElementImpl && oldElement is ClassElementImpl) {
+      oldElement.nameOffset = newElement.nameOffset;
+      oldElement.setCodeRange(newElement.codeOffset, newElement.codeLength);
+      oldElement.typeParameters = newElement.typeParameters;
+    }
+    // Prepare delta.
+    ClassElementImpl classElement = oldClass.element;
+    ElementHolder classElementHolder = new ElementHolder();
+    ClassElementDelta classDelta =
+        new ClassElementDelta(classElement, librarySource, classElement.name);
+    // Prepare all old member elements.
+    var removedAccessors = new Set<PropertyAccessorElement>();
+    var removedConstructors = new Set<ConstructorElement>();
+    var removedMethods = new Set<MethodElement>();
+    removedAccessors.addAll(classElement.accessors);
+    removedConstructors.addAll(classElement.constructors);
+    removedMethods.addAll(classElement.methods);
+    // Utilities.
+    void processConstructorDeclaration(
+        ConstructorDeclaration node, bool isNew) {
+      ConstructorElement element = node.element;
+      if (element != null) {
+        classElementHolder.addConstructor(element);
+        if (isNew) {
+          classDelta.addedConstructors.add(element);
+        } else {
+          removedConstructors.remove(element);
+        }
+      }
+    }
+    void processFieldDeclaration(FieldDeclaration node, bool isNew) {
+      for (VariableDeclaration field in node.fields.variables) {
+        PropertyInducingElement element = field.element;
+        if (element != null) {
+          PropertyAccessorElement getter = element.getter;
+          PropertyAccessorElement setter = element.setter;
+          if (getter != null) {
+            classElementHolder.addAccessor(getter);
+            if (isNew) {
+              classDelta.addedAccessors.add(getter);
+            } else {
+              removedAccessors.remove(getter);
+            }
+          }
+          if (setter != null) {
+            classElementHolder.addAccessor(setter);
+            if (isNew) {
+              classDelta.addedAccessors.add(setter);
+            } else {
+              removedAccessors.remove(setter);
+            }
+          }
+        }
+      }
+    }
+    void processMethodDeclaration(MethodDeclaration node, bool isNew) {
+      Element element = node.element;
+      if (element is MethodElement) {
+        classElementHolder.addMethod(element);
+        if (isNew) {
+          classDelta.addedMethods.add(element);
+        } else {
+          removedMethods.remove(element);
+        }
+      } else if (element is PropertyAccessorElement) {
+        classElementHolder.addAccessor(element);
+        if (isNew) {
+          classDelta.addedAccessors.add(element);
+        } else {
+          removedAccessors.remove(element);
+        }
+      }
+    }
+    // Replace new nodes with the identical old nodes.
+    bool newHasConstructor = false;
+    for (ClassMember newNode in newClass.members) {
+      String code = TokenUtils.getFullCode(newNode);
+      ClassMember oldNode = oldNodeMap.remove(code);
+      // When we type a name before a constructor with a documentation
+      // comment, this makes the comment disappear from AST. So, even though
+      // tokens are the same, the nodes are not the same.
+      if (oldNode != null) {
+        if (oldNode.documentationComment == null &&
+                newNode.documentationComment != null ||
+            oldNode.documentationComment != null &&
+                newNode.documentationComment == null) {
+          oldNode = null;
+        }
+      }
+      // Add the new element.
+      if (oldNode == null) {
+        if (newNode is ConstructorDeclaration) {
+          newHasConstructor = true;
+          processConstructorDeclaration(newNode, true);
+        }
+        if (newNode is FieldDeclaration) {
+          processFieldDeclaration(newNode, true);
+        }
+        if (newNode is MethodDeclaration) {
+          processMethodDeclaration(newNode, true);
+        }
+        continue;
+      }
+      // Do replacement.
+      _replaceNode(newNode, oldNode);
+      if (oldNode is ConstructorDeclaration) {
+        processConstructorDeclaration(oldNode, false);
+      }
+      if (oldNode is FieldDeclaration) {
+        processFieldDeclaration(oldNode, false);
+      }
+      if (oldNode is MethodDeclaration) {
+        processMethodDeclaration(oldNode, false);
+      }
+    }
+    // If the class had only a default synthetic constructor, and there are
+    // no explicit constructors in the new AST, keep the constructor.
+    if (!newHasConstructor) {
+      List<ConstructorElement> constructors = classElement.constructors;
+      if (constructors.length == 1) {
+        ConstructorElement constructor = constructors[0];
+        if (constructor.isSynthetic && constructor.isDefaultConstructor) {
+          classElementHolder.addConstructor(constructor);
+          removedConstructors.remove(constructor);
+        }
+      }
+    }
+    // Update the delta.
+    classDelta.removedAccessors.addAll(removedAccessors);
+    classDelta.removedConstructors.addAll(removedConstructors);
+    classDelta.removedMethods.addAll(removedMethods);
+    // Prepare fields.
+    List<PropertyAccessorElement> newAccessors = classElementHolder.accessors;
+    Map<String, FieldElement> newFields = <String, FieldElement>{};
+    for (PropertyAccessorElement accessor in newAccessors) {
+      newFields[accessor.displayName] = accessor.variable;
+    }
+    // Update references to fields from constructors.
+    for (ClassMember member in newClass.members) {
+      if (member is ConstructorDeclaration) {
+        for (FormalParameter parameter in member.parameters.parameters) {
+          FormalParameter normalParameter = parameter;
+          if (parameter is DefaultFormalParameter) {
+            normalParameter = parameter.parameter;
+          }
+          if (normalParameter is FieldFormalParameter) {
+            FieldFormalParameterElementImpl parameterElement =
+                normalParameter.element as FieldFormalParameterElementImpl;
+            parameterElement.field = newFields[parameterElement.name];
+          }
+        }
+      }
+    }
+    // Update ClassElement.
+    classElement.accessors = newAccessors;
+    classElement.constructors = classElementHolder.constructors;
+    classElement.fields = newFields.values.toList();
+    classElement.methods = classElementHolder.methods;
+    classElementHolder.validate();
+    // Ensure at least a default synthetic constructor.
+    if (classElement.constructors.isEmpty) {
+      ConstructorElementImpl constructor =
+          new ConstructorElementImpl.forNode(null);
+      constructor.synthetic = true;
+      classElement.constructors = <ConstructorElement>[constructor];
+    }
+    // OK
+    return classDelta;
+  }
+
   void _processDirectives() {
     Map<String, Directive> oldDirectiveMap = new HashMap<String, Directive>();
     for (Directive oldDirective in oldUnit.directives) {
@@ -136,9 +382,14 @@
   void _processUnitMembers() {
     Map<String, CompilationUnitMember> oldNodeMap =
         new HashMap<String, CompilationUnitMember>();
+    Map<String, ClassDeclaration> nameToOldClassMap =
+        new HashMap<String, ClassDeclaration>();
     for (CompilationUnitMember oldNode in oldUnit.declarations) {
       String code = TokenUtils.getFullCode(oldNode);
       oldNodeMap[code] = oldNode;
+      if (oldNode is ClassDeclaration) {
+        nameToOldClassMap[oldNode.name.name] = oldNode;
+      }
     }
     // Prepare all old top-level elements.
     Set<Element> removedElements = new Set<Element>();
@@ -151,29 +402,43 @@
     // Replace new nodes with the identical old nodes.
     for (CompilationUnitMember newNode in newUnit.declarations) {
       String code = TokenUtils.getFullCode(newNode);
-      // Prepare an old node.
       CompilationUnitMember oldNode = oldNodeMap[code];
+      // Add the new element.
       if (oldNode == null) {
+        // Compute a delta for the class.
+        if (newNode is ClassDeclaration) {
+          ClassDeclaration oldClass = nameToOldClassMap[newNode.name.name];
+          if (oldClass != null) {
+            ClassElementDelta delta = _processClassMembers(oldClass, newNode);
+            if (delta != null) {
+              unitDelta.classDeltas[delta._element.name] = delta;
+              _addElementToUnitHolder(delta._element);
+              removedElements.remove(delta._element);
+              continue;
+            }
+          }
+        }
+        // Add the new node elements.
         List<Element> elements = _getElements(newNode);
-        elements.forEach(_addElementToHolder);
+        elements.forEach(_addElementToUnitHolder);
         elements.forEach(unitDelta.addedDeclarations.add);
         continue;
       }
       // Do replacement.
       _replaceNode(newNode, oldNode);
       List<Element> elements = _getElements(oldNode);
-      elements.forEach(_addElementToHolder);
+      elements.forEach(_addElementToUnitHolder);
       elements.forEach(removedElements.remove);
     }
     unitDelta.removedDeclarations.addAll(removedElements);
     // Update CompilationUnitElement.
-    unitElement.accessors = holder.accessors;
-    unitElement.enums = holder.enums;
-    unitElement.functions = holder.functions;
-    unitElement.typeAliases = holder.typeAliases;
-    unitElement.types = holder.types;
-    unitElement.topLevelVariables = holder.topLevelVariables;
-    holder.validate();
+    unitElement.accessors = unitElementHolder.accessors;
+    unitElement.enums = unitElementHolder.enums;
+    unitElement.functions = unitElementHolder.functions;
+    unitElement.typeAliases = unitElementHolder.typeAliases;
+    unitElement.types = unitElementHolder.types;
+    unitElement.topLevelVariables = unitElementHolder.topLevelVariables;
+    unitElementHolder.validate();
   }
 
   /**
@@ -184,16 +449,14 @@
     // Replace node.
     NodeReplacer.replace(newNode, oldNode);
     // Replace tokens.
-    {
-      Token oldBeginToken = TokenUtils.getBeginTokenNotComment(newNode);
-      Token newBeginToken = TokenUtils.getBeginTokenNotComment(oldNode);
-      oldBeginToken.previous.setNext(newBeginToken);
-      oldNode.endToken.setNext(newNode.endToken.next);
-    }
+    Token oldBeginToken = TokenUtils.getBeginTokenNotComment(oldNode);
+    Token newBeginToken = TokenUtils.getBeginTokenNotComment(newNode);
+    newBeginToken.previous.setNext(oldBeginToken);
+    oldNode.endToken.setNext(newNode.endToken.next);
     // Change tokens offsets.
     Map<int, int> offsetMap = new HashMap<int, int>();
-    TokenUtils.copyTokenOffsets(offsetMap, oldNode.beginToken,
-        newNode.beginToken, oldNode.endToken, newNode.endToken, true);
+    TokenUtils.copyTokenOffsets(offsetMap, oldBeginToken, newBeginToken,
+        oldNode.endToken, newNode.endToken);
     // Change elements offsets.
     {
       var visitor = new _UpdateElementOffsetsVisitor(offsetMap);
@@ -213,23 +476,29 @@
    */
   static List<Element> _getElements(AstNode node) {
     List<Element> elements = <Element>[];
-    if (node is TopLevelVariableDeclaration) {
-      VariableDeclarationList variableList = node.variables;
+    void addPropertyAccessors(VariableDeclarationList variableList) {
       if (variableList != null) {
         for (VariableDeclaration variable in variableList.variables) {
-          TopLevelVariableElement element = variable.element;
-          elements.add(element);
-          if (element.getter != null) {
-            elements.add(element.getter);
-          }
-          if (element.setter != null) {
-            elements.add(element.setter);
+          PropertyInducingElement element = variable.element;
+          if (element != null) {
+            elements.add(element);
+            if (element.getter != null) {
+              elements.add(element.getter);
+            }
+            if (element.setter != null) {
+              elements.add(element.setter);
+            }
           }
         }
       }
-    } else if (node is PartDirective || node is PartOfDirective) {} else if (node
-        is Directive &&
-        node.element != null) {
+    }
+    if (node is FieldDeclaration) {
+      addPropertyAccessors(node.fields);
+    } else if (node is TopLevelVariableDeclaration) {
+      addPropertyAccessors(node.variables);
+    } else if (node is PartDirective || node is PartOfDirective) {
+      // Ignore.
+    } else if (node is Directive && node.element != null) {
       elements.add(node.element);
     } else if (node is Declaration && node.element != null) {
       Element element = node.element;
@@ -242,7 +511,7 @@
   }
 
   /**
-   * Replaces contents of the [to] unit with the contenxts of the [from] unit.
+   * Replaces contents of the [to] unit with the contexts of the [from] unit.
    */
   static void _replaceUnitContents(CompilationUnit to, CompilationUnit from) {
     to.directives.clear();
@@ -253,6 +522,7 @@
     to.declarations.addAll(from.declarations);
     to.element = to.element;
     to.lineInfo = from.lineInfo;
+    to.endToken = from.endToken;
   }
 }
 
@@ -266,15 +536,23 @@
    * Copy offsets from [newToken]s to [oldToken]s.
    */
   static void copyTokenOffsets(Map<int, int> offsetMap, Token oldToken,
-      Token newToken, Token oldEndToken, Token newEndToken,
-      [bool goUpComment = false]) {
+      Token newToken, Token oldEndToken, Token newEndToken) {
     if (oldToken is CommentToken && newToken is CommentToken) {
-      if (goUpComment) {
-        copyTokenOffsets(offsetMap, (oldToken as CommentToken).parent,
-            (newToken as CommentToken).parent, oldEndToken, newEndToken);
+      // Update (otherwise unlinked) reference tokens in documentation.
+      if (oldToken is DocumentationCommentToken &&
+          newToken is DocumentationCommentToken) {
+        List<Token> oldReferences = oldToken.references;
+        List<Token> newReferences = newToken.references;
+        assert(oldReferences.length == newReferences.length);
+        for (int i = 0; i < oldReferences.length; i++) {
+          copyTokenOffsets(offsetMap, oldReferences[i], newReferences[i],
+              oldEndToken, newEndToken);
+        }
       }
+      // Update documentation tokens.
       while (oldToken != null) {
         offsetMap[oldToken.offset] = newToken.offset;
+        offsetMap[oldToken.end] = newToken.end;
         oldToken.offset = newToken.offset;
         oldToken = oldToken.next;
         newToken = newToken.next;
@@ -290,7 +568,12 @@
             newToken.precedingComments, oldEndToken, newEndToken);
       }
       offsetMap[oldToken.offset] = newToken.offset;
+      offsetMap[oldToken.end] = newToken.end;
       oldToken.offset = newToken.offset;
+      if (oldToken.type == TokenType.EOF) {
+        assert(newToken.type == TokenType.EOF);
+        break;
+      }
       if (oldToken == oldEndToken) {
         assert(newToken == newEndToken);
         break;
@@ -312,12 +595,15 @@
    * Return the token string of all the [node] tokens.
    */
   static String getFullCode(AstNode node) {
+    if (node == null) {
+      return '';
+    }
     List<Token> tokens = getTokens(node);
     return joinTokens(tokens);
   }
 
   /**
-   * Returns all tokends (including comments) of the given [node].
+   * Returns all tokens (including comments) of the given [node].
    */
   static List<Token> getTokens(AstNode node) {
     List<Token> tokens = <Token>[];
@@ -355,18 +641,62 @@
   _UpdateElementOffsetsVisitor(this.map);
 
   void visitElement(Element element) {
-    if (element is CompilationUnitElement) {
+    if (element is LibraryElement) {
       return;
     }
-    if (element.isSynthetic) {
+    if (element.isSynthetic && !_isVariableInitializer(element)) {
       return;
     }
-    int oldOffset = element.nameOffset;
-    int newOffset = map[oldOffset];
-    assert(newOffset != null);
-    (element as ElementImpl).nameOffset = newOffset;
-    if (element is! LibraryElement) {
-      super.visitElement(element);
+    if (element is ElementImpl) {
+      // name offset
+      {
+        int oldOffset = element.nameOffset;
+        int newOffset = map[oldOffset];
+        assert(newOffset != null);
+        element.nameOffset = newOffset;
+      }
+      // code range
+      {
+        int oldOffset = element.codeOffset;
+        if (oldOffset != null) {
+          int oldEnd = oldOffset + element.codeLength;
+          int newOffset = map[oldOffset];
+          int newEnd = map[oldEnd];
+          assert(newOffset != null);
+          assert(newEnd != null);
+          int newLength = newEnd - newOffset;
+          element.setCodeRange(newOffset, newLength);
+        }
+      }
+      // visible range
+      if (element is LocalElement) {
+        SourceRange oldVisibleRange = (element as LocalElement).visibleRange;
+        if (oldVisibleRange != null) {
+          int oldOffset = oldVisibleRange.offset;
+          int oldLength = oldVisibleRange.length;
+          int oldEnd = oldOffset + oldLength;
+          int newOffset = map[oldOffset];
+          int newEnd = map[oldEnd];
+          assert(newOffset != null);
+          assert(newEnd != null);
+          int newLength = newEnd - newOffset;
+          if (newOffset != oldOffset || newLength != oldLength) {
+            if (element is FunctionElementImpl) {
+              element.setVisibleRange(newOffset, newLength);
+            } else if (element is LocalVariableElementImpl) {
+              element.setVisibleRange(newOffset, newLength);
+            } else if (element is ParameterElementImpl) {
+              element.setVisibleRange(newOffset, newLength);
+            }
+          }
+        }
+      }
     }
+    super.visitElement(element);
+  }
+
+  static bool _isVariableInitializer(Element element) {
+    return element is FunctionElement &&
+        element.enclosingElement is VariableElement;
   }
 }
diff --git a/pkg/analyzer/lib/src/task/options.dart b/pkg/analyzer/lib/src/task/options.dart
index fd1c224..a25d146 100644
--- a/pkg/analyzer/lib/src/task/options.dart
+++ b/pkg/analyzer/lib/src/task/options.dart
@@ -15,7 +15,6 @@
 import 'package:analyzer/src/generated/source.dart';
 import 'package:analyzer/src/generated/utilities_general.dart';
 import 'package:analyzer/src/task/general.dart';
-import 'package:analyzer/src/task/strong/info.dart';
 import 'package:analyzer/task/general.dart';
 import 'package:analyzer/task/model.dart';
 import 'package:source_span/source_span.dart';
@@ -48,6 +47,7 @@
   static const String enableGenericMethods = 'enableGenericMethods';
   static const String enableStrictCallChecks = 'enableStrictCallChecks';
   static const String enableSuperMixins = 'enableSuperMixins';
+  static const String enableTrailingCommas = 'enableTrailingCommas';
 
   /// This option is deprecated.
   static const String enableConditionalDirectives =
@@ -161,8 +161,6 @@
       _errorCodes = new HashSet<String>();
       // Engine codes.
       _errorCodes.addAll(ErrorCode.values.map((ErrorCode code) => code.name));
-      // Strong-mode codes.
-      _errorCodes.addAll(StaticInfo.names);
     }
     return _errorCodes;
   }
@@ -491,6 +489,14 @@
         context.analysisOptions = options;
       }
     }
+    if (feature == AnalyzerOptions.enableTrailingCommas) {
+      if (isTrue(value)) {
+        AnalysisOptionsImpl options =
+            new AnalysisOptionsImpl.from(context.analysisOptions);
+        options.enableTrailingCommas = true;
+        context.analysisOptions = options;
+      }
+    }
     if (feature == AnalyzerOptions.enableGenericMethods) {
       if (isTrue(value)) {
         AnalysisOptionsImpl options =
diff --git a/pkg/analyzer/lib/src/task/strong/ast_properties.dart b/pkg/analyzer/lib/src/task/strong/ast_properties.dart
new file mode 100644
index 0000000..52f27f1
--- /dev/null
+++ b/pkg/analyzer/lib/src/task/strong/ast_properties.dart
@@ -0,0 +1,49 @@
+// Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+/// Properties that result from Strong Mode analysis on an AST.
+///
+/// These properties are not public, but provided by use of back-ends such as
+/// Dart Dev Compiler.
+
+import 'package:analyzer/analyzer.dart';
+import 'package:analyzer/dart/element/type.dart';
+
+const String _implicitCast = '_implicitCast';
+const String _hasImplicitCasts = '_hasImplicitCasts';
+const String _isDynamicInvoke = '_isDynamicInvoke';
+
+/// True if this compilation unit has any implicit casts, otherwise false.
+///
+/// See also [getImplicitCast].
+bool hasImplicitCasts(CompilationUnit node) {
+  return node.getProperty/*<bool>*/(_hasImplicitCasts) ?? false;
+}
+
+/// Sets [hasImplicitCasts] property for this compilation unit.
+void setHasImplicitCasts(CompilationUnit node, bool value) {
+  node.setProperty(_hasImplicitCasts, value == true ? true : null);
+}
+
+/// If this expression has an implicit cast, returns the type it is coerced to,
+/// otherwise returns null.
+DartType getImplicitCast(Expression node) {
+  return node.getProperty/*<DartType>*/(_implicitCast);
+}
+
+/// Sets the result of [getImplicitCast] for this node.
+void setImplicitCast(Expression node, DartType type) {
+  node.setProperty(_implicitCast, type);
+}
+
+/// True if this node is a dynamic operation that requires dispatch and/or
+/// checking at runtime.
+bool isDynamicInvoke(Expression node) {
+  return node.getProperty/*<bool>*/(_isDynamicInvoke) ?? false;
+}
+
+/// Sets [isDynamicInvoke] property for this expression
+void setIsDynamicInvoke(Expression node, bool value) {
+  node.setProperty(_isDynamicInvoke, value == true ? true : null);
+}
diff --git a/pkg/analyzer/lib/src/task/strong/checker.dart b/pkg/analyzer/lib/src/task/strong/checker.dart
index 4566e6a..783cdf0 100644
--- a/pkg/analyzer/lib/src/task/strong/checker.dart
+++ b/pkg/analyzer/lib/src/task/strong/checker.dart
@@ -13,10 +13,12 @@
 import 'package:analyzer/dart/element/element.dart';
 import 'package:analyzer/dart/element/type.dart';
 import 'package:analyzer/src/dart/element/type.dart';
+import 'package:analyzer/src/generated/engine.dart' show AnalysisOptionsImpl;
+import 'package:analyzer/src/generated/error.dart' show StrongModeCode;
 import 'package:analyzer/src/generated/resolver.dart' show TypeProvider;
 import 'package:analyzer/src/generated/type_system.dart';
 
-import 'info.dart';
+import 'ast_properties.dart';
 
 DartType _elementType(Element e) {
   if (e == null) {
@@ -100,20 +102,21 @@
   final StrongTypeSystemImpl rules;
   final TypeProvider typeProvider;
   final AnalysisErrorListener reporter;
-  final _OverrideChecker _overrideChecker;
-  final bool _hints;
+  final AnalysisOptionsImpl _options;
+  _OverrideChecker _overrideChecker;
 
   bool _failure = false;
+  bool _hasImplicitCasts;
+
   CodeChecker(TypeProvider typeProvider, StrongTypeSystemImpl rules,
-      AnalysisErrorListener reporter,
-      {bool hints: false})
+      AnalysisErrorListener reporter, this._options)
       : typeProvider = typeProvider,
         rules = rules,
-        reporter = reporter,
-        _hints = hints,
-        _overrideChecker = new _OverrideChecker(typeProvider, rules, reporter);
+        reporter = reporter {
+    _overrideChecker = new _OverrideChecker(this);
+  }
 
-  bool get failure => _failure || _overrideChecker._failure;
+  bool get failure => _failure;
 
   void checkArgument(Expression arg, DartType expectedType) {
     // Preserve named argument structure, so their immediate parent is the
@@ -170,7 +173,13 @@
 
   void reset() {
     _failure = false;
-    _overrideChecker._failure = false;
+  }
+
+  @override
+  void visitCompilationUnit(CompilationUnit node) {
+    _hasImplicitCasts = false;
+    node.visitChildren(this);
+    setHasImplicitCasts(node, _hasImplicitCasts);
   }
 
   @override
@@ -264,7 +273,7 @@
     for (int i = 0, last = init.length - 1; i < last; i++) {
       final node = init[i];
       if (node is SuperConstructorInvocation) {
-        _recordMessage(new InvalidSuperInvocation(node));
+        _recordMessage(node, StrongModeCode.INVALID_SUPER_INVOCATION, [node]);
       }
     }
   }
@@ -315,9 +324,8 @@
           node.identifier.staticElement as FieldFormalParameterElement;
       var fieldType = _elementType(fieldElement.field);
       if (!rules.isSubtypeOf(type, fieldType)) {
-        var staticInfo =
-            new InvalidParameterDeclaration(rules, node, fieldType);
-        _recordMessage(staticInfo);
+        _recordMessage(node, StrongModeCode.INVALID_PARAMETER_DECLARATION,
+            [node, fieldType]);
       }
     }
     node.visitChildren(this);
@@ -345,8 +353,7 @@
             sequenceInterface.instantiate([DynamicTypeImpl.instance]);
 
         if (rules.isSubtypeOf(sequenceType, iterableType)) {
-          _recordMessage(DownCast.create(
-              rules, node.iterable, iterableType, sequenceType));
+          _recordImplicitCast(node.iterable, iterableType, sequenceType);
           elementType = DynamicTypeImpl.instance;
         }
       }
@@ -500,7 +507,7 @@
       //
       // The first case is handled here, the second case is handled below when
       // we call [checkFunctionApplication].
-      DynamicInvoke.set(node.methodName, true);
+      setIsDynamicInvoke(node.methodName, true);
     } else {
       checkFunctionApplication(node, node.methodName, node.argumentList);
     }
@@ -619,7 +626,6 @@
       assert(functionType.optionalParameterTypes.isEmpty);
 
       // Check the LHS type.
-      var staticInfo;
       var rhsType = _getStaticType(expr.rightHandSide);
       var lhsType = _getStaticType(expr.leftHandSide);
       var returnType = rules.refineBinaryExpressionType(
@@ -632,19 +638,17 @@
             rules.isSubtypeOf(lhsType, rhsType)) {
           // This is also slightly different from spec, but allows us to keep
           // compound operators in the int += num and num += dynamic cases.
-          staticInfo =
-              DownCast.create(rules, expr.rightHandSide, rhsType, lhsType);
-          rhsType = lhsType;
+          _recordImplicitCast(expr.rightHandSide, rhsType, lhsType);
         } else {
-          staticInfo = new StaticTypeError(rules, expr, lhsType);
+          _recordMessage(expr, StrongModeCode.STATIC_TYPE_ERROR,
+              [expr, returnType, lhsType]);
         }
-        _recordMessage(staticInfo);
-      }
-
-      // Check the rhs type
-      if (staticInfo is! CoercionInfo) {
-        var paramType = paramTypes.first;
-        _checkDowncast(expr.rightHandSide, paramType);
+      } else {
+        // Check the RHS type.
+        //
+        // This is only needed if we didn't already need a cast, and avoids
+        // emitting two messages for the same expression.
+        _checkDowncast(expr.rightHandSide, paramTypes.first);
       }
     }
   }
@@ -679,7 +683,7 @@
 
     // Downcast if toT <: fromT
     if (rules.isSubtypeOf(to, from)) {
-      _recordMessage(DownCast.create(rules, expr, from, to));
+      _recordImplicitCast(expr, from, to);
       return;
     }
 
@@ -694,7 +698,7 @@
     // Iterable<T> for some concrete T (e.g. Object).  These are unrelated
     // in the restricted system, but List<dynamic> <: Iterable<T> in dart.
     if (from.isAssignableTo(to)) {
-      _recordMessage(DownCast.create(rules, expr, from, to));
+      _recordImplicitCast(expr, from, to);
     }
   }
 
@@ -731,7 +735,7 @@
   void _checkRuntimeTypeCheck(AstNode node, TypeName typeName) {
     var type = getType(typeName);
     if (!rules.isGroundType(type)) {
-      _recordMessage(new NonGroundTypeCheckInfo(node, type));
+      _recordMessage(node, StrongModeCode.NON_GROUND_TYPE_CHECK_INFO, [type]);
     }
   }
 
@@ -749,6 +753,86 @@
     }
   }
 
+  /// Records an implicit cast for the [expression] from [fromType] to [toType].
+  ///
+  /// This will emit the appropriate error/warning/hint message as well as mark
+  /// the AST node.
+  void _recordImplicitCast(
+      Expression expression, DartType fromType, DartType toType) {
+    // toT <:_R fromT => to <: fromT
+    // NB: classes with call methods are subtypes of function
+    // types, but the function type is not assignable to the class
+    assert(toType.isSubtypeOf(fromType) || fromType.isAssignableTo(toType));
+
+    // Inference "casts":
+    if (expression is Literal || expression is FunctionExpression) {
+      // fromT should be an exact type - this will almost certainly fail at
+      // runtime.
+      _recordMessage(expression, StrongModeCode.STATIC_TYPE_ERROR,
+          [expression, fromType, toType]);
+      return;
+    }
+
+    if (expression is InstanceCreationExpression) {
+      ConstructorElement e = expression.staticElement;
+      if (e == null || !e.isFactory) {
+        // fromT should be an exact type - this will almost certainly fail at
+        // runtime.
+
+        _recordMessage(expression, StrongModeCode.STATIC_TYPE_ERROR,
+            [expression, fromType, toType]);
+        return;
+      }
+    }
+
+    if (isKnownFunction(expression)) {
+      _recordMessage(expression, StrongModeCode.STATIC_TYPE_ERROR,
+          [expression, fromType, toType]);
+      return;
+    }
+
+    // TODO(vsm): Change this to an assert when we have generic methods and
+    // fix TypeRules._coerceTo to disallow implicit sideways casts.
+    bool downCastComposite = false;
+    if (!rules.isSubtypeOf(toType, fromType)) {
+      assert(toType.isSubtypeOf(fromType) || fromType.isAssignableTo(toType));
+      downCastComposite = true;
+    }
+
+    // Composite cast: these are more likely to fail.
+    if (!rules.isGroundType(toType)) {
+      // This cast is (probably) due to our different treatment of dynamic.
+      // It may be more likely to fail at runtime.
+      if (fromType is InterfaceType) {
+        // For class types, we'd like to allow non-generic down casts, e.g.,
+        // Iterable<T> to List<T>.  The intuition here is that raw (generic)
+        // casts are problematic, and we should complain about those.
+        var typeArgs = fromType.typeArguments;
+        downCastComposite =
+            typeArgs.isEmpty || typeArgs.any((t) => t.isDynamic);
+      } else {
+        downCastComposite = true;
+      }
+    }
+
+    var parent = expression.parent;
+    ErrorCode errorCode;
+    if (downCastComposite) {
+      errorCode = StrongModeCode.DOWN_CAST_COMPOSITE;
+    } else if (fromType.isDynamic) {
+      errorCode = StrongModeCode.DYNAMIC_CAST;
+    } else if (parent is VariableDeclaration &&
+        parent.initializer == expression) {
+      errorCode = StrongModeCode.ASSIGNMENT_CAST;
+    } else {
+      errorCode = StrongModeCode.DOWN_CAST_IMPLICIT;
+    }
+
+    _recordMessage(expression, errorCode, [fromType, toType]);
+    setImplicitCast(expression, toType);
+    _hasImplicitCasts = true;
+  }
+
   // Produce a coercion which coerces something of type fromT
   // to something of type toT.
   // Returns the error coercion if the types cannot be coerced
@@ -811,7 +895,7 @@
     DartType t = expr.staticType ?? DynamicTypeImpl.instance;
 
     // Remove fuzzy arrow if possible.
-    if (t is FunctionType && StaticInfo.isKnownFunction(expr)) {
+    if (t is FunctionType && isKnownFunction(expr)) {
       t = rules.functionTypeToConcreteType(typeProvider, t);
     }
 
@@ -893,46 +977,57 @@
     return _isObjectGetter(target, id) || _isObjectMethod(target, id);
   }
 
-  void _recordDynamicInvoke(AstNode node, AstNode target) {
-    if (_hints) {
-      reporter.onError(new DynamicInvoke(rules, node).toAnalysisError());
-    }
+  void _recordDynamicInvoke(AstNode node, Expression target) {
+    _recordMessage(node, StrongModeCode.DYNAMIC_INVOKE, [node]);
     // TODO(jmesserly): we may eventually want to record if the whole operation
     // (node) was dynamic, rather than the target, but this is an easier fit
     // with what we used to do.
-    DynamicInvoke.set(target, true);
+    setIsDynamicInvoke(target, true);
   }
 
-  void _recordMessage(StaticInfo info) {
-    if (info == null) return;
-    var error = info.toAnalysisError();
-    var severity = error.errorCode.errorSeverity;
+  void _recordMessage(AstNode node, ErrorCode errorCode, List arguments) {
+    var severity = errorCode.errorSeverity;
     if (severity == ErrorSeverity.ERROR) _failure = true;
-    if (severity != ErrorSeverity.INFO || _hints) {
+    if (severity != ErrorSeverity.INFO || _options.strongModeHints) {
+      int begin = node is AnnotatedNode
+          ? node.firstTokenAfterCommentAndMetadata.offset
+          : node.offset;
+      int length = node.end - begin;
+      var source = (node.root as CompilationUnit).element.source;
+      var error =
+          new AnalysisError(source, begin, length, errorCode, arguments);
       reporter.onError(error);
     }
-
-    if (info is CoercionInfo) {
-      // TODO(jmesserly): if we're run again on the same AST, we'll produce the
-      // same annotations. This should be harmless. This might go away once
-      // CodeChecker is integrated better with analyzer, as it will know that
-      // checking has already been performed.
-      // assert(CoercionInfo.get(info.node) == null);
-      CoercionInfo.set(info.node, info);
-    }
   }
 }
 
+bool isKnownFunction(Expression expression) {
+  Element element = null;
+  if (expression is FunctionExpression) {
+    return true;
+  } else if (expression is PropertyAccess) {
+    element = expression.propertyName.staticElement;
+  } else if (expression is Identifier) {
+    element = expression.staticElement;
+  }
+// First class functions and static methods, where we know the original
+// declaration, will have an exact type, so we know a downcast will fail.
+  return element is FunctionElement ||
+      element is MethodElement && element.isStatic;
+}
+
 /// Checks for overriding declarations of fields and methods. This is used to
 /// check overrides between classes and superclasses, interfaces, and mixin
 /// applications.
 class _OverrideChecker {
-  bool _failure = false;
   final StrongTypeSystemImpl rules;
   final TypeProvider _typeProvider;
-  final AnalysisErrorListener _reporter;
+  final CodeChecker _checker;
 
-  _OverrideChecker(this._typeProvider, this.rules, this._reporter);
+  _OverrideChecker(CodeChecker checker)
+      : _checker = checker,
+        rules = checker.rules,
+        _typeProvider = checker.typeProvider;
 
   void check(ClassDeclaration node) {
     if (node.element.type.isObject) return;
@@ -1196,8 +1291,14 @@
       // as a field.  We effectively treat fields as final / non-virtual.
       PropertyInducingElement field = _getMemberField(type, element);
       if (field != null) {
-        _recordMessage(new InvalidFieldOverride(
-            errorLocation, element, type, subType, baseType));
+        _checker._recordMessage(
+            errorLocation, StrongModeCode.INVALID_FIELD_OVERRIDE, [
+          element.enclosingElement.name,
+          element.name,
+          subType,
+          type,
+          baseType
+        ]);
       }
     }
     FunctionType concreteSubType = subType;
@@ -1225,8 +1326,23 @@
       //     get foo => e; // no type specified.
       //     toString() { ... } // no return type specified.
       //   }
-      _recordMessage(new InvalidMethodOverride(
-          errorLocation, element, type, subType, baseType));
+
+      ErrorCode errorCode;
+      if (errorLocation is ExtendsClause) {
+        errorCode = StrongModeCode.INVALID_METHOD_OVERRIDE_FROM_BASE;
+      } else if (errorLocation.parent is WithClause) {
+        errorCode = StrongModeCode.INVALID_METHOD_OVERRIDE_FROM_MIXIN;
+      } else {
+        errorCode = StrongModeCode.INVALID_METHOD_OVERRIDE;
+      }
+
+      _checker._recordMessage(errorLocation, errorCode, [
+        element.enclosingElement.name,
+        element.name,
+        subType,
+        type,
+        baseType
+      ]);
     }
     return true;
   }
@@ -1265,11 +1381,4 @@
       current = current.superclass;
     } while (!current.isObject && !visited.contains(current));
   }
-
-  void _recordMessage(StaticInfo info) {
-    if (info == null) return;
-    var error = info.toAnalysisError();
-    if (error.errorCode.errorSeverity == ErrorSeverity.ERROR) _failure = true;
-    _reporter.onError(error);
-  }
 }
diff --git a/pkg/analyzer/lib/src/task/strong/info.dart b/pkg/analyzer/lib/src/task/strong/info.dart
deleted file mode 100644
index 408c05d..0000000
--- a/pkg/analyzer/lib/src/task/strong/info.dart
+++ /dev/null
@@ -1,547 +0,0 @@
-// Copyright (c) 2015, the Dart project authors.  Please see the AUTHORS file
-// for details. All rights reserved. Use of this source code is governed by a
-// BSD-style license that can be found in the LICENSE file.
-
-/// Defines static information collected by the type checker and used later by
-/// emitters to generate code.
-// TODO(jmesserly): this was ported from package:dev_compiler, and needs to be
-// refactored to fit into analyzer.
-library analyzer.src.task.strong.info;
-
-import 'package:analyzer/dart/ast/ast.dart';
-import 'package:analyzer/dart/element/element.dart';
-import 'package:analyzer/dart/element/type.dart';
-import 'package:analyzer/src/dart/element/type.dart';
-import 'package:analyzer/src/generated/error.dart';
-import 'package:analyzer/src/generated/type_system.dart';
-
-/// A down cast due to a variable declaration to a ground type:
-///
-///     T x = expr;
-///
-/// where `T` is ground.  We exclude non-ground types as these behave
-/// differently compared to standard Dart.
-class AssignmentCast extends DownCast {
-  AssignmentCast(TypeSystem rules, Expression expression, DartType fromType,
-      DartType toType)
-      : super._internal(rules, expression, fromType, toType);
-
-  @override
-  String get name => 'STRONG_MODE_ASSIGNMENT_CAST';
-
-  toErrorCode() => new HintCode(name, message);
-}
-
-/// Implicitly injected expression conversion.
-abstract class CoercionInfo extends StaticInfo {
-  static const String _propertyName = 'dev_compiler.src.info.CoercionInfo';
-
-  final TypeSystem rules;
-
-  final Expression node;
-
-  CoercionInfo(this.rules, this.node);
-
-  DartType get baseType => node.staticType ?? DynamicTypeImpl.instance;
-  DartType get convertedType;
-
-  String get message;
-  DartType get staticType => convertedType;
-
-  toErrorCode() => new HintCode(name, message);
-
-  /// Gets the coercion info associated with this node.
-  static CoercionInfo get(AstNode node) => node.getProperty(_propertyName);
-
-  /// Sets the coercion info associated with this node.
-  static CoercionInfo set(AstNode node, CoercionInfo info) {
-    node.setProperty(_propertyName, info);
-    return info;
-  }
-}
-
-/// Base class for all casts from base type to sub type.
-abstract class DownCast extends CoercionInfo {
-  final DartType _fromType;
-  final DartType _toType;
-
-  DownCast._internal(
-      TypeSystem rules, Expression expression, this._fromType, this._toType)
-      : super(rules, expression);
-
-  @override
-  List<Object> get arguments => [baseType, convertedType];
-
-  /// The type being cast from.
-  ///
-  /// This is usually the static type of the associated expression, but may not
-  /// be if the cast is attached to a variable in a for-in loop.
-  @override
-  DartType get baseType => _fromType;
-
-  DartType get convertedType => _toType;
-
-  @override
-  String get message => 'Unsound implicit cast from {0} to {1}';
-
-  /// Factory to create correct DownCast variant.
-  static StaticInfo create(StrongTypeSystemImpl rules, Expression expression,
-      DartType fromType, DartType toType) {
-    // toT <:_R fromT => to <: fromT
-    // NB: classes with call methods are subtypes of function
-    // types, but the function type is not assignable to the class
-    assert(toType.isSubtypeOf(fromType) || fromType.isAssignableTo(toType));
-
-    // Handle null call specially.
-    if (expression is NullLiteral) {
-      // TODO(vsm): Create a NullCast for this once we revisit nonnullability.
-      return new DownCastImplicit(rules, expression, fromType, toType);
-    }
-
-    // Inference "casts":
-    if (expression is Literal || expression is FunctionExpression) {
-      // fromT should be an exact type - this will almost certainly fail at
-      // runtime.
-      return new StaticTypeError(rules, expression, toType);
-    }
-
-    if (expression is InstanceCreationExpression) {
-      ConstructorElement e = expression.staticElement;
-      if (e == null || !e.isFactory) {
-        // fromT should be an exact type - this will almost certainly fail at
-        // runtime.
-        return new StaticTypeError(rules, expression, toType);
-      }
-    }
-
-    if (StaticInfo.isKnownFunction(expression)) {
-      return new StaticTypeError(rules, expression, toType);
-    }
-
-    // TODO(vsm): Change this to an assert when we have generic methods and
-    // fix TypeRules._coerceTo to disallow implicit sideways casts.
-    if (!rules.isSubtypeOf(toType, fromType)) {
-      assert(toType.isSubtypeOf(fromType) || fromType.isAssignableTo(toType));
-      return new DownCastComposite(rules, expression, fromType, toType);
-    }
-
-    // Composite cast: these are more likely to fail.
-    if (!rules.isGroundType(toType)) {
-      // This cast is (probably) due to our different treatment of dynamic.
-      // It may be more likely to fail at runtime.
-      if (fromType is InterfaceType) {
-        // For class types, we'd like to allow non-generic down casts, e.g.,
-        // Iterable<T> to List<T>.  The intuition here is that raw (generic)
-        // casts are problematic, and we should complain about those.
-        var typeArgs = fromType.typeArguments;
-        if (typeArgs.isEmpty || typeArgs.any((t) => t.isDynamic)) {
-          return new DownCastComposite(rules, expression, fromType, toType);
-        }
-      } else {
-        return new DownCastComposite(rules, expression, fromType, toType);
-      }
-    }
-
-    // Dynamic cast
-    if (fromType.isDynamic) {
-      return new DynamicCast(rules, expression, fromType, toType);
-    }
-
-    // Assignment cast
-    var parent = expression.parent;
-    if (parent is VariableDeclaration && (parent.initializer == expression)) {
-      return new AssignmentCast(rules, expression, fromType, toType);
-    }
-
-    // Other casts
-    return new DownCastImplicit(rules, expression, fromType, toType);
-  }
-}
-
-/// Implicit down casts.  These are only injected by the compiler by flag.
-///
-/// A down cast to a non-ground type.  These behave differently from standard
-/// Dart and may be more likely to fail at runtime.
-class DownCastComposite extends DownCast {
-  DownCastComposite(TypeSystem rules, Expression expression, DartType fromType,
-      DartType toType)
-      : super._internal(rules, expression, fromType, toType);
-
-  @override
-  String get name => 'STRONG_MODE_DOWN_CAST_COMPOSITE';
-
-  toErrorCode() => new StaticWarningCode(name, message);
-}
-
-/// A down cast to a non-ground type.  These behave differently from standard
-/// Dart and may be more likely to fail at runtime.
-class DownCastImplicit extends DownCast {
-  DownCastImplicit(TypeSystem rules, Expression expression, DartType fromType,
-      DartType toType)
-      : super._internal(rules, expression, fromType, toType);
-
-  @override
-  String get name => 'STRONG_MODE_DOWN_CAST_IMPLICIT';
-
-  toErrorCode() => new HintCode(name, message);
-}
-
-/// A down cast from dynamic to T.
-class DynamicCast extends DownCast {
-  DynamicCast(TypeSystem rules, Expression expression, DartType fromType,
-      DartType toType)
-      : super._internal(rules, expression, fromType, toType);
-
-  @override
-  String get name => 'STRONG_MODE_DYNAMIC_CAST';
-
-  toErrorCode() => new HintCode(name, message);
-}
-
-class DynamicInvoke extends CoercionInfo {
-  static const String _propertyName = 'dev_compiler.src.info.DynamicInvoke';
-
-  DynamicInvoke(TypeSystem rules, Expression expression)
-      : super(rules, expression);
-  DartType get convertedType => DynamicTypeImpl.instance;
-  String get message => '{0} requires dynamic invoke';
-
-  @override
-  String get name => 'STRONG_MODE_DYNAMIC_INVOKE';
-
-  toErrorCode() => new HintCode(name, message);
-
-  /// Whether this [node] is the target of a dynamic operation.
-  static bool get(AstNode node) => node.getProperty(_propertyName) ?? false;
-
-  /// Sets whether this node is the target of a dynamic operation.
-  static bool set(AstNode node, bool value) {
-    // Free the storage for things that aren't dynamic.
-    if (value == false) value = null;
-    node.setProperty(_propertyName, value);
-    return value;
-  }
-}
-
-/// Standard / unspecialized inferred type.
-class InferredType extends InferredTypeBase {
-  InferredType(TypeSystem rules, Expression expression, DartType type)
-      : super._internal(rules, expression, type);
-
-  @override
-  String get name => 'STRONG_MODE_INFERRED_TYPE';
-
-  /// Factory to create correct InferredType variant.
-  static InferredTypeBase create(
-      TypeSystem rules, Expression expression, DartType type) {
-    // Specialized inference:
-    if (expression is Literal) {
-      return new InferredTypeLiteral(rules, expression, type);
-    }
-    if (expression is InstanceCreationExpression) {
-      return new InferredTypeAllocation(rules, expression, type);
-    }
-    if (expression is FunctionExpression) {
-      return new InferredTypeClosure(rules, expression, type);
-    }
-    return new InferredType(rules, expression, type);
-  }
-}
-
-/// An inferred type for a non-literal allocation site.
-class InferredTypeAllocation extends InferredTypeBase {
-  InferredTypeAllocation(TypeSystem rules, Expression expression, DartType type)
-      : super._internal(rules, expression, type);
-
-  @override
-  String get name => 'STRONG_MODE_INFERRED_TYPE_ALLOCATION';
-}
-
-/// An inferred type for the wrapped expression, which may need to be
-/// reified into the term.
-abstract class InferredTypeBase extends CoercionInfo {
-  final DartType _type;
-
-  InferredTypeBase._internal(
-      TypeSystem rules, Expression expression, this._type)
-      : super(rules, expression);
-
-  @override
-  List get arguments => [node, type];
-  DartType get convertedType => type;
-  @override
-  String get message => '{0} has inferred type {1}';
-  DartType get type => _type;
-
-  toErrorCode() => new HintCode(name, message);
-}
-
-/// An inferred type for a closure expression.
-class InferredTypeClosure extends InferredTypeBase {
-  InferredTypeClosure(TypeSystem rules, Expression expression, DartType type)
-      : super._internal(rules, expression, type);
-
-  @override
-  String get name => 'STRONG_MODE_INFERRED_TYPE_CLOSURE';
-}
-
-/// An inferred type for a literal expression.
-class InferredTypeLiteral extends InferredTypeBase {
-  InferredTypeLiteral(TypeSystem rules, Expression expression, DartType type)
-      : super._internal(rules, expression, type);
-
-  @override
-  String get name => 'STRONG_MODE_INFERRED_TYPE_LITERAL';
-}
-
-class InvalidFieldOverride extends InvalidOverride {
-  InvalidFieldOverride(AstNode node, ExecutableElement element,
-      InterfaceType base, DartType subType, DartType baseType)
-      : super(node, element, base, subType, baseType);
-
-  String get message => 'Field declaration {3}.{1} cannot be '
-      'overridden in {0}.';
-
-  @override
-  String get name => 'STRONG_MODE_INVALID_FIELD_OVERRIDE';
-}
-
-/// Invalid override due to incompatible type.  I.e., the overridden signature
-/// is not compatible with the original.
-class InvalidMethodOverride extends InvalidOverride {
-  InvalidMethodOverride(AstNode node, ExecutableElement element,
-      InterfaceType base, FunctionType subType, FunctionType baseType)
-      : super(node, element, base, subType, baseType);
-
-  String get message => _messageHelper('Invalid override');
-
-  @override
-  String get name => 'STRONG_MODE_INVALID_METHOD_OVERRIDE';
-}
-
-/// Invalid override of an instance member of a class.
-abstract class InvalidOverride extends StaticError {
-  /// Member declaration with the invalid override.
-  final ExecutableElement element;
-
-  /// Type (class or interface) that provides the base declaration.
-  final InterfaceType base;
-
-  /// Actual type of the overridden member.
-  final DartType subType;
-
-  /// Actual type of the base member.
-  final DartType baseType;
-
-  /// Whether the error comes from combining a base class and an interface
-  final bool fromBaseClass;
-
-  /// Whether the error comes from a mixin (either overriding a base class or an
-  /// interface declaration).
-  final bool fromMixin;
-
-  InvalidOverride(
-      AstNode node, this.element, this.base, this.subType, this.baseType)
-      : fromBaseClass = node is ExtendsClause,
-        fromMixin = node.parent is WithClause,
-        super(node);
-
-  @override
-  List<Object> get arguments =>
-      [parent.name, element.name, subType, base, baseType];
-
-  ClassElement get parent => element.enclosingElement;
-
-  String _messageHelper(String errorName) {
-    var lcErrorName = errorName.toLowerCase();
-    var intro = fromBaseClass
-        ? 'Base class introduces an $lcErrorName'
-        : (fromMixin ? 'Mixin introduces an $lcErrorName' : errorName);
-    return '$intro. The type of {0}.{1} ({2}) is not a '
-        'subtype of {3}.{1} ({4}).';
-  }
-}
-
-class InvalidParameterDeclaration extends StaticError {
-  final DartType expectedType;
-
-  InvalidParameterDeclaration(
-      TypeSystem rules, FormalParameter declaration, this.expectedType)
-      : super(declaration);
-
-  @override
-  List<Object> get arguments => [node, expectedType];
-  @override
-  String get message => 'Type check failed: {0} is not of type {1}';
-  @override
-  String get name => 'STRONG_MODE_INVALID_PARAMETER_DECLARATION';
-}
-
-/// Dart constructors have one weird quirk, illustrated with this example:
-///
-///     class Base {
-///       var x;
-///       Base() : x = print('Base.1') {
-///         print('Base.2');
-///       }
-///     }
-///
-///     class Derived extends Base {
-///       var y, z;
-///       Derived()
-///           : y = print('Derived.1'),
-///             super(),
-///             z = print('Derived.2') {
-///         print('Derived.3');
-///       }
-///     }
-///
-/// The order will be Derived.1, Base.1, Derived.2, Base.2, Derived.3; this
-/// ordering preserves the invariant that code can't observe uninitialized
-/// state, however it results in super constructor body not being run
-/// immediately after super initializers. Normally this isn't observable, but it
-/// could be if initializers have side effects.
-///
-/// Better to have `super` at the end, as required by the Dart style guide:
-/// <https://goo.gl/EY6hDP>
-///
-/// For now this is the only pattern we support.
-class InvalidSuperInvocation extends StaticError {
-  InvalidSuperInvocation(SuperConstructorInvocation node) : super(node);
-
-  @override
-  String get message => "super call must be last in an initializer "
-      "list (see https://goo.gl/EY6hDP): {0}";
-
-  @override
-  String get name => 'STRONG_MODE_INVALID_SUPER_INVOCATION';
-}
-
-class InvalidVariableDeclaration extends StaticError {
-  final DartType expectedType;
-
-  InvalidVariableDeclaration(
-      TypeSystem rules, AstNode declaration, this.expectedType)
-      : super(declaration);
-
-  @override
-  List<Object> get arguments => [expectedType];
-  @override
-  String get message => 'Type check failed: null is not of type {0}';
-
-  @override
-  String get name => 'STRONG_MODE_INVALID_VARIABLE_DECLARATION';
-}
-
-class NonGroundTypeCheckInfo extends StaticInfo {
-  final DartType type;
-  final AstNode node;
-
-  NonGroundTypeCheckInfo(this.node, this.type) {
-    assert(node is IsExpression || node is AsExpression);
-  }
-
-  @override
-  List<Object> get arguments => [type];
-  String get message =>
-      "Runtime check on non-ground type {0} may throw StrongModeError";
-
-  @override
-  String get name => 'STRONG_MODE_NON_GROUND_TYPE_CHECK_INFO';
-
-  toErrorCode() => new HintCode(name, message);
-}
-
-abstract class StaticError extends StaticInfo {
-  final AstNode node;
-
-  StaticError(this.node);
-
-  String get message;
-
-  toErrorCode() => new CompileTimeErrorCode(name, message);
-}
-
-// TODO(jmesserly): this could use some refactoring. These are essentially
-// like ErrorCodes in analyzer, but we're including some details in our message.
-// Analyzer instead has template strings, and replaces '{0}' with the first
-// argument.
-abstract class StaticInfo {
-  /// Strong-mode error code names.
-  ///
-  /// Used for error code configuration validation in an analysis options file.
-  static const List<String> names = const [
-    //
-    // Manually populated.
-    //
-    'STRONG_MODE_ASSIGNMENT_CAST',
-    'STRONG_MODE_DOWN_CAST_COMPOSITE',
-    'STRONG_MODE_DOWN_CAST_IMPLICIT',
-    'STRONG_MODE_DYNAMIC_CAST',
-    'STRONG_MODE_DYNAMIC_INVOKE',
-    'STRONG_MODE_INFERRED_TYPE',
-    'STRONG_MODE_INFERRED_TYPE_ALLOCATION',
-    'STRONG_MODE_INFERRED_TYPE_CLOSURE',
-    'STRONG_MODE_INFERRED_TYPE_LITERAL',
-    'STRONG_MODE_INVALID_FIELD_OVERRIDE',
-    'STRONG_MODE_INVALID_METHOD_OVERRIDE',
-    'STRONG_MODE_INVALID_PARAMETER_DECLARATION',
-    'STRONG_MODE_INVALID_SUPER_INVOCATION',
-    'STRONG_MODE_INVALID_VARIABLE_DECLARATION',
-    'STRONG_MODE_NON_GROUND_TYPE_CHECK_INFO',
-    'STRONG_MODE_STATIC_TYPE_ERROR',
-    'STRONG_MODE_UNINFERRED_CLOSURE',
-  ];
-
-  List<Object> get arguments => [node];
-
-  String get name;
-
-  /// AST Node this info is attached to.
-  AstNode get node;
-
-  AnalysisError toAnalysisError() {
-    int begin = node is AnnotatedNode
-        ? (node as AnnotatedNode).firstTokenAfterCommentAndMetadata.offset
-        : node.offset;
-    int length = node.end - begin;
-    var source = (node.root as CompilationUnit).element.source;
-    return new AnalysisError(source, begin, length, toErrorCode(), arguments);
-  }
-
-  // TODO(jmesserly): review the usage of error codes. We probably want our own,
-  // as well as some DDC specific [ErrorType]s.
-  ErrorCode toErrorCode();
-
-  static bool isKnownFunction(Expression expression) {
-    Element element = null;
-    if (expression is FunctionExpression) {
-      return true;
-    } else if (expression is PropertyAccess) {
-      element = expression.propertyName.staticElement;
-    } else if (expression is Identifier) {
-      element = expression.staticElement;
-    }
-    // First class functions and static methods, where we know the original
-    // declaration, will have an exact type, so we know a downcast will fail.
-    return element is FunctionElement ||
-        element is MethodElement && element.isStatic;
-  }
-}
-
-class StaticTypeError extends StaticError {
-  final DartType baseType;
-  final DartType expectedType;
-
-  StaticTypeError(TypeSystem rules, Expression expression, this.expectedType)
-      : baseType = expression.staticType ?? DynamicTypeImpl.instance,
-        super(expression);
-
-  @override
-  List<Object> get arguments => [node, baseType, expectedType];
-  @override
-  String get message => 'Type check failed: {0} ({1}) is not of type {2}';
-
-  @override
-  String get name => 'STRONG_MODE_STATIC_TYPE_ERROR';
-}
diff --git a/pkg/analyzer/lib/src/task/strong_mode.dart b/pkg/analyzer/lib/src/task/strong_mode.dart
index 2e3d3c0..420fd61 100644
--- a/pkg/analyzer/lib/src/task/strong_mode.dart
+++ b/pkg/analyzer/lib/src/task/strong_mode.dart
@@ -27,15 +27,6 @@
   if (field.initializer != null) {
     (field.initializer as ExecutableElementImpl).returnType = newType;
   }
-  if (field is PropertyInducingElementImpl) {
-    (field.getter as ExecutableElementImpl).returnType = newType;
-    if (!field.isFinal && !field.isConst) {
-      List<ParameterElement> setterParameters = field.setter.parameters;
-      if (setterParameters.isNotEmpty) {
-        (setterParameters[0] as ParameterElementImpl).type = newType;
-      }
-    }
-  }
 }
 
 /**
diff --git a/pkg/analyzer/lib/task/dart.dart b/pkg/analyzer/lib/task/dart.dart
index 3dbea46..4df1c28 100644
--- a/pkg/analyzer/lib/task/dart.dart
+++ b/pkg/analyzer/lib/task/dart.dart
@@ -167,6 +167,9 @@
   }
 
   @override
+  Source get librarySource => library;
+
+  @override
   Source get source => unit;
 
   @override
diff --git a/pkg/analyzer/lib/task/model.dart b/pkg/analyzer/lib/task/model.dart
index dd1062f..771d422 100644
--- a/pkg/analyzer/lib/task/model.dart
+++ b/pkg/analyzer/lib/task/model.dart
@@ -61,6 +61,9 @@
   AnalysisContextTarget(this.context);
 
   @override
+  Source get librarySource => null;
+
+  @override
   Source get source => null;
 }
 
@@ -73,6 +76,12 @@
  */
 abstract class AnalysisTarget {
   /**
+   * If this target is associated with a library, return the source of the
+   * library's defining compilation unit; otherwise return `null`.
+   */
+  Source get librarySource;
+
+  /**
    * Return the source associated with this target, or `null` if this target is
    * not associated with a source.
    */
@@ -335,6 +344,8 @@
 //      }
     } on AnalysisException {
       rethrow;
+    } on ModificationTimeMismatchError {
+      rethrow;
     } catch (exception, stackTrace) {
       throw new AnalysisException(
           'Unexpected exception while performing $description',
@@ -423,6 +434,18 @@
 }
 
 /**
+ * Instances of this class are thrown when a task detects that the modification
+ * time of a cache entry is not the same as the actual modification time.  This
+ * means that any analysis results based on the content of the target cannot be
+ * used anymore and must be invalidated.
+ */
+class ModificationTimeMismatchError {
+  final Source source;
+
+  ModificationTimeMismatchError(this.source);
+}
+
+/**
  * A policy object that can compute sizes of results and provide the maximum
  * active and idle sizes that can be kept in the cache.
  *
diff --git a/pkg/analyzer/pubspec.yaml b/pkg/analyzer/pubspec.yaml
index 82bd902..42066a8 100644
--- a/pkg/analyzer/pubspec.yaml
+++ b/pkg/analyzer/pubspec.yaml
@@ -1,5 +1,5 @@
 name: analyzer
-version: 0.27.4-alpha.7.1
+version: 0.27.4-alpha.15
 author: Dart Team <misc@dartlang.org>
 description: Static analyzer for Dart.
 homepage: https://github.com/dart-lang/sdk/tree/master/pkg/analyzer
@@ -7,10 +7,10 @@
   sdk: '>=1.12.0 <2.0.0'
 dependencies:
   args: '>=0.12.1 <0.14.0'
-  crypto: '>=0.9.2 <2.0.0'
+  crypto: '>=1.1.1 <3.0.0'
   glob: ^1.0.3
   html: ^0.12.0
-  package_config: ^0.1.1
+  package_config: ^0.1.5
   path: '>=0.9.0 <2.0.0'
   plugin: ^0.2.0
   watcher: '>=0.9.6 <0.10.0'
diff --git a/pkg/analyzer/test/dart/ast/ast_test.dart b/pkg/analyzer/test/dart/ast/ast_test.dart
index 35ff9a9..122a93e 100644
--- a/pkg/analyzer/test/dart/ast/ast_test.dart
+++ b/pkg/analyzer/test/dart/ast/ast_test.dart
@@ -592,6 +592,13 @@
     }
   }
 
+  void test_inGetterContext_constructorFieldInitializer() {
+    ConstructorFieldInitializer initializer = AstFactory
+        .constructorFieldInitializer(false, 'f', AstFactory.integer(0));
+    SimpleIdentifier identifier = initializer.fieldName;
+    expect(identifier.inGetterContext(), isFalse);
+  }
+
   void test_inGetterContext_forEachLoop() {
     SimpleIdentifier identifier = AstFactory.identifier3("a");
     Expression iterator = AstFactory.listLiteral();
diff --git a/pkg/analyzer/test/file_system/memory_file_system_test.dart b/pkg/analyzer/test/file_system/memory_file_system_test.dart
index b40be43..a6adddd 100644
--- a/pkg/analyzer/test/file_system/memory_file_system_test.dart
+++ b/pkg/analyzer/test/file_system/memory_file_system_test.dart
@@ -48,6 +48,14 @@
 class FileTest {
   MemoryResourceProvider provider = new MemoryResourceProvider();
 
+  void test_delete() {
+    File file = provider.newFile('/foo/file.txt', 'content');
+    expect(file.exists, isTrue);
+    // delete
+    file.delete();
+    expect(file.exists, isFalse);
+  }
+
   void test_equals_beforeAndAfterCreate() {
     String path = '/file.txt';
     File file1 = provider.getResource(path);
@@ -241,6 +249,23 @@
     expect(folder.contains('/foo/bar'), isFalse);
   }
 
+  void test_delete() {
+    Folder folder = provider.newFolder('/foo');
+    Folder barFolder = provider.newFolder('/foo/bar');
+    File aFile = provider.newFile('/foo/bar/a.txt', '');
+    File bFile = provider.newFile('/foo/b.txt', '');
+    expect(folder.exists, isTrue);
+    expect(barFolder.exists, isTrue);
+    expect(aFile.exists, isTrue);
+    expect(bFile.exists, isTrue);
+    // delete 'folder'
+    folder.delete();
+    expect(folder.exists, isFalse);
+    expect(barFolder.exists, isFalse);
+    expect(aFile.exists, isFalse);
+    expect(bFile.exists, isFalse);
+  }
+
   void test_equal_false() {
     String path2 = '/foo/baz';
     Folder folder2 = provider.newFolder(path2);
@@ -272,6 +297,26 @@
     expect(child.exists, isTrue);
   }
 
+  void test_getChildAssumingFile_doesNotExist() {
+    File child = folder.getChildAssumingFile('name');
+    expect(child, isNotNull);
+    expect(child.exists, isFalse);
+  }
+
+  void test_getChildAssumingFile_file() {
+    provider.newFile('/foo/bar/name', 'content');
+    File child = folder.getChildAssumingFile('name');
+    expect(child, isNotNull);
+    expect(child.exists, isTrue);
+  }
+
+  void test_getChildAssumingFile_folder() {
+    provider.newFolder('/foo/bar/name');
+    File child = folder.getChildAssumingFile('name');
+    expect(child, isNotNull);
+    expect(child.exists, isFalse);
+  }
+
   void test_getChildAssumingFolder_doesNotExist() {
     Folder child = folder.getChildAssumingFolder('foldername');
     expect(child, isNotNull);
diff --git a/pkg/analyzer/test/file_system/physical_resource_provider_test.dart b/pkg/analyzer/test/file_system/physical_resource_provider_test.dart
index 5bf3256..790eff7 100644
--- a/pkg/analyzer/test/file_system/physical_resource_provider_test.dart
+++ b/pkg/analyzer/test/file_system/physical_resource_provider_test.dart
@@ -50,6 +50,14 @@
     expect(source.contents.data, 'contents');
   }
 
+  void test_delete() {
+    new io.File(path).writeAsStringSync('contents');
+    expect(file.exists, isTrue);
+    // delete
+    file.delete();
+    expect(file.exists, isFalse);
+  }
+
   void test_equals_differentPaths() {
     String path2 = join(tempPath, 'file2.txt');
     File file2 = PhysicalResourceProvider.INSTANCE.getResource(path2);
@@ -222,6 +230,16 @@
     expect(folder.contains(path), isFalse);
   }
 
+  void test_delete() {
+    new io.File(join(path, 'myFile')).createSync();
+    var child = folder.getChild('myFile');
+    expect(child, _isFile);
+    expect(child.exists, isTrue);
+    // delete "folder"
+    folder.delete();
+    expect(child.exists, isFalse);
+  }
+
   void test_equals_differentPaths() {
     String path2 = join(tempPath, 'folder2');
     new io.Directory(path2).createSync();
@@ -254,6 +272,26 @@
     expect(child.exists, isTrue);
   }
 
+  void test_getChildAssumingFile_doesNotExist() {
+    File child = folder.getChildAssumingFile('no-such-resource');
+    expect(child, isNotNull);
+    expect(child.exists, isFalse);
+  }
+
+  void test_getChildAssumingFile_file() {
+    new io.File(join(path, 'myFile')).createSync();
+    File child = folder.getChildAssumingFile('myFile');
+    expect(child, isNotNull);
+    expect(child.exists, isTrue);
+  }
+
+  void test_getChildAssumingFile_folder() {
+    new io.Directory(join(path, 'myFolder')).createSync();
+    File child = folder.getChildAssumingFile('myFolder');
+    expect(child, isNotNull);
+    expect(child.exists, isFalse);
+  }
+
   void test_getChildAssumingFolder_doesNotExist() {
     Folder child = folder.getChildAssumingFolder('no-such-resource');
     expect(child, isNotNull);
diff --git a/pkg/analyzer/test/file_system/resource_uri_resolver_test.dart b/pkg/analyzer/test/file_system/resource_uri_resolver_test.dart
index 53247c1..6ca5bbe 100644
--- a/pkg/analyzer/test/file_system/resource_uri_resolver_test.dart
+++ b/pkg/analyzer/test/file_system/resource_uri_resolver_test.dart
@@ -29,6 +29,11 @@
     provider.newFolder('/folder');
   }
 
+  void test_creation() {
+    expect(provider, isNotNull);
+    expect(resolver, isNotNull);
+  }
+
   void test_resolveAbsolute_file() {
     var uri = new Uri(scheme: 'file', path: '/test.dart');
     Source source = resolver.resolveAbsolute(uri);
@@ -43,12 +48,18 @@
     expect(source, isNull);
   }
 
-  void test_resolveAbsolute_notFile() {
+  void test_resolveAbsolute_notFile_httpsUri() {
     var uri = new Uri(scheme: 'https', path: '127.0.0.1/test.dart');
     Source source = resolver.resolveAbsolute(uri);
     expect(source, isNull);
   }
 
+  void test_resolveAbsolute_notFile_dartUri() {
+    var uri = new Uri(scheme: 'dart', path: 'core');
+    Source source = resolver.resolveAbsolute(uri);
+    expect(source, isNull);
+  }
+
   void test_restoreAbsolute() {
     var uri = new Uri(scheme: 'file', path: '/test.dart');
     Source source = resolver.resolveAbsolute(uri);
diff --git a/pkg/analyzer/test/generated/all_the_rest_test.dart b/pkg/analyzer/test/generated/all_the_rest_test.dart
index c4a47ca..70e1c73 100644
--- a/pkg/analyzer/test/generated/all_the_rest_test.dart
+++ b/pkg/analyzer/test/generated/all_the_rest_test.dart
@@ -8,6 +8,7 @@
 import 'package:analyzer/dart/ast/token.dart';
 import 'package:analyzer/dart/element/element.dart';
 import 'package:analyzer/dart/element/type.dart';
+import 'package:analyzer/file_system/file_system.dart';
 import 'package:analyzer/file_system/physical_file_system.dart';
 import 'package:analyzer/src/dart/ast/ast.dart';
 import 'package:analyzer/src/dart/ast/utilities.dart' hide ConstantEvaluator;
@@ -53,7 +54,6 @@
   runReflectiveTests(ExitDetectorTest);
   runReflectiveTests(ExitDetectorTest2);
   runReflectiveTests(FileBasedSourceTest);
-  runReflectiveTests(FileUriResolverTest);
   runReflectiveTests(ResolveRelativeUriTest);
   runReflectiveTests(SDKLibrariesReaderTest);
   runReflectiveTests(UriKindTest);
@@ -2565,14 +2565,14 @@
 
 @reflectiveTest
 class ElementLocatorTest extends ResolverTestCase {
-  void fail_locate_ExportDirective() {
+  void test_locate_ExportDirective() {
     AstNode id = _findNodeIn("export", "export 'dart:core';");
     Element element = ElementLocator.locate(id);
     EngineTestCase.assertInstanceOf(
-        (obj) => obj is ImportElement, ImportElement, element);
+        (obj) => obj is ExportElement, ExportElement, element);
   }
 
-  void fail_locate_Identifier_libraryDirective() {
+  void test_locate_Identifier_libraryDirective() {
     AstNode id = _findNodeIn("foo", "library foo.bar;");
     Element element = ElementLocator.locate(id);
     EngineTestCase.assertInstanceOf(
@@ -3267,18 +3267,6 @@
  */
 @reflectiveTest
 class ExitDetectorTest extends ParserTestCase {
-  void fail_doStatement_continue_with_label() {
-    _assertFalse("{ x: do { continue x; } while(true); }");
-  }
-
-  void fail_whileStatement_continue_with_label() {
-    _assertFalse("{ x: while (true) { continue x; } }");
-  }
-
-  void fail_whileStatement_doStatement_scopeRequired() {
-    _assertTrue("{ while (true) { x: do { continue x; } while(true); }");
-  }
-
   void test_asExpression() {
     _assertFalse("a as Object;");
   }
@@ -3479,12 +3467,46 @@
     expect(new ExitDetector(), isNotNull);
   }
 
+  void test_doStatement_return() {
+    _assertTrue("{ do { return null; } while (1 == 2); }");
+  }
+
   void test_doStatement_throwCondition() {
     _assertTrue("{ do {} while (throw ''); }");
   }
 
-  void test_doStatement_return() {
-    _assertTrue("{ do { return null; } while (1 == 2); }");
+  void test_doStatement_break_and_throw() {
+    _assertFalse("{ do { if (1==1) break; throw 'T'; } while (0==1); }");
+  }
+
+  void test_doStatement_continue_and_throw() {
+    _assertFalse("{ do { if (1==1) continue; throw 'T'; } while (0==1); }");
+  }
+
+  void test_doStatement_continueInSwitch_and_throw() {
+    _assertFalse('''
+{
+  do {
+    switch (1) {
+      L: case 0: continue;
+      M: case 1: break;
+    }
+    throw 'T';
+  } while (0 == 1);
+}''');
+  }
+
+  void test_doStatement_continueDoInSwitch_and_throw() {
+    _assertFalse('''
+{
+  D: do {
+    switch (1) {
+      L: case 0: continue D;
+      M: case 1: break;
+    }
+    throw 'T';
+  } while (0 == 1);
+}''');
   }
 
   void test_doStatement_true_break() {
@@ -3495,6 +3517,11 @@
     _assertTrue("{ do { continue; } while (true); }");
   }
 
+  void test_doStatement_true_continueWithLabel() {
+    _assertTrue("{ x: do { continue x; } while (true); }");
+  }
+
+
   void test_doStatement_true_if_return() {
     _assertTrue("{ do { if (true) {return null;} } while (true); }");
   }
@@ -3759,6 +3786,19 @@
     _assertTrue("switch (i) { case 0: case 1: return 0; default: return 1; }");
   }
 
+  // The ExitDetector could conceivably follow switch continue labels and
+  // determine that `case 0` exits, `case 1` continues to an exiting case, and
+  // `default` exits, so the switch exits.
+  @failingTest
+  void test_switch_includesContinue() {
+    _assertTrue('''
+switch (i) {
+  zero: case 0: return 0;
+  case 1: continue zero;
+  default: return 1;
+}''');
+  }
+
   void test_switch_noDefault() {
     _assertFalse("switch (i) { case 0: return 0; }");
   }
@@ -3779,16 +3819,85 @@
     _assertFalse("try {} catch (e, s) {} finally {}");
   }
 
+  void test_tryStatement_noReturn_noFinally() {
+    _assertFalse("try {} catch (e, s) {}");
+  }
+
   void test_tryStatement_return_catch() {
     _assertFalse("try {} catch (e, s) { return 1; } finally {}");
   }
 
+  void test_tryStatement_return_catch_noFinally() {
+    _assertFalse("try {} catch (e, s) { return 1; }");
+  }
+
   void test_tryStatement_return_finally() {
     _assertTrue("try {} catch (e, s) {} finally { return 1; }");
   }
 
-  void test_tryStatement_return_try() {
-    _assertTrue("try { return 1; } catch (e, s) {} finally {}");
+  void test_tryStatement_return_try_noCatch() {
+    _assertTrue("try { return 1; } finally {}");
+  }
+
+  void test_tryStatement_return_try_oneCatchDoesNotExit() {
+    _assertFalse("try { return 1; } catch (e, s) {} finally {}");
+  }
+
+  void test_tryStatement_return_try_oneCatchDoesNotExit_noFinally() {
+    _assertFalse("try { return 1; } catch (e, s) {}");
+  }
+
+  void test_tryStatement_return_try_oneCatchExits() {
+    _assertTrue("try { return 1; } catch (e, s) { return 1; } finally {}");
+  }
+
+  void test_tryStatement_return_try_oneCatchExits_noFinally() {
+    _assertTrue("try { return 1; } catch (e, s) { return 1; }");
+  }
+
+  void test_tryStatement_return_try_twoCatchesDoExit() {
+    _assertTrue('''
+try { return 1; }
+on int catch (e, s) { return 1; }
+on String catch (e, s) { return 1; }
+finally {}''');
+  }
+
+  void test_tryStatement_return_try_twoCatchesDoExit_noFinally() {
+    _assertTrue('''
+try { return 1; }
+on int catch (e, s) { return 1; }
+on String catch (e, s) { return 1; }''');
+  }
+
+  void test_tryStatement_return_try_twoCatchesDoNotExit() {
+    _assertFalse('''
+try { return 1; }
+on int catch (e, s) {}
+on String catch (e, s) {}
+finally {}''');
+  }
+
+  void test_tryStatement_return_try_twoCatchesDoNotExit_noFinally() {
+    _assertFalse('''
+try { return 1; }
+on int catch (e, s) {}
+on String catch (e, s) {}''');
+  }
+
+  void test_tryStatement_return_try_twoCatchesMixed() {
+    _assertFalse('''
+try { return 1; }
+on int catch (e, s) {}
+on String catch (e, s) { return 1; }
+finally {}''');
+  }
+
+  void test_tryStatement_return_try_twoCatchesMixed_noFinally() {
+    _assertFalse('''
+try { return 1; }
+on int catch (e, s) {}
+on String catch (e, s) { return 1; }''');
   }
 
   void test_variableDeclarationStatement_noInitializer() {
@@ -3819,6 +3928,14 @@
     _assertTrue("{ while (true) { continue; } }");
   }
 
+  void test_whileStatement_true_continueWithLabel() {
+    _assertTrue("{ x: while (true) { continue x; } }");
+  }
+
+  void test_whileStatement_true_doStatement_scopeRequired() {
+    _assertTrue("{ while (true) { x: do { continue x; } while (true); } }");
+  }
+
   void test_whileStatement_true_if_return() {
     _assertTrue("{ while (true) { if (true) {return null;} } }");
   }
@@ -3835,6 +3952,10 @@
     _assertTrue("{ while (true) { throw ''; } }");
   }
 
+  void test_whileStatement_true_break_and_throw() {
+    _assertFalse("{ while (true) { if (1==1) break; throw 'T'; } }");
+  }
+
   void _assertFalse(String source) {
     _assertHasReturn(false, source);
   }
@@ -3916,10 +4037,10 @@
   }
 }
 ''');
-    _assertNthStatementExits(source, 0);
+    _assertNthStatementDoesNotExit(source, 0);
   }
 
-  void test_switch_withEnum_true_withDefault() {
+  void test_switch_withEnum_true_withExitingDefault() {
     Source source = addSource(r'''
 enum E { A, B }
 String f(E e) {
@@ -3934,6 +4055,22 @@
     _assertNthStatementExits(source, 0);
   }
 
+  void test_switch_withEnum_true_withNonExitingDefault() {
+    Source source = addSource(r'''
+enum E { A, B }
+String f(E e) {
+  var x;
+  switch (e) {
+    case A:
+      return 'A';
+    default:
+      x = '?';
+  }
+}
+''');
+    _assertNthStatementDoesNotExit(source, 1);
+  }
+
   void test_whileStatement_breakWithLabel() {
     Source source = addSource(r'''
 void f() {
@@ -3948,6 +4085,20 @@
     _assertNthStatementDoesNotExit(source, 0);
   }
 
+  void test_whileStatement_switchWithBreakWithLabel() {
+    Source source = addSource(r'''
+void f() {
+  x: while (true) {
+    switch (true) {
+      case false: break;
+      case true: break x;
+    }
+  }
+}
+''');
+    _assertNthStatementDoesNotExit(source, 0);
+  }
+
   void test_whileStatement_breakWithLabel_afterExting() {
     Source source = addSource(r'''
 void f() {
@@ -3962,6 +4113,42 @@
     _assertNthStatementExits(source, 0);
   }
 
+  void test_yieldStatement_plain() {
+    Source source = addSource(r'''
+void f() sync* {
+  yield 1;
+}
+''');
+    _assertNthStatementDoesNotExit(source, 0);
+  }
+
+  void test_yieldStatement_star_plain() {
+    Source source = addSource(r'''
+void f() sync* {
+  yield* 1;
+}
+''');
+    _assertNthStatementDoesNotExit(source, 0);
+  }
+
+  void test_yieldStatement_star_throw() {
+    Source source = addSource(r'''
+void f() sync* {
+  yield* throw '';
+}
+''');
+    _assertNthStatementExits(source, 0);
+  }
+
+  void test_yieldStatement_throw() {
+    Source source = addSource(r'''
+void f() sync* {
+  yield throw '';
+}
+''');
+    _assertNthStatementExits(source, 0);
+  }
+
   void _assertHasReturn(bool expectedResult, Source source, int n) {
     LibraryElement element = resolve2(source);
     CompilationUnit unit = resolveCompilationUnit(source, element);
@@ -3973,14 +4160,14 @@
 
   // Assert that the [n]th statement in the last function declaration of
   // [source] exits.
-  void _assertNthStatementExits(Source source, int n) {
-    _assertHasReturn(true, source, n);
+  void _assertNthStatementDoesNotExit(Source source, int n) {
+    _assertHasReturn(false, source, n);
   }
 
   // Assert that the [n]th statement in the last function declaration of
   // [source] does not exit.
-  void _assertNthStatementDoesNotExit(Source source, int n) {
-    _assertHasReturn(false, source, n);
+  void _assertNthStatementExits(Source source, int n) {
+    _assertHasReturn(true, source, n);
   }
 }
 
@@ -4051,7 +4238,8 @@
   }
 
   void test_getEncoding() {
-    SourceFactory factory = new SourceFactory([new FileUriResolver()]);
+    SourceFactory factory = new SourceFactory(
+        [new ResourceUriResolver(PhysicalResourceProvider.INSTANCE)]);
     String fullPath = "/does/not/exist.dart";
     JavaFile file = FileUtilities2.createFile(fullPath);
     FileBasedSource source = new FileBasedSource(file);
@@ -4170,41 +4358,6 @@
 }
 
 @reflectiveTest
-class FileUriResolverTest {
-  void test_creation() {
-    expect(new FileUriResolver(), isNotNull);
-  }
-
-  void test_resolve_file() {
-    UriResolver resolver = new FileUriResolver();
-    Source result = resolver
-        .resolveAbsolute(parseUriWithException("file:/does/not/exist.dart"));
-    expect(result, isNotNull);
-    expect(result.fullName,
-        FileUtilities2.createFile("/does/not/exist.dart").getAbsolutePath());
-  }
-
-  void test_resolve_nonFile() {
-    UriResolver resolver = new FileUriResolver();
-    Source result =
-        resolver.resolveAbsolute(parseUriWithException("dart:core"));
-    expect(result, isNull);
-  }
-
-  void test_restore() {
-    UriResolver resolver = new FileUriResolver();
-    Uri uri = parseUriWithException('file:///foo/bar.dart');
-    Source source = resolver.resolveAbsolute(uri);
-    expect(source, isNotNull);
-    expect(resolver.restoreAbsolute(source), uri);
-    expect(
-        resolver.restoreAbsolute(
-            new NonExistingSource(source.fullName, null, null)),
-        uri);
-  }
-}
-
-@reflectiveTest
 class ResolveRelativeUriTest {
   void test_resolveRelative_dart_dartUri() {
     Uri uri = parseUriWithException('dart:foo');
diff --git a/pkg/analyzer/test/generated/bazel_test.dart b/pkg/analyzer/test/generated/bazel_test.dart
new file mode 100644
index 0000000..bac7e66
--- /dev/null
+++ b/pkg/analyzer/test/generated/bazel_test.dart
@@ -0,0 +1,104 @@
+// Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+library analyzer.test.generated.bazel_test;
+
+import 'package:analyzer/file_system/file_system.dart';
+import 'package:analyzer/file_system/memory_file_system.dart';
+import 'package:analyzer/src/generated/bazel.dart';
+import 'package:analyzer/src/generated/source.dart';
+import 'package:unittest/unittest.dart';
+
+import '../reflective_tests.dart';
+import '../utils.dart';
+
+main() {
+  initializeTestEnvironment();
+  runReflectiveTests(BazelFileUriResolverTest);
+}
+
+@reflectiveTest
+class BazelFileUriResolverTest {
+  MemoryResourceProvider provider;
+  Folder workspace;
+  List<Folder> buildDirs;
+  ResourceUriResolver resolver;
+
+  void setUp() {
+    provider = new MemoryResourceProvider();
+    workspace = provider.newFolder('/workspace');
+    buildDirs = [provider.newFolder('/workspace/one'), provider.newFolder('/workspace/two')];
+    resolver = new BazelFileUriResolver(provider, workspace, buildDirs);
+    provider.newFile('/workspace/test.dart', '');
+    provider.newFile('/workspace/one/gen1.dart', '');
+    provider.newFile('/workspace/two/gen2.dart', '');
+  }
+
+  void test_creation() {
+    expect(provider, isNotNull);
+    expect(workspace, isNotNull);
+    expect(buildDirs, isNotNull);
+    expect(buildDirs.length, 2);
+    expect(resolver, isNotNull);
+  }
+
+  void test_resolveAbsolute_file() {
+    var uri = new Uri(scheme: 'file', path: '/workspace/test.dart');
+    Source source = resolver.resolveAbsolute(uri);
+    expect(source, isNotNull);
+    expect(source.exists(), isTrue);
+    expect(source.fullName, '/workspace/test.dart');
+  }
+
+  void test_resolveAbsolute_folder() {
+    var uri = new Uri(scheme: 'file', path: '/workspace');
+    Source source = resolver.resolveAbsolute(uri);
+    expect(source, isNull);
+  }
+
+  void test_resolveAbsolute_notFile_httpsUri() {
+    var uri = new Uri(scheme: 'https', path: '127.0.0.1/test.dart');
+    Source source = resolver.resolveAbsolute(uri);
+    expect(source, isNull);
+  }
+
+  void test_resolveAbsolute_notFile_dartUri() {
+    var uri = new Uri(scheme: 'dart', path: 'core');
+    Source source = resolver.resolveAbsolute(uri);
+    expect(source, isNull);
+  }
+
+  void test_resolveAbsolute_generated_file_exists_one() {
+    var uri = new Uri(scheme: 'file', path: '/workspace/gen1.dart');
+    Source source = resolver.resolveAbsolute(uri);
+    expect(source, isNotNull);
+    expect(source.exists(), isTrue);
+    expect(source.fullName, '/workspace/one/gen1.dart');
+  }
+
+  void test_resolveAbsolute_generated_file_exists_two() {
+    var uri = new Uri(scheme: 'file', path: '/workspace/gen2.dart');
+    Source source = resolver.resolveAbsolute(uri);
+    expect(source, isNotNull);
+    expect(source.exists(), isTrue);
+    expect(source.fullName, '/workspace/two/gen2.dart');
+  }
+
+  void test_resolveAbsolute_generated_file_does_not_exist_three() {
+    var uri = new Uri(scheme: 'file', path: '/workspace/gen3.dart');
+    Source source = resolver.resolveAbsolute(uri);
+    expect(source, isNull);
+  }
+
+  void test_restoreAbsolute() {
+    var uri = new Uri(scheme: 'file', path: '/workspace/test.dart');
+    Source source = resolver.resolveAbsolute(uri);
+    expect(source, isNotNull);
+    expect(resolver.restoreAbsolute(source), uri);
+    expect(
+        resolver.restoreAbsolute(
+            new NonExistingSource(source.fullName, null, null)),
+        uri);
+  }
+}
diff --git a/pkg/analyzer/test/generated/element_resolver_test.dart b/pkg/analyzer/test/generated/element_resolver_test.dart
index 5eb564a..da95f04 100644
--- a/pkg/analyzer/test/generated/element_resolver_test.dart
+++ b/pkg/analyzer/test/generated/element_resolver_test.dart
@@ -353,7 +353,7 @@
   void test_visitEnumDeclaration() {
     CompilationUnitElementImpl compilationUnitElement =
         ElementFactory.compilationUnit('foo.dart');
-    ClassElementImpl enumElement =
+    EnumElementImpl enumElement =
         ElementFactory.enumElement(_typeProvider, ('E'));
     compilationUnitElement.enums = <ClassElement>[enumElement];
     EnumDeclaration enumNode = AstFactory.enumDeclaration2('E', []);
diff --git a/pkg/analyzer/test/generated/engine_test.dart b/pkg/analyzer/test/generated/engine_test.dart
index e679466..b3ceaa5 100644
--- a/pkg/analyzer/test/generated/engine_test.dart
+++ b/pkg/analyzer/test/generated/engine_test.dart
@@ -8,6 +8,7 @@
 
 import 'package:analyzer/dart/ast/ast.dart';
 import 'package:analyzer/dart/element/element.dart';
+import 'package:analyzer/plugin/resolver_provider.dart';
 import 'package:analyzer/source/embedder.dart';
 import 'package:analyzer/src/cancelable_future.dart';
 import 'package:analyzer/src/context/cache.dart';
@@ -234,6 +235,7 @@
     return null;
   }
 
+  @deprecated
   @override
   EmbedderYamlLocator get embedderYamlLocator {
     fail("Unexpected invocation of get embedderYamlLocator");
@@ -247,6 +249,17 @@
   }
 
   @override
+  ResolverProvider get fileResolverProvider {
+    fail("Unexpected invocation of fileResolverProvider");
+    return null;
+  }
+
+  @override
+  void set fileResolverProvider(ResolverProvider resolverProvider) {
+    fail("Unexpected invocation of fileResolverProvider");
+  }
+
+  @override
   List<Source> get htmlSources {
     fail("Unexpected invocation of getHtmlSources");
     return null;
diff --git a/pkg/analyzer/test/generated/error_suppression_test.dart b/pkg/analyzer/test/generated/error_suppression_test.dart
index 275e94c..edcfaf2 100644
--- a/pkg/analyzer/test/generated/error_suppression_test.dart
+++ b/pkg/analyzer/test/generated/error_suppression_test.dart
@@ -82,6 +82,14 @@
     assertErrors(source, [StaticTypeWarningCode.INVALID_ASSIGNMENT]);
   }
 
+  void test_ignore_upper_case() {
+    Source source = addSource('''
+int x = ''; // ignore: INVALID_ASSIGNMENT
+''');
+    computeLibrarySourceErrors(source);
+    assertErrors(source, []);
+  }
+
   void test_invalid_error_code() {
     Source source = addSource('''
 // ignore: right_format_wrong_code
diff --git a/pkg/analyzer/test/generated/hint_code_test.dart b/pkg/analyzer/test/generated/hint_code_test.dart
index 3975270..6644ccd 100644
--- a/pkg/analyzer/test/generated/hint_code_test.dart
+++ b/pkg/analyzer/test/generated/hint_code_test.dart
@@ -21,33 +21,6 @@
 
 @reflectiveTest
 class HintCodeTest extends ResolverTestCase {
-  void fail_deadCode_statementAfterRehrow() {
-    Source source = addSource(r'''
-f() {
-  try {
-    var one = 1;
-  } catch (e) {
-    rethrow;
-    var two = 2;
-  }
-}''');
-    computeLibrarySourceErrors(source);
-    assertErrors(source, [HintCode.DEAD_CODE]);
-    verify([source]);
-  }
-
-  void fail_deadCode_statementAfterThrow() {
-    Source source = addSource(r'''
-f() {
-  var one = 1;
-  throw 'Stop here';
-  var two = 2;
-}''');
-    computeLibrarySourceErrors(source);
-    assertErrors(source, [HintCode.DEAD_CODE]);
-    verify([source]);
-  }
-
   void fail_isInt() {
     Source source = addSource("var v = 1 is int;");
     computeLibrarySourceErrors(source);
@@ -404,6 +377,49 @@
     verify([source]);
   }
 
+  void test_deadCode_deadFinalReturnInCase() {
+    Source source = addSource(r'''
+f() {
+  switch (true) {
+  case true:
+    try {
+      int a = 1;
+    } finally {
+      return;
+    }
+    return;
+  default:
+    break;
+  }
+}''');
+    computeLibrarySourceErrors(source);
+    assertErrors(source, [HintCode.DEAD_CODE]);
+    verify([source]);
+  }
+
+  void test_deadCode_deadFinalStatementInCase() {
+    Source source = addSource(r'''
+f() {
+  switch (true) {
+  case true:
+    try {
+      int a = 1;
+    } finally {
+      return;
+    }
+    int b = 1;
+  default:
+    break;
+  }
+}''');
+    computeLibrarySourceErrors(source);
+    // A single dead statement at the end of a switch case that is not a
+    // terminating statement will yield two errors.
+    assertErrors(source,
+        [HintCode.DEAD_CODE, StaticWarningCode.CASE_BLOCK_NOT_TERMINATED]);
+    verify([source]);
+  }
+
   void test_deadCode_deadOperandLHS_and() {
     Source source = addSource(r'''
 f() {
@@ -553,6 +569,36 @@
     verify([source]);
   }
 
+  void test_deadCode_statementAfterExitingIf_returns() {
+    Source source = addSource(r'''
+f() {
+  if (1 > 2) {
+    return;
+  } else {
+    return;
+  }
+  var one = 1;
+}''');
+    computeLibrarySourceErrors(source);
+    assertErrors(source, [HintCode.DEAD_CODE]);
+    verify([source]);
+  }
+
+  void test_deadCode_statementAfterRethrow() {
+    Source source = addSource(r'''
+f() {
+  try {
+    var one = 1;
+  } catch (e) {
+    rethrow;
+    var two = 2;
+  }
+}''');
+    computeLibrarySourceErrors(source);
+    assertErrors(source, [HintCode.DEAD_CODE]);
+    verify([source]);
+  }
+
   void test_deadCode_statementAfterReturn_function() {
     Source source = addSource(r'''
 f() {
@@ -619,6 +665,18 @@
     verify([source]);
   }
 
+  void test_deadCode_statementAfterThrow() {
+    Source source = addSource(r'''
+f() {
+  var one = 1;
+  throw 'Stop here';
+  var two = 2;
+}''');
+    computeLibrarySourceErrors(source);
+    assertErrors(source, [HintCode.DEAD_CODE]);
+    verify([source]);
+  }
+
   void test_deprecatedAnnotationUse_assignment() {
     Source source = addSource(r'''
 class A {
@@ -634,12 +692,15 @@
     verify([source]);
   }
 
-  void test_deprecatedAnnotationUse_Deprecated() {
+  void test_deprecatedAnnotationUse_call() {
     Source source = addSource(r'''
 class A {
-  @Deprecated('0.9')
-  m() {}
-  n() {m();}
+  @deprecated
+  call() {}
+  m() {
+    A a = new A();
+    a();
+  }
 }''');
     computeLibrarySourceErrors(source);
     assertErrors(source, [HintCode.DEPRECATED_MEMBER_USE]);
@@ -658,22 +719,12 @@
     verify([source]);
   }
 
-  void test_deprecatedAnnotationUse_positional() {
+  void test_deprecatedAnnotationUse_Deprecated() {
     Source source = addSource(r'''
 class A {
-  m([@deprecated int x]) {}
-  n() {m(1);}
-}''');
-    computeLibrarySourceErrors(source);
-    assertErrors(source, [HintCode.DEPRECATED_MEMBER_USE]);
-    verify([source]);
-  }
-
-  void test_deprecatedAnnotationUse_named() {
-    Source source = addSource(r'''
-class A {
-  m({@deprecated int x}) {}
-  n() {m(x: 1);}
+  @Deprecated('0.9')
+  m() {}
+  n() {m();}
 }''');
     computeLibrarySourceErrors(source);
     assertErrors(source, [HintCode.DEPRECATED_MEMBER_USE]);
@@ -778,6 +829,17 @@
     verify([source]);
   }
 
+  void test_deprecatedAnnotationUse_named() {
+    Source source = addSource(r'''
+class A {
+  m({@deprecated int x}) {}
+  n() {m(x: 1);}
+}''');
+    computeLibrarySourceErrors(source);
+    assertErrors(source, [HintCode.DEPRECATED_MEMBER_USE]);
+    verify([source]);
+  }
+
   void test_deprecatedAnnotationUse_operator() {
     Source source = addSource(r'''
 class A {
@@ -793,6 +855,17 @@
     verify([source]);
   }
 
+  void test_deprecatedAnnotationUse_positional() {
+    Source source = addSource(r'''
+class A {
+  m([@deprecated int x]) {}
+  n() {m(1);}
+}''');
+    computeLibrarySourceErrors(source);
+    assertErrors(source, [HintCode.DEPRECATED_MEMBER_USE]);
+    verify([source]);
+  }
+
   void test_deprecatedAnnotationUse_setter() {
     Source source = addSource(r'''
 class A {
@@ -835,21 +908,6 @@
     verify([source]);
   }
 
-  void test_deprecatedAnnotationUse_call() {
-    Source source = addSource(r'''
-class A {
-  @deprecated
-  call() {}
-  m() {
-    A a = new A();
-    a();
-  }
-}''');
-    computeLibrarySourceErrors(source);
-    assertErrors(source, [HintCode.DEPRECATED_MEMBER_USE]);
-    verify([source]);
-  }
-
   void test_divisionOptimization_double() {
     Source source = addSource(r'''
 f(double x, double y) {
@@ -944,6 +1002,186 @@
     verify([source]);
   }
 
+  void test_factory__expr_return_null_OK() {
+    Source source = addSource(r'''
+import 'package:meta/meta.dart';
+
+class Stateful {
+  @factory
+  State createState() => null;
+}
+
+class State { }
+''');
+    computeLibrarySourceErrors(source);
+    assertNoErrors(source);
+    verify([source]);
+  }
+
+  void test_factory_abstract_OK() {
+    Source source = addSource(r'''
+import 'package:meta/meta.dart';
+
+abstract class Stateful {
+  @factory
+  State createState();
+}
+
+class State { }
+''');
+    computeLibrarySourceErrors(source);
+    assertNoErrors(source);
+    verify([source]);
+  }
+
+  void test_factory_bad_return() {
+    Source source = addSource(r'''
+import 'package:meta/meta.dart';
+
+class Stateful {
+  State _s = new State();
+
+  @factory
+  State createState() => _s;
+}
+
+class State { }
+''');
+    computeLibrarySourceErrors(source);
+    assertErrors(source, [HintCode.INVALID_FACTORY_METHOD_IMPL]);
+    verify([source]);
+  }
+
+  void test_factory_block_OK() {
+    Source source = addSource(r'''
+import 'package:meta/meta.dart';
+
+class Stateful {
+  @factory
+  State createState() {
+    return new State();
+  }
+}
+
+class State { }
+''');
+    computeLibrarySourceErrors(source);
+    assertNoErrors(source);
+    verify([source]);
+  }
+
+  void test_factory_block_return_null_OK() {
+    Source source = addSource(r'''
+import 'package:meta/meta.dart';
+
+class Stateful {
+  @factory
+  State createState() {
+    return null;
+  }
+}
+
+class State { }
+''');
+    computeLibrarySourceErrors(source);
+    assertNoErrors(source);
+    verify([source]);
+  }
+
+  void test_factory_expr_OK() {
+    Source source = addSource(r'''
+import 'package:meta/meta.dart';
+
+class Stateful {
+  @factory
+  State createState() => new State();
+}
+
+class State { }
+''');
+    computeLibrarySourceErrors(source);
+    assertNoErrors(source);
+    verify([source]);
+  }
+
+  void test_factory_misplaced_annotation() {
+    Source source = addSource(r'''
+import 'package:meta/meta.dart';
+
+@factory
+class X {
+  @factory
+  int x;
+}
+
+@factory
+main() { }
+''');
+    computeLibrarySourceErrors(source);
+    assertErrors(source, [
+      HintCode.INVALID_FACTORY_ANNOTATION,
+      HintCode.INVALID_FACTORY_ANNOTATION,
+      HintCode.INVALID_FACTORY_ANNOTATION
+    ]);
+    verify([source]);
+  }
+
+  void test_factory_no_return_type_OK() {
+    Source source = addSource(r'''
+import 'package:meta/meta.dart';
+
+class Stateful {
+  @factory
+  createState() {
+    return new Stateful();
+  }
+}
+''');
+    computeLibrarySourceErrors(source);
+    // Null return types will get flagged elsewhere, no need to pile-on here.
+    assertNoErrors(source);
+    verify([source]);
+  }
+
+  void test_factory_subclass_OK() {
+    Source source = addSource(r'''
+import 'package:meta/meta.dart';
+
+abstract class Stateful {
+  @factory
+  State createState();
+}
+
+class MyThing extends Stateful {
+  @override
+  State createState() {
+    print('my state');
+    return new MyState();
+  }
+}
+
+class State { }
+class MyState extends State { }
+''');
+    computeLibrarySourceErrors(source);
+    assertNoErrors(source);
+    verify([source]);
+  }
+
+  void test_factory_void_return() {
+    Source source = addSource(r'''
+import 'package:meta/meta.dart';
+
+class Stateful {
+  @factory
+  void createState() {}
+}
+''');
+    computeLibrarySourceErrors(source);
+    assertErrors(source, [HintCode.INVALID_FACTORY_METHOD_DECL]);
+    verify([source]);
+  }
+
   void test_importDeferredLibraryWithLoadFunction() {
     resolveWithErrors(<String>[
       r'''
@@ -1029,7 +1267,59 @@
     verify([source]);
   }
 
+  void test_invalidUseOfProtectedMember_closure() {
+    Source source = addNamedSource(
+        '/lib1.dart',
+        r'''
+import 'package:meta/meta.dart';
+
+class A {
+  @protected
+  int a() => 42;
+}
+''');
+    Source source2 = addNamedSource(
+        '/lib2.dart',
+        r'''
+import 'lib1.dart';
+
+void main() {
+  var leak = new A().a;
+  print(leak);
+}
+''');
+    computeLibrarySourceErrors(source2);
+    assertErrors(source2, [HintCode.INVALID_USE_OF_PROTECTED_MEMBER]);
+    assertNoErrors(source);
+    verify([source, source2]);
+  }
+
   void test_invalidUseOfProtectedMember_field() {
+    Source source = addNamedSource(
+        '/lib1.dart',
+        r'''
+import 'package:meta/meta.dart';
+class A {
+  @protected
+  int a;
+}
+''');
+    Source source2 = addNamedSource(
+        '/lib2.dart',
+        r'''
+import 'lib1.dart';
+
+abstract class B {
+  int b() => new A().a;
+}
+''');
+    computeLibrarySourceErrors(source2);
+    assertErrors(source2, [HintCode.INVALID_USE_OF_PROTECTED_MEMBER]);
+    assertNoErrors(source);
+    verify([source, source2]);
+  }
+
+  void test_invalidUseOfProtectedMember_field_OK() {
     Source source = addSource(r'''
 import 'package:meta/meta.dart';
 class A {
@@ -1040,11 +1330,36 @@
   int b() => a;
 }''');
     computeLibrarySourceErrors(source);
-    assertErrors(source, [HintCode.INVALID_USE_OF_PROTECTED_MEMBER]);
+    assertNoErrors(source);
     verify([source]);
   }
 
   void test_invalidUseOfProtectedMember_function() {
+    Source source = addNamedSource(
+        '/lib1.dart',
+        r'''
+import 'package:meta/meta.dart';
+class A {
+  @protected
+  void a(){ }
+}
+''');
+    Source source2 = addNamedSource(
+        '/lib2.dart',
+        r'''
+import 'lib1.dart';
+
+main() {
+  new A().a();
+}
+''');
+    computeLibrarySourceErrors(source2);
+    assertErrors(source2, [HintCode.INVALID_USE_OF_PROTECTED_MEMBER]);
+    assertNoErrors(source);
+    verify([source, source2]);
+  }
+
+  void test_invalidUseOfProtectedMember_function_OK2() {
     Source source = addSource(r'''
 import 'package:meta/meta.dart';
 class A {
@@ -1055,11 +1370,53 @@
   new A().a();
 }''');
     computeLibrarySourceErrors(source);
-    assertErrors(source, [HintCode.INVALID_USE_OF_PROTECTED_MEMBER]);
+    assertNoErrors(source);
+    verify([source]);
+  }
+
+  void test_invalidUseOfProtectedMember_function_OK() {
+    Source source = addSource(r'''
+import 'package:meta/meta.dart';
+class A {
+  @protected
+  int a() => 0;
+}
+
+abstract class B implements A {
+  int b() => a();
+}''');
+    computeLibrarySourceErrors(source);
+    assertNoErrors(source);
     verify([source]);
   }
 
   void test_invalidUseOfProtectedMember_getter() {
+    Source source = addNamedSource(
+        '/lib1.dart',
+        r'''
+import 'package:meta/meta.dart';
+class A {
+  @protected
+  int get a => 42;
+}
+''');
+    Source source2 = addNamedSource(
+        '/lib2.dart',
+        r'''
+import 'lib1.dart';
+
+class B {
+  A a;
+  int b() => a.a;
+}
+''');
+    computeLibrarySourceErrors(source2);
+    assertErrors(source2, [HintCode.INVALID_USE_OF_PROTECTED_MEMBER]);
+    assertNoErrors(source);
+    verify([source, source2]);
+  }
+
+  void test_invalidUseOfProtectedMember_getter_OK() {
     Source source = addSource(r'''
 import 'package:meta/meta.dart';
 class A {
@@ -1070,53 +1427,103 @@
   int b() => a;
 }''');
     computeLibrarySourceErrors(source);
-    assertErrors(source, [HintCode.INVALID_USE_OF_PROTECTED_MEMBER]);
+    assertNoErrors(source);
+    verify([source]);
+  }
+
+  void test_invalidUseOfProtectedMember_in_docs_OK() {
+    Source source = addSource(r'''
+import 'package:meta/meta.dart';
+
+class A {
+  @protected
+  int a() => c;
+  @protected
+  int get b => a();
+  @protected
+  int c = 42;
+}
+
+/// OK: [A.a], [A.b], [A.c].
+f() {}
+''');
+    computeLibrarySourceErrors(source);
+    assertNoErrors(source);
     verify([source]);
   }
 
   void test_invalidUseOfProtectedMember_message() {
-    Source source = addSource(r'''
+    Source source = addNamedSource(
+        '/lib1.dart',
+        r'''
 import 'package:meta/meta.dart';
 class A {
   @protected
   void a(){ }
 }
+''');
+    Source source2 = addNamedSource(
+        '/lib2.dart',
+        r'''
+import 'lib1.dart';
+
 class B {
   void b() => new A().a();
-}''');
-    List<AnalysisError> errors = analysisContext2.computeErrors(source);
+}
+''');
+    List<AnalysisError> errors = analysisContext2.computeErrors(source2);
     expect(errors, hasLength(1));
     expect(errors[0].message,
         "The member 'a' can only be used within instance members of subclasses of 'A'");
+    verify([source, source2]);
   }
 
   void test_invalidUseOfProtectedMember_method_1() {
-    Source source = addSource(r'''
+    Source source = addNamedSource(
+        '/lib1.dart',
+        r'''
 import 'package:meta/meta.dart';
 class A {
   @protected
   void a(){ }
 }
+''');
+    Source source2 = addNamedSource(
+        '/lib2.dart',
+        r'''
+import 'lib1.dart';
+
 class B {
   void b() => new A().a();
-}''');
-    computeLibrarySourceErrors(source);
-    assertErrors(source, [HintCode.INVALID_USE_OF_PROTECTED_MEMBER]);
-    verify([source]);
+}
+''');
+
+    computeLibrarySourceErrors(source2);
+    assertErrors(source2, [HintCode.INVALID_USE_OF_PROTECTED_MEMBER]);
+    assertNoErrors(source);
+    verify([source, source2]);
   }
 
-  void test_invalidUseOfProtectedMember_method_2() {
+  void test_invalidUseOfProtectedMember_method_OK() {
+    // https://github.com/dart-lang/linter/issues/257
     Source source = addSource(r'''
 import 'package:meta/meta.dart';
-class A {
+
+typedef void VoidCallback();
+
+class State<E> {
   @protected
-  void a(){ }
+  void setState(VoidCallback fn) {}
 }
-abstract class B implements A {
-  void b() => a();
-}''');
+
+class Button extends State<Object> {
+  void handleSomething() {
+    setState(() {});
+  }
+}
+''');
     computeLibrarySourceErrors(source);
-    assertErrors(source, [HintCode.INVALID_USE_OF_PROTECTED_MEMBER]);
+    assertNoErrors(source);
     verify([source]);
   }
 
@@ -1131,7 +1538,7 @@
   void b() => a();
 }''');
     computeLibrarySourceErrors(source);
-    assertErrors(source, []);
+    assertNoErrors(source);
     verify([source]);
   }
 
@@ -1146,7 +1553,7 @@
   void b() => a();
 }''');
     computeLibrarySourceErrors(source);
-    assertErrors(source, []);
+    assertNoErrors(source);
     verify([source]);
   }
 
@@ -1160,7 +1567,7 @@
   static m2(A a) => a.m1();
 }''');
     computeLibrarySourceErrors(source);
-    assertErrors(source, []);
+    assertNoErrors(source);
     verify([source]);
   }
 
@@ -1178,7 +1585,7 @@
   new B().a();
 }''');
     computeLibrarySourceErrors(source);
-    assertErrors(source, []);
+    assertNoErrors(source);
     verify([source]);
   }
 
@@ -1194,7 +1601,7 @@
 }
 ''');
     computeLibrarySourceErrors(source);
-    assertErrors(source, []);
+    assertNoErrors(source);
     verify([source]);
   }
 
@@ -1210,7 +1617,7 @@
 }
 ''');
     computeLibrarySourceErrors(source);
-    assertErrors(source, []);
+    assertNoErrors(source);
     verify([source]);
   }
 
@@ -1228,11 +1635,56 @@
 }
 ''');
     computeLibrarySourceErrors(source);
-    assertErrors(source, []);
+    assertNoErrors(source);
+    verify([source]);
+  }
+
+  void test_invalidUseOfProtectedMember_OK_setter_2() {
+    Source source = addSource(r'''
+import 'package:meta/meta.dart';
+class A {
+  int _a;
+  @protected
+  void set a(int a) { _a = a; }
+  A(int a) {
+    this.a = a;
+  }
+}
+''');
+    computeLibrarySourceErrors(source);
+    assertNoErrors(source);
     verify([source]);
   }
 
   void test_invalidUseOfProtectedMember_setter() {
+    Source source = addNamedSource(
+        '/lib1.dart',
+        r'''
+import 'package:meta/meta.dart';
+class A {
+  @protected
+  void set a(int i) { }
+}
+''');
+    Source source2 = addNamedSource(
+        '/lib2.dart',
+        r'''
+import 'lib1.dart';
+
+class B{
+  A a;
+  b(int i) {
+    a.a = i;
+  }
+}
+''');
+    computeLibrarySourceErrors(source2);
+    assertErrors(source2, [HintCode.INVALID_USE_OF_PROTECTED_MEMBER]);
+    assertNoErrors(source);
+    verify([source, source2]);
+  }
+
+  void test_invalidUseOfProtectedMember_setter_OK() {
     Source source = addSource(r'''
 import 'package:meta/meta.dart';
 class A {
@@ -1245,7 +1697,7 @@
   }
 }''');
     computeLibrarySourceErrors(source);
-    assertErrors(source, [HintCode.INVALID_USE_OF_PROTECTED_MEMBER]);
+    assertNoErrors(source);
     verify([source]);
   }
 
@@ -1353,13 +1805,6 @@
     verify([source]);
   }
 
-  void test_missingReturn_function() {
-    Source source = addSource("int f() {}");
-    computeLibrarySourceErrors(source);
-    assertErrors(source, [HintCode.MISSING_RETURN]);
-    verify([source]);
-  }
-
   void test_missingReturn_factory() {
     Source source = addSource(r'''
 class A {
@@ -1371,6 +1816,13 @@
     verify([source]);
   }
 
+  void test_missingReturn_function() {
+    Source source = addSource("int f() {}");
+    computeLibrarySourceErrors(source);
+    assertErrors(source, [HintCode.MISSING_RETURN]);
+    verify([source]);
+  }
+
   void test_missingReturn_method() {
     Source source = addSource(r'''
 class A {
@@ -1708,6 +2160,37 @@
     verify([source]);
   }
 
+  void test_required_constructor_param_redirecting_cons_call() {
+    Source source = addSource(r'''
+import 'package:meta/meta.dart';
+
+class C {
+  C({@required int x});
+  C.named() : this();
+}
+''');
+    computeLibrarySourceErrors(source);
+    assertErrors(source, [HintCode.MISSING_REQUIRED_PARAM]);
+    verify([source]);
+  }
+
+  void test_required_constructor_param_super_call() {
+    Source source = addSource(r'''
+import 'package:meta/meta.dart';
+
+class C {
+  C({@Required('must specify an `a`') int a}) {}
+}
+
+class D extends C {
+  D() : super();
+}
+''');
+    computeLibrarySourceErrors(source);
+    assertErrors(source, [HintCode.MISSING_REQUIRED_PARAM_WITH_DETAILS]);
+    verify([source]);
+  }
+
   void test_required_function_param() {
     Source source = addSource(r'''
 import 'package:meta/meta.dart';
@@ -1761,6 +2244,23 @@
     verify([source]);
   }
 
+  void test_required_typedef_function_param() {
+    Source source = addSource(r'''
+import 'package:meta/meta.dart';
+
+String test(C c) => c.m()();
+
+typedef String F({@required String x});
+
+class C {
+  F m() => ({@required String x}) => null;
+}
+''');
+    computeLibrarySourceErrors(source);
+    assertErrors(source, [HintCode.MISSING_REQUIRED_PARAM]);
+    verify([source]);
+  }
+
   void test_typeCheck_type_is_Null() {
     Source source = addSource(r'''
 m(i) {
@@ -1781,6 +2281,46 @@
     verify([source]);
   }
 
+  void test_undefinedGetter() {
+    Source source = addSource(r'''
+class A {}
+f(var a) {
+  if(a is A) {
+    return a.m;
+  }
+}''');
+    computeLibrarySourceErrors(source);
+    assertErrors(source, [HintCode.UNDEFINED_GETTER]);
+  }
+
+  void test_undefinedGetter_message() {
+    // The implementation of HintCode.UNDEFINED_SETTER assumes that
+    // UNDEFINED_SETTER in StaticTypeWarningCode and StaticWarningCode are the
+    // same, this verifies that assumption.
+    expect(StaticWarningCode.UNDEFINED_GETTER.message,
+        StaticTypeWarningCode.UNDEFINED_GETTER.message);
+  }
+
+  void test_undefinedIdentifier_exportHide() {
+    Source source = addSource(r'''
+library L;
+export 'lib1.dart' hide a;''');
+    addNamedSource("/lib1.dart", "library lib1;");
+    computeLibrarySourceErrors(source);
+    assertErrors(source, [HintCode.UNDEFINED_HIDDEN_NAME]);
+    verify([source]);
+  }
+
+  void test_undefinedIdentifier_exportShow() {
+    Source source = addSource(r'''
+library L;
+export 'lib1.dart' show a;''');
+    addNamedSource("/lib1.dart", "library lib1;");
+    computeLibrarySourceErrors(source);
+    assertErrors(source, [HintCode.UNDEFINED_SHOWN_NAME]);
+    verify([source]);
+  }
+
   void test_undefinedIdentifier_importHide() {
     Source source = addSource(r'''
 library L;
@@ -1803,46 +2343,6 @@
     verify([source]);
   }
 
-  void test_undefinedIdentifier_exportHide() {
-    Source source = addSource(r'''
-library L;
-export 'lib1.dart' hide a;''');
-    addNamedSource("/lib1.dart", "library lib1;");
-    computeLibrarySourceErrors(source);
-    assertErrors(source, [HintCode.UNDEFINED_HIDDEN_NAME]);
-    verify([source]);
-  }
-
-  void test_undefinedIdentifier_exportShow() {
-    Source source = addSource(r'''
-library L;
-export 'lib1.dart' show a;''');
-    addNamedSource("/lib1.dart", "library lib1;");
-    computeLibrarySourceErrors(source);
-    assertErrors(source, [HintCode.UNDEFINED_SHOWN_NAME]);
-    verify([source]);
-  }
-
-  void test_undefinedGetter() {
-    Source source = addSource(r'''
-class A {}
-f(var a) {
-  if(a is A) {
-    return a.m;
-  }
-}''');
-    computeLibrarySourceErrors(source);
-    assertErrors(source, [HintCode.UNDEFINED_GETTER]);
-  }
-
-  void test_undefinedGetter_message() {
-    // The implementation of HintCode.UNDEFINED_SETTER assumes that
-    // UNDEFINED_SETTER in StaticTypeWarningCode and StaticWarningCode are the
-    // same, this verifies that assumption.
-    expect(StaticWarningCode.UNDEFINED_GETTER.message,
-        StaticTypeWarningCode.UNDEFINED_GETTER.message);
-  }
-
   void test_undefinedMethod() {
     Source source = addSource(r'''
 f() {
@@ -2886,6 +3386,30 @@
     verify([source]);
   }
 
+  void test_unusedField_notUsed_constructorFieldInitializers() {
+    enableUnusedElement = true;
+    Source source = addSource(r'''
+class A {
+  int _f;
+  A() : _f = 0;
+}''');
+    computeLibrarySourceErrors(source);
+    assertErrors(source, [HintCode.UNUSED_FIELD]);
+    verify([source]);
+  }
+
+  void test_unusedField_notUsed_fieldFormalParameter() {
+    enableUnusedElement = true;
+    Source source = addSource(r'''
+class A {
+  int _f;
+  A(this._f);
+}''');
+    computeLibrarySourceErrors(source);
+    assertErrors(source, [HintCode.UNUSED_FIELD]);
+    verify([source]);
+  }
+
   void test_unusedField_notUsed_noReference() {
     enableUnusedElement = true;
     Source source = addSource(r'''
@@ -3016,84 +3540,6 @@
     verify([source, source2]);
   }
 
-  void test_unusedShownName() {
-    Source source = addSource(r'''
-library L;
-import 'lib1.dart' show A, B;
-A a;''');
-    Source source2 = addNamedSource(
-        "/lib1.dart",
-        r'''
-library lib1;
-class A {}
-class B {}''');
-    computeLibrarySourceErrors(source);
-    assertErrors(source, [HintCode.UNUSED_SHOWN_NAME]);
-    assertNoErrors(source2);
-    verify([source, source2]);
-  }
-
-  void test_unusedShownName_topLevelVariable() {
-    Source source = addSource(r'''
-library L;
-import 'lib1.dart' show var1, var2;
-import 'lib1.dart' show var3, var4;
-int a = var1;
-int b = var2;
-int c = var3;''');
-    Source source2 = addNamedSource(
-        "/lib1.dart",
-        r'''
-library lib1;
-const int var1 = 1;
-const int var2 = 2;
-const int var3 = 3;
-const int var4 = 4;''');
-    computeLibrarySourceErrors(source);
-    assertErrors(source, [HintCode.UNUSED_SHOWN_NAME]);
-    assertNoErrors(source2);
-    verify([source, source2]);
-  }
-
-  void test_unusedShownName_as() {
-    Source source = addSource(r'''
-library L;
-import 'lib1.dart' as p show A, B;
-p.A a;''');
-    Source source2 = addNamedSource(
-        "/lib1.dart",
-        r'''
-library lib1;
-class A {}
-class B {}''');
-    computeLibrarySourceErrors(source);
-    assertErrors(source, [HintCode.UNUSED_SHOWN_NAME]);
-    assertNoErrors(source2);
-    verify([source, source2]);
-  }
-
-  void test_unusedShownName_duplicates() {
-    Source source = addSource(r'''
-library L;
-import 'lib1.dart' show A, B;
-import 'lib1.dart' show C, D;
-A a;
-C c;''');
-    Source source2 = addNamedSource(
-        "/lib1.dart",
-        r'''
-library lib1;
-class A {}
-class B {}
-class C {}
-class D {}''');
-    computeLibrarySourceErrors(source);
-    assertErrors(
-        source, [HintCode.UNUSED_SHOWN_NAME, HintCode.UNUSED_SHOWN_NAME]);
-    assertNoErrors(source2);
-    verify([source, source2]);
-  }
-
   void test_unusedLocalVariable_inCatch_exception() {
     enableUnusedLocalVariable = true;
     Source source = addSource(r'''
@@ -3281,6 +3727,84 @@
     verify([source]);
   }
 
+  void test_unusedShownName() {
+    Source source = addSource(r'''
+library L;
+import 'lib1.dart' show A, B;
+A a;''');
+    Source source2 = addNamedSource(
+        "/lib1.dart",
+        r'''
+library lib1;
+class A {}
+class B {}''');
+    computeLibrarySourceErrors(source);
+    assertErrors(source, [HintCode.UNUSED_SHOWN_NAME]);
+    assertNoErrors(source2);
+    verify([source, source2]);
+  }
+
+  void test_unusedShownName_as() {
+    Source source = addSource(r'''
+library L;
+import 'lib1.dart' as p show A, B;
+p.A a;''');
+    Source source2 = addNamedSource(
+        "/lib1.dart",
+        r'''
+library lib1;
+class A {}
+class B {}''');
+    computeLibrarySourceErrors(source);
+    assertErrors(source, [HintCode.UNUSED_SHOWN_NAME]);
+    assertNoErrors(source2);
+    verify([source, source2]);
+  }
+
+  void test_unusedShownName_duplicates() {
+    Source source = addSource(r'''
+library L;
+import 'lib1.dart' show A, B;
+import 'lib1.dart' show C, D;
+A a;
+C c;''');
+    Source source2 = addNamedSource(
+        "/lib1.dart",
+        r'''
+library lib1;
+class A {}
+class B {}
+class C {}
+class D {}''');
+    computeLibrarySourceErrors(source);
+    assertErrors(
+        source, [HintCode.UNUSED_SHOWN_NAME, HintCode.UNUSED_SHOWN_NAME]);
+    assertNoErrors(source2);
+    verify([source, source2]);
+  }
+
+  void test_unusedShownName_topLevelVariable() {
+    Source source = addSource(r'''
+library L;
+import 'lib1.dart' show var1, var2;
+import 'lib1.dart' show var3, var4;
+int a = var1;
+int b = var2;
+int c = var3;''');
+    Source source2 = addNamedSource(
+        "/lib1.dart",
+        r'''
+library lib1;
+const int var1 = 1;
+const int var2 = 2;
+const int var3 = 3;
+const int var4 = 4;''');
+    computeLibrarySourceErrors(source);
+    assertErrors(source, [HintCode.UNUSED_SHOWN_NAME]);
+    assertNoErrors(source2);
+    verify([source, source2]);
+  }
+
   void test_useOfVoidResult_assignmentExpression_function() {
     Source source = addSource(r'''
 void f() {}
diff --git a/pkg/analyzer/test/generated/incremental_resolver_test.dart b/pkg/analyzer/test/generated/incremental_resolver_test.dart
index 518f8c8a..f0b0107 100644
--- a/pkg/analyzer/test/generated/incremental_resolver_test.dart
+++ b/pkg/analyzer/test/generated/incremental_resolver_test.dart
@@ -9,7 +9,6 @@
 import 'package:analyzer/dart/element/element.dart';
 import 'package:analyzer/src/context/cache.dart';
 import 'package:analyzer/src/dart/ast/utilities.dart';
-import 'package:analyzer/src/dart/element/builder.dart';
 import 'package:analyzer/src/dart/element/element.dart';
 import 'package:analyzer/src/dart/scanner/reader.dart';
 import 'package:analyzer/src/dart/scanner/scanner.dart';
@@ -37,7 +36,6 @@
 
 main() {
   initializeTestEnvironment();
-  runReflectiveTests(DeclarationMatcherTest);
   runReflectiveTests(IncrementalResolverTest);
   runReflectiveTests(PoorMansIncrementalResolutionTest);
   runReflectiveTests(ResolutionContextBuilderTest);
@@ -138,2976 +136,6 @@
 }
 
 @reflectiveTest
-class DeclarationMatcherTest extends ResolverTestCase {
-  void setUp() {
-    super.setUp();
-    test_resolveApiChanges = true;
-  }
-
-  void test_false_class_annotation_accessor_edit() {
-    _assertDoesNotMatch(
-        r'''
-const my_annotationA = const Object();
-const my_annotationB = const Object();
-@my_annotationA
-class A {
-}
-''',
-        r'''
-const my_annotationA = const Object();
-const my_annotationB = const Object();
-@my_annotationB
-class A {
-}
-''');
-  }
-
-  void test_false_class_annotation_constructor_edit() {
-    _assertDoesNotMatch(
-        r'''
-class MyAnnotationA {
-  const MyAnnotationA();
-}
-class MyAnnotationB {
-  const MyAnnotationB();
-}
-@MyAnnotationA()
-class A {
-}
-''',
-        r'''
-class MyAnnotationA {
-  const MyAnnotationA();
-}
-class MyAnnotationB {
-  const MyAnnotationB();
-}
-@MyAnnotationB()
-class A {
-}
-''');
-  }
-
-  void test_false_class_annotations_add() {
-    _assertDoesNotMatch(
-        r'''
-const my_annotation = const Object();
-class A {
-}
-''',
-        r'''
-const my_annotation = const Object();
-@my_annotation
-class A {
-}
-''');
-  }
-
-  void test_false_class_annotations_remove() {
-    _assertDoesNotMatch(
-        r'''
-const my_annotation = const Object();
-@my_annotation
-class A {
-}
-''',
-        r'''
-const my_annotation = const Object();
-class A {
-}
-''');
-  }
-
-  void test_false_class_list_add() {
-    _assertDoesNotMatch(
-        r'''
-class A {}
-class B {}
-''',
-        r'''
-class A {}
-class B {}
-class C {}
-''');
-  }
-
-  void test_false_class_list_remove() {
-    _assertDoesNotMatch(
-        r'''
-class A {}
-class B {}
-class C {}
-''',
-        r'''
-class A {}
-class B {}
-''');
-  }
-
-  void test_false_class_typeParameters_bounds_add() {
-    _assertDoesNotMatch(
-        r'''
-class A {}
-class B<T> {
-  T f;
-}
-''',
-        r'''
-class A {}
-class B<T extends A> {
-  T f;
-}
-''');
-  }
-
-  void test_false_class_typeParameters_bounds_remove() {
-    _assertDoesNotMatch(
-        r'''
-class A {}
-class B<T extends A> {
-  T f;
-}
-''',
-        r'''
-class A {}
-class B<T> {
-  T f;
-}
-''');
-  }
-
-  void test_false_classMemberAccessor_list_add() {
-    _assertDoesNotMatchOK(
-        r'''
-class A {
-  get a => 1;
-  get b => 2;
-}
-''',
-        r'''
-class A {
-  get a => 1;
-  get b => 2;
-  get c => 3;
-}
-''');
-  }
-
-  void test_false_classMemberAccessor_list_remove() {
-    _assertDoesNotMatch(
-        r'''
-class A {
-  get a => 1;
-  get b => 2;
-  get c => 3;
-}
-''',
-        r'''
-class A {
-  get a => 1;
-  get b => 2;
-}
-''');
-  }
-
-  void test_false_classMemberAccessor_wasGetter() {
-    _assertDoesNotMatchOK(
-        r'''
-class A {
-  get a => 1;
-}
-''',
-        r'''
-class A {
-  set a(x) {}
-}
-''');
-  }
-
-  void test_false_classMemberAccessor_wasInstance() {
-    _assertDoesNotMatchOK(
-        r'''
-class A {
-  get a => 1;
-}
-''',
-        r'''
-class A {
-  static get a => 1;
-}
-''');
-  }
-
-  void test_false_classMemberAccessor_wasSetter() {
-    _assertDoesNotMatchOK(
-        r'''
-class A {
-  set a(x) {}
-}
-''',
-        r'''
-class A {
-  get a => 1;
-}
-''');
-  }
-
-  void test_false_classMemberAccessor_wasStatic() {
-    _assertDoesNotMatchOK(
-        r'''
-class A {
-  static get a => 1;
-}
-''',
-        r'''
-class A {
-  get a => 1;
-}
-''');
-  }
-
-  void test_false_classTypeAlias_list_add() {
-    _assertDoesNotMatch(
-        r'''
-class M {}
-class A = Object with M;
-''',
-        r'''
-class M {}
-class A = Object with M;
-class B = Object with M;
-''');
-  }
-
-  void test_false_classTypeAlias_list_remove() {
-    _assertDoesNotMatch(
-        r'''
-class M {}
-class A = Object with M;
-class B = Object with M;
-''',
-        r'''
-class M {}
-class A = Object with M;
-''');
-  }
-
-  void test_false_classTypeAlias_typeParameters_bounds_add() {
-    _assertDoesNotMatch(
-        r'''
-class M<T> {}
-class A {}
-class B<T> = Object with M<T>;
-''',
-        r'''
-class M<T> {}
-class A {}
-class B<T extends A> = Object with M<T>;
-''');
-  }
-
-  void test_false_classTypeAlias_typeParameters_bounds_remove() {
-    _assertDoesNotMatch(
-        r'''
-class M<T> {}
-class A {}
-class B<T extends A> = Object with M<T>;
-''',
-        r'''
-class M<T> {}
-class A {}
-class B<T> = Object with M<T>;
-''');
-  }
-
-  void test_false_constructor_keywordConst_add() {
-    _assertDoesNotMatch(
-        r'''
-class A {
-  A();
-}
-''',
-        r'''
-class A {
-  const A();
-}
-''');
-  }
-
-  void test_false_constructor_keywordConst_remove() {
-    _assertDoesNotMatch(
-        r'''
-class A {
-  const A();
-}
-''',
-        r'''
-class A {
-  A();
-}
-''');
-  }
-
-  void test_false_constructor_keywordFactory_add() {
-    _assertDoesNotMatch(
-        r'''
-class A {
-  A();
-  A.foo() {
-    return new A();
-  }
-}
-''',
-        r'''
-class A {
-  A();
-  factory A.foo() {
-    return new A();
-  }
-}
-''');
-  }
-
-  void test_false_constructor_keywordFactory_remove() {
-    _assertDoesNotMatch(
-        r'''
-class A {
-  A();
-  factory A.foo() {
-    return new A();
-  }
-}
-''',
-        r'''
-class A {
-  A();
-  A.foo() {
-    return new A();
-  }
-}
-''');
-  }
-
-  void test_false_constructor_parameters_list_add() {
-    _assertDoesNotMatch(
-        r'''
-class A {
-  A();
-}
-''',
-        r'''
-class A {
-  A(int p);
-}
-''');
-  }
-
-  void test_false_constructor_parameters_list_remove() {
-    _assertDoesNotMatch(
-        r'''
-class A {
-  A(int p);
-}
-''',
-        r'''
-class A {
-  A();
-}
-''');
-  }
-
-  void test_false_constructor_parameters_name() {
-    _assertDoesNotMatch(
-        r'''
-class A {
-  A(int a);
-}
-''',
-        r'''
-class A {
-  A(int b);
-}
-''');
-  }
-
-  void test_false_constructor_parameters_type_edit() {
-    _assertDoesNotMatch(
-        r'''
-class A {
-  A(int p);
-}
-''',
-        r'''
-class A {
-  A(String p);
-}
-''');
-  }
-
-  void test_false_constructor_unnamed_add_hadParameters() {
-    _assertDoesNotMatch(
-        r'''
-class A {
-}
-''',
-        r'''
-class A {
-  A(int p) {}
-}
-''');
-  }
-
-  void test_false_constructor_unnamed_remove_hadParameters() {
-    _assertDoesNotMatch(
-        r'''
-class A {
-  A(int p) {}
-}
-''',
-        r'''
-class A {
-}
-''');
-  }
-
-  void test_false_defaultFieldFormalParameterElement_wasSimple() {
-    _assertDoesNotMatch(
-        r'''
-class A {
-  int field;
-  A(int field);
-}
-''',
-        r'''
-class A {
-  int field;
-  A([this.field = 0]);
-}
-''');
-  }
-
-  void test_false_enum_constants_add() {
-    _assertDoesNotMatch(
-        r'''
-enum E {A, B}
-''',
-        r'''
-enum E {A, B, C}
-''');
-  }
-
-  void test_false_enum_constants_remove() {
-    _assertDoesNotMatch(
-        r'''
-enum E {A, B, C}
-''',
-        r'''
-enum E {A, B}
-''');
-  }
-
-  void test_false_export_hide_add() {
-    _assertDoesNotMatch(
-        r'''
-export 'dart:async' hide Future;
-''',
-        r'''
-export 'dart:async' hide Future, Stream;
-''');
-  }
-
-  void test_false_export_hide_remove() {
-    _assertDoesNotMatch(
-        r'''
-export 'dart:async' hide Future, Stream;
-''',
-        r'''
-export 'dart:async' hide Future;
-''');
-  }
-
-  void test_false_export_list_add() {
-    _assertDoesNotMatch(
-        r'''
-export 'dart:async';
-''',
-        r'''
-export 'dart:async';
-export 'dart:math';
-''');
-  }
-
-  void test_false_export_list_remove() {
-    _assertDoesNotMatch(
-        r'''
-export 'dart:async';
-export 'dart:math';
-''',
-        r'''
-export 'dart:async';
-''');
-  }
-
-  void test_false_export_show_add() {
-    _assertDoesNotMatch(
-        r'''
-export 'dart:async' show Future;
-''',
-        r'''
-export 'dart:async' show Future, Stream;
-''');
-  }
-
-  void test_false_export_show_remove() {
-    _assertDoesNotMatch(
-        r'''
-export 'dart:async' show Future, Stream;
-''',
-        r'''
-export 'dart:async' show Future;
-''');
-  }
-
-  void test_false_extendsClause_add() {
-    _assertDoesNotMatch(
-        r'''
-class A {}
-class B {}
-''',
-        r'''
-class A {}
-class B extends A {}
-''');
-  }
-
-  void test_false_extendsClause_different() {
-    _assertDoesNotMatch(
-        r'''
-class A {}
-class B {}
-class C extends A {}
-''',
-        r'''
-class A {}
-class B {}
-class C extends B {}
-''');
-  }
-
-  void test_false_extendsClause_remove() {
-    _assertDoesNotMatch(
-        r'''
-class A {}
-class B extends A{}
-''',
-        r'''
-class A {}
-class B {}
-''');
-  }
-
-  void test_false_field_list_add() {
-    _assertDoesNotMatch(
-        r'''
-class T {
-  int A = 1;
-  int C = 3;
-}
-''',
-        r'''
-class T {
-  int A = 1;
-  int B = 2;
-  int C = 3;
-}
-''');
-  }
-
-  void test_false_field_list_remove() {
-    _assertDoesNotMatch(
-        r'''
-class T {
-  int A = 1;
-  int B = 2;
-  int C = 3;
-}
-''',
-        r'''
-class T {
-  int A = 1;
-  int C = 3;
-}
-''');
-  }
-
-  void test_false_field_modifier_isConst() {
-    _assertDoesNotMatch(
-        r'''
-class T {
-  static final A = 1;
-}
-''',
-        r'''
-class T {
-  static const A = 1;
-}
-''');
-  }
-
-  void test_false_field_modifier_isFinal() {
-    _assertDoesNotMatch(
-        r'''
-class T {
-  int A = 1;
-}
-''',
-        r'''
-class T {
-  final int A = 1;
-}
-''');
-  }
-
-  void test_false_field_modifier_isStatic() {
-    _assertDoesNotMatch(
-        r'''
-class T {
-  int A = 1;
-}
-''',
-        r'''
-class T {
-  static int A = 1;
-}
-''');
-  }
-
-  void test_false_field_modifier_wasConst() {
-    _assertDoesNotMatch(
-        r'''
-class T {
-  static const A = 1;
-}
-''',
-        r'''
-class T {
-  static final A = 1;
-}
-''');
-  }
-
-  void test_false_field_modifier_wasFinal() {
-    _assertDoesNotMatch(
-        r'''
-class T {
-  final int A = 1;
-}
-''',
-        r'''
-class T {
-  int A = 1;
-}
-''');
-  }
-
-  void test_false_field_modifier_wasStatic() {
-    _assertDoesNotMatch(
-        r'''
-class T {
-  static int A = 1;
-}
-''',
-        r'''
-class T {
-  int A = 1;
-}
-''');
-  }
-
-  void test_false_field_type_differentArgs() {
-    _assertDoesNotMatch(
-        r'''
-class T {
-  List<int> A;
-}
-''',
-        r'''
-class T {
-  List<String> A;
-}
-''');
-  }
-
-  void test_false_fieldFormalParameter_add() {
-    _assertDoesNotMatch(
-        r'''
-class A {
-  final field;
-  A(field);
-}
-''',
-        r'''
-class A {
-  final field;
-  A(this.field);
-}
-''');
-  }
-
-  void test_false_fieldFormalParameter_add_function() {
-    _assertDoesNotMatch(
-        r'''
-class A {
-  final field;
-  A(field(a));
-}
-''',
-        r'''
-class A {
-  final field;
-  A(this.field(a));
-}
-''');
-  }
-
-  void test_false_fieldFormalParameter_changeName_wasUnresolvedField() {
-    _assertDoesNotMatch(
-        r'''
-class A {
-  final fff;
-  A(this.unresolved);
-}
-''',
-        r'''
-class A {
-  final fff;
-  A(this.fff);
-}
-''');
-  }
-
-  void test_false_fieldFormalParameter_differentField() {
-    _assertDoesNotMatch(
-        r'''
-class A {
-  final aaa;
-  final bbb;
-  A(this.aaa, this.bbb);
-}
-''',
-        r'''
-class A {
-  final aaa;
-  final bbb;
-  A(this.bbb, this.aaa);
-}
-''');
-  }
-
-  void test_false_fieldFormalParameter_parameters_add() {
-    _assertDoesNotMatch(
-        r'''
-class A {
-  final field;
-  A(this.field(a));
-}
-''',
-        r'''
-class A {
-  final field;
-  A(this.field(a, b));
-}
-''');
-  }
-
-  void test_false_fieldFormalParameter_parameters_remove() {
-    _assertDoesNotMatch(
-        r'''
-class A {
-  final field;
-  A(this.field(a, b));
-}
-''',
-        r'''
-class A {
-  final field;
-  A(this.field(a));
-}
-''');
-  }
-
-  void test_false_fieldFormalParameter_parameters_typeEdit() {
-    _assertDoesNotMatch(
-        r'''
-class A {
-  final field;
-  A(this.field(int p));
-}
-''',
-        r'''
-class A {
-  final field;
-  A(this.field(String p));
-}
-''');
-  }
-
-  void test_false_fieldFormalParameter_remove_default() {
-    _assertDoesNotMatch(
-        r'''
-class A {
-  final field;
-  A([this.field = 0]);
-}
-''',
-        r'''
-class A {
-  final field;
-  A([field = 0]);
-}
-''');
-  }
-
-  void test_false_fieldFormalParameter_remove_function() {
-    _assertDoesNotMatch(
-        r'''
-class A {
-  final field;
-  A(this.field(a));
-}
-''',
-        r'''
-class A {
-  final field;
-  A(field(a));
-}
-''');
-  }
-
-  void test_false_fieldFormalParameter_remove_normal() {
-    _assertDoesNotMatch(
-        r'''
-class A {
-  final field;
-  A(this.field);
-}
-''',
-        r'''
-class A {
-  final field;
-  A(field);
-}
-''');
-  }
-
-  void test_false_fieldFormalParameter_typeAdd() {
-    _assertDoesNotMatch(
-        r'''
-class A {
-  final fff;
-  A(this.fff);
-}
-''',
-        r'''
-class A {
-  final fff;
-  A(int this.fff);
-}
-''');
-  }
-
-  void test_false_fieldFormalParameter_typeEdit() {
-    _assertDoesNotMatch(
-        r'''
-class A {
-  final fff;
-  A(int this.fff);
-}
-''',
-        r'''
-class A {
-  final fff;
-  A(String this.fff);
-}
-''');
-  }
-
-  void test_false_fieldFormalParameter_typeRemove() {
-    _assertDoesNotMatch(
-        r'''
-class A {
-  final fff;
-  A(int this.fff);
-}
-''',
-        r'''
-class A {
-  final fff;
-  A(this.fff);
-}
-''');
-  }
-
-  void test_false_fieldFormalParameterElement_wasSimple() {
-    _assertDoesNotMatch(
-        r'''
-class A {
-  int field;
-  A(int field);
-}
-''',
-        r'''
-class A {
-  int field;
-  A(this.field);
-}
-''');
-  }
-
-  void test_false_final_type_different() {
-    _assertDoesNotMatch(
-        r'''
-class T {
-  int A;
-}
-''',
-        r'''
-class T {
-  String A;
-}
-''');
-  }
-
-  void test_false_function_async_add() {
-    _assertDoesNotMatch(
-        r'''
-main() {}
-''',
-        r'''
-main() async {}
-''');
-  }
-
-  void test_false_function_async_remove() {
-    _assertDoesNotMatch(
-        r'''
-main() async {}
-''',
-        r'''
-main() {}
-''');
-  }
-
-  void test_false_function_generator_add() {
-    _assertDoesNotMatch(
-        r'''
-main() async {}
-''',
-        r'''
-main() async* {}
-''');
-  }
-
-  void test_false_function_generator_remove() {
-    _assertDoesNotMatch(
-        r'''
-main() async* {}
-''',
-        r'''
-main() async {}
-''');
-  }
-
-  void test_false_functionTypeAlias_list_add() {
-    _assertDoesNotMatch(
-        r'''
-typedef A(int pa);
-typedef B(String pb);
-''',
-        r'''
-typedef A(int pa);
-typedef B(String pb);
-typedef C(pc);
-''');
-  }
-
-  void test_false_functionTypeAlias_list_remove() {
-    _assertDoesNotMatch(
-        r'''
-typedef A(int pa);
-typedef B(String pb);
-typedef C(pc);
-''',
-        r'''
-typedef A(int pa);
-typedef B(String pb);
-''');
-  }
-
-  void test_false_functionTypeAlias_parameters_list_add() {
-    _assertDoesNotMatch(
-        r'''
-typedef A(a);
-''',
-        r'''
-typedef A(a, b);
-''');
-  }
-
-  void test_false_functionTypeAlias_parameters_list_remove() {
-    _assertDoesNotMatch(
-        r'''
-typedef A(a, b);
-''',
-        r'''
-typedef A(a);
-''');
-  }
-
-  void test_false_functionTypeAlias_parameters_type_edit() {
-    _assertDoesNotMatch(
-        r'''
-typedef A(int p);
-''',
-        r'''
-typedef A(String p);
-''');
-  }
-
-  void test_false_functionTypeAlias_returnType_edit() {
-    _assertDoesNotMatch(
-        r'''
-typedef int A();
-''',
-        r'''
-typedef String A();
-''');
-  }
-
-  void test_false_functionTypeAlias_typeParameters_bounds_add() {
-    _assertDoesNotMatch(
-        r'''
-class A {}
-typedef F<T>();
-''',
-        r'''
-class A {}
-typedef F<T extends A>();
-''');
-  }
-
-  void test_false_functionTypeAlias_typeParameters_bounds_edit() {
-    _assertDoesNotMatch(
-        r'''
-class A {}
-class B {}
-typedef F<T extends A>();
-''',
-        r'''
-class A {}
-typedef F<T extends B>();
-''');
-  }
-
-  void test_false_functionTypeAlias_typeParameters_bounds_remove() {
-    _assertDoesNotMatch(
-        r'''
-class A {}
-typedef F<T extends A>();
-''',
-        r'''
-class A {}
-typedef F<T>();
-''');
-  }
-
-  void test_false_functionTypeAlias_typeParameters_list_add() {
-    _assertDoesNotMatch(
-        r'''
-typedef F<A>();
-''',
-        r'''
-typedef F<A, B>();
-''');
-  }
-
-  void test_false_functionTypeAlias_typeParameters_list_remove() {
-    _assertDoesNotMatch(
-        r'''
-typedef F<A, B>();
-''',
-        r'''
-typedef F<A>();
-''');
-  }
-
-  void test_false_FunctionTypedFormalParameter_parameters_list_add() {
-    _assertDoesNotMatch(
-        r'''
-main(int callback(int a)) {
-}
-''',
-        r'''
-main(int callback(int a, String b)) {
-}
-''');
-  }
-
-  void test_false_FunctionTypedFormalParameter_parameters_list_remove() {
-    _assertDoesNotMatch(
-        r'''
-main(int callback(int a, String b)) {
-}
-''',
-        r'''
-main(int callback(int a)) {
-}
-''');
-  }
-
-  void test_false_FunctionTypedFormalParameter_parameterType() {
-    _assertDoesNotMatch(
-        r'''
-main(int callback(int p)) {
-}
-''',
-        r'''
-main(int callback(String p)) {
-}
-''');
-  }
-
-  void test_false_FunctionTypedFormalParameter_returnType() {
-    _assertDoesNotMatch(
-        r'''
-main(int callback()) {
-}
-''',
-        r'''
-main(String callback()) {
-}
-''');
-  }
-
-  void test_false_FunctionTypedFormalParameter_wasSimple() {
-    _assertDoesNotMatch(
-        r'''
-main(int callback) {
-}
-''',
-        r'''
-main(int callback(int a, String b)) {
-}
-''');
-  }
-
-  void test_false_getter_body_add() {
-    _assertDoesNotMatchOK(
-        r'''
-class A {
-  int get foo;
-}
-''',
-        r'''
-class A {
-  int get foo => 0;
-}
-''');
-  }
-
-  void test_false_getter_body_remove() {
-    _assertDoesNotMatchOK(
-        r'''
-class A {
-  int get foo => 0;
-}
-''',
-        r'''
-class A {
-  int get foo;
-}
-''');
-  }
-
-  void test_false_implementsClause_add() {
-    _assertDoesNotMatch(
-        r'''
-class A {}
-class B {}
-''',
-        r'''
-class A {}
-class B implements A {}
-''');
-  }
-
-  void test_false_implementsClause_remove() {
-    _assertDoesNotMatch(
-        r'''
-class A {}
-class B implements A {}
-''',
-        r'''
-class A {}
-class B {}
-''');
-  }
-
-  void test_false_implementsClause_reorder() {
-    _assertDoesNotMatch(
-        r'''
-class A {}
-class B {}
-class C implements A, B {}
-''',
-        r'''
-class A {}
-class B {}
-class C implements B, A {}
-''');
-  }
-
-  void test_false_import_hide_add() {
-    _assertDoesNotMatch(
-        r'''
-import 'dart:async' hide Future;
-''',
-        r'''
-import 'dart:async' hide Future, Stream;
-''');
-  }
-
-  void test_false_import_hide_remove() {
-    _assertDoesNotMatch(
-        r'''
-import 'dart:async' hide Future, Stream;
-''',
-        r'''
-import 'dart:async' hide Future;
-''');
-  }
-
-  void test_false_import_list_add() {
-    _assertDoesNotMatch(
-        r'''
-import 'dart:async';
-''',
-        r'''
-import 'dart:async';
-import 'dart:math';
-''');
-  }
-
-  void test_false_import_list_remove() {
-    _assertDoesNotMatch(
-        r'''
-import 'dart:async';
-import 'dart:math';
-''',
-        r'''
-import 'dart:async';
-''');
-  }
-
-  void test_false_import_prefix_add() {
-    _assertDoesNotMatch(
-        r'''
-import 'dart:async';
-''',
-        r'''
-import 'dart:async' as async;
-''');
-  }
-
-  void test_false_import_prefix_edit() {
-    _assertDoesNotMatch(
-        r'''
-import 'dart:async' as oldPrefix;
-''',
-        r'''
-import 'dart:async' as newPrefix;
-''');
-  }
-
-  void test_false_import_prefix_remove() {
-    _assertDoesNotMatch(
-        r'''
-import 'dart:async' as async;
-''',
-        r'''
-import 'dart:async';
-''');
-  }
-
-  void test_false_import_show_add() {
-    _assertDoesNotMatch(
-        r'''
-import 'dart:async' show Future;
-''',
-        r'''
-import 'dart:async' show Future, Stream;
-''');
-  }
-
-  void test_false_import_show_remove() {
-    _assertDoesNotMatch(
-        r'''
-import 'dart:async' show Future, Stream;
-''',
-        r'''
-import 'dart:async' show Future;
-''');
-  }
-
-  void test_false_method_annotation_edit() {
-    _assertDoesNotMatchOK(
-        r'''
-const my_annotationA = const Object();
-const my_annotationB = const Object();
-class A {
-  @my_annotationA
-  void m() {}
-}
-''',
-        r'''
-const my_annotationA = const Object();
-const my_annotationB = const Object();
-class A {
-  @my_annotationB
-  void m() {}
-}
-''');
-  }
-
-  void test_false_method_annotations_add() {
-    _assertDoesNotMatchOK(
-        r'''
-const my_annotation = const Object();
-class A {
-  void m() {}
-}
-''',
-        r'''
-const my_annotation = const Object();
-class A {
-  @my_annotation
-  void m() {}
-}
-''');
-  }
-
-  void test_false_method_annotations_remove() {
-    _assertDoesNotMatchOK(
-        r'''
-const my_annotation = const Object();
-class A {
-  @my_annotation
-  void m() {}
-}
-''',
-        r'''
-const my_annotation = const Object();
-class A {
-  void m() {}
-}
-''');
-  }
-
-  void test_false_method_async_add() {
-    _assertDoesNotMatchOK(
-        r'''
-class A {
-  m() {}
-}
-''',
-        r'''
-class A {
-  m() async {}
-}
-''');
-  }
-
-  void test_false_method_async_remove() {
-    _assertDoesNotMatchOK(
-        r'''
-class A {
-  m() async {}
-}
-''',
-        r'''
-class A {
-  m() {}
-}
-''');
-  }
-
-  void test_false_method_body_add() {
-    _assertDoesNotMatchOK(
-        r'''
-class A {
-  void foo();
-}
-''',
-        r'''
-class A {
-  void foo() {}
-}
-''');
-  }
-
-  void test_false_method_body_remove() {
-    _assertDoesNotMatchOK(
-        r'''
-class A {
-  void foo() {}
-}
-''',
-        r'''
-class A {
-  void foo();
-}
-''');
-  }
-
-  void test_false_method_generator_add() {
-    _assertDoesNotMatchOK(
-        r'''
-class A {
-  m() async {}
-}
-''',
-        r'''
-class A {
-  m() async* {}
-}
-''');
-  }
-
-  void test_false_method_generator_remove() {
-    _assertDoesNotMatchOK(
-        r'''
-class A {
-  m() async* {}
-}
-''',
-        r'''
-class A {
-  m() async {}
-}
-''');
-  }
-
-  void test_false_method_getKeyword_add() {
-    _assertDoesNotMatchOK(
-        r'''
-class A {
-  void foo() {}
-}
-''',
-        r'''
-class A {
-  void get foo {}
-}
-''');
-  }
-
-  void test_false_method_getKeyword_remove() {
-    _assertDoesNotMatchOK(
-        r'''
-class A {
-  void get foo {}
-}
-''',
-        r'''
-class A {
-  void foo() {}
-}
-''');
-  }
-
-  void test_false_method_list_add() {
-    _assertDoesNotMatchOK(
-        r'''
-class A {
-  a() {}
-  b() {}
-}
-''',
-        r'''
-class A {
-  a() {}
-  b() {}
-  c() {}
-}
-''');
-  }
-
-  void test_false_method_list_remove() {
-    _assertDoesNotMatch(
-        r'''
-class A {
-  a() {}
-  b() {}
-  c() {}
-}
-''',
-        r'''
-class A {
-  a() {}
-  b() {}
-}
-''');
-  }
-
-  void test_false_method_parameters_type_edit() {
-    _assertDoesNotMatchOK(
-        r'''
-class A {
-  m(int p) {
-  }
-}
-''',
-        r'''
-class A {
-  m(String p) {
-  }
-}
-''');
-  }
-
-  void test_false_method_parameters_type_edit_insertImportPrefix() {
-    _assertDoesNotMatchOK(
-        r'''
-import 'dart:async' as a;
-
-class C {
-  void foo(Future f) {}
-}
-
-class Future {}
-
-bar(C c, a.Future f) {
-  c.foo(f);
-}
-''',
-        r'''
-import 'dart:async' as a;
-
-class C {
-  void foo(a.Future f) {}
-}
-
-class Future {}
-
-bar(C c, a.Future f) {
-  c.foo(f);
-}
-''');
-  }
-
-  void test_false_method_returnType_edit() {
-    _assertDoesNotMatchOK(
-        r'''
-class A {
-  int m() {}
-}
-''',
-        r'''
-class A {
-  String m() {}
-}
-''');
-  }
-
-  void test_false_method_setKeyword_add() {
-    _assertDoesNotMatchOK(
-        r'''
-class A {
-  void foo(x) {}
-}
-''',
-        r'''
-class A {
-  void set foo(x) {}
-}
-''');
-  }
-
-  void test_false_method_setKeyword_remove() {
-    _assertDoesNotMatchOK(
-        r'''
-class A {
-  void set foo(x) {}
-}
-''',
-        r'''
-class A {
-  void foo(x) {}
-}
-''');
-  }
-
-  void test_false_part_list_add() {
-    addNamedSource('/unitA.dart', 'part of lib; class A {}');
-    addNamedSource('/unitB.dart', 'part of lib; class B {}');
-    _assertDoesNotMatch(
-        r'''
-library lib;
-part 'unitA.dart';
-''',
-        r'''
-library lib;
-part 'unitA.dart';
-part 'unitB.dart';
-''');
-  }
-
-  void test_false_part_list_remove() {
-    addNamedSource('/unitA.dart', 'part of lib; class A {}');
-    addNamedSource('/unitB.dart', 'part of lib; class B {}');
-    _assertDoesNotMatch(
-        r'''
-library lib;
-part 'unitA.dart';
-part 'unitB.dart';
-''',
-        r'''
-library lib;
-part 'unitA.dart';
-''');
-  }
-
-  void test_false_SimpleFormalParameter_named_differentName() {
-    _assertDoesNotMatch(
-        r'''
-main({int oldName}) {
-}
-''',
-        r'''
-main({int newName}) {
-}
-''');
-  }
-
-  void test_false_SimpleFormalParameter_namedDefault_addValue() {
-    _assertDoesNotMatch(
-        r'''
-main({int p}) {
-}
-''',
-        r'''
-main({int p: 2}) {
-}
-''');
-  }
-
-  void test_false_SimpleFormalParameter_namedDefault_differentValue() {
-    _assertDoesNotMatch(
-        r'''
-main({int p: 1}) {
-}
-''',
-        r'''
-main({int p: 2}) {
-}
-''');
-  }
-
-  void test_false_SimpleFormalParameter_namedDefault_removeValue() {
-    _assertDoesNotMatch(
-        r'''
-main({int p: 1}) {
-}
-''',
-        r'''
-main({int p}) {
-}
-''');
-  }
-
-  void test_false_SimpleFormalParameter_optionalDefault_addValue() {
-    _assertDoesNotMatch(
-        r'''
-main([int p]) {
-}
-''',
-        r'''
-main([int p = 2]) {
-}
-''');
-  }
-
-  void test_false_SimpleFormalParameter_optionalDefault_differentValue() {
-    _assertDoesNotMatch(
-        r'''
-main([int p = 1]) {
-}
-''',
-        r'''
-main([int p = 2]) {
-}
-''');
-  }
-
-  void test_false_SimpleFormalParameter_optionalDefault_removeValue() {
-    _assertDoesNotMatch(
-        r'''
-main([int p = 1]) {
-}
-''',
-        r'''
-main([int p]) {
-}
-''');
-  }
-
-  void test_false_topLevelAccessor_list_add() {
-    _assertDoesNotMatch(
-        r'''
-get a => 1;
-get b => 2;
-''',
-        r'''
-get a => 1;
-get b => 2;
-get c => 3;
-''');
-  }
-
-  void test_false_topLevelAccessor_list_remove() {
-    _assertDoesNotMatch(
-        r'''
-get a => 1;
-get b => 2;
-get c => 3;
-''',
-        r'''
-get a => 1;
-get b => 2;
-''');
-  }
-
-  void test_false_topLevelAccessor_wasGetter() {
-    _assertDoesNotMatch(
-        r'''
-get a => 1;
-''',
-        r'''
-set a(x) {}
-''');
-  }
-
-  void test_false_topLevelAccessor_wasSetter() {
-    _assertDoesNotMatch(
-        r'''
-set a(x) {}
-''',
-        r'''
-get a => 1;
-''');
-  }
-
-  void test_false_topLevelFunction_list_add() {
-    _assertDoesNotMatch(
-        r'''
-a() {}
-b() {}
-''',
-        r'''
-a() {}
-b() {}
-c() {}
-''');
-  }
-
-  void test_false_topLevelFunction_list_remove() {
-    _assertDoesNotMatch(
-        r'''
-a() {}
-b() {}
-c() {}
-''',
-        r'''
-a() {}
-b() {}
-''');
-  }
-
-  void test_false_topLevelFunction_parameters_list_add() {
-    _assertDoesNotMatch(
-        r'''
-main(int a, int b) {
-}
-''',
-        r'''
-main(int a, int b, int c) {
-}
-''');
-  }
-
-  void test_false_topLevelFunction_parameters_list_remove() {
-    _assertDoesNotMatch(
-        r'''
-main(int a, int b, int c) {
-}
-''',
-        r'''
-main(int a, int b) {
-}
-''');
-  }
-
-  void test_false_topLevelFunction_parameters_type_edit() {
-    _assertDoesNotMatch(
-        r'''
-main(int a, int b, int c) {
-}
-''',
-        r'''
-main(int a, String b, int c) {
-}
-''');
-  }
-
-  void test_false_topLevelFunction_returnType_edit() {
-    _assertDoesNotMatch(
-        r'''
-int a() {}
-''',
-        r'''
-String a() {}
-''');
-  }
-
-  void test_false_topLevelVariable_list_add() {
-    _assertDoesNotMatch(
-        r'''
-const int A = 1;
-const int C = 3;
-''',
-        r'''
-const int A = 1;
-const int B = 2;
-const int C = 3;
-''');
-  }
-
-  void test_false_topLevelVariable_list_remove() {
-    _assertDoesNotMatch(
-        r'''
-const int A = 1;
-const int B = 2;
-const int C = 3;
-''',
-        r'''
-const int A = 1;
-const int C = 3;
-''');
-  }
-
-  void test_false_topLevelVariable_modifier_isConst() {
-    _assertDoesNotMatch(
-        r'''
-final int A = 1;
-''',
-        r'''
-const int A = 1;
-''');
-  }
-
-  void test_false_topLevelVariable_modifier_isFinal() {
-    _assertDoesNotMatch(
-        r'''
-int A = 1;
-''',
-        r'''
-final int A = 1;
-''');
-  }
-
-  void test_false_topLevelVariable_modifier_wasConst() {
-    _assertDoesNotMatch(
-        r'''
-const int A = 1;
-''',
-        r'''
-final int A = 1;
-''');
-  }
-
-  void test_false_topLevelVariable_modifier_wasFinal() {
-    _assertDoesNotMatch(
-        r'''
-final int A = 1;
-''',
-        r'''
-int A = 1;
-''');
-  }
-
-  void test_false_topLevelVariable_synthetic_wasGetter() {
-    _assertDoesNotMatch(
-        r'''
-int get A => 1;
-''',
-        r'''
-final int A = 1;
-''');
-  }
-
-  void test_false_topLevelVariable_type_different() {
-    _assertDoesNotMatch(
-        r'''
-int A;
-''',
-        r'''
-String A;
-''');
-  }
-
-  void test_false_topLevelVariable_type_differentArgs() {
-    _assertDoesNotMatch(
-        r'''
-List<int> A;
-''',
-        r'''
-List<String> A;
-''');
-  }
-
-  void test_false_type_noTypeArguments_hadTypeArguments() {
-    _assertDoesNotMatch(
-        r'''
-class A<T> {}
-A<int> main() {
-}
-''',
-        r'''
-class A<T> {}
-A main() {
-}
-''');
-  }
-
-  void test_false_withClause_add() {
-    _assertDoesNotMatch(
-        r'''
-class A {}
-class B {}
-''',
-        r'''
-class A {}
-class B extends Object with A {}
-''');
-  }
-
-  void test_false_withClause_remove() {
-    _assertDoesNotMatch(
-        r'''
-class A {}
-class B extends Object with A {}
-''',
-        r'''
-class A {}
-class B {}
-''');
-  }
-
-  void test_false_withClause_reorder() {
-    _assertDoesNotMatch(
-        r'''
-class A {}
-class B {}
-class C extends Object with A, B {}
-''',
-        r'''
-class A {}
-class B {}
-class C extends Object with B, A {}
-''');
-  }
-
-  void test_true_class_annotations_same() {
-    _assertMatches(
-        r'''
-const my_annotation = const Object();
-@my_annotation
-class A {
-}
-''',
-        r'''
-const my_annotation = const Object();
-@my_annotation
-class A {
-}
-''');
-  }
-
-  void test_true_class_list_reorder() {
-    _assertMatches(
-        r'''
-class A {}
-class B {}
-class C {}
-''',
-        r'''
-class C {}
-class A {}
-class B {}
-''');
-  }
-
-  void test_true_class_list_same() {
-    _assertMatches(
-        r'''
-class A {}
-class B {}
-class C {}
-''',
-        r'''
-class A {}
-class B {}
-class C {}
-''');
-  }
-
-  void test_true_class_typeParameters_same() {
-    _assertMatches(
-        r'''
-class A<T> {}
-''',
-        r'''
-class A<T> {}
-''');
-  }
-
-  void test_true_classMemberAccessor_getterSetter() {
-    _assertMatches(
-        r'''
-class A {
-  int _test;
-  get test => _test;
-  set test(v) {
-    _test = v;
-  }
-}
-''',
-        r'''
-class A {
-  int _test;
-  get test => _test;
-  set test(v) {
-    _test = v;
-  }
-}
-''');
-  }
-
-  void test_true_classMemberAccessor_list_reorder() {
-    _assertMatches(
-        r'''
-class A {
-  get a => 1;
-  get b => 2;
-  get c => 3;
-}
-''',
-        r'''
-class A {
-  get c => 3;
-  get a => 1;
-  get b => 2;
-}
-''');
-  }
-
-  void test_true_classMemberAccessor_list_same() {
-    _assertMatches(
-        r'''
-class A {
-  get a => 1;
-  get b => 2;
-  get c => 3;
-}
-''',
-        r'''
-class A {
-  get a => 1;
-  get b => 2;
-  get c => 3;
-}
-''');
-  }
-
-  void test_true_classTypeAlias_list_reorder() {
-    _assertMatches(
-        r'''
-class M {}
-class A = Object with M;
-class B = Object with M;
-class C = Object with M;
-''',
-        r'''
-class M {}
-class C = Object with M;
-class A = Object with M;
-class B = Object with M;
-''');
-  }
-
-  void test_true_classTypeAlias_list_same() {
-    _assertMatches(
-        r'''
-class M {}
-class A = Object with M;
-class B = Object with M;
-class C = Object with M;
-''',
-        r'''
-class M {}
-class A = Object with M;
-class B = Object with M;
-class C = Object with M;
-''');
-  }
-
-  void test_true_classTypeAlias_typeParameters_same() {
-    _assertMatches(
-        r'''
-class M<T> {}
-class A<T> {}
-class B<T> = A<T> with M<T>;
-''',
-        r'''
-class M<T> {}
-class A<T> {}
-class B<T> = A<T> with M<T>;
-''');
-  }
-
-  void test_true_constructor_body_add() {
-    _assertMatches(
-        r'''
-class A {
-  A(int p);
-}
-''',
-        r'''
-class A {
-  A(int p) {}
-}
-''');
-  }
-
-  void test_true_constructor_body_remove() {
-    _assertMatches(
-        r'''
-class A {
-  A(int p) {}
-}
-''',
-        r'''
-class A {
-  A(int p);
-}
-''');
-  }
-
-  void test_true_constructor_named_same() {
-    _assertMatches(
-        r'''
-class A {
-  A.name(int p);
-}
-''',
-        r'''
-class A {
-  A.name(int p);
-}
-''');
-  }
-
-  void test_true_constructor_unnamed_add_noParameters() {
-    _assertMatches(
-        r'''
-class A {
-}
-''',
-        r'''
-class A {
-  A() {}
-}
-''');
-  }
-
-  void test_true_constructor_unnamed_remove_noParameters() {
-    _assertMatches(
-        r'''
-class A {
-  A() {}
-}
-''',
-        r'''
-class A {
-}
-''');
-  }
-
-  void test_true_constructor_unnamed_same() {
-    _assertMatches(
-        r'''
-class A {
-  A(int p);
-}
-''',
-        r'''
-class A {
-  A(int p);
-}
-''');
-  }
-
-  void test_true_defaultFieldFormalParameterElement() {
-    _assertMatches(
-        r'''
-class A {
-  int field;
-  A([this.field = 0]);
-}
-''',
-        r'''
-class A {
-  int field;
-  A([this.field = 0]);
-}
-''');
-  }
-
-  void test_true_enum_constants_reorder() {
-    _assertMatches(
-        r'''
-enum E {A, B, C}
-''',
-        r'''
-enum E {C, A, B}
-''');
-  }
-
-  void test_true_enum_list_reorder() {
-    _assertMatches(
-        r'''
-enum A {A1, A2, A3}
-enum B {B1, B2, B3}
-enum C {C1, C2, C3}
-''',
-        r'''
-enum C {C1, C2, C3}
-enum A {A1, A2, A3}
-enum B {B1, B2, B3}
-''');
-  }
-
-  void test_true_enum_list_same() {
-    _assertMatches(
-        r'''
-enum A {A1, A2, A3}
-enum B {B1, B2, B3}
-enum C {C1, C2, C3}
-''',
-        r'''
-enum A {A1, A2, A3}
-enum B {B1, B2, B3}
-enum C {C1, C2, C3}
-''');
-  }
-
-  void test_true_executable_same_hasLabel() {
-    _assertMatches(
-        r'''
-main() {
-  label: return 42;
-}
-''',
-        r'''
-main() {
-  label: return 42;
-}
-''');
-  }
-
-  void test_true_executable_same_hasLocalVariable() {
-    _assertMatches(
-        r'''
-main() {
-  int a = 42;
-}
-''',
-        r'''
-main() {
-  int a = 42;
-}
-''');
-  }
-
-  void test_true_export_hide_reorder() {
-    _assertMatches(
-        r'''
-export 'dart:async' hide Future, Stream;
-''',
-        r'''
-export 'dart:async' hide Stream, Future;
-''');
-  }
-
-  void test_true_export_list_reorder() {
-    _assertMatches(
-        r'''
-export 'dart:async';
-export 'dart:math';
-''',
-        r'''
-export 'dart:math';
-export 'dart:async';
-''');
-  }
-
-  void test_true_export_list_same() {
-    _assertMatches(
-        r'''
-export 'dart:async';
-export 'dart:math';
-''',
-        r'''
-export 'dart:async';
-export 'dart:math';
-''');
-  }
-
-  void test_true_export_show_reorder() {
-    _assertMatches(
-        r'''
-export 'dart:async' show Future, Stream;
-''',
-        r'''
-export 'dart:async' show Stream, Future;
-''');
-  }
-
-  void test_true_extendsClause_same() {
-    _assertMatches(
-        r'''
-class A {}
-class B extends A {}
-''',
-        r'''
-class A {}
-class B extends A {}
-''');
-  }
-
-  void test_true_field_list_reorder() {
-    _assertMatches(
-        r'''
-class T {
-  int A = 1;
-  int B = 2;
-  int C = 3;
-}
-''',
-        r'''
-class T {
-  int C = 3;
-  int A = 1;
-  int B = 2;
-}
-''');
-  }
-
-  void test_true_field_list_same() {
-    _assertMatches(
-        r'''
-class T {
-  int A = 1;
-  int B = 2;
-  int C = 3;
-}
-''',
-        r'''
-class T {
-  int A = 1;
-  int B = 2;
-  int C = 3;
-}
-''');
-  }
-
-  void test_true_fieldFormalParameter() {
-    _assertMatches(
-        r'''
-class A {
-  int field;
-  A(this.field);
-}
-''',
-        r'''
-class A {
-  int field;
-  A(this.field);
-}
-''');
-  }
-
-  void test_true_fieldFormalParameter_function() {
-    _assertMatches(
-        r'''
-class A {
-  final field;
-  A(this.field(int a, String b));
-}
-''',
-        r'''
-class A {
-  final field;
-  A(this.field(int a, String b));
-}
-''');
-  }
-
-  void test_true_functionTypeAlias_list_reorder() {
-    _assertMatches(
-        r'''
-typedef A(int pa);
-typedef B(String pb);
-typedef C(pc);
-''',
-        r'''
-typedef C(pc);
-typedef A(int pa);
-typedef B(String pb);
-''');
-  }
-
-  void test_true_functionTypeAlias_list_same() {
-    _assertMatches(
-        r'''
-typedef String A(int pa);
-typedef int B(String pb);
-typedef C(pc);
-''',
-        r'''
-typedef String A(int pa);
-typedef int B(String pb);
-typedef C(pc);
-''');
-  }
-
-  void test_true_functionTypeAlias_typeParameters_list_same() {
-    _assertMatches(
-        r'''
-typedef F<A, B, C>();
-''',
-        r'''
-typedef F<A, B, C>();
-''');
-  }
-
-  void test_true_FunctionTypedFormalParameter() {
-    _assertMatches(
-        r'''
-main(int callback(int a, String b)) {
-}
-''',
-        r'''
-main(int callback(int a, String b)) {
-}
-''');
-  }
-
-  void test_true_implementsClause_same() {
-    _assertMatches(
-        r'''
-class A {}
-class B implements A {}
-''',
-        r'''
-class A {}
-class B implements A {}
-''');
-  }
-
-  void test_true_import_hide_reorder() {
-    _assertMatches(
-        r'''
-import 'dart:async' hide Future, Stream;
-''',
-        r'''
-import 'dart:async' hide Stream, Future;
-''');
-  }
-
-  void test_true_import_list_reorder() {
-    _assertMatches(
-        r'''
-import 'dart:async';
-import 'dart:math';
-''',
-        r'''
-import 'dart:math';
-import 'dart:async';
-''');
-  }
-
-  void test_true_import_list_same() {
-    _assertMatches(
-        r'''
-import 'dart:async';
-import 'dart:math';
-''',
-        r'''
-import 'dart:async';
-import 'dart:math';
-''');
-  }
-
-  void test_true_import_prefix() {
-    _assertMatches(
-        r'''
-import 'dart:async' as async;
-''',
-        r'''
-import 'dart:async' as async;
-''');
-  }
-
-  void test_true_import_show_reorder() {
-    _assertMatches(
-        r'''
-import 'dart:async' show Future, Stream;
-''',
-        r'''
-import 'dart:async' show Stream, Future;
-''');
-  }
-
-  void test_true_method_annotation_accessor_same() {
-    _assertMatches(
-        r'''
-const my_annotation = const Object();
-class A {
-  @my_annotation
-  void m() {}
-}
-''',
-        r'''
-const my_annotation = const Object();
-class A {
-  @my_annotation
-  void m() {}
-}
-''');
-  }
-
-  void test_true_method_annotation_constructor_same() {
-    _assertMatches(
-        r'''
-class MyAnnotation {
-  const MyAnnotation();
-}
-class A {
-  @MyAnnotation()
-  void m() {}
-}
-''',
-        r'''
-class MyAnnotation {
-  const MyAnnotation();
-}
-class A {
-  @MyAnnotation()
-  void m() {}
-}
-''');
-  }
-
-  void test_true_method_async() {
-    _assertMatches(
-        r'''
-class A {
-  m() async {}
-}
-''',
-        r'''
-class A {
-  m() async {}
-}
-''');
-  }
-
-  void test_true_method_list_reorder() {
-    _assertMatches(
-        r'''
-class A {
-  a() {}
-  b() {}
-  c() {}
-}
-''',
-        r'''
-class A {
-  c() {}
-  a() {}
-  b() {}
-}
-''');
-  }
-
-  void test_true_method_list_same() {
-    _assertMatches(
-        r'''
-class A {
-  a() {}
-  b() {}
-  c() {}
-}
-''',
-        r'''
-class A {
-  a() {}
-  b() {}
-  c() {}
-}
-''');
-  }
-
-  void test_true_method_operator_minus() {
-    _assertMatches(
-        r'''
-class A {
-  operator -(other) {}
-}
-''',
-        r'''
-class A {
-  operator -(other) {}
-}
-''');
-  }
-
-  void test_true_method_operator_minusUnary() {
-    _assertMatches(
-        r'''
-class A {
-  operator -() {}
-}
-''',
-        r'''
-class A {
-  operator -() {}
-}
-''');
-  }
-
-  void test_true_method_operator_plus() {
-    _assertMatches(
-        r'''
-class A {
-  operator +(other) {}
-}
-''',
-        r'''
-class A {
-  operator +(other) {}
-}
-''');
-  }
-
-  void test_true_method_parameters_type_functionType() {
-    _assertMatches(
-        r'''
-typedef F();
-class A {
-  m(F p) {}
-}
-''',
-        r'''
-typedef F();
-class A {
-  m(F p) {}
-}
-''');
-  }
-
-  void test_true_method_parameters_type_sameImportPrefix() {
-    _assertMatches(
-        r'''
-import 'dart:async' as a;
-
-bar(a.Future f) {
-  print(f);
-}
-''',
-        r'''
-import 'dart:async' as a;
-
-bar(a.Future ff) {
-  print(ff);
-}
-''');
-  }
-
-  void test_true_part_list_reorder() {
-    addNamedSource('/unitA.dart', 'part of lib; class A {}');
-    addNamedSource('/unitB.dart', 'part of lib; class B {}');
-    _assertMatches(
-        r'''
-library lib;
-part 'unitA.dart';
-part 'unitB.dart';
-''',
-        r'''
-library lib;
-part 'unitB.dart';
-part 'unitA.dart';
-''');
-  }
-
-  void test_true_part_list_same() {
-    addNamedSource('/unitA.dart', 'part of lib; class A {}');
-    addNamedSource('/unitB.dart', 'part of lib; class B {}');
-    _assertMatches(
-        r'''
-library lib;
-part 'unitA.dart';
-part 'unitB.dart';
-''',
-        r'''
-library lib;
-part 'unitA.dart';
-part 'unitB.dart';
-''');
-  }
-
-  void test_true_SimpleFormalParameter_optional_differentName() {
-    _assertMatches(
-        r'''
-main([int oldName]) {
-}
-''',
-        r'''
-main([int newName]) {
-}
-''');
-  }
-
-  void test_true_SimpleFormalParameter_optionalDefault_differentName() {
-    _assertMatches(
-        r'''
-main([int oldName = 1]) {
-}
-''',
-        r'''
-main([int newName = 1]) {
-}
-''');
-  }
-
-  void test_true_SimpleFormalParameter_required_differentName() {
-    _assertMatches(
-        r'''
-main(int oldName) {
-}
-''',
-        r'''
-main(int newName) {
-}
-''');
-  }
-
-  void test_true_topLevelAccessor_list_reorder() {
-    _assertMatches(
-        r'''
-set a(x) {}
-set b(x) {}
-set c(x) {}
-''',
-        r'''
-set c(x) {}
-set a(x) {}
-set b(x) {}
-''');
-  }
-
-  void test_true_topLevelAccessor_list_same() {
-    _assertMatches(
-        r'''
-get a => 1;
-get b => 2;
-get c => 3;
-''',
-        r'''
-get a => 1;
-get b => 2;
-get c => 3;
-''');
-  }
-
-  void test_true_topLevelFunction_list_reorder() {
-    _assertMatches(
-        r'''
-a() {}
-b() {}
-c() {}
-''',
-        r'''
-c() {}
-a() {}
-b() {}
-''');
-  }
-
-  void test_true_topLevelFunction_list_same() {
-    _assertMatches(
-        r'''
-a() {}
-b() {}
-c() {}
-''',
-        r'''
-a() {}
-b() {}
-c() {}
-''');
-  }
-
-  void test_true_topLevelVariable_list_reorder() {
-    _assertMatches(
-        r'''
-const int A = 1;
-const int B = 2;
-const int C = 3;
-''',
-        r'''
-const int C = 3;
-const int A = 1;
-const int B = 2;
-''');
-  }
-
-  void test_true_topLevelVariable_list_same() {
-    _assertMatches(
-        r'''
-const int A = 1;
-const int B = 2;
-const int C = 3;
-''',
-        r'''
-const int A = 1;
-const int B = 2;
-const int C = 3;
-''');
-  }
-
-  void test_true_topLevelVariable_type_sameArgs() {
-    _assertMatches(
-        r'''
-Map<int, String> A;
-''',
-        r'''
-Map<int, String> A;
-''');
-  }
-
-  void test_true_type_dynamic() {
-    _assertMatches(
-        r'''
-dynamic a() {}
-''',
-        r'''
-dynamic a() {}
-''');
-  }
-
-  void test_true_type_hasImportPrefix() {
-    _assertMatches(
-        r'''
-import 'dart:async' as async;
-async.Future F;
-''',
-        r'''
-import 'dart:async' as async;
-async.Future F;
-''');
-  }
-
-  void test_true_type_noTypeArguments_implyAllDynamic() {
-    _assertMatches(
-        r'''
-class A<T> {}
-A main() {
-}
-''',
-        r'''
-class A<T> {}
-A main() {
-}
-''');
-  }
-
-  void test_true_type_void() {
-    _assertMatches(
-        r'''
-void a() {}
-''',
-        r'''
-void a() {}
-''');
-  }
-
-  void test_true_withClause_same() {
-    _assertMatches(
-        r'''
-class A {}
-class B extends Object with A {}
-''',
-        r'''
-class A {}
-class B extends Object with A {}
-''');
-  }
-
-  void _assertDoesNotMatch(String oldContent, String newContent) {
-    _assertMatchKind(DeclarationMatchKind.MISMATCH, oldContent, newContent);
-  }
-
-  void _assertDoesNotMatchOK(String oldContent, String newContent) {
-    _assertMatchKind(DeclarationMatchKind.MISMATCH_OK, oldContent, newContent);
-  }
-
-  void _assertMatches(String oldContent, String newContent) {
-    _assertMatchKind(DeclarationMatchKind.MATCH, oldContent, newContent);
-  }
-
-  void _assertMatchKind(
-      DeclarationMatchKind expectMatch, String oldContent, String newContent) {
-    Source source = addSource(oldContent);
-    LibraryElement library = resolve2(source);
-    CompilationUnit oldUnit = resolveCompilationUnit(source, library);
-    // parse
-    CompilationUnit newUnit = IncrementalResolverTest._parseUnit(newContent);
-    // build elements
-    {
-      ElementHolder holder = new ElementHolder();
-      ElementBuilder builder = new ElementBuilder(holder, oldUnit.element);
-      newUnit.accept(builder);
-    }
-    // match
-    DeclarationMatcher matcher = new DeclarationMatcher();
-    DeclarationMatchKind matchKind = matcher.matches(newUnit, oldUnit.element);
-    expect(matchKind, same(expectMatch));
-  }
-}
-
-@reflectiveTest
 class IncrementalResolverTest extends ResolverTestCase {
   Source source;
   String code;
@@ -3126,7 +154,6 @@
 
   void setUp() {
     super.setUp();
-    test_resolveApiChanges = true;
     logging.logger = logging.NULL_LOGGER;
   }
 
@@ -3166,26 +193,6 @@
     _resolve(_editString('+', '*'), _isFunctionBody);
   }
 
-  void test_constructor_fieldInitializer_add() {
-    _resolveUnit(r'''
-class A {
-  int f;
-  A(int a, int b);
-}''');
-    _resolve(_editString(');', ') : f = a + b;'), _isClassMember);
-  }
-
-  void test_constructor_fieldInitializer_edit() {
-    _resolveUnit(r'''
-class A {
-  int f;
-  A(int a, int b) : f = a + b {
-    int a = 42;
-  }
-}''');
-    _resolve(_editString('+', '*'), _isExpression);
-  }
-
   void test_constructor_label_add() {
     _resolveUnit(r'''
 class A {
@@ -3208,18 +215,6 @@
     _resolve(_editString('42;', 'var res = 42;'), _isBlock);
   }
 
-  void test_constructor_superConstructorInvocation() {
-    _resolveUnit(r'''
-class A {
-  A(int p);
-}
-class B extends A {
-  B(int a, int b) : super(a + b);
-}
-''');
-    _resolve(_editString('+', '*'), _isExpression);
-  }
-
   void test_function_localFunction_add() {
     _resolveUnit(r'''
 int main() {
@@ -3238,13 +233,6 @@
     _resolve(_editString('+', '*'), _isFunctionBody);
   }
 
-  void test_functionBody_expression() {
-    _resolveUnit(r'''
-main(int a, int b) => a + b;
-''');
-    _resolve(_editString('+', '*'), _isExpression);
-  }
-
   void test_functionBody_statement() {
     _resolveUnit(r'''
 main(int a, int b) {
@@ -3304,23 +292,6 @@
         _isBlock);
   }
 
-  void test_method_parameter_rename() {
-    _resolveUnit(r'''
-class A {
-  int m(int a, int b, int c) {
-    return a + b + c;
-  }
-}
-''');
-    _resolve(
-        _editString(
-            r'''(int a, int b, int c) {
-    return a + b + c;''',
-            r'''(int a, int second, int c) {
-    return a + second + c;'''),
-        _isDeclaration);
-  }
-
   void test_superInvocation() {
     _resolveUnit(r'''
 class A {
@@ -3386,36 +357,6 @@
     _resolve(_editString('int res = a * b;', ''), _isBlock);
   }
 
-  void test_topLevelFunction_parameter_inFunctionTyped_rename() {
-    _resolveUnit(r'''
-test(f(int a, int b)) {
-}
-''');
-    _resolve(_editString('test(f(int a', 'test(f2(int a2'), _isDeclaration);
-  }
-
-  void test_topLevelFunction_parameter_rename() {
-    _resolveUnit(r'''
-int main(int a, int b) {
-  return a + b;
-}
-''');
-    _resolve(
-        _editString(
-            r'''(int a, int b) {
-  return a + b;''',
-            r'''(int first, int b) {
-  return first + b;'''),
-        _isDeclaration);
-  }
-
-  void test_topLevelVariable_initializer() {
-    _resolveUnit(r'''
-int C = 1 + 2;
-''');
-    _resolve(_editString('+', '*'), _isExpression);
-  }
-
   void test_updateElementOffset() {
     _resolveUnit(r'''
 class A {
@@ -3487,8 +428,11 @@
     LibrarySpecificUnit lsu = new LibrarySpecificUnit(source, source);
     resolver = new IncrementalResolver(cache, cache.get(source), cache.get(lsu),
         unit.element, updateOffset, updateEndOld, updateOldNew);
-    bool success = resolver.resolve(newNode);
-    expect(success, isTrue);
+
+    BlockFunctionBody body = newNode.getAncestor((n) => n is BlockFunctionBody);
+    expect(body, isNotNull);
+
+    resolver.resolve(body);
     _checkCacheEntries(cache);
 
     List<AnalysisError> newErrors = analysisContext.computeErrors(source);
@@ -3536,12 +480,6 @@
 
   static bool _isBlock(AstNode node) => node is Block;
 
-  static bool _isClassMember(AstNode node) => node is ClassMember;
-
-  static bool _isDeclaration(AstNode node) => node is Declaration;
-
-  static bool _isExpression(AstNode node) => node is Expression;
-
   static bool _isFunctionBody(AstNode node) => node is FunctionBody;
 
   static bool _isStatement(AstNode node) => node is Statement;
@@ -3556,10 +494,13 @@
   }
 
   static void _shiftTokens(Token token, int afterOffset, int delta) {
-    while (token.type != TokenType.EOF) {
-      if (token.offset >= afterOffset) {
+    while (true) {
+      if (token.offset > afterOffset) {
         token.applyDelta(delta);
       }
+      if (token.type == TokenType.EOF) {
+        break;
+      }
       token = token.next;
     }
   }
@@ -3579,23 +520,6 @@
   CompilationUnit oldUnit;
   CompilationUnitElement oldUnitElement;
 
-  void fail_updateErrors_removeExisting_duplicateMethodDeclaration() {
-    // TODO(scheglov) We fail to remove the second "foo" declaration.
-    // So, we still have the same duplicate declaration problem.
-    _resolveUnit(r'''
-class A {
-  void foo() {}
-  void foo() {}
-}
-''');
-    _updateAndValidate(r'''
-class A {
-  void foo() {}
-  void foo2() {}
-}
-''');
-  }
-
   @override
   void setUp() {
     super.setUp();
@@ -4174,6 +1098,59 @@
         expectedSuccess: false);
   }
 
+  void test_false_wholeConstructor_addInitializer() {
+    _resolveUnit(r'''
+class A {
+  int field;
+  A();
+}
+''');
+    _updateAndValidate(
+        r'''
+class A {
+  int field;
+  A() : field = 5;
+}
+''',
+        expectedSuccess: false);
+  }
+
+  void test_false_wholeFunction() {
+    _resolveUnit(r'''
+foo() {}
+main(int a) {
+  print(a);
+}
+''');
+    _updateAndValidate(
+        r'''
+foo() {}
+main(int b) {
+  print(b);
+}
+''',
+        expectedSuccess: false);
+  }
+
+  void test_false_wholeMethod() {
+    _resolveUnit(r'''
+class A {
+  main(int a) {
+    print(a);
+  }
+}
+''');
+    _updateAndValidate(
+        r'''
+class A {
+  main(int b) {
+    print(b);
+  }
+}
+''',
+        expectedSuccess: false);
+  }
+
   void test_fieldClassField_propagatedType() {
     _resolveUnit(r'''
 class A {
@@ -4411,66 +1388,6 @@
     _assertEqualErrors(newErrors, oldErrors);
   }
 
-  void test_true_wholeConstructor_addInitializer() {
-    _resolveUnit(r'''
-class A {
-  int field;
-  A();
-}
-''');
-    _updateAndValidate(r'''
-class A {
-  int field;
-  A() : field = 5;
-}
-''');
-  }
-
-  void test_true_wholeFunction() {
-    _resolveUnit(r'''
-foo() {}
-main(int a) {
-  print(a);
-}
-''');
-    _updateAndValidate(r'''
-foo() {}
-main(int b) {
-  print(b);
-}
-''');
-  }
-
-  void test_true_wholeFunction_firstTokenInUnit() {
-    _resolveUnit(r'''
-main(int a) {
-  print(a);
-}
-''');
-    _updateAndValidate(r'''
-main(int b) {
-  print(b);
-}
-''');
-  }
-
-  void test_true_wholeMethod() {
-    _resolveUnit(r'''
-class A {
-  main(int a) {
-    print(a);
-  }
-}
-''');
-    _updateAndValidate(r'''
-class A {
-  main(int b) {
-    print(b);
-  }
-}
-''');
-  }
-
   void test_unusedHint_add_wasUsedOnlyInPart() {
     Source partSource = addNamedSource(
         '/my_unit.dart',
diff --git a/pkg/analyzer/test/generated/non_error_resolver_test.dart b/pkg/analyzer/test/generated/non_error_resolver_test.dart
index 41a5185..1be8c5b 100644
--- a/pkg/analyzer/test/generated/non_error_resolver_test.dart
+++ b/pkg/analyzer/test/generated/non_error_resolver_test.dart
@@ -5125,6 +5125,19 @@
     verify([source]);
   }
 
+  void test_typeArgumentNotMatchingBounds_ofFunctionTypeAlias_hasBound2() {
+    Source source = addSource(r'''
+class MyClass<T> {}
+typedef MyFunction<T, P extends MyClass<T>>();
+class A<T, P extends MyClass<T>> {
+  MyFunction<T, P> f;
+}
+''');
+    computeLibrarySourceErrors(source);
+    assertNoErrors(source);
+    verify([source]);
+  }
+
   void test_typeArgumentNotMatchingBounds_ofFunctionTypeAlias_noBound() {
     Source source = addSource(r'''
 typedef F<T>();
diff --git a/pkg/analyzer/test/generated/non_hint_code_test.dart b/pkg/analyzer/test/generated/non_hint_code_test.dart
index 9fc1dfb..13abb03 100644
--- a/pkg/analyzer/test/generated/non_hint_code_test.dart
+++ b/pkg/analyzer/test/generated/non_hint_code_test.dart
@@ -18,6 +18,25 @@
 
 @reflectiveTest
 class NonHintCodeTest extends ResolverTestCase {
+  void test_deadCode_afterTryCatch() {
+    Source source = addSource('''
+main() {
+  try {
+    return f();
+  } catch (e) {
+    print(e);
+  }
+  print('not dead');
+}
+f() {
+  throw 'foo';
+}
+''');
+    computeLibrarySourceErrors(source);
+    assertNoErrors(source);
+    verify([source]);
+  }
+
   void test_deadCode_deadBlock_conditionalElse_debugConst() {
     Source source = addSource(r'''
 const bool DEBUG = true;
@@ -158,6 +177,39 @@
     verify([source]);
   }
 
+  void test_deadCode_statementAfterIfWithoutElse() {
+    Source source = addSource(r'''
+f() {
+  if (1 < 0) {
+    return;
+  }
+  int a = 1;
+}''');
+    computeLibrarySourceErrors(source);
+    assertNoErrors(source);
+    verify([source]);
+  }
+
+  void test_deadCode_deadFinalBreakInCase() {
+    Source source = addSource(r'''
+f() {
+  switch (true) {
+  case true:
+    try {
+      int a = 1;
+    } finally {
+      return;
+    }
+    break;
+  default:
+    break;
+  }
+}''');
+    computeLibrarySourceErrors(source);
+    assertNoErrors(source);
+    verify([source]);
+  }
+
   void test_deprecatedMemberUse_inDeprecatedClass() {
     Source source = addSource(r'''
 @deprecated
diff --git a/pkg/analyzer/test/generated/parser_test.dart b/pkg/analyzer/test/generated/parser_test.dart
index 47be73f..58c7c6f 100644
--- a/pkg/analyzer/test/generated/parser_test.dart
+++ b/pkg/analyzer/test/generated/parser_test.dart
@@ -1216,6 +1216,50 @@
         "external typedef F();", [ParserErrorCode.EXTERNAL_TYPEDEF]);
   }
 
+  void test_extraCommaInParameterList() {
+    parseTrailingCommas = true;
+    parse4("parseFormalParameterList", "(int a, , int b)",
+        [ParserErrorCode.MISSING_IDENTIFIER, ParserErrorCode.EXPECTED_TOKEN]);
+    parseTrailingCommas = false;
+    parse4("parseFormalParameterList", "(int a, , int b)",
+        [ParserErrorCode.MISSING_IDENTIFIER, ParserErrorCode.EXPECTED_TOKEN]);
+  }
+
+  void test_extraCommaTrailingNamedParameterGroup() {
+    parseTrailingCommas = true;
+    parse4("parseFormalParameterList", "({int b},)", [
+      ParserErrorCode.MISSING_IDENTIFIER,
+      ParserErrorCode.NORMAL_BEFORE_OPTIONAL_PARAMETERS
+    ]);
+    parseTrailingCommas = false;
+    parse4("parseFormalParameterList", "({int b},)", [
+      ParserErrorCode.MISSING_IDENTIFIER,
+      ParserErrorCode.NORMAL_BEFORE_OPTIONAL_PARAMETERS
+    ]);
+  }
+
+  void test_extraCommaTrailingPositionalParameterGroup() {
+    parseTrailingCommas = true;
+    parse4("parseFormalParameterList", "([int b],)", [
+      ParserErrorCode.MISSING_IDENTIFIER,
+      ParserErrorCode.NORMAL_BEFORE_OPTIONAL_PARAMETERS
+    ]);
+    parseTrailingCommas = false;
+    parse4("parseFormalParameterList", "([int b],)", [
+      ParserErrorCode.MISSING_IDENTIFIER,
+      ParserErrorCode.NORMAL_BEFORE_OPTIONAL_PARAMETERS
+    ]);
+  }
+
+  void test_extraTrailingCommaInParameterList() {
+    parseTrailingCommas = true;
+    parse4("parseFormalParameterList", "(a,,)",
+        [ParserErrorCode.MISSING_IDENTIFIER]);
+    parseTrailingCommas = false;
+    parse4("parseFormalParameterList", "(a,,)",
+        [ParserErrorCode.MISSING_IDENTIFIER, ParserErrorCode.EXPECTED_TOKEN]);
+  }
+
   void test_factoryTopLevelDeclaration_class() {
     ParserTestCase.parseCompilationUnit(
         "factory class C {}", [ParserErrorCode.FACTORY_TOP_LEVEL_DECLARATION]);
@@ -1322,7 +1366,7 @@
         "0++", [ParserErrorCode.ILLEGAL_ASSIGNMENT_TO_NON_ASSIGNABLE]);
   }
 
-  void test_illegalAssignmentToNonAssignable_postfix_plusPlus_parethesized() {
+  void test_illegalAssignmentToNonAssignable_postfix_plusPlus_parenthesized() {
     parseExpression(
         "(x)++", [ParserErrorCode.ILLEGAL_ASSIGNMENT_TO_NON_ASSIGNABLE]);
   }
@@ -1715,6 +1759,12 @@
     expect(expression.isSynthetic, isTrue);
   }
 
+  void test_missingIdentifierForParameterGroup() {
+    parseTrailingCommas = true;
+    parse4("parseFormalParameterList", "(,)",
+        [ParserErrorCode.MISSING_IDENTIFIER]);
+  }
+
   void test_missingKeywordOperator() {
     parse3("parseOperator", <Object>[emptyCommentAndMetadata(), null, null],
         "+(x) {}", [ParserErrorCode.MISSING_KEYWORD_OPERATOR]);
@@ -2777,6 +2827,12 @@
   bool enableGenericMethodComments = false;
 
   /**
+   * A flag indicating whether parsing trailing commas in parameter and argument
+   * lists should be enabled for this test.
+   */
+  bool parseTrailingCommas = false;
+
+  /**
    * Return a CommentAndMetadata object with the given values that can be used for testing.
    *
    * @param comment the comment to be wrapped in the object
@@ -2830,6 +2886,7 @@
     parser.parseGenericMethods = enableGenericMethods;
     parser.parseGenericMethodComments = enableGenericMethodComments;
     parser.parseFunctionBodies = parseFunctionBodies;
+    parser.parseTrailingCommas = parseTrailingCommas;
     Object result =
         invokeParserMethodImpl(parser, methodName, objects, tokenStream);
     //
@@ -4755,6 +4812,13 @@
     expect(arguments, hasLength(2));
   }
 
+  void test_parseArgumentList_trailing_comma() {
+    parseTrailingCommas = true;
+    ArgumentList argumentList = parse4("parseArgumentList", "(x, y, z,)");
+    NodeList<Expression> arguments = argumentList.arguments;
+    expect(arguments, hasLength(3));
+  }
+
   void test_parseAssertStatement() {
     AssertStatement statement = parse4("parseAssertStatement", "assert (x);");
     expect(statement.assertKeyword, isNotNull);
@@ -5937,6 +6001,22 @@
     expect(method.body, isNotNull);
   }
 
+  void test_parseClassMember_method_trailing_commas() {
+    parseTrailingCommas = true;
+    MethodDeclaration method =
+        parse("parseClassMember", <Object>["C"], "void f(int x, int y,) {}");
+    expect(method.documentationComment, isNull);
+    expect(method.externalKeyword, isNull);
+    expect(method.modifierKeyword, isNull);
+    expect(method.propertyKeyword, isNull);
+    expect(method.returnType, isNotNull);
+    expect(method.name, isNotNull);
+    expect(method.operatorKeyword, isNull);
+    expect(method.typeParameters, isNull);
+    expect(method.parameters, isNotNull);
+    expect(method.body, isNotNull);
+  }
+
   void test_parseClassMember_operator_index() {
     MethodDeclaration method =
         parse("parseClassMember", <Object>["C"], "int operator [](int i) {}");
@@ -6121,7 +6201,7 @@
     CommentAndMetadata commentAndMetadata =
         parse4("parseCommentAndMetadata", "/** 1 */ void");
     expect(commentAndMetadata.comment, isNotNull);
-    expect(commentAndMetadata.metadata, hasLength(0));
+    expect(commentAndMetadata.metadata, isNull);
   }
 
   void test_parseCommentAndMetadata_cmc() {
@@ -6177,7 +6257,7 @@
     CommentAndMetadata commentAndMetadata =
         parse4("parseCommentAndMetadata", "void");
     expect(commentAndMetadata.comment, isNull);
-    expect(commentAndMetadata.metadata, hasLength(0));
+    expect(commentAndMetadata.metadata, isNull);
   }
 
   void test_parseCommentAndMetadata_singleLine() {
@@ -6188,7 +6268,7 @@
 /// 2
 void''');
     expect(commentAndMetadata.comment, isNotNull);
-    expect(commentAndMetadata.metadata, hasLength(0));
+    expect(commentAndMetadata.metadata, isNull);
   }
 
   void test_parseCommentReference_new_prefixed() {
@@ -6263,6 +6343,10 @@
     expect(identifier.token, isNotNull);
     expect(identifier.name, "");
     expect(identifier.offset, 5);
+    // Should end with EOF token.
+    Token nextToken = identifier.token.next;
+    expect(nextToken, isNotNull);
+    expect(nextToken.type, TokenType.EOF);
   }
 
   void test_parseCommentReferences_multiLine() {
@@ -6298,35 +6382,51 @@
   }
 
   void test_parseCommentReferences_notClosed_noIdentifier() {
-    List<DocumentationCommentToken> tokens = <DocumentationCommentToken>[
-      new DocumentationCommentToken(
-          TokenType.MULTI_LINE_COMMENT, "/** [ some text", 5)
-    ];
-    List<CommentReference> references =
-        parse("parseCommentReferences", <Object>[tokens], "")
-        as List<CommentReference>;
+    DocumentationCommentToken docToken = new DocumentationCommentToken(
+        TokenType.MULTI_LINE_COMMENT, "/** [ some text", 5);
+    List<CommentReference> references = parse(
+        "parseCommentReferences",
+        <Object>[
+          <DocumentationCommentToken>[docToken]
+        ],
+        "") as List<CommentReference>;
+    expect(docToken.references, hasLength(1));
     expect(references, hasLength(1));
+    Token referenceToken = docToken.references[0];
     CommentReference reference = references[0];
     expect(reference, isNotNull);
+    expect(docToken.references[0], same(reference.beginToken));
     expect(reference.identifier, isNotNull);
     expect(reference.identifier.isSynthetic, isTrue);
     expect(reference.identifier.name, "");
+    // Should end with EOF token.
+    Token nextToken = referenceToken.next;
+    expect(nextToken, isNotNull);
+    expect(nextToken.type, TokenType.EOF);
   }
 
   void test_parseCommentReferences_notClosed_withIdentifier() {
-    List<DocumentationCommentToken> tokens = <DocumentationCommentToken>[
-      new DocumentationCommentToken(
-          TokenType.MULTI_LINE_COMMENT, "/** [namePrefix some text", 5)
-    ];
-    List<CommentReference> references =
-        parse("parseCommentReferences", <Object>[tokens], "")
-        as List<CommentReference>;
+    DocumentationCommentToken docToken = new DocumentationCommentToken(
+        TokenType.MULTI_LINE_COMMENT, "/** [namePrefix some text", 5);
+    List<CommentReference> references = parse(
+        "parseCommentReferences",
+        <Object>[
+          <DocumentationCommentToken>[docToken]
+        ],
+        "") as List<CommentReference>;
+    expect(docToken.references, hasLength(1));
     expect(references, hasLength(1));
+    Token referenceToken = docToken.references[0];
     CommentReference reference = references[0];
     expect(reference, isNotNull);
+    expect(referenceToken, same(reference.beginToken));
     expect(reference.identifier, isNotNull);
     expect(reference.identifier.isSynthetic, isFalse);
     expect(reference.identifier.name, "namePrefix");
+    // Should end with EOF token.
+    Token nextToken = referenceToken.next;
+    expect(nextToken, isNotNull);
+    expect(nextToken.type, TokenType.EOF);
   }
 
   void test_parseCommentReferences_singleLine() {
@@ -6999,7 +7099,7 @@
 
   void test_parseConstructorFieldInitializer_qualified() {
     ConstructorFieldInitializer invocation =
-        parse4("parseConstructorFieldInitializer", "this.a = b");
+        parse2("parseConstructorFieldInitializer", [true], "this.a = b");
     expect(invocation.equals, isNotNull);
     expect(invocation.expression, isNotNull);
     expect(invocation.fieldName, isNotNull);
@@ -7009,7 +7109,7 @@
 
   void test_parseConstructorFieldInitializer_unqualified() {
     ConstructorFieldInitializer invocation =
-        parse4("parseConstructorFieldInitializer", "a = b");
+        parse2("parseConstructorFieldInitializer", [false], "a = b");
     expect(invocation.equals, isNotNull);
     expect(invocation.expression, isNotNull);
     expect(invocation.fieldName, isNotNull);
@@ -7758,6 +7858,17 @@
     expect(parameterList.rightParenthesis, isNotNull);
   }
 
+  void test_parseFormalParameterList_named_trailing_comma() {
+    parseTrailingCommas = true;
+    FormalParameterList parameterList =
+        parse4("parseFormalParameterList", "(A a, {B b,})");
+    expect(parameterList.leftParenthesis, isNotNull);
+    expect(parameterList.leftDelimiter, isNotNull);
+    expect(parameterList.parameters, hasLength(2));
+    expect(parameterList.rightDelimiter, isNotNull);
+    expect(parameterList.rightParenthesis, isNotNull);
+  }
+
   void test_parseFormalParameterList_normal_multiple() {
     FormalParameterList parameterList =
         parse4("parseFormalParameterList", "(A a, B b, C c)");
@@ -7798,6 +7909,17 @@
     expect(parameterList.rightParenthesis, isNotNull);
   }
 
+  void test_parseFormalParameterList_normal_single_trailing_comma() {
+    parseTrailingCommas = true;
+    FormalParameterList parameterList =
+        parse4("parseFormalParameterList", "(A a,)");
+    expect(parameterList.leftParenthesis, isNotNull);
+    expect(parameterList.leftDelimiter, isNull);
+    expect(parameterList.parameters, hasLength(1));
+    expect(parameterList.rightDelimiter, isNull);
+    expect(parameterList.rightParenthesis, isNotNull);
+  }
+
   void test_parseFormalParameterList_positional_multiple() {
     FormalParameterList parameterList =
         parse4("parseFormalParameterList", "([A a = null, B b, C c = null])");
@@ -7818,6 +7940,17 @@
     expect(parameterList.rightParenthesis, isNotNull);
   }
 
+  void test_parseFormalParameterList_positional_trailing_comma() {
+    parseTrailingCommas = true;
+    FormalParameterList parameterList =
+        parse4("parseFormalParameterList", "(A a, [B b,])");
+    expect(parameterList.leftParenthesis, isNotNull);
+    expect(parameterList.leftDelimiter, isNotNull);
+    expect(parameterList.parameters, hasLength(2));
+    expect(parameterList.rightDelimiter, isNotNull);
+    expect(parameterList.rightParenthesis, isNotNull);
+  }
+
   void test_parseFormalParameterList_prefixedType() {
     FormalParameterList parameterList =
         parse4("parseFormalParameterList", "(io.File f)");
@@ -9691,7 +9824,7 @@
 
   void test_parseRedirectingConstructorInvocation_named() {
     RedirectingConstructorInvocation invocation =
-        parse4("parseRedirectingConstructorInvocation", "this.a()");
+        parse2("parseRedirectingConstructorInvocation", [true], "this.a()");
     expect(invocation.argumentList, isNotNull);
     expect(invocation.constructorName, isNotNull);
     expect(invocation.thisKeyword, isNotNull);
@@ -9700,7 +9833,7 @@
 
   void test_parseRedirectingConstructorInvocation_unnamed() {
     RedirectingConstructorInvocation invocation =
-        parse4("parseRedirectingConstructorInvocation", "this()");
+        parse2("parseRedirectingConstructorInvocation", [false], "this()");
     expect(invocation.argumentList, isNotNull);
     expect(invocation.constructorName, isNull);
     expect(invocation.thisKeyword, isNotNull);
diff --git a/pkg/analyzer/test/generated/resolver_test.dart b/pkg/analyzer/test/generated/resolver_test.dart
index fd39ad5..e3756ac 100644
--- a/pkg/analyzer/test/generated/resolver_test.dart
+++ b/pkg/analyzer/test/generated/resolver_test.dart
@@ -42,6 +42,7 @@
   runReflectiveTests(ErrorResolverTest);
   runReflectiveTests(LibraryImportScopeTest);
   runReflectiveTests(LibraryScopeTest);
+  runReflectiveTests(PrefixedNamespaceTest);
   runReflectiveTests(ScopeTest);
   runReflectiveTests(StrictModeTest);
   runReflectiveTests(SubtypeManagerTest);
@@ -535,6 +536,35 @@
   }
 }
 
+@reflectiveTest
+class PrefixedNamespaceTest extends ResolverTestCase {
+  void test_lookup_missing() {
+    ClassElement element = ElementFactory.classElement2('A');
+    PrefixedNamespace namespace = new PrefixedNamespace('p', _toMap([element]));
+    expect(namespace.get('p.B'), isNull);
+  }
+
+  void test_lookup_missing_matchesPrefix() {
+    ClassElement element = ElementFactory.classElement2('A');
+    PrefixedNamespace namespace = new PrefixedNamespace('p', _toMap([element]));
+    expect(namespace.get('p'), isNull);
+  }
+
+  void test_lookup_valid() {
+    ClassElement element = ElementFactory.classElement2('A');
+    PrefixedNamespace namespace = new PrefixedNamespace('p', _toMap([element]));
+    expect(namespace.get('p.A'), same(element));
+  }
+
+  HashMap<String, Element> _toMap(List<Element> elements) {
+    HashMap<String, Element> map = new HashMap<String, Element>();
+    for (Element element in elements) {
+      map[element.name] = element;
+    }
+    return map;
+  }
+}
+
 class Scope_EnclosedScopeTest_test_define_duplicate extends Scope {
   GatheringErrorListener listener;
 
@@ -2890,8 +2920,12 @@
         context, AstFactory.libraryIdentifier2(["dart.core"]));
     coreLibrary.definingCompilationUnit = coreUnit;
 
-    LibraryElementImpl mockAsyncLib =
-        (context as AnalysisContextImpl).createMockAsyncLib(coreLibrary);
+    Source asyncSource = new NonExistingSource(
+        'async.dart', Uri.parse('dart:async'), UriKind.DART_URI);
+    LibraryElementImpl mockAsyncLib = (context as AnalysisContextImpl)
+        .createMockAsyncLib(coreLibrary, asyncSource);
+    expect(mockAsyncLib.source, same(asyncSource));
+    expect(mockAsyncLib.definingCompilationUnit.source, same(asyncSource));
     expect(mockAsyncLib.publicNamespace, isNotNull);
 
     //
@@ -2922,8 +2956,6 @@
     ClassElementImpl element =
         new ClassElementImpl.forNode(AstFactory.identifier3(typeName));
     element.supertype = superclassType;
-    InterfaceTypeImpl type = new InterfaceTypeImpl(element);
-    element.type = type;
     if (parameterNames != null) {
       int count = parameterNames.length;
       if (count > 0) {
@@ -2940,7 +2972,6 @@
           typeParameter.type = typeArguments[i];
         }
         element.typeParameters = typeParameters;
-        type.typeArguments = typeArguments;
       }
     }
     return element;
diff --git a/pkg/analyzer/test/generated/sdk_test.dart b/pkg/analyzer/test/generated/sdk_test.dart
new file mode 100644
index 0000000..8976b9c
--- /dev/null
+++ b/pkg/analyzer/test/generated/sdk_test.dart
@@ -0,0 +1,132 @@
+// Copyright (c) 2014, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+library analyzer.test.generated.sdk_test;
+
+import 'package:analyzer/dart/ast/token.dart';
+import 'package:analyzer/src/dart/ast/token.dart';
+import 'package:analyzer/src/dart/scanner/reader.dart';
+import 'package:analyzer/src/dart/scanner/scanner.dart';
+import 'package:analyzer/src/generated/engine.dart';
+import 'package:analyzer/src/generated/error.dart';
+import 'package:analyzer/src/generated/sdk.dart';
+import 'package:analyzer/src/generated/source.dart';
+import 'package:unittest/unittest.dart';
+
+import '../reflective_tests.dart';
+import '../src/context/mock_sdk.dart';
+import '../utils.dart';
+import 'test_support.dart';
+
+main() {
+  initializeTestEnvironment();
+  runReflectiveTests(DartSdkManagerTest);
+  runReflectiveTests(SdkDescriptionTest);
+}
+
+@reflectiveTest
+class DartSdkManagerTest extends EngineTestCase {
+  void test_anySdk() {
+    DartSdkManager manager =
+        new DartSdkManager('/a/b/c', false, _failIfCreated);
+    expect(manager.anySdk, isNull);
+
+    AnalysisOptions options = new AnalysisOptionsImpl();
+    SdkDescription description = new SdkDescription(<String>['/c/d'], options);
+    DartSdk sdk = new MockSdk();
+    manager.getSdk(description, () => sdk);
+    expect(manager.anySdk, same(sdk));
+  }
+
+  void test_getSdk_differentDescriptors() {
+    DartSdkManager manager =
+        new DartSdkManager('/a/b/c', false, _failIfCreated);
+    AnalysisOptions options = new AnalysisOptionsImpl();
+    SdkDescription description1 = new SdkDescription(<String>['/c/d'], options);
+    DartSdk sdk1 = new MockSdk();
+    DartSdk result1 = manager.getSdk(description1, () => sdk1);
+    expect(result1, same(sdk1));
+    SdkDescription description2 = new SdkDescription(<String>['/e/f'], options);
+    DartSdk sdk2 = new MockSdk();
+    DartSdk result2 = manager.getSdk(description2, () => sdk2);
+    expect(result2, same(sdk2));
+
+    manager.getSdk(description1, _failIfAbsent);
+    manager.getSdk(description2, _failIfAbsent);
+  }
+
+  void test_getSdk_sameDescriptor() {
+    DartSdkManager manager =
+        new DartSdkManager('/a/b/c', false, _failIfCreated);
+    AnalysisOptions options = new AnalysisOptionsImpl();
+    SdkDescription description = new SdkDescription(<String>['/c/d'], options);
+    DartSdk sdk = new MockSdk();
+    DartSdk result = manager.getSdk(description, () => sdk);
+    expect(result, same(sdk));
+    manager.getSdk(description, _failIfAbsent);
+  }
+
+  DartSdk _failIfAbsent() {
+    fail('Use of ifAbsent function');
+    return null;
+  }
+
+  DartSdk _failIfCreated(_) {
+    fail('Use of sdkCreator');
+    return null;
+  }
+}
+
+@reflectiveTest
+class SdkDescriptionTest extends EngineTestCase {
+  void test_equals_differentPaths_nested() {
+    AnalysisOptions options = new AnalysisOptionsImpl();
+    SdkDescription left = new SdkDescription(<String>['/a/b/c'], options);
+    SdkDescription right = new SdkDescription(<String>['/a/b'], options);
+    expect(left == right, isFalse);
+  }
+
+  void test_equals_differentPaths_unrelated() {
+    AnalysisOptions options = new AnalysisOptionsImpl();
+    SdkDescription left = new SdkDescription(<String>['/a/b/c'], options);
+    SdkDescription right = new SdkDescription(<String>['/d/e'], options);
+    expect(left == right, isFalse);
+  }
+
+  void test_equals_noPaths() {
+    AnalysisOptions options = new AnalysisOptionsImpl();
+    SdkDescription left = new SdkDescription(<String>[], options);
+    SdkDescription right = new SdkDescription(<String>[], options);
+    expect(left == right, isTrue);
+  }
+
+  void test_equals_samePaths_differentOptions() {
+    String path = '/a/b/c';
+    AnalysisOptionsImpl leftOptions = new AnalysisOptionsImpl();
+    AnalysisOptionsImpl rightOptions = new AnalysisOptionsImpl();
+    rightOptions.strongMode = !leftOptions.strongMode;
+    SdkDescription left = new SdkDescription(<String>[path], leftOptions);
+    SdkDescription right = new SdkDescription(<String>[path], rightOptions);
+    expect(left == right, isFalse);
+  }
+
+  void test_equals_samePaths_sameOptions_multiple() {
+    String leftPath = '/a/b/c';
+    String rightPath = '/d/e';
+    AnalysisOptions options = new AnalysisOptionsImpl();
+    SdkDescription left =
+        new SdkDescription(<String>[leftPath, rightPath], options);
+    SdkDescription right =
+        new SdkDescription(<String>[leftPath, rightPath], options);
+    expect(left == right, isTrue);
+  }
+
+  void test_equals_samePaths_sameOptions_single() {
+    String path = '/a/b/c';
+    AnalysisOptions options = new AnalysisOptionsImpl();
+    SdkDescription left = new SdkDescription(<String>[path], options);
+    SdkDescription right = new SdkDescription(<String>[path], options);
+    expect(left == right, isTrue);
+  }
+}
diff --git a/pkg/analyzer/test/generated/source_factory_test.dart b/pkg/analyzer/test/generated/source_factory_test.dart
index 9932cf8..9aa7fc1 100644
--- a/pkg/analyzer/test/generated/source_factory_test.dart
+++ b/pkg/analyzer/test/generated/source_factory_test.dart
@@ -7,6 +7,7 @@
 import 'dart:convert';
 
 import 'package:analyzer/file_system/file_system.dart';
+import 'package:analyzer/file_system/physical_file_system.dart';
 import 'package:analyzer/file_system/memory_file_system.dart';
 import 'package:analyzer/source/package_map_resolver.dart';
 import 'package:analyzer/src/generated/engine.dart' show AnalysisEngine, Logger;
@@ -40,7 +41,9 @@
 
 void runPackageMapTests() {
   final Uri baseUri = new Uri.file('test/base');
-  final List<UriResolver> testResolvers = [new FileUriResolver()];
+  final List<UriResolver> testResolvers = [
+    new ResourceUriResolver(PhysicalResourceProvider.INSTANCE)
+  ];
 
   Packages createPackageMap(Uri base, String configFileContents) {
     List<int> bytes = UTF8.encode(configFileContents);
diff --git a/pkg/analyzer/test/generated/static_type_analyzer_test.dart b/pkg/analyzer/test/generated/static_type_analyzer_test.dart
index 0e0e3a1..23fac6b 100644
--- a/pkg/analyzer/test/generated/static_type_analyzer_test.dart
+++ b/pkg/analyzer/test/generated/static_type_analyzer_test.dart
@@ -990,9 +990,6 @@
     String constructorName = "m";
     ConstructorElementImpl constructor =
         ElementFactory.constructorElement2(classElement, constructorName);
-    constructor.returnType = classElement.type;
-    FunctionTypeImpl constructorType = new FunctionTypeImpl(constructor);
-    constructor.type = constructorType;
     classElement.constructors = <ConstructorElement>[constructor];
     InstanceCreationExpression node = AstFactory.instanceCreationExpression2(
         null,
@@ -1010,9 +1007,6 @@
     ConstructorElementImpl constructor =
         ElementFactory.constructorElement2(elementC, null);
     elementC.constructors = <ConstructorElement>[constructor];
-    constructor.returnType = elementC.type;
-    FunctionTypeImpl constructorType = new FunctionTypeImpl(constructor);
-    constructor.type = constructorType;
     TypeName typeName =
         AstFactory.typeName(elementC, [AstFactory.typeName(elementI)]);
     typeName.type = elementC.type.instantiate(<DartType>[elementI.type]);
@@ -1031,9 +1025,6 @@
     ClassElementImpl classElement = ElementFactory.classElement2("C");
     ConstructorElementImpl constructor =
         ElementFactory.constructorElement2(classElement, null);
-    constructor.returnType = classElement.type;
-    FunctionTypeImpl constructorType = new FunctionTypeImpl(constructor);
-    constructor.type = constructorType;
     classElement.constructors = <ConstructorElement>[constructor];
     InstanceCreationExpression node = AstFactory.instanceCreationExpression2(
         null, AstFactory.typeName(classElement));
@@ -1087,6 +1078,29 @@
     _listener.assertNoErrors();
   }
 
+  void test_visitListLiteral_unresolved() {
+    _analyzer = _createAnalyzer(strongMode: true);
+    // [a] // where 'a' is not resolved
+    Identifier identifier = AstFactory.identifier3('a');
+    Expression node = AstFactory.listLiteral([identifier]);
+    DartType resultType = _analyze(node);
+    expect(resultType, isNull);
+    _listener.assertNoErrors();
+  }
+
+  void test_visitListLiteral_unresolved_multiple() {
+    _analyzer = _createAnalyzer(strongMode: true);
+    // [0, a, 1] // where 'a' is not resolved
+    Identifier identifier = AstFactory.identifier3('a');
+    Expression node = AstFactory
+        .listLiteral([_resolvedInteger(0), identifier, _resolvedInteger(1)]);
+    DartType resultType = _analyze(node);
+    _assertType2(
+        _typeProvider.listType.instantiate(<DartType>[_typeProvider.intType]),
+        resultType);
+    _listener.assertNoErrors();
+  }
+
   void test_visitMapLiteral_empty() {
     // {}
     Expression node = AstFactory.mapLiteral2();
@@ -1497,11 +1511,16 @@
 
   /**
    * Create the analyzer used by the tests.
-   *
-   * @return the analyzer to be used by the tests
    */
-  StaticTypeAnalyzer _createAnalyzer() {
-    InternalAnalysisContext context = AnalysisContextFactory.contextWithCore();
+  StaticTypeAnalyzer _createAnalyzer({bool strongMode: false}) {
+    InternalAnalysisContext context;
+    if (strongMode) {
+      AnalysisOptionsImpl options = new AnalysisOptionsImpl();
+      options.strongMode = true;
+      context = AnalysisContextFactory.contextWithCoreAndOptions(options);
+    } else {
+      context = AnalysisContextFactory.contextWithCore();
+    }
     FileBasedSource source =
         new FileBasedSource(FileUtilities2.createFile("/lib.dart"));
     CompilationUnitElementImpl definingCompilationUnit =
diff --git a/pkg/analyzer/test/generated/static_type_warning_code_test.dart b/pkg/analyzer/test/generated/static_type_warning_code_test.dart
index 381313e..241e3af 100644
--- a/pkg/analyzer/test/generated/static_type_warning_code_test.dart
+++ b/pkg/analyzer/test/generated/static_type_warning_code_test.dart
@@ -1381,7 +1381,7 @@
         [StaticTypeWarningCode.TYPE_ARGUMENT_NOT_MATCHING_BOUNDS]);
   }
 
-  void test_typeArgumentNotMatchingBounds_ofFunctionTypeAlias() {
+  void fail_typeArgumentNotMatchingBounds_ofFunctionTypeAlias() {
     assertErrorsInCode(
         r'''
 class A {}
diff --git a/pkg/analyzer/test/generated/strong_mode_test.dart b/pkg/analyzer/test/generated/strong_mode_test.dart
index e169167..277f8bb 100644
--- a/pkg/analyzer/test/generated/strong_mode_test.dart
+++ b/pkg/analyzer/test/generated/strong_mode_test.dart
@@ -1795,10 +1795,12 @@
     errors.sort((AnalysisError e1, AnalysisError e2) =>
         e1.errorCode.name.compareTo(e2.errorCode.name));
 
-    expect(errors.map((e) => e.errorCode.name), [
-      'INVALID_METHOD_OVERRIDE_RETURN_TYPE',
-      'STRONG_MODE_INVALID_METHOD_OVERRIDE'
-    ]);
+    expect(
+        errors.map((e) => e.errorCode.name),
+        unorderedEquals([
+          'INVALID_METHOD_OVERRIDE_RETURN_TYPE',
+          'STRONG_MODE_INVALID_METHOD_OVERRIDE'
+        ]));
     expect(errors[0].message, contains('Iterable<S>'),
         reason: 'errors should be in terms of the type parameters '
             'at the error location');
@@ -1818,11 +1820,12 @@
     // TODO(jmesserly): this is modified code from assertErrors, which we can't
     // use directly because STRONG_MODE_* errors don't have working equality.
     List<AnalysisError> errors = analysisContext2.computeErrors(source);
-    List errorNames = errors.map((e) => e.errorCode.name).toList();
-    expect(errorNames, hasLength(2));
-    expect(errorNames, contains('STRONG_MODE_INVALID_METHOD_OVERRIDE'));
     expect(
-        errorNames, contains('INVALID_METHOD_OVERRIDE_TYPE_PARAMETER_BOUND'));
+        errors.map((e) => e.errorCode.name),
+        unorderedEquals([
+          'INVALID_METHOD_OVERRIDE_TYPE_PARAMETER_BOUND',
+          'STRONG_MODE_INVALID_METHOD_OVERRIDE'
+        ]));
     verify([source]);
   }
 
@@ -1837,10 +1840,12 @@
     // TODO(jmesserly): we can't use assertErrors because STRONG_MODE_* errors
     // from CodeChecker don't have working equality.
     List<AnalysisError> errors = analysisContext2.computeErrors(source);
-    expect(errors.map((e) => e.errorCode.name), [
-      'STRONG_MODE_INVALID_METHOD_OVERRIDE',
-      'INVALID_METHOD_OVERRIDE_TYPE_PARAMETERS'
-    ]);
+    expect(
+        errors.map((e) => e.errorCode.name),
+        unorderedEquals([
+          'STRONG_MODE_INVALID_METHOD_OVERRIDE',
+          'INVALID_METHOD_OVERRIDE_TYPE_PARAMETERS'
+        ]));
     verify([source]);
   }
 
diff --git a/pkg/analyzer/test/generated/test_all.dart b/pkg/analyzer/test/generated/test_all.dart
index 3536c10..01ccba9 100644
--- a/pkg/analyzer/test/generated/test_all.dart
+++ b/pkg/analyzer/test/generated/test_all.dart
@@ -8,6 +8,7 @@
 
 import '../utils.dart';
 import 'all_the_rest_test.dart' as all_the_rest;
+import 'bazel_test.dart' as bazel_test;
 import 'checked_mode_compile_time_error_code_test.dart'
     as checked_mode_compile_time_error_code_test;
 import 'compile_time_error_code_test.dart' as compile_time_error_code_test;
@@ -27,6 +28,7 @@
 import 'parser_test.dart' as parser_test;
 import 'resolver_test.dart' as resolver_test;
 import 'scanner_test.dart' as scanner_test;
+import 'sdk_test.dart' as sdk_test;
 import 'simple_resolver_test.dart' as simple_resolver_test;
 import 'source_factory_test.dart' as source_factory_test;
 import 'static_type_analyzer_test.dart' as static_type_analyzer_test;
@@ -41,6 +43,7 @@
   initializeTestEnvironment();
   group('generated tests', () {
     all_the_rest.main();
+    bazel_test.main();
     checked_mode_compile_time_error_code_test.main();
     compile_time_error_code_test.main();
     constant_test.main();
@@ -59,6 +62,7 @@
     parser_test.main();
     resolver_test.main();
     scanner_test.main();
+    sdk_test.main();
     simple_resolver_test.main();
     source_factory_test.main();
     static_type_analyzer_test.main();
diff --git a/pkg/analyzer/test/generated/test_support.dart b/pkg/analyzer/test/generated/test_support.dart
index 1168c8b..93da8f0 100644
--- a/pkg/analyzer/test/generated/test_support.dart
+++ b/pkg/analyzer/test/generated/test_support.dart
@@ -600,9 +600,7 @@
     return _name;
   }
 
-  Uri get uri {
-    throw new UnsupportedOperationException();
-  }
+  Uri get uri => new Uri.file(_name);
 
   UriKind get uriKind {
     throw new UnsupportedOperationException();
diff --git a/pkg/analyzer/test/generated/utilities_test.dart b/pkg/analyzer/test/generated/utilities_test.dart
index 344612b..a4de8c0 100644
--- a/pkg/analyzer/test/generated/utilities_test.dart
+++ b/pkg/analyzer/test/generated/utilities_test.dart
@@ -59,6 +59,12 @@
       fail('Failed to copy token: ${first.lexeme} (${first.offset})');
       return false;
     }
+    if (first is TokenWithComment) {
+      CommentToken comment = first.precedingComments;
+      if (comment.parent != first) {
+        fail('Failed to link the comment "$comment" with the token "$first".');
+      }
+    }
     return super.isEqualTokens(first, second);
   }
 }
@@ -77,6 +83,13 @@
     _assertCloneUnitMember('@A.c() main() {}');
   }
 
+  void test_visitAnnotation_withComment() {
+    CompilationUnitMember clazz =
+        _parseUnitMember('/** comment */ @deprecated class A {}');
+    Annotation annotation = clazz.metadata.single;
+    _assertClone(annotation);
+  }
+
   void test_visitArgumentList() {
     _assertCloneExpression('m(a, b)');
   }
diff --git a/pkg/analyzer/test/reflective_tests.dart b/pkg/analyzer/test/reflective_tests.dart
index 4788776..9a8bb66 100644
--- a/pkg/analyzer/test/reflective_tests.dart
+++ b/pkg/analyzer/test/reflective_tests.dart
@@ -30,6 +30,11 @@
 const ReflectiveTest reflectiveTest = const ReflectiveTest();
 
 /**
+ * Test classes annotated with this annotation are run using [solo_group].
+ */
+const _SoloTest soloTest = const _SoloTest();
+
+/**
  * Is `true` the application is running in the checked mode.
  */
 final bool _isCheckedMode = () {
@@ -65,8 +70,7 @@
     throw new Exception('Class $name must have annotation "@reflectiveTest" '
         'in order to be run by runReflectiveTests.');
   }
-  String className = MirrorSystem.getName(classMirror.simpleName);
-  group(className, () {
+  void runMembers() {
     classMirror.instanceMembers
         .forEach((Symbol symbol, MethodMirror memberMirror) {
       // we need only methods
@@ -105,7 +109,13 @@
         });
       }
     });
-  });
+  }
+  String className = MirrorSystem.getName(classMirror.simpleName);
+  if (_hasAnnotationInstance(classMirror, soloTest)) {
+    solo_group(className, runMembers);
+  } else {
+    group(className, runMembers);
+  }
 }
 
 bool _hasAnnotationInstance(DeclarationMirror declaration, instance) =>
@@ -178,3 +188,11 @@
 class _FailingTest {
   const _FailingTest();
 }
+
+/**
+ * A marker annotation used to annotate a test class to run it using
+ * [solo_group].
+ */
+class _SoloTest {
+  const _SoloTest();
+}
diff --git a/pkg/analyzer/test/source/embedder_test.dart b/pkg/analyzer/test/source/embedder_test.dart
index 80072b8..c16e6f9 100644
--- a/pkg/analyzer/test/source/embedder_test.dart
+++ b/pkg/analyzer/test/source/embedder_test.dart
@@ -9,149 +9,59 @@
 import 'package:analyzer/file_system/file_system.dart';
 import 'package:analyzer/file_system/memory_file_system.dart';
 import 'package:analyzer/source/embedder.dart';
+import 'package:analyzer/src/generated/sdk.dart';
 import 'package:analyzer/src/generated/source.dart';
 import 'package:path/path.dart' as path;
 import 'package:unittest/unittest.dart';
 
+import '../reflective_tests.dart';
 import '../resource_utils.dart';
 import '../utils.dart';
 
 main() {
-  group('EmbedderUriResolverTest', () {
-    setUp(() {
-      initializeTestEnvironment(path.context);
-      buildResourceProvider();
-    });
-    tearDown(() {
-      initializeTestEnvironment();
-      clearResourceProvider();
-    });
-    test('test_NullEmbedderYamls', () {
-      var resolver = new EmbedderUriResolver(null);
-      expect(resolver.length, 0);
-    });
-    test('test_NoEmbedderYamls', () {
-      var locator = new EmbedderYamlLocator({
-        'fox': [pathTranslator.getResource('/empty')]
-      });
-      expect(locator.embedderYamls.length, 0);
-    });
-    test('test_EmbedderYaml', () {
-      var locator = new EmbedderYamlLocator({
-        'fox': [pathTranslator.getResource('/tmp')]
-      });
-      var resolver = new EmbedderUriResolver(locator.embedderYamls);
-
-      expectResolved(dartUri, posixPath) {
-        Source source = resolver.resolveAbsolute(Uri.parse(dartUri));
-        expect(source, isNotNull, reason: dartUri);
-        expect(source.fullName, posixToOSPath(posixPath));
-      }
-
-      // We have five mappings.
-      expect(resolver.length, 5);
-      // Check that they map to the correct paths.
-      expectResolved('dart:core', '/tmp/core.dart');
-      expectResolved('dart:fox', '/tmp/slippy.dart');
-      expectResolved('dart:bear', '/tmp/grizzly.dart');
-      expectResolved('dart:relative', '/relative.dart');
-      expectResolved('dart:deep', '/tmp/deep/directory/file.dart');
-    });
-    test('test_BadYAML', () {
-      var locator = new EmbedderYamlLocator(null);
-      locator.addEmbedderYaml(null, r'''{{{,{{}}},}}''');
-      expect(locator.embedderYamls.length, 0);
-    });
-    test('test_restoreAbsolute', () {
-      var locator = new EmbedderYamlLocator({
-        'fox': [pathTranslator.getResource('/tmp')]
-      });
-      var resolver = new EmbedderUriResolver(locator.embedderYamls);
-
-      expectRestore(String dartUri, [String expected]) {
-        var parsedUri = Uri.parse(dartUri);
-        var source = resolver.resolveAbsolute(parsedUri);
-        expect(source, isNotNull);
-        // Restore source's uri.
-        var restoreUri = resolver.restoreAbsolute(source);
-        expect(restoreUri, isNotNull, reason: dartUri);
-        // Verify that it is 'dart:fox'.
-        expect(restoreUri.toString(), expected ?? dartUri);
-        List<String> split = (expected ?? dartUri).split(':');
-        expect(restoreUri.scheme, split[0]);
-        expect(restoreUri.path, split[1]);
-      }
-
-      expectRestore('dart:deep');
-      expectRestore('dart:deep/file.dart', 'dart:deep');
-      expectRestore('dart:deep/part.dart');
-      expectRestore('dart:deep/deep/file.dart');
-    });
-
-    test('test_EmbedderSdk_fromFileUri', () {
-      var locator = new EmbedderYamlLocator({
-        'fox': [pathTranslator.getResource('/tmp')]
-      });
-      var resolver = new EmbedderUriResolver(locator.embedderYamls);
-      var sdk = resolver.dartSdk;
-
-      expectSource(String posixPath, String dartUri) {
-        var uri = Uri.parse(posixToOSFileUri(posixPath));
-        var source = sdk.fromFileUri(uri);
-        expect(source, isNotNull, reason: posixPath);
-        expect(source.uri.toString(), dartUri);
-        expect(source.fullName, posixToOSPath(posixPath));
-      }
-
-      expectSource('/tmp/slippy.dart', 'dart:fox');
-      expectSource('/tmp/deep/directory/file.dart', 'dart:deep');
-      expectSource('/tmp/deep/directory/part.dart', 'dart:deep/part.dart');
-    });
-    test('test_EmbedderSdk_getSdkLibrary', () {
-      var locator = new EmbedderYamlLocator({
-        'fox': [pathTranslator.getResource('/tmp')]
-      });
-      var resolver = new EmbedderUriResolver(locator.embedderYamls);
-      var sdk = resolver.dartSdk;
-      var lib = sdk.getSdkLibrary('dart:fox');
-      expect(lib, isNotNull);
-      expect(lib.path, posixToOSPath('/tmp/slippy.dart'));
-      expect(lib.shortName, 'dart:fox');
-    });
-    test('test_EmbedderSdk_mapDartUri', () {
-      var locator = new EmbedderYamlLocator({
-        'fox': [pathTranslator.getResource('/tmp')]
-      });
-      var resolver = new EmbedderUriResolver(locator.embedderYamls);
-      var sdk = resolver.dartSdk;
-
-      expectSource(String dartUri, String posixPath) {
-        var source = sdk.mapDartUri(dartUri);
-        expect(source, isNotNull, reason: posixPath);
-        expect(source.uri.toString(), dartUri);
-        expect(source.fullName, posixToOSPath(posixPath));
-      }
-
-      expectSource('dart:core', '/tmp/core.dart');
-      expectSource('dart:fox', '/tmp/slippy.dart');
-      expectSource('dart:deep', '/tmp/deep/directory/file.dart');
-      expectSource('dart:deep/part.dart', '/tmp/deep/directory/part.dart');
-    });
-  });
+  runReflectiveTests(DartUriResolverTest);
+  runReflectiveTests(EmbedderSdkTest);
+  runReflectiveTests(EmbedderUriResolverTest);
+  runReflectiveTests(EmbedderYamlLocatorTest);
 }
 
-TestPathTranslator pathTranslator;
-ResourceProvider resourceProvider;
+@reflectiveTest
+class DartUriResolverTest extends EmbedderRelatedTest {
+  void test_embedderYaml() {
+    EmbedderYamlLocator locator = new EmbedderYamlLocator({
+      'fox': [pathTranslator.getResource('/tmp')]
+    });
+    EmbedderSdk sdk = new EmbedderSdk(locator.embedderYamls);
+    DartUriResolver resolver = new DartUriResolver(sdk);
 
-buildResourceProvider() {
-  var rawProvider = new MemoryResourceProvider(isWindows: isWindows);
-  resourceProvider = new TestResourceProvider(rawProvider);
-  pathTranslator = new TestPathTranslator(rawProvider)
-    ..newFolder('/empty')
-    ..newFolder('/tmp')
-    ..newFile(
-        '/tmp/_embedder.yaml',
-        r'''
+    void expectResolved(dartUri, posixPath) {
+      Source source = resolver.resolveAbsolute(Uri.parse(dartUri));
+      expect(source, isNotNull, reason: dartUri);
+      expect(source.fullName, posixToOSPath(posixPath));
+    }
+    // Check that they map to the correct paths.
+    expectResolved('dart:core', '/tmp/core.dart');
+    expectResolved('dart:fox', '/tmp/slippy.dart');
+    expectResolved('dart:bear', '/tmp/grizzly.dart');
+    expectResolved('dart:relative', '/relative.dart');
+    expectResolved('dart:deep', '/tmp/deep/directory/file.dart');
+  }
+}
+
+abstract class EmbedderRelatedTest {
+  TestPathTranslator pathTranslator;
+  ResourceProvider resourceProvider;
+
+  buildResourceProvider() {
+    MemoryResourceProvider rawProvider =
+        new MemoryResourceProvider(isWindows: isWindows);
+    resourceProvider = new TestResourceProvider(rawProvider);
+    pathTranslator = new TestPathTranslator(rawProvider)
+      ..newFolder('/empty')
+      ..newFolder('/tmp')
+      ..newFile(
+          '/tmp/_embedder.yaml',
+          r'''
 embedded_libs:
   "dart:core" : "core.dart"
   "dart:fox": "slippy.dart"
@@ -160,9 +70,161 @@
   "dart:deep": "deep/directory/file.dart"
   "fart:loudly": "nomatter.dart"
 ''');
+  }
+
+  clearResourceProvider() {
+    resourceProvider = null;
+    pathTranslator = null;
+  }
+
+  void setUp() {
+    initializeTestEnvironment(path.context);
+    buildResourceProvider();
+  }
+
+  void tearDown() {
+    initializeTestEnvironment();
+    clearResourceProvider();
+  }
 }
 
-clearResourceProvider() {
-  resourceProvider = null;
-  pathTranslator = null;
+@reflectiveTest
+class EmbedderSdkTest extends EmbedderRelatedTest {
+  void test_creation() {
+    EmbedderYamlLocator locator = new EmbedderYamlLocator({
+      'fox': [pathTranslator.getResource('/tmp')]
+    });
+    EmbedderSdk sdk = new EmbedderSdk(locator.embedderYamls);
+
+    expect(sdk.urlMappings, hasLength(5));
+  }
+
+  void test_fromFileUri() {
+    EmbedderYamlLocator locator = new EmbedderYamlLocator({
+      'fox': [pathTranslator.getResource('/tmp')]
+    });
+    EmbedderSdk sdk = new EmbedderSdk(locator.embedderYamls);
+
+    expectSource(String posixPath, String dartUri) {
+      Uri uri = Uri.parse(posixToOSFileUri(posixPath));
+      Source source = sdk.fromFileUri(uri);
+      expect(source, isNotNull, reason: posixPath);
+      expect(source.uri.toString(), dartUri);
+      expect(source.fullName, posixToOSPath(posixPath));
+    }
+
+    expectSource('/tmp/slippy.dart', 'dart:fox');
+    expectSource('/tmp/deep/directory/file.dart', 'dart:deep');
+    expectSource('/tmp/deep/directory/part.dart', 'dart:deep/part.dart');
+  }
+
+  void test_getSdkLibrary() {
+    EmbedderYamlLocator locator = new EmbedderYamlLocator({
+      'fox': [pathTranslator.getResource('/tmp')]
+    });
+    EmbedderSdk sdk = new EmbedderSdk(locator.embedderYamls);
+
+    SdkLibrary lib = sdk.getSdkLibrary('dart:fox');
+    expect(lib, isNotNull);
+    expect(lib.path, posixToOSPath('/tmp/slippy.dart'));
+    expect(lib.shortName, 'dart:fox');
+  }
+
+  void test_mapDartUri() {
+    EmbedderYamlLocator locator = new EmbedderYamlLocator({
+      'fox': [pathTranslator.getResource('/tmp')]
+    });
+    EmbedderSdk sdk = new EmbedderSdk(locator.embedderYamls);
+
+    void expectSource(String dartUri, String posixPath) {
+      Source source = sdk.mapDartUri(dartUri);
+      expect(source, isNotNull, reason: posixPath);
+      expect(source.uri.toString(), dartUri);
+      expect(source.fullName, posixToOSPath(posixPath));
+    }
+
+    expectSource('dart:core', '/tmp/core.dart');
+    expectSource('dart:fox', '/tmp/slippy.dart');
+    expectSource('dart:deep', '/tmp/deep/directory/file.dart');
+    expectSource('dart:deep/part.dart', '/tmp/deep/directory/part.dart');
+  }
+}
+
+@reflectiveTest
+class EmbedderUriResolverTest extends EmbedderRelatedTest {
+  void test_embedderYaml() {
+    var locator = new EmbedderYamlLocator({
+      'fox': [pathTranslator.getResource('/tmp')]
+    });
+    var resolver = new EmbedderUriResolver(locator.embedderYamls);
+
+    expectResolved(dartUri, posixPath) {
+      Source source = resolver.resolveAbsolute(Uri.parse(dartUri));
+      expect(source, isNotNull, reason: dartUri);
+      expect(source.fullName, posixToOSPath(posixPath));
+    }
+
+    // We have five mappings.
+    expect(resolver, hasLength(5));
+    // Check that they map to the correct paths.
+    expectResolved('dart:core', '/tmp/core.dart');
+    expectResolved('dart:fox', '/tmp/slippy.dart');
+    expectResolved('dart:bear', '/tmp/grizzly.dart');
+    expectResolved('dart:relative', '/relative.dart');
+    expectResolved('dart:deep', '/tmp/deep/directory/file.dart');
+  }
+
+  void test_nullEmbedderYamls() {
+    var resolver = new EmbedderUriResolver(null);
+    expect(resolver, hasLength(0));
+  }
+
+  void test_restoreAbsolute() {
+    var locator = new EmbedderYamlLocator({
+      'fox': [pathTranslator.getResource('/tmp')]
+    });
+    var resolver = new EmbedderUriResolver(locator.embedderYamls);
+
+    expectRestore(String dartUri, [String expected]) {
+      var parsedUri = Uri.parse(dartUri);
+      var source = resolver.resolveAbsolute(parsedUri);
+      expect(source, isNotNull);
+      // Restore source's uri.
+      var restoreUri = resolver.restoreAbsolute(source);
+      expect(restoreUri, isNotNull, reason: dartUri);
+      // Verify that it is 'dart:fox'.
+      expect(restoreUri.toString(), expected ?? dartUri);
+      List<String> split = (expected ?? dartUri).split(':');
+      expect(restoreUri.scheme, split[0]);
+      expect(restoreUri.path, split[1]);
+    }
+
+    expectRestore('dart:deep');
+    expectRestore('dart:deep/file.dart', 'dart:deep');
+    expectRestore('dart:deep/part.dart');
+    expectRestore('dart:deep/deep/file.dart');
+  }
+}
+
+@reflectiveTest
+class EmbedderYamlLocatorTest extends EmbedderRelatedTest {
+  void test_empty() {
+    EmbedderYamlLocator locator = new EmbedderYamlLocator({
+      'fox': [pathTranslator.getResource('/empty')]
+    });
+    expect(locator.embedderYamls, hasLength(0));
+  }
+
+  void test_invalid() {
+    EmbedderYamlLocator locator = new EmbedderYamlLocator(null);
+    locator.addEmbedderYaml(null, r'''{{{,{{}}},}}''');
+    expect(locator.embedderYamls, hasLength(0));
+  }
+
+  void test_valid() {
+    EmbedderYamlLocator locator = new EmbedderYamlLocator({
+      'fox': [pathTranslator.getResource('/tmp')]
+    });
+    expect(locator.embedderYamls, hasLength(1));
+  }
 }
diff --git a/pkg/analyzer/test/src/context/builder_test.dart b/pkg/analyzer/test/src/context/builder_test.dart
new file mode 100644
index 0000000..758a03c
--- /dev/null
+++ b/pkg/analyzer/test/src/context/builder_test.dart
@@ -0,0 +1,340 @@
+// Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+library analyzer.test.src.context.context_builder_test;
+
+import 'dart:io' as io;
+
+import 'package:analyzer/file_system/file_system.dart';
+import 'package:analyzer/file_system/memory_file_system.dart';
+import 'package:analyzer/file_system/physical_file_system.dart';
+import 'package:analyzer/src/context/builder.dart';
+import 'package:analyzer/src/generated/engine.dart';
+import 'package:analyzer/src/generated/sdk.dart';
+import 'package:analyzer/src/generated/source.dart';
+import 'package:package_config/packages.dart';
+import 'package:package_config/src/packages_impl.dart';
+import 'package:path/path.dart' as path;
+import 'package:unittest/unittest.dart';
+
+import '../../generated/test_support.dart';
+import '../../reflective_tests.dart';
+import '../../utils.dart';
+import 'mock_sdk.dart';
+
+main() {
+  initializeTestEnvironment();
+  runReflectiveTests(ContextBuilderTest_WithDisk);
+  runReflectiveTests(ContextBuilderTest_WithoutDisk);
+}
+
+@reflectiveTest
+class ContextBuilderTest_WithDisk extends EngineTestCase {
+  /**
+   * The resource provider to be used by tests.
+   */
+  PhysicalResourceProvider resourceProvider;
+
+  /**
+   * The path context used to manipulate file paths.
+   */
+  path.Context pathContext;
+
+  /**
+   * The SDK manager used by the tests;
+   */
+  DartSdkManager sdkManager;
+
+  /**
+   * The content cache used by the tests.
+   */
+  ContentCache contentCache;
+
+  @override
+  void setUp() {
+    resourceProvider = PhysicalResourceProvider.INSTANCE;
+    pathContext = resourceProvider.pathContext;
+    sdkManager = new DartSdkManager('', false, (_) => new MockSdk());
+    contentCache = new ContentCache();
+  }
+
+  void test_createPackageMap_fromPackageDirectory_explicit() {
+    withTempDir((io.Directory tempDir) {
+      // Use a package directory that is outside the project directory.
+      String rootPath = tempDir.path;
+      String projectPath = pathContext.join(rootPath, 'project');
+      String packageDirPath = pathContext.join(rootPath, 'packages');
+      String fooName = 'foo';
+      String fooPath = pathContext.join(packageDirPath, fooName);
+      String barName = 'bar';
+      String barPath = pathContext.join(packageDirPath, barName);
+      new io.Directory(projectPath).createSync(recursive: true);
+      new io.Directory(fooPath).createSync(recursive: true);
+      new io.Directory(barPath).createSync(recursive: true);
+
+      ContextBuilder builder =
+          new ContextBuilder(resourceProvider, sdkManager, contentCache);
+      builder.defaultPackagesDirectoryPath = packageDirPath;
+
+      Packages packages = builder.createPackageMap(projectPath);
+      expect(packages, isNotNull);
+      Map<String, Uri> map = packages.asMap();
+      expect(map, hasLength(2));
+      expect(map[fooName], new Uri.directory(fooPath));
+      expect(map[barName], new Uri.directory(barPath));
+    });
+  }
+
+  void test_createPackageMap_fromPackageDirectory_inRoot() {
+    withTempDir((io.Directory tempDir) {
+      // Use a package directory that is inside the project directory.
+      String projectPath = tempDir.path;
+      String packageDirPath = pathContext.join(projectPath, 'packages');
+      String fooName = 'foo';
+      String fooPath = pathContext.join(packageDirPath, fooName);
+      String barName = 'bar';
+      String barPath = pathContext.join(packageDirPath, barName);
+      new io.Directory(fooPath).createSync(recursive: true);
+      new io.Directory(barPath).createSync(recursive: true);
+
+      ContextBuilder builder =
+          new ContextBuilder(resourceProvider, sdkManager, contentCache);
+      Packages packages = builder.createPackageMap(projectPath);
+      expect(packages, isNotNull);
+      Map<String, Uri> map = packages.asMap();
+      expect(map, hasLength(2));
+      expect(map[fooName], new Uri.directory(fooPath));
+      expect(map[barName], new Uri.directory(barPath));
+    });
+  }
+
+  void test_createPackageMap_fromPackageFile_explicit() {
+    withTempDir((io.Directory tempDir) {
+      // Use a package file that is outside the project directory's hierarchy.
+      String rootPath = tempDir.path;
+      String projectPath = pathContext.join(rootPath, 'project');
+      String packageFilePath = pathContext.join(rootPath, 'child', '.packages');
+      new io.Directory(projectPath).createSync(recursive: true);
+      new io.File(packageFilePath)
+        ..createSync(recursive: true)
+        ..writeAsStringSync(r'''
+foo:/pkg/foo
+bar:/pkg/bar
+''');
+
+      ContextBuilder builder =
+          new ContextBuilder(resourceProvider, sdkManager, contentCache);
+      builder.defaultPackageFilePath = packageFilePath;
+      Packages packages = builder.createPackageMap(projectPath);
+      expect(packages, isNotNull);
+      Map<String, Uri> map = packages.asMap();
+      expect(map, hasLength(2));
+      expect(map['foo'], new Uri.directory('/pkg/foo'));
+      expect(map['bar'], new Uri.directory('/pkg/bar'));
+    });
+  }
+
+  void test_createPackageMap_fromPackageFile_inParentOfRoot() {
+    withTempDir((io.Directory tempDir) {
+      // Use a package file that is inside the parent of the project directory.
+      String rootPath = tempDir.path;
+      String projectPath = pathContext.join(rootPath, 'project');
+      String packageFilePath = pathContext.join(rootPath, '.packages');
+      new io.Directory(projectPath).createSync(recursive: true);
+      new io.File(packageFilePath)
+        ..createSync(recursive: true)
+        ..writeAsStringSync(r'''
+foo:/pkg/foo
+bar:/pkg/bar
+''');
+
+      ContextBuilder builder =
+          new ContextBuilder(resourceProvider, sdkManager, contentCache);
+      Packages packages = builder.createPackageMap(projectPath);
+      expect(packages, isNotNull);
+      Map<String, Uri> map = packages.asMap();
+      expect(map, hasLength(2));
+      expect(map['foo'], new Uri.directory('/pkg/foo'));
+      expect(map['bar'], new Uri.directory('/pkg/bar'));
+    });
+  }
+
+  void test_createPackageMap_fromPackageFile_inRoot() {
+    withTempDir((io.Directory tempDir) {
+      // Use a package file that is inside the project directory.
+      String rootPath = tempDir.path;
+      String projectPath = pathContext.join(rootPath, 'project');
+      String packageFilePath = pathContext.join(projectPath, '.packages');
+      new io.Directory(projectPath).createSync(recursive: true);
+      new io.File(packageFilePath)
+        ..createSync(recursive: true)
+        ..writeAsStringSync(r'''
+foo:/pkg/foo
+bar:/pkg/bar
+''');
+
+      ContextBuilder builder =
+          new ContextBuilder(resourceProvider, sdkManager, contentCache);
+      Packages packages = builder.createPackageMap(projectPath);
+      expect(packages, isNotNull);
+      Map<String, Uri> map = packages.asMap();
+      expect(map, hasLength(2));
+      expect(map['foo'], new Uri.directory('/pkg/foo'));
+      expect(map['bar'], new Uri.directory('/pkg/bar'));
+    });
+  }
+
+  void test_createPackageMap_none() {
+    withTempDir((io.Directory tempDir) {
+      ContextBuilder builder =
+          new ContextBuilder(resourceProvider, sdkManager, contentCache);
+      Packages packages = builder.createPackageMap(tempDir.path);
+      expect(packages, same(Packages.noPackages));
+    });
+  }
+
+  /**
+   * Execute the [test] function with a temporary [directory]. The test function
+   * can perform any disk operations within the directory and the directory (and
+   * its content) will be removed after the function returns.
+   */
+  void withTempDir(test(io.Directory directory)) {
+    io.Directory directory =
+        io.Directory.systemTemp.createTempSync('analyzer_');
+    try {
+      test(directory);
+    } finally {
+      directory.deleteSync(recursive: true);
+    }
+  }
+}
+
+@reflectiveTest
+class ContextBuilderTest_WithoutDisk extends EngineTestCase {
+  /**
+   * The resource provider to be used by tests.
+   */
+  MemoryResourceProvider resourceProvider;
+
+  /**
+   * The SDK manager used by the tests;
+   */
+  DartSdkManager sdkManager;
+
+  /**
+   * The content cache used by the tests.
+   */
+  ContentCache contentCache;
+
+  void fail_createSourceFactory() {
+    fail('Incomplete test');
+  }
+
+  void fail_findSdkResolver() {
+    fail('Incomplete test');
+  }
+
+  @override
+  void setUp() {
+    resourceProvider = new MemoryResourceProvider();
+    sdkManager = new DartSdkManager('', false, (_) => new MockSdk());
+    contentCache = new ContentCache();
+  }
+
+  void test_convertPackagesToMap_noPackages() {
+    ContextBuilder builder =
+        new ContextBuilder(resourceProvider, sdkManager, contentCache);
+    expect(builder.convertPackagesToMap(Packages.noPackages), isNull);
+  }
+
+  void test_convertPackagesToMap_null() {
+    ContextBuilder builder =
+        new ContextBuilder(resourceProvider, sdkManager, contentCache);
+    expect(builder.convertPackagesToMap(null), isNull);
+  }
+
+  void test_convertPackagesToMap_packages() {
+    String fooName = 'foo';
+    String fooPath = '/pkg/foo';
+    Uri fooUri = new Uri.directory(fooPath);
+    String barName = 'bar';
+    String barPath = '/pkg/bar';
+    Uri barUri = new Uri.directory(barPath);
+
+    ContextBuilder builder =
+        new ContextBuilder(resourceProvider, sdkManager, contentCache);
+    MapPackages packages = new MapPackages({fooName: fooUri, barName: barUri});
+    Map<String, List<Folder>> result = builder.convertPackagesToMap(packages);
+    expect(result, isNotNull);
+    expect(result, hasLength(2));
+    expect(result[fooName], hasLength(1));
+    expect(result[fooName][0].path, fooPath);
+    expect(result[barName], hasLength(1));
+    expect(result[barName][0].path, barPath);
+  }
+
+  void test_getOptionsFile_explicit() {
+    String path = '/some/directory/path';
+    String filePath = '/options/analysis.yaml';
+    resourceProvider.newFile(filePath, '');
+
+    ContextBuilder builder =
+        new ContextBuilder(resourceProvider, sdkManager, contentCache);
+    builder.defaultAnalysisOptionsFilePath = filePath;
+    File result = builder.getOptionsFile(path);
+    expect(result, isNotNull);
+    expect(result.path, filePath);
+  }
+
+  void test_getOptionsFile_inParentOfRoot_new() {
+    String parentPath = '/some/directory';
+    String path = '$parentPath/path';
+    String filePath =
+        '$parentPath/${AnalysisEngine.ANALYSIS_OPTIONS_YAML_FILE}';
+    resourceProvider.newFile(filePath, '');
+
+    ContextBuilder builder =
+        new ContextBuilder(resourceProvider, sdkManager, contentCache);
+    File result = builder.getOptionsFile(path);
+    expect(result, isNotNull);
+    expect(result.path, filePath);
+  }
+
+  void test_getOptionsFile_inParentOfRoot_old() {
+    String parentPath = '/some/directory';
+    String path = '$parentPath/path';
+    String filePath = '$parentPath/${AnalysisEngine.ANALYSIS_OPTIONS_FILE}';
+    resourceProvider.newFile(filePath, '');
+
+    ContextBuilder builder =
+        new ContextBuilder(resourceProvider, sdkManager, contentCache);
+    File result = builder.getOptionsFile(path);
+    expect(result, isNotNull);
+    expect(result.path, filePath);
+  }
+
+  void test_getOptionsFile_inRoot_new() {
+    String path = '/some/directory/path';
+    String filePath = '$path/${AnalysisEngine.ANALYSIS_OPTIONS_YAML_FILE}';
+    resourceProvider.newFile(filePath, '');
+
+    ContextBuilder builder =
+        new ContextBuilder(resourceProvider, sdkManager, contentCache);
+    File result = builder.getOptionsFile(path);
+    expect(result, isNotNull);
+    expect(result.path, filePath);
+  }
+
+  void test_getOptionsFile_inRoot_old() {
+    String path = '/some/directory/path';
+    String filePath = '$path/${AnalysisEngine.ANALYSIS_OPTIONS_FILE}';
+    resourceProvider.newFile(filePath, '');
+
+    ContextBuilder builder =
+        new ContextBuilder(resourceProvider, sdkManager, contentCache);
+    File result = builder.getOptionsFile(path);
+    expect(result, isNotNull);
+    expect(result.path, filePath);
+  }
+}
diff --git a/pkg/analyzer/test/src/context/cache_test.dart b/pkg/analyzer/test/src/context/cache_test.dart
index 57ff94e..b03791a 100644
--- a/pkg/analyzer/test/src/context/cache_test.dart
+++ b/pkg/analyzer/test/src/context/cache_test.dart
@@ -52,6 +52,27 @@
     expect(cache, isNotNull);
   }
 
+  test_flush() {
+    AnalysisTarget target = new TestSource();
+    ResultDescriptor resultA = new ResultDescriptor('A', null);
+    ResultDescriptor resultB = new ResultDescriptor('B', null);
+    CacheEntry entry = new CacheEntry(target);
+    cache.put(entry);
+    // put values
+    entry.setValue(resultA, 'a', TargetedResult.EMPTY_LIST);
+    entry.setValue(resultB, 'b', TargetedResult.EMPTY_LIST);
+    expect(cache.getState(target, resultA), CacheState.VALID);
+    expect(cache.getState(target, resultB), CacheState.VALID);
+    expect(cache.getValue(target, resultA), 'a');
+    expect(cache.getValue(target, resultB), 'b');
+    // flush A
+    cache.flush((target, result) => result == resultA);
+    expect(cache.getState(target, resultA), CacheState.FLUSHED);
+    expect(cache.getState(target, resultB), CacheState.VALID);
+    expect(cache.getValue(target, resultA), isNull);
+    expect(cache.getValue(target, resultB), 'b');
+  }
+
   void test_get() {
     AnalysisTarget target = new TestSource();
     expect(cache.get(target), isNull);
@@ -297,6 +318,27 @@
     expect(entry.exception, isNull);
   }
 
+  test_flush() {
+    AnalysisTarget target = new TestSource();
+    ResultDescriptor resultA = new ResultDescriptor('A', null);
+    ResultDescriptor resultB = new ResultDescriptor('B', null);
+    CacheEntry entry = new CacheEntry(target);
+    cache.put(entry);
+    // put values
+    entry.setValue(resultA, 'a', TargetedResult.EMPTY_LIST);
+    entry.setValue(resultB, 'b', TargetedResult.EMPTY_LIST);
+    expect(entry.getState(resultA), CacheState.VALID);
+    expect(entry.getState(resultB), CacheState.VALID);
+    expect(entry.getValue(resultA), 'a');
+    expect(entry.getValue(resultB), 'b');
+    // flush A
+    entry.flush((target, result) => result == resultA);
+    expect(entry.getState(resultA), CacheState.FLUSHED);
+    expect(entry.getState(resultB), CacheState.VALID);
+    expect(entry.getValue(resultA), isNull);
+    expect(entry.getValue(resultB), 'b');
+  }
+
   test_getState() {
     AnalysisTarget target = new TestSource();
     ResultDescriptor result = new ResultDescriptor('test', null);
@@ -1146,5 +1188,8 @@
 
 class _TestAnalysisTarget implements AnalysisTarget {
   @override
+  Source get librarySource => null;
+
+  @override
   Source get source => null;
 }
diff --git a/pkg/analyzer/test/src/context/context_factory_test.dart b/pkg/analyzer/test/src/context/context_factory_test.dart
new file mode 100644
index 0000000..a116d29
--- /dev/null
+++ b/pkg/analyzer/test/src/context/context_factory_test.dart
@@ -0,0 +1,159 @@
+// Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+library analyzer.test.src.context.context_factory_test;
+
+import 'package:analyzer/file_system/file_system.dart';
+import 'package:analyzer/file_system/memory_file_system.dart';
+import 'package:analyzer/src/context/context_factory.dart';
+import 'package:unittest/unittest.dart';
+
+import '../../reflective_tests.dart';
+import '../../utils.dart';
+
+main() {
+  initializeTestEnvironment();
+  runReflectiveTests(PackageMapProcessorTest);
+}
+
+@reflectiveTest
+class PackageMapProcessorTest {
+  MemoryResourceProvider resourceProvider;
+
+  Folder empty;
+  Folder tmp_sdk_ext;
+  Folder tmp_embedder;
+  Map<String, List<Folder>> packageMap;
+
+  void setUp() {
+    resourceProvider = new MemoryResourceProvider();
+    empty = resourceProvider.newFolder('/empty');
+    tmp_sdk_ext = resourceProvider.newFolder('/tmp_sdk_ext');
+    tmp_embedder = resourceProvider.newFolder('/tmp_embedder');
+    packageMap = <String, List<Folder>>{
+      'empty': [empty],
+      'tmp_embedder': [tmp_embedder],
+      'tmp_sdk_ext': [tmp_sdk_ext]
+    };
+  }
+
+  void test_basic_processing() {
+    resourceProvider.newFile(
+        '/tmp_sdk_ext/_sdkext',
+        r'''
+  {
+    "dart:ui": "ui.dart"
+  }''');
+    resourceProvider.newFile(
+        '/tmp_embedder/_embedder.yaml',
+        r'''
+embedded_libs:
+  "dart:core" : "core.dart"
+  "dart:fox": "slippy.dart"
+  "dart:bear": "grizzly.dart"
+  "dart:relative": "../relative.dart"
+  "dart:deep": "deep/directory/file.dart"
+''');
+
+    PackageMapProcessor proc = new PackageMapProcessor(packageMap);
+    expect(proc.embeddedLibraries.size(), 5);
+    expect(proc.embeddedLibraries.getLibrary('dart:core').path,
+        '/tmp_embedder/core.dart');
+    expect(proc.extendedLibraries.size(), 1);
+    expect(proc.extendedLibraries.getLibrary('dart:ui').path,
+        '/tmp_sdk_ext/ui.dart');
+  }
+
+  void test_empty_package_map() {
+    PackageMapProcessor proc =
+        new PackageMapProcessor(<String, List<Folder>>{});
+    expect(proc.embeddedLibraries.size(), 0);
+    expect(proc.extendedLibraries.size(), 0);
+    expect(proc.libraryMap.size(), 0);
+  }
+
+  void test_extenders_do_not_override() {
+    resourceProvider.newFile(
+        '/tmp_sdk_ext/_sdkext',
+        r'''
+  {
+    "dart:ui": "ui2.dart"
+  }''');
+    resourceProvider.newFile(
+        '/tmp_embedder/_embedder.yaml',
+        r'''
+embedded_libs:
+  "dart:core" : "core.dart"
+  "dart:ui": "ui.dart"
+''');
+
+    PackageMapProcessor proc = new PackageMapProcessor(packageMap);
+    expect(proc.embeddedLibraries.size(), 2);
+    expect(proc.extendedLibraries.size(), 1);
+    expect(proc.libraryMap.size(), 2);
+    expect(proc.libraryMap.getLibrary('dart:ui').path, '/tmp_embedder/ui.dart');
+  }
+
+  void test_invalid_embedder() {
+    resourceProvider.newFile(
+        '/tmp_embedder/_embedder.yaml',
+        r'''
+invalid contents, will not parse
+''');
+
+    PackageMapProcessor proc = new PackageMapProcessor(packageMap);
+    expect(proc.embeddedLibraries.size(), 0);
+    expect(proc.extendedLibraries.size(), 0);
+    expect(proc.libraryMap.size(), 0);
+  }
+
+  void test_invalid_extender() {
+    resourceProvider.newFile(
+        '/tmp_sdk_ext/_sdkext',
+        r'''
+invalid contents, will not parse
+''');
+
+    PackageMapProcessor proc = new PackageMapProcessor(packageMap);
+    expect(proc.embeddedLibraries.size(), 0);
+    expect(proc.extendedLibraries.size(), 0);
+    expect(proc.libraryMap.size(), 0);
+  }
+
+  void test_no_embedder() {
+    resourceProvider.newFile(
+        '/tmp_sdk_ext/_sdkext',
+        r'''
+  {
+    "dart:ui": "ui2.dart"
+  }''');
+
+    PackageMapProcessor proc = new PackageMapProcessor(packageMap);
+    expect(proc.embeddedLibraries.size(), 0);
+    expect(proc.extendedLibraries.size(), 1);
+    expect(proc.libraryMap.size(), 1);
+  }
+
+  void test_no_embedder_or_extender() {
+    PackageMapProcessor proc = new PackageMapProcessor(packageMap);
+    expect(proc.embeddedLibraries.size(), 0);
+    expect(proc.extendedLibraries.size(), 0);
+    expect(proc.libraryMap.size(), 0);
+  }
+
+  void test_no_extender() {
+    resourceProvider.newFile(
+        '/tmp_embedder/_embedder.yaml',
+        r'''
+embedded_libs:
+  "dart:core" : "core.dart"
+  "dart:ui": "ui.dart"
+''');
+
+    PackageMapProcessor proc = new PackageMapProcessor(packageMap);
+    expect(proc.embeddedLibraries.size(), 2);
+    expect(proc.extendedLibraries.size(), 0);
+    expect(proc.libraryMap.size(), 2);
+  }
+}
diff --git a/pkg/analyzer/test/src/context/context_test.dart b/pkg/analyzer/test/src/context/context_test.dart
index ea41470..a2f3cb3 100644
--- a/pkg/analyzer/test/src/context/context_test.dart
+++ b/pkg/analyzer/test/src/context/context_test.dart
@@ -26,6 +26,7 @@
 import 'package:analyzer/src/task/dart.dart';
 import 'package:analyzer/src/task/html.dart';
 import 'package:analyzer/task/dart.dart';
+import 'package:analyzer/task/general.dart';
 import 'package:analyzer/task/model.dart';
 import 'package:html/dom.dart' show Document;
 import 'package:unittest/unittest.dart';
@@ -300,6 +301,39 @@
     expect(context.sourcesNeedingProcessing, hasLength(0));
   }
 
+  void test_applyChanges_recompute_exportNamespace() {
+    Source libSource = addSource(
+        "/lib.dart",
+        r'''
+class A {}
+''');
+    Source exporterSource = addSource(
+        "/exporter.dart",
+        r'''
+export 'lib.dart';
+''');
+    _performPendingAnalysisTasks();
+    // initially: A
+    {
+      LibraryElement libraryElement =
+          context.getResult(exporterSource, LIBRARY_ELEMENT1) as LibraryElement;
+      expect(libraryElement.exportNamespace.definedNames.keys,
+          unorderedEquals(['A']));
+    }
+    // after update: B
+    context.setContents(
+        libSource,
+        r'''
+class B {}''');
+    _performPendingAnalysisTasks();
+    {
+      LibraryElement libraryElement =
+          context.getResult(exporterSource, LIBRARY_ELEMENT1) as LibraryElement;
+      expect(libraryElement.exportNamespace.definedNames.keys,
+          unorderedEquals(['B']));
+    }
+  }
+
   Future test_applyChanges_remove() {
     SourcesChangedListener listener = new SourcesChangedListener();
     context.onSourcesChanged.listen(listener.onData);
@@ -856,6 +890,57 @@
         isTrue);
   }
 
+  void test_flushResolvedUnit_updateFile_dontNotify() {
+    String oldCode = '';
+    String newCode = r'''
+import 'dart:async';
+''';
+    String path = '/test.dart';
+    Source source = resourceProvider.newFile(path, oldCode).createSource();
+    context.applyChanges(new ChangeSet()..addedSource(source));
+    context.resolveCompilationUnit2(source, source);
+    // Flush all results units.
+    context.analysisCache.flush((target, result) {
+      if (target.source == source) {
+        return RESOLVED_UNIT_RESULTS.contains(result);
+      }
+      return false;
+    });
+    // Update the file, but don't notify the context.
+    resourceProvider.updateFile(path, newCode);
+    // Driver must detect that the file was changed and recover.
+    CompilationUnit unit = context.resolveCompilationUnit2(source, source);
+    expect(unit, isNotNull);
+  }
+
+  void test_flushResolvedUnit_updateFile_dontNotify2() {
+    String oldCode = r'''
+main() {}
+''';
+    String newCode = r'''
+import 'dart:async';
+main() {}
+''';
+    String path = '/test.dart';
+    Source source = resourceProvider.newFile(path, oldCode).createSource();
+    context.applyChanges(new ChangeSet()..addedSource(source));
+    context.resolveCompilationUnit2(source, source);
+    // Flush all results units.
+    context.analysisCache.flush((target, result) {
+      if (target.source == source) {
+        if (target.source == source) {
+          return RESOLVED_UNIT_RESULTS.contains(result);
+        }
+      }
+      return false;
+    });
+    // Update the file, but don't notify the context.
+    resourceProvider.updateFile(path, newCode);
+    // Driver must detect that the file was changed and recover.
+    CompilationUnit unit = context.resolveCompilationUnit2(source, source);
+    expect(unit, isNotNull);
+  }
+
   void test_getAnalysisOptions() {
     expect(context.analysisOptions, isNotNull);
   }
@@ -1094,7 +1179,7 @@
     List<Source> sources = context.launchableClientLibrarySources;
     expect(sources, isEmpty);
     addSource(
-        "/a.dart",
+        '/a.dart',
         r'''
 import 'dart:html';
 ''');
@@ -1112,7 +1197,7 @@
     List<Source> sources = context.launchableClientLibrarySources;
     expect(sources, isEmpty);
     addSource(
-        "/a.dart",
+        '/a.dart',
         r'''
 export 'dart:html';
 ''');
@@ -1413,6 +1498,44 @@
     expect(context.getLibraryElement(source), isNull);
   }
 
+  void test_handleContentsChanged_noOriginal_sameAsFile() {
+    ContentCache contentCache = new ContentCache();
+    context.contentCache = contentCache;
+    // Add the source.
+    String code = 'foo() {}';
+    Source source = addSource("/test.dart", code);
+    _analyzeAll_assertFinished();
+    expect(context.getResolvedCompilationUnit2(source, source), isNotNull);
+    // Update the content cache, and notify that we updated the source.
+    // We pass "null" as "originalContents" because the was no one.
+    contentCache.setContents(source, code);
+    context.handleContentsChanged(source, null, code, true);
+    expect(context.getResolvedCompilationUnit2(source, source), isNotNull);
+  }
+
+  void test_handleContentsChanged_noOriginal_sameAsFile_butFileUpdated() {
+    ContentCache contentCache = new ContentCache();
+    context.contentCache = contentCache;
+    // Add the source.
+    String oldCode = 'foo() {}';
+    String newCode = 'bar() {}';
+    var file = resourceProvider.newFile('/test.dart', oldCode);
+    Source source = file.createSource();
+    context.applyChanges(new ChangeSet()..addedSource(source));
+    _analyzeAll_assertFinished();
+    expect(context.getResolvedCompilationUnit2(source, source), isNotNull);
+    // Test for the race condition.
+    // 1. Update the file.
+    // 2. Update the content cache.
+    // 3. Notify the context, and because this is the first time when we
+    //    update the content cache, we don't know "originalContents".
+    // The source must be invalidated, because it has different contents now.
+    resourceProvider.updateFile('/test.dart', newCode);
+    contentCache.setContents(source, newCode);
+    context.handleContentsChanged(source, null, newCode, true);
+    expect(context.getResolvedCompilationUnit2(source, source), isNull);
+  }
+
   Future test_implicitAnalysisEvents_added() async {
     AnalyzedSourcesListener listener = new AnalyzedSourcesListener();
     context.implicitAnalysisEvents.listen(listener.onData);
@@ -1775,7 +1898,7 @@
         reason: "part resolved 1");
     // update and analyze #1
     context.setContents(partSource, "part of lib; // 1");
-    if (AnalysisEngine.instance.limitInvalidationInTaskModel) {
+    if (context.analysisOptions.finerGrainedInvalidation) {
       expect(
           context.getResolvedCompilationUnit2(libSource, libSource), isNotNull,
           reason: "library changed 2");
@@ -1797,7 +1920,7 @@
     }
     // update and analyze #2
     context.setContents(partSource, "part of lib; // 12");
-    if (AnalysisEngine.instance.limitInvalidationInTaskModel) {
+    if (context.analysisOptions.finerGrainedInvalidation) {
       expect(
           context.getResolvedCompilationUnit2(libSource, libSource), isNotNull,
           reason: "library changed 3");
@@ -1922,7 +2045,7 @@
 
   void test_performAnalysisTask_interruptBy_setContents() {
     Source sourceA = addSource(
-        "/a.dart",
+        '/a.dart',
         r'''
 library expectedToFindSemicolon
 ''');
@@ -1953,10 +2076,10 @@
     _changeSource(source, "");
     source.generateExceptionOnRead = true;
     _analyzeAll_assertFinished();
-    if (AnalysisEngine.instance.limitInvalidationInTaskModel) {
+    if (context.analysisOptions.finerGrainedInvalidation) {
       expect(source.readCount, 7);
     } else {
-      expect(source.readCount, 5);
+      expect(source.readCount, 4);
     }
   }
 
@@ -2171,6 +2294,17 @@
         ["dart.core", "dart.async", "dart.math", "libA", "libB"]);
   }
 
+//  void test_resolveCompilationUnit_sourceChangeDuringResolution() {
+//    _context = new _AnalysisContext_sourceChangeDuringResolution();
+//    AnalysisContextFactory.initContextWithCore(_context);
+//    _sourceFactory = _context.sourceFactory;
+//    Source source = _addSource("/lib.dart", "library lib;");
+//    CompilationUnit compilationUnit =
+//        _context.resolveCompilationUnit2(source, source);
+//    expect(compilationUnit, isNotNull);
+//    expect(_context.getLineInfo(source), isNotNull);
+//  }
+
   void test_resolveCompilationUnit_library() {
     Source source = addSource("/lib.dart", "library lib;");
     LibraryElement library = context.computeLibraryElement(source);
@@ -2187,17 +2321,6 @@
     expect(compilationUnit, isNotNull);
   }
 
-//  void test_resolveCompilationUnit_sourceChangeDuringResolution() {
-//    _context = new _AnalysisContext_sourceChangeDuringResolution();
-//    AnalysisContextFactory.initContextWithCore(_context);
-//    _sourceFactory = _context.sourceFactory;
-//    Source source = _addSource("/lib.dart", "library lib;");
-//    CompilationUnit compilationUnit =
-//        _context.resolveCompilationUnit2(source, source);
-//    expect(compilationUnit, isNotNull);
-//    expect(_context.getLineInfo(source), isNotNull);
-//  }
-
   void test_setAnalysisOptions() {
     AnalysisOptionsImpl options = new AnalysisOptionsImpl();
     options.cacheSize = 42;
@@ -2438,16 +2561,24 @@
    * Perform analysis tasks up to 512 times and assert that it was enough.
    */
   void _analyzeAll_assertFinished([int maxIterations = 512]) {
+    bool finishedAnalyzing = false;
     for (int i = 0; i < maxIterations; i++) {
       List<ChangeNotice> notice = context.performAnalysisTask().changeNotices;
       if (notice == null) {
+        finishedAnalyzing = true;
         bool inconsistent = context.validateCacheConsistency();
         if (!inconsistent) {
           return;
         }
       }
     }
-    fail("performAnalysisTask failed to terminate after analyzing all sources");
+    if (finishedAnalyzing) {
+      fail(
+          "performAnalysisTask failed to finish analyzing all sources after $maxIterations iterations");
+    } else {
+      fail(
+          "performAnalysisTask failed to terminate after analyzing all sources");
+    }
   }
 
   void _assertNoExceptions() {
@@ -2536,159 +2667,355 @@
 class LimitedInvalidateTest extends AbstractContextTest {
   @override
   void setUp() {
-    AnalysisEngine.instance.limitInvalidationInTaskModel = true;
     super.setUp();
     AnalysisOptionsImpl options =
         new AnalysisOptionsImpl.from(context.analysisOptions);
     options.incremental = true;
+    options.finerGrainedInvalidation = true;
     context.analysisOptions = options;
   }
 
-  @override
-  void tearDown() {
-    AnalysisEngine.instance.limitInvalidationInTaskModel = false;
-    super.tearDown();
+  void test_class_addMethod_useClass() {
+    Source a = addSource(
+        '/a.dart',
+        r'''
+class A {}
+class B extends A {
+  foo() {}
+}
+''');
+    Source b = addSource(
+        '/b.dart',
+        r'''
+import 'a.dart';
+B b = null;
+''');
+    _performPendingAnalysisTasks();
+    // Update a.dart: remove add A.bar.
+    //   b.dart is valid, because though it uses A, it has the same element.
+    context.setContents(
+        a,
+        r'''
+class A {}
+class B extends A {
+  foo() {}
+  bar() {}
+}
+''');
+    _assertValidForChangedLibrary(a);
+    _assertInvalid(a, LIBRARY_ERRORS_READY);
+    _assertValidForDependentLibrary(b);
+    _assertValidAllLibraryUnitResults(b);
+    _assertValid(b, LIBRARY_ERRORS_READY);
   }
 
-  void test_noChange_thenChange() {
-    Source sourceA = addSource(
-        "/a.dart",
+  void test_class_method_change_notUsed() {
+    Source a = addSource(
+        '/a.dart',
         r'''
-library lib_a;
-
 class A {
-  A();
-}
-class B {
-  B();
+  foo() {}
+  bar() {}
 }
 ''');
-    Source sourceB = addSource(
-        "/b.dart",
+    Source b = addSource(
+        '/b.dart',
         r'''
-library lib_b;
 import 'a.dart';
-main() {
-  new A();
+main(A a) {
+  a.foo();
 }
 ''');
     _performPendingAnalysisTasks();
-    expect(context.getErrors(sourceA).errors, hasLength(0));
-    expect(context.getErrors(sourceB).errors, hasLength(0));
-    var unitA = context.getResolvedCompilationUnit2(sourceA, sourceA);
-    var unitElementA = unitA.element;
-    var libraryElementA = unitElementA.library;
-    // Update a.dart, no declaration changes.
+    // Update a.dart: remove A.bar, add A.bar2.
+    //   b.dart is valid, because it doesn't references 'bar' or 'bar2'.
     context.setContents(
-        sourceA,
+        a,
         r'''
-library lib_a;
 class A {
-  A();
-}
-class B {
-  B();
+  foo() {}
+  bar2() {}
 }
 ''');
-    _assertInvalid(sourceA, LIBRARY_ERRORS_READY);
-    _assertValid(sourceB, LIBRARY_ELEMENT);
-    // The a.dart's unit and element are updated incrementally.
-    // They are the same instances as initially.
-    // So, all the references from other units are still valid.
-    {
-      LibrarySpecificUnit target = new LibrarySpecificUnit(sourceA, sourceA);
-      expect(analysisCache.getValue(target, RESOLVED_UNIT1), same(unitA));
-      expect(unitA.element, same(unitElementA));
-      expect(unitElementA.library, same(libraryElementA));
-    }
-    // Analyze.
-    _performPendingAnalysisTasks();
-    expect(context.getErrors(sourceA).errors, hasLength(0));
-    expect(context.getErrors(sourceB).errors, hasLength(0));
-    // The a.dart's unit and element are the same.
-    {
-      LibrarySpecificUnit target = new LibrarySpecificUnit(sourceA, sourceA);
-      expect(analysisCache.getValue(target, RESOLVED_UNIT), same(unitA));
-      expect(unitA.element, same(unitElementA));
-      expect(unitElementA.library, same(libraryElementA));
-    }
-    // Add method to a.dart. This invalidates b.dart, so
-    // we know that the previous update did not damage dependencies.
-    context.setContents(
-        sourceA,
+    _assertValidForChangedLibrary(a);
+    _assertInvalid(a, LIBRARY_ERRORS_READY);
+    _assertValidForDependentLibrary(b);
+    _assertValidAllLibraryUnitResults(b);
+    _assertValid(b, LIBRARY_ERRORS_READY);
+  }
+
+  void test_class_method_change_notUsed_throughSubclass_extends() {
+    Source a = addSource(
+        '/a.dart',
         r'''
-library lib_a;
 class A {
-  A();
+  foo() {}
+  bar() {}
+}
+class B extends A {}
+''');
+    Source b = addSource(
+        '/b.dart',
+        r'''
+import 'a.dart';
+main(B b) {
+  a.foo();
+}
+''');
+    _performPendingAnalysisTasks();
+    // Update a.dart: remove A.bar, add A.bar2.
+    //   b.dart is valid, because it doesn't references 'bar' or 'bar2'.
+    context.setContents(
+        a,
+        r'''
+class A {
+  foo() {}
+  bar2() {}
+}
+class B extends A {}
+''');
+    _assertValidForChangedLibrary(a);
+    _assertInvalid(a, LIBRARY_ERRORS_READY);
+    _assertValidForDependentLibrary(b);
+    _assertValidAllLibraryUnitResults(b);
+    _assertValid(b, LIBRARY_ERRORS_READY);
+  }
+
+  void test_class_method_definedInSuper_sameLibrary() {
+    Source a = addSource(
+        '/a.dart',
+        r'''
+class A {
   m() {}
 }
-class B {
-  B();
+class B extends A {}
+''');
+    Source b = addSource(
+        '/b.dart',
+        r'''
+import 'a.dart';
+main(B b) {
+  b.m();
 }
 ''');
-    _assertInvalid(sourceA, LIBRARY_ERRORS_READY);
-    _assertInvalid(sourceB, LIBRARY_ELEMENT);
-    // The a.dart's unit and element are the same.
-    {
-      LibrarySpecificUnit target = new LibrarySpecificUnit(sourceA, sourceA);
-      expect(analysisCache.getValue(target, RESOLVED_UNIT1), same(unitA));
-      expect(unitA.element, same(unitElementA));
-      expect(unitElementA.library, same(libraryElementA));
-    }
-    // Analyze.
     _performPendingAnalysisTasks();
-    expect(context.getErrors(sourceA).errors, hasLength(0));
-    expect(context.getErrors(sourceB).errors, hasLength(0));
+    // Update a.dart: change A.m
+    //   This makes B changed.
+    //   b.dart is invalid, because it references B.
+    context.setContents(
+        a,
+        r'''
+class A {
+  m2() {}
+}
+class B extends A {}
+''');
+    _assertValidForChangedLibrary(a);
+    _assertValidForDependentLibrary(b);
+    _assertInvalid(a, LIBRARY_ERRORS_READY);
+    _assertInvalid(b, LIBRARY_ERRORS_READY);
   }
 
-  void test_unusedName() {
-    Source sourceA = addSource(
-        "/a.dart",
+  void test_class_private_member() {
+    Source a = addSource(
+        '/a.dart',
         r'''
-library lib_a;
-class A {}
-class B {}
-class C {}
+class A {
+  A();
+  A._privateConstructor();
+
+  foo() {}
+
+  int _privateField;
+  _privateMethod() {}
+  int get _privateGetter => null;
+  void set _privateSetter(_) {}
+}
 ''');
-    Source sourceB = addSource(
-        "/b.dart",
+    Source b = addSource(
+        '/b.dart',
         r'''
-library lib_b;
+import 'a.dart';
+main(A a) {
+  a.foo();
+}
+''');
+    _performPendingAnalysisTasks();
+    // Update a.dart: rename private members of A
+    //   b.dart is valid, it cannot see these private members.
+    context.setContents(
+        a,
+        r'''
+class A {
+  A();
+  A._privateConstructor2();
+
+  foo() {}
+
+  int _privateField2;
+  _privateMethod2() {}
+  int get _privateGetter2 => null;
+  void set _privateSetter2(_) {}
+}
+''');
+    _assertValidForChangedLibrary(a);
+    _assertInvalid(a, LIBRARY_ERRORS_READY);
+    _assertValidForDependentLibrary(b);
+    _assertValidAllLibraryUnitResults(b);
+    _assertValid(b, LIBRARY_ERRORS_READY);
+  }
+
+  void test_class_super_makeAbstract_instantiate() {
+    Source a = addSource(
+        '/a.dart',
+        r'''
+abstract class I {
+ void m();
+}
+class A implements I {
+ void m() {}
+}
+''');
+    Source b = addSource(
+        '/b.dart',
+        r'''
+import 'a.dart';
+class B extends A {}
+''');
+    Source c = addSource(
+        '/c.dart',
+        r'''
+import 'b.dart';
+main() {
+  new B();
+}
+''');
+    _performPendingAnalysisTasks();
+    // Update a.dart: remove A.bar, add A.bar2.
+    //   b.dart is valid, because it doesn't references 'bar' or 'bar2'.
+    context.setContents(
+        a,
+        r'''
+abstract class I {
+ void m();
+}
+class A implements I {
+ void m2() {}
+}
+''');
+    _assertValidForChangedLibrary(a);
+    _assertInvalid(a, LIBRARY_ERRORS_READY);
+
+    _assertValidForDependentLibrary(b);
+    _assertInvalid(b, LIBRARY_ERRORS_READY);
+
+    _assertValidForDependentLibrary(c);
+    _assertInvalid(c, LIBRARY_ERRORS_READY);
+  }
+
+  void test_private_class() {
+    Source a = addSource(
+        '/a.dart',
+        r'''
+class _A {}
+class _B2 {}
+''');
+    Source b = addSource(
+        '/b.dart',
+        r'''
 import 'a.dart';
 main() {
-  new A();
-  new C();
+  new _A();
+  new _B();
 }
 ''');
     _performPendingAnalysisTasks();
-    // Update A.
+    // Update a.dart: change _A and _B2
+    //   b.dart is valid, because _A, _B, _A2 and _B2 are all private,
+    //   so b.dart cannot see them.
     context.setContents(
-        sourceA,
+        a,
         r'''
-library lib_a;
-class A {}
-class B2 {}
-class C {}
+class _A2 {}
+class _B {}
 ''');
-    // Only a.dart is invalidated.
-    // Because b.dart does not use B, so it is valid.
-    _assertInvalid(sourceA, LIBRARY_ERRORS_READY);
-    _assertValid(sourceB, LIBRARY_ERRORS_READY);
+    _assertValidForChangedLibrary(a);
+    _assertInvalid(a, LIBRARY_ERRORS_READY);
+    _assertValidForDependentLibrary(b);
+    _assertValidAllLibraryUnitResults(b);
+    _assertValid(b, LIBRARY_ERRORS_READY);
   }
 
-  void test_usedName_directUser() {
-    Source sourceA = addSource(
-        "/a.dart",
+  void test_private_topLevelVariable() {
+    Source a = addSource(
+        '/a.dart',
         r'''
-library lib_a;
+int _V = 1;
+''');
+    Source b = addSource(
+        '/b.dart',
+        r'''
+import 'a.dart';
+main() {
+  print(_A);
+}
+''');
+    _performPendingAnalysisTasks();
+    // Update a.dart: change _V
+    //   b.dart is valid, because _V is private and b.dart cannot see it.
+    context.setContents(
+        a,
+        r'''
+int _V = 2;
+''');
+    _assertValidForChangedLibrary(a);
+    _assertInvalid(a, LIBRARY_ERRORS_READY);
+    _assertValidForDependentLibrary(b);
+    _assertValidAllLibraryUnitResults(b);
+    _assertValid(b, LIBRARY_ERRORS_READY);
+  }
+
+  void test_private_topLevelVariable_throughPublic() {
+    Source a = addSource(
+        '/a.dart',
+        r'''
+int _A = 1;
+int B = _A + 1;
+''');
+    Source b = addSource(
+        '/b.dart',
+        r'''
+import 'a.dart';
+main() {
+  print(B);
+}
+''');
+    _performPendingAnalysisTasks();
+    // Update a.dart: change _A
+    //   b.dart is invalid, because it uses B, which uses _A.
+    context.setContents(
+        a,
+        r'''
+int _A = 2;
+int B = _A + 1;
+''');
+    _assertValidForChangedLibrary(a);
+    _assertInvalid(a, LIBRARY_ERRORS_READY);
+    _assertValidForDependentLibrary(b);
+    _assertInvalid(b, LIBRARY_ERRORS_READY);
+  }
+
+  void test_sequence_class_give_take() {
+    Source a = addSource(
+        '/a.dart',
+        r'''
 class A {}
 class B {}
 class C {}
 ''');
-    Source sourceB = addSource(
-        "/b.dart",
+    Source b = addSource(
+        '/b.dart',
         r'''
-library lib_b;
 import 'a.dart';
 main() {
   new A();
@@ -2696,50 +3023,342 @@
 }
 ''');
     _performPendingAnalysisTasks();
-    expect(context.getErrors(sourceB).errors, hasLength(1));
-    // Update a.dart, invalidates b.dart because it references "C2".
+    expect(context.getErrors(b).errors, hasLength(1));
+    // Update a.dart: remove C, add C2.
+    //   b.dart is invalid, because it references C2.
     context.setContents(
-        sourceA,
+        a,
         r'''
-library lib_a;
 class A {}
 class B {}
 class C2 {}
 ''');
-    _assertInvalid(sourceA, LIBRARY_ERRORS_READY);
-    _assertInvalid(sourceB, LIBRARY_ERRORS_READY);
+    _assertValidForChangedLibrary(a);
+    _assertInvalid(a, LIBRARY_ERRORS_READY);
+    _assertValidForDependentLibrary(b);
+    _assertInvalid(b, LIBRARY_ERRORS_READY);
+    _assertInvalidUnits(b, RESOLVED_UNIT4);
     // Now b.dart is analyzed and the error is fixed.
     _performPendingAnalysisTasks();
-    expect(context.getErrors(sourceB).errors, hasLength(0));
-    // Update a.dart, invalidates b.dart because it references "C".
+    expect(context.getErrors(b).errors, hasLength(0));
+    // Update a.dart: remove C2, add C.
+    //   b.dart is invalid, because it references C2.
     context.setContents(
-        sourceA,
+        a,
         r'''
-library lib_a;
 class A {}
 class B {}
 class C {}
 ''');
-    _assertInvalid(sourceA, LIBRARY_ERRORS_READY);
-    _assertInvalid(sourceB, LIBRARY_ERRORS_READY);
-    _performPendingAnalysisTasks();
+    _assertValidForChangedLibrary(a);
+    _assertInvalid(a, LIBRARY_ERRORS_READY);
+    _assertValidForDependentLibrary(b);
+    _assertInvalid(b, LIBRARY_ERRORS_READY);
+    _assertInvalidUnits(b, RESOLVED_UNIT4);
     // Now b.dart is analyzed and it again has the error.
-    expect(context.getErrors(sourceB).errors, hasLength(1));
+    _performPendingAnalysisTasks();
+    expect(context.getErrors(b).errors, hasLength(1));
   }
 
-  void test_usedName_directUser_withIncremental() {
-    Source sourceA = addSource(
-        "/a.dart",
+  void test_sequence_noChange_thenChange() {
+    Source a = addSource(
+        '/a.dart',
         r'''
-library lib_a;
+class A {
+  A();
+}
+
+class B {
+  B();
+}
+''');
+    Source b = addSource(
+        '/b.dart',
+        r'''
+import 'a.dart';
+main() {
+  new A();
+}
+''');
+    _performPendingAnalysisTasks();
+    expect(context.getErrors(a).errors, hasLength(0));
+    expect(context.getErrors(b).errors, hasLength(0));
+    var unitA = context.getResolvedCompilationUnit2(a, a);
+    var unitElementA = unitA.element;
+    var libraryElementA = unitElementA.library;
+    // Update a.dart, no declaration changes.
+    context.setContents(
+        a,
+        r'''
+class A {
+  A();
+}
+class B {
+  B();
+}
+''');
+    _assertValidForChangedLibrary(a);
+    _assertInvalid(a, LIBRARY_ERRORS_READY);
+    _assertValidForDependentLibrary(b);
+    _assertValidAllLibraryUnitResults(b);
+    _assertValid(b, LIBRARY_ERRORS_READY);
+    // The a.dart's unit and element are updated incrementally.
+    // They are the same instances as initially.
+    // So, all the references from other units are still valid.
+    {
+      LibrarySpecificUnit target = new LibrarySpecificUnit(a, a);
+      expect(analysisCache.getValue(target, RESOLVED_UNIT1), same(unitA));
+      expect(unitA.element, same(unitElementA));
+      expect(unitElementA.library, same(libraryElementA));
+    }
+    // Analyze.
+    _performPendingAnalysisTasks();
+    expect(context.getErrors(a).errors, hasLength(0));
+    expect(context.getErrors(b).errors, hasLength(0));
+    // The a.dart's unit and element are the same.
+    {
+      LibrarySpecificUnit target = new LibrarySpecificUnit(a, a);
+      expect(analysisCache.getValue(target, RESOLVED_UNIT), same(unitA));
+      expect(unitA.element, same(unitElementA));
+      expect(unitElementA.library, same(libraryElementA));
+    }
+    // Add a new method to a.dart. This invalidates b.dart, so
+    // we know that the previous update did not damage dependencies.
+    context.setContents(
+        a,
+        r'''
+class A {
+  A();
+  m() {}
+}
+class B {
+  B();
+}
+''');
+    _assertValidForChangedLibrary(a);
+    _assertInvalid(a, LIBRARY_ERRORS_READY);
+    _assertValidForDependentLibrary(b);
+    _assertInvalid(b, LIBRARY_ERRORS_READY);
+    _assertInvalidUnits(b, RESOLVED_UNIT4);
+    // The a.dart's unit and element are the same.
+    {
+      LibrarySpecificUnit target = new LibrarySpecificUnit(a, a);
+      expect(analysisCache.getValue(target, RESOLVED_UNIT1), same(unitA));
+      expect(unitA.element, same(unitElementA));
+      expect(unitElementA.library, same(libraryElementA));
+    }
+    // Analyze.
+    _performPendingAnalysisTasks();
+    expect(context.getErrors(a).errors, hasLength(0));
+    expect(context.getErrors(b).errors, hasLength(0));
+  }
+
+  void test_sequence_useAnyResolvedUnit() {
+    Source a = addSource(
+        '/a.dart',
+        r'''
+class A {}
+class B {}
+''');
+    Source b = addSource(
+        '/b.dart',
+        r'''
+import 'a.dart';
+main() {
+  new A();
+}
+''');
+    _performPendingAnalysisTasks();
+    _assertValid(a, LIBRARY_ERRORS_READY);
+    _assertValid(b, LIBRARY_ERRORS_READY);
+    // Invalidate RESOLVED_UNIT
+    CacheEntry entryA = context.getCacheEntry(new LibrarySpecificUnit(a, a));
+    entryA.setState(RESOLVED_UNIT, CacheState.FLUSHED);
+    entryA.setState(RESOLVED_UNIT1, CacheState.FLUSHED);
+    entryA.setState(RESOLVED_UNIT2, CacheState.FLUSHED);
+    entryA.setState(RESOLVED_UNIT3, CacheState.FLUSHED);
+    context.setContents(
+        a,
+        r'''
+class A {}
+class B2 {}
+''');
+    _assertValidAllLibraryUnitResults(b);
+    _assertValid(b, LIBRARY_ERRORS_READY);
+  }
+
+  void test_unusedName_class_add() {
+    Source a = addSource(
+        '/a.dart',
+        r'''
+class A {}
+class B {}
+class C {}
+''');
+    Source b = addSource(
+        '/b.dart',
+        r'''
+import 'a.dart';
+main() {
+  new A();
+  new C();
+}
+''');
+    _performPendingAnalysisTasks();
+    _assertValid(a, LINE_INFO);
+    // The class B is not referenced.
+    //   a.dart is invalid.
+    //   b.dart is valid.
+    context.setContents(
+        a,
+        r'''
+class A {}
+class B2 {}
+class C {}
+''');
+    _assertValidForChangedLibrary(a);
+    _assertInvalid(a, LIBRARY_ERRORS_READY);
+    _assertUnitInvalid(a, RESOLVED_UNIT);
+    _assertValidForDependentLibrary(b);
+    _assertValidAllLibraryUnitResults(b);
+    _assertValid(b, LIBRARY_ERRORS_READY);
+  }
+
+  void test_usedName_class_name_asHole_inBody() {
+    Source a = addSource(
+        '/a.dart',
+        r'''
+class A {}
+class B {}
+class C {}
+''');
+    Source b = addSource(
+        '/b.dart',
+        r'''
+import 'a.dart';
+main() {
+  new A();
+  new C2();
+}
+''');
+    _performPendingAnalysisTasks();
+    // Update a.dart: remove C, add C2.
+    //   b.dart is invalid, because it references C2.
+    context.setContents(
+        a,
+        r'''
+class A {}
+class B {}
+class C2 {}
+''');
+    _assertValidForChangedLibrary(a);
+    _assertInvalid(a, LIBRARY_ERRORS_READY);
+    _assertValidForDependentLibrary(b);
+    _assertInvalid(b, LIBRARY_ERRORS_READY);
+    _assertInvalidUnits(b, RESOLVED_UNIT4);
+  }
+
+  void test_usedName_class_name_asSuper() {
+    Source a = addSource(
+        '/a.dart',
+        r'''
+class A {}
+''');
+    Source b = addSource(
+        '/b.dart',
+        r'''
+import 'a.dart';
+class B extends A {}
+''');
+    _performPendingAnalysisTasks();
+    // Update a.dart: remove A, add A2.
+    //   b.dart is invalid, because it references A.
+    context.setContents(
+        a,
+        r'''
+class A2 {}
+''');
+    _assertValidForChangedLibrary(a);
+    _assertInvalid(a, LIBRARY_ERRORS_READY);
+    _assertValidForDependentLibrary(b);
+    _assertInvalid(b, LIBRARY_ERRORS_READY);
+    _assertInvalidUnits(b, RESOLVED_UNIT4);
+  }
+
+  void test_usedName_class_name_asTypeBound() {
+    Source a = addSource(
+        '/a.dart',
+        r'''
+class A {}
+''');
+    Source b = addSource(
+        '/b.dart',
+        r'''
+import 'a.dart';
+class B<T extends A> {
+  T f;
+}
+''');
+    _performPendingAnalysisTasks();
+    // Update a.dart: remove A, add A2.
+    //   b.dart is invalid, because it references A.
+    context.setContents(
+        a,
+        r'''
+class A2 {}
+''');
+    _assertValidForChangedLibrary(a);
+    _assertInvalid(a, LIBRARY_ERRORS_READY);
+    _assertValidForDependentLibrary(b);
+    _assertInvalid(b, LIBRARY_ERRORS_READY);
+    _assertInvalidUnits(b, RESOLVED_UNIT4);
+  }
+
+  void test_usedName_class_name_inBody() {
+    Source a = addSource(
+        '/a.dart',
+        r'''
+class A {}
+class B {}
+class C {}
+''');
+    Source b = addSource(
+        '/b.dart',
+        r'''
+import 'a.dart';
+main() {
+  new A();
+  new C();
+}
+''');
+    _performPendingAnalysisTasks();
+    // Update a.dart: remove C, add C2.
+    //   b.dart is invalid, because it references C.
+    context.setContents(
+        a,
+        r'''
+class A {}
+class B {}
+class C2 {}
+''');
+    _assertValidForChangedLibrary(a);
+    _assertValidForDependentLibrary(b);
+    _assertInvalid(a, LIBRARY_ERRORS_READY);
+    _assertInvalid(b, LIBRARY_ERRORS_READY);
+    _assertInvalidUnits(b, RESOLVED_UNIT4);
+  }
+
+  void test_usedName_classMethod_name_inBody() {
+    Source a = addSource(
+        '/a.dart',
+        r'''
 class A {
   m() {}
 }
 ''');
-    Source sourceB = addSource(
-        "/b.dart",
+    Source b = addSource(
+        '/b.dart',
         r'''
-library lib_b;
 import 'a.dart';
 main() {
   A a = new A();
@@ -2747,80 +3366,122 @@
 }
 ''');
     _performPendingAnalysisTasks();
-    // Update A.
+    // Update a.dart: remove A.m, add A.m2.
+    //   b.dart is invalid, because it references 'm'.
     context.setContents(
-        sourceA,
+        a,
         r'''
-library lib_a;
 class A {
   m2() {}
 }
 ''');
-    _assertInvalid(sourceA, LIBRARY_ERRORS_READY);
-    _assertInvalid(sourceB, LIBRARY_ERRORS_READY);
+    _assertValidForChangedLibrary(a);
+    _assertInvalid(a, LIBRARY_ERRORS_READY);
+    _assertValidForDependentLibrary(b);
+    _assertInvalidLibraryElements(b, LIBRARY_ELEMENT4);
+    _assertInvalidUnits(b, RESOLVED_UNIT4);
+    _assertInvalid(b, LIBRARY_ERRORS_READY);
   }
 
-  void test_usedName_indirectUser() {
-    Source sourceA = addSource(
-        "/a.dart",
+  void test_usedName_indirect_classMethod_name_inBody() {
+    Source a = addSource(
+        '/a.dart',
         r'''
-library lib_a;
 class A {
   m() {}
 }
 ''');
-    Source sourceB = addSource(
-        "/b.dart",
+    Source b = addSource(
+        '/b.dart',
         r'''
-library lib_b;
 import 'a.dart';
 class B extends A {}
 ''');
-    Source sourceC = addSource(
-        "/c.dart",
+    Source c = addSource(
+        '/c.dart',
         r'''
-library lib_c;
 import 'b.dart';
-class C extends B {
-  main() {
-    m();
-  }
+main() {
+  B b = new B();
+  b.m();
 }
 ''');
-    // No errors, "A.m" exists.
     _performPendingAnalysisTasks();
-    expect(context.getErrors(sourceC).errors, hasLength(0));
-    // Replace "A.m" with "A.m2", invalidate both b.dart and c.dart files.
+    // Update a.dart: remove A.m, add A.m2.
+    //   b.dart is invalid, because B extends A.
+    //   c.dart is invalid, because 'main' references B.
     context.setContents(
-        sourceA,
+        a,
         r'''
-library lib_a;
 class A {
   m2() {}
 }
 ''');
-    _assertInvalid(sourceA, LIBRARY_ERRORS_READY);
-    _assertInvalid(sourceB, LIBRARY_ERRORS_READY);
-    _assertInvalid(sourceC, LIBRARY_ERRORS_READY);
-    // There is an error in c.dart, "A.m" does not exist.
-    _performPendingAnalysisTasks();
-    expect(context.getErrors(sourceB).errors, hasLength(0));
-    expect(context.getErrors(sourceC).errors, hasLength(1));
-    // Restore "A.m", invalidate both b.dart and c.dart files.
-    context.setContents(
-        sourceA,
+    _assertValidForChangedLibrary(a);
+    _assertInvalid(a, LIBRARY_ERRORS_READY);
+
+    _assertValidForDependentLibrary(b);
+    _assertInvalidLibraryElements(b, LIBRARY_ELEMENT4);
+    _assertInvalidUnits(b, RESOLVED_UNIT4);
+    _assertInvalid(b, LIBRARY_ERRORS_READY);
+
+    _assertValidForDependentLibrary(c);
+    _assertInvalidLibraryElements(c, LIBRARY_ELEMENT5);
+    _assertInvalidUnits(c, RESOLVED_UNIT4);
+    _assertInvalid(c, LIBRARY_ERRORS_READY);
+  }
+
+  void test_usedName_indirect_classMethod_returnType_inBody() {
+    Source a = addSource(
+        '/a.dart',
         r'''
-library lib_a;
 class A {
-  m() {}
+  int m() {
+    return 1;
+  }
 }
 ''');
-    _assertInvalid(sourceA, LIBRARY_ERRORS_READY);
-    _assertInvalid(sourceB, LIBRARY_ERRORS_READY);
-    _assertInvalid(sourceC, LIBRARY_ERRORS_READY);
-    // No errors, "A.m" exists.
+    Source b = addSource(
+        '/b.dart',
+        r'''
+import 'a.dart';
+class B extends A {}
+''');
+    Source c = addSource(
+        '/c.dart',
+        r'''
+import 'b.dart';
+main() {
+  B b = new B();
+  b.m();
+}
+''');
     _performPendingAnalysisTasks();
-    expect(context.getErrors(sourceC).errors, hasLength(0));
+    // Update a.dart: remove A.m, add A.m2.
+    //   b.dart is invalid, because B extends A.
+    //   c.dart is invalid, because 'main' references 'm'.
+    context.setContents(
+        a,
+        r'''
+class A {
+  double m() {
+    return 1.2;
+  }
+}
+''');
+    _assertInvalid(a, LIBRARY_ERRORS_READY);
+
+    // TODO(scheglov) In theory b.dart is not affected, because it does not
+    // call A.m, does not override it, etc.
+    _assertValidForDependentLibrary(b);
+    _assertInvalidLibraryElements(b, LIBRARY_ELEMENT4);
+    _assertInvalidUnits(b, RESOLVED_UNIT4);
+    _assertInvalid(b, LIBRARY_ERRORS_READY);
+
+    _assertValidForDependentLibrary(c);
+    _assertInvalidLibraryElements(c, LIBRARY_ELEMENT5);
+    _assertInvalidUnits(c, RESOLVED_UNIT4);
+    _assertInvalid(c, LIBRARY_ERRORS_READY);
   }
 
   void _assertInvalid(AnalysisTarget target, ResultDescriptor descriptor) {
@@ -2830,9 +3491,102 @@
     }
   }
 
+  /**
+   * Assert that [LIBRARY_ELEMENT_RESULTS] for [first] and after it are invalid.
+   */
+  void _assertInvalidLibraryElements(
+      Source source, ResultDescriptor<LibraryElement> first) {
+    bool foundFirst = false;
+    for (ResultDescriptor<LibraryElement> result in LIBRARY_ELEMENT_RESULTS) {
+      foundFirst = foundFirst || result == first;
+      if (foundFirst) {
+        _assertInvalid(source, result);
+      }
+    }
+  }
+
+  void _assertInvalidUnits(Source unit, ResultDescriptor<CompilationUnit> first,
+      {Source library}) {
+    var target = new LibrarySpecificUnit(library ?? unit, unit);
+    bool foundFirst = false;
+    for (ResultDescriptor<CompilationUnit> result in RESOLVED_UNIT_RESULTS) {
+      foundFirst = foundFirst || result == first;
+      if (foundFirst) {
+        _assertInvalid(target, result);
+      }
+    }
+  }
+
+  void _assertUnitInvalid(Source unitSource, ResultDescriptor descriptor,
+      {Source librarySource}) {
+    librarySource ??= unitSource;
+    _assertInvalid(
+        new LibrarySpecificUnit(librarySource, unitSource), descriptor);
+  }
+
+  void _assertUnitValid(Source unitSource, ResultDescriptor descriptor,
+      {Source librarySource}) {
+    librarySource ??= unitSource;
+    _assertValid(
+        new LibrarySpecificUnit(librarySource, unitSource), descriptor);
+  }
+
+  void _assertUnitValidTaskResults(Source unitSource, TaskDescriptor descriptor,
+      {Source librarySource}) {
+    librarySource ??= unitSource;
+    for (ResultDescriptor result in descriptor.results) {
+      _assertUnitValid(unitSource, result, librarySource: librarySource);
+    }
+  }
+
   void _assertValid(AnalysisTarget target, ResultDescriptor descriptor) {
     CacheState state = analysisCache.getState(target, descriptor);
-    expect(state, CacheState.VALID);
+    expect(state, isIn([CacheState.VALID, CacheState.FLUSHED]),
+        reason: '$descriptor in $target');
+  }
+
+  void _assertValidAllLibraryUnitResults(Source source, {Source library}) {
+    for (ResultDescriptor<LibraryElement> result in LIBRARY_ELEMENT_RESULTS) {
+      _assertValid(source, result);
+    }
+    library ??= source;
+    LibrarySpecificUnit target = new LibrarySpecificUnit(library, source);
+    for (ResultDescriptor<CompilationUnit> result in RESOLVED_UNIT_RESULTS) {
+      _assertValid(target, result);
+    }
+  }
+
+  void _assertValidForAnyLibrary(Source source) {
+    // Source results.
+    _assertValidTaskResults(source, ScanDartTask.DESCRIPTOR);
+    // Library results.
+    _assertValidTaskResults(source, BuildLibraryElementTask.DESCRIPTOR);
+    _assertValidTaskResults(source, BuildDirectiveElementsTask.DESCRIPTOR);
+    _assertValidTaskResults(source, BuildSourceExportClosureTask.DESCRIPTOR);
+    _assertValidTaskResults(source, ReadyLibraryElement2Task.DESCRIPTOR);
+    _assertValidTaskResults(source, ComputeLibraryCycleTask.DESCRIPTOR);
+    // Unit results.
+    _assertUnitValidTaskResults(
+        source, BuildCompilationUnitElementTask.DESCRIPTOR);
+    _assertUnitValidTaskResults(
+        source, ResolveDirectiveElementsTask.DESCRIPTOR);
+    _assertUnitValidTaskResults(source, BuildEnumMemberElementsTask.DESCRIPTOR);
+  }
+
+  void _assertValidForChangedLibrary(Source source) {
+    _assertValidForAnyLibrary(source);
+  }
+
+  void _assertValidForDependentLibrary(Source source) {
+    _assertValidForAnyLibrary(source);
+    // Library results.
+    _assertValidTaskResults(source, BuildPublicNamespaceTask.DESCRIPTOR);
+  }
+
+  void _assertValidTaskResults(AnalysisTarget target, TaskDescriptor task) {
+    for (ResultDescriptor result in task.results) {
+      _assertValid(target, result);
+    }
   }
 
   void _performPendingAnalysisTasks([int maxTasks = 512]) {
diff --git a/pkg/analyzer/test/src/context/test_all.dart b/pkg/analyzer/test/src/context/test_all.dart
index 301b13c..33c2986 100644
--- a/pkg/analyzer/test/src/context/test_all.dart
+++ b/pkg/analyzer/test/src/context/test_all.dart
@@ -7,14 +7,18 @@
 import 'package:unittest/unittest.dart';
 
 import '../../utils.dart';
+import 'builder_test.dart' as builder_test;
 import 'cache_test.dart' as cache_test;
+import 'context_factory_test.dart' as context_factory_test;
 import 'context_test.dart' as context_test;
 
 /// Utility for manually running all tests.
 main() {
   initializeTestEnvironment();
   group('context tests', () {
+    builder_test.main();
     cache_test.main();
+    context_factory_test.main();
     context_test.main();
   });
 }
diff --git a/pkg/analyzer/test/src/dart/element/element_test.dart b/pkg/analyzer/test/src/dart/element/element_test.dart
index ff2f9568..a6e718d 100644
--- a/pkg/analyzer/test/src/dart/element/element_test.dart
+++ b/pkg/analyzer/test/src/dart/element/element_test.dart
@@ -44,6 +44,7 @@
   runReflectiveTests(MethodElementImplTest);
   runReflectiveTests(MultiplyDefinedElementImplTest);
   runReflectiveTests(ParameterElementImplTest);
+  runReflectiveTests(PropertyAccessorElementImplTest);
   runReflectiveTests(TopLevelVariableElementImplTest);
 }
 
@@ -308,7 +309,7 @@
   void test_isEnum() {
     String firstConst = "A";
     String secondConst = "B";
-    ClassElementImpl enumE = ElementFactory
+    EnumElementImpl enumE = ElementFactory
         .enumElement(new TestTypeProvider(), "E", [firstConst, secondConst]);
 
     // E is an enum
@@ -4184,6 +4185,39 @@
   }
 }
 
+@reflectiveTest
+class PropertyAccessorElementImplTest extends EngineTestCase {
+  void test_matchesHandle_getter() {
+    CompilationUnitElementImpl compilationUnitElement =
+        ElementFactory.compilationUnit('foo.dart');
+    ElementFactory.library(null, '')
+      ..definingCompilationUnit = compilationUnitElement;
+    PropertyAccessorElementImpl element =
+        ElementFactory.getterElement('x', true, DynamicTypeImpl.instance);
+    compilationUnitElement.accessors = <PropertyAccessorElement>[element];
+    PropertyAccessorElementHandle handle =
+        new PropertyAccessorElementHandle(null, element.location);
+    expect(element.hashCode, handle.hashCode);
+    expect(element == handle, isTrue);
+    expect(handle == element, isTrue);
+  }
+
+  void test_matchesHandle_setter() {
+    CompilationUnitElementImpl compilationUnitElement =
+        ElementFactory.compilationUnit('foo.dart');
+    ElementFactory.library(null, '')
+      ..definingCompilationUnit = compilationUnitElement;
+    PropertyAccessorElementImpl element =
+        ElementFactory.setterElement('x', true, DynamicTypeImpl.instance);
+    compilationUnitElement.accessors = <PropertyAccessorElement>[element];
+    PropertyAccessorElementHandle handle =
+        new PropertyAccessorElementHandle(null, element.location);
+    expect(element.hashCode, handle.hashCode);
+    expect(element == handle, isTrue);
+    expect(handle == element, isTrue);
+  }
+}
+
 class TestElementResynthesizer extends ElementResynthesizer {
   Map<ElementLocation, Element> locationMap;
 
diff --git a/pkg/analyzer/test/src/summary/incremental_cache_test.dart b/pkg/analyzer/test/src/summary/incremental_cache_test.dart
index bf7620a..19a6197 100644
--- a/pkg/analyzer/test/src/summary/incremental_cache_test.dart
+++ b/pkg/analyzer/test/src/summary/incremental_cache_test.dart
@@ -3,18 +3,68 @@
 // BSD-style license that can be found in the LICENSE file.
 
 import 'package:analyzer/dart/element/element.dart';
+import 'package:analyzer/src/generated/error.dart';
 import 'package:analyzer/src/generated/source.dart';
 import 'package:analyzer/src/summary/incremental_cache.dart';
 import 'package:unittest/unittest.dart';
 
+import '../../generated/test_support.dart';
 import '../../reflective_tests.dart';
 import '../abstract_single_unit.dart';
 
 main() {
   groupSep = ' | ';
+  runReflectiveTests(ComparePathsTest);
   runReflectiveTests(IncrementalCacheTest);
 }
 
+@reflectiveTest
+class ComparePathsTest extends AbstractSingleUnitTest {
+  void test_empty() {
+    expect(comparePaths('', ''), 0);
+  }
+
+  void test_equal() {
+    expect(comparePaths('abc', 'abc'), 0);
+  }
+
+  void test_longer_suffixAfter() {
+    expect(comparePaths('aab', 'aa'), 1);
+  }
+
+  void test_longer_suffixBefore() {
+    expect(comparePaths('aaa', 'ab'), -1);
+  }
+
+  void test_longer_suffixSame() {
+    expect(comparePaths('aaa', 'aa'), 1);
+  }
+
+  void test_sameLength_before0() {
+    expect(comparePaths('aaa', 'bbb'), -1);
+  }
+
+  void test_sameLength_before1() {
+    expect(comparePaths('aaa', 'bba'), -1);
+  }
+
+  void test_sameLength_before2() {
+    expect(comparePaths('aaa', 'bba'), -1);
+  }
+
+  void test_shorter_suffixAfter() {
+    expect(comparePaths('ab', 'aaa'), 1);
+  }
+
+  void test_shorter_suffixBefore() {
+    expect(comparePaths('aa', 'aab'), -1);
+  }
+
+  void test_shorter_suffixSame() {
+    expect(comparePaths('aa', 'aaa'), -1);
+  }
+}
+
 /**
  * TODO(scheglov) write more tests for invalidation.
  */
@@ -119,6 +169,63 @@
     expect(bundles, isNotNull);
   }
 
+  void test_getLibraryParts_hasParts() {
+    Source part1Source = addSource('/part1.dart', r'part of test;');
+    Source part2Source = addSource('/part2.dart', r'part of test;');
+    putTestLibrary(r'''
+library test;
+part 'part1.dart';
+part 'part2.dart';
+''');
+    expect(cache.getLibraryParts(testSource),
+        unorderedEquals([part1Source, part2Source]));
+  }
+
+  void test_getLibraryParts_noParts() {
+    putTestLibrary(r'''
+main() {}
+''');
+    expect(cache.getLibraryParts(testSource), isEmpty);
+  }
+
+  void test_getSourceErrorsInLibrary_library() {
+    verifyNoTestUnitErrors = false;
+    putTestLibrary(r'''
+main() {
+  int unusedVar = 42;
+}
+''');
+    List<AnalysisError> computedErrors = context.computeErrors(testSource);
+    cache.putSourceErrorsInLibrary(testSource, testSource, computedErrors);
+    List<AnalysisError> readErrors =
+        cache.getSourceErrorsInLibrary(testSource, testSource);
+    new GatheringErrorListener()
+      ..addAll(readErrors)
+      ..assertErrors(computedErrors);
+  }
+
+  void test_getSourceErrorsInLibrary_part() {
+    verifyNoTestUnitErrors = false;
+    Source partSource = addSource(
+        '/foo.dart',
+        r'''
+main() {
+  int unusedVar = 42;
+}
+''');
+    putTestLibrary(r'''
+library lib;
+part 'foo.dart';
+''');
+    List<AnalysisError> computedErrors = context.computeErrors(partSource);
+    cache.putSourceErrorsInLibrary(testSource, partSource, computedErrors);
+    List<AnalysisError> readErrors =
+        cache.getSourceErrorsInLibrary(testSource, partSource);
+    new GatheringErrorListener()
+      ..addAll(readErrors)
+      ..assertErrors(computedErrors);
+  }
+
   void test_getSourceKind_library() {
     putTestLibrary(r'''
 main() {}
@@ -176,6 +283,9 @@
   final Map<String, List<int>> map = <String, List<int>>{};
 
   @override
+  void compact() {}
+
+  @override
   List<int> get(String key) {
     return map[key];
   }
diff --git a/pkg/analyzer/test/src/summary/index_unit_test.dart b/pkg/analyzer/test/src/summary/index_unit_test.dart
index 9a269b0..1bf2cf5 100644
--- a/pkg/analyzer/test/src/summary/index_unit_test.dart
+++ b/pkg/analyzer/test/src/summary/index_unit_test.dart
@@ -699,7 +699,7 @@
   new A(field: 4);
 }
 ''');
-    FieldElement field = findElement('field');
+    FieldElement field = findElement('field', ElementKind.FIELD);
     PropertyAccessorElement getter = field.getter;
     PropertyAccessorElement setter = field.setter;
     // A()
@@ -729,7 +729,7 @@
 ''');
     // aaa
     {
-      FieldElement field = findElement('aaa');
+      FieldElement field = findElement('aaa', ElementKind.FIELD);
       PropertyAccessorElement getter = field.getter;
       PropertyAccessorElement setter = field.setter;
       assertThat(field)..isWrittenAt('aaa, ', true);
@@ -738,7 +738,7 @@
     }
     // bbb
     {
-      FieldElement field = findElement('bbb');
+      FieldElement field = findElement('bbb', ElementKind.FIELD);
       PropertyAccessorElement getter = field.getter;
       PropertyAccessorElement setter = field.setter;
       assertThat(field)..isWrittenAt('bbb) {}', true);
@@ -964,7 +964,7 @@
   A.bar() : field = 5;
 }
 ''');
-    FieldElement element = findElement('field');
+    FieldElement element = findElement('field', ElementKind.FIELD);
     assertThat(element)
       ..isWrittenAt('field})', true)
       ..isWrittenAt('field = 5', true);
diff --git a/pkg/analyzer/test/src/summary/linker_test.dart b/pkg/analyzer/test/src/summary/linker_test.dart
index f64a448..674ce9b 100644
--- a/pkg/analyzer/test/src/summary/linker_test.dart
+++ b/pkg/analyzer/test/src/summary/linker_test.dart
@@ -194,16 +194,32 @@
 var y = x;
 ''');
     LibraryElementForLink library = linker.getLibrary(linkerInputs.testDartUri);
-    expect(
-        library
-            .getContainedName('y')
-            .asTypeInferenceNode
-            .variableElement
-            .inferredType
-            .toString(),
+    expect(_getVariable(library.getContainedName('y')).inferredType.toString(),
         '() → dynamic');
   }
 
+  void test_inferredType_closure_fromBundle_identifierSequence() {
+    var bundle = createPackageBundle(
+        '''
+class C {
+  static final x = (D d) => d.e;
+}
+class D {
+  E e;
+}
+class E {}
+''',
+        path: '/a.dart');
+    addBundle(bundle);
+    createLinker('''
+import 'a.dart';
+var y = C.x;
+''');
+    LibraryElementForLink library = linker.getLibrary(linkerInputs.testDartUri);
+    expect(_getVariable(library.getContainedName('y')).inferredType.toString(),
+        '(D) → E');
+  }
+
   void test_inferredType_instanceField_dynamic() {
     createLinker('''
 var x;
@@ -277,12 +293,10 @@
 }
 ''');
     expect(
-        linker
-            .getLibrary(linkerInputs.testDartUri)
-            .getContainedName('C')
-            .getContainedName('y')
-            .asTypeInferenceNode
-            .variableElement
+        _getVariable(linker
+                .getLibrary(linkerInputs.testDartUri)
+                .getContainedName('C')
+                .getContainedName('y'))
             .inferredType
             .toString(),
         'dynamic');
@@ -294,11 +308,9 @@
 var y = x;
 ''');
     expect(
-        linker
-            .getLibrary(linkerInputs.testDartUri)
-            .getContainedName('y')
-            .asTypeInferenceNode
-            .variableElement
+        _getVariable(linker
+                .getLibrary(linkerInputs.testDartUri)
+                .getContainedName('y'))
             .inferredType
             .toString(),
         'dynamic');
@@ -317,13 +329,7 @@
 var z = y; // Inferred type: dynamic
 ''');
     LibraryElementForLink library = linker.getLibrary(linkerInputs.testDartUri);
-    expect(
-        library
-            .getContainedName('z')
-            .asTypeInferenceNode
-            .variableElement
-            .inferredType
-            .toString(),
+    expect(_getVariable(library.getContainedName('z')).inferredType.toString(),
         'dynamic');
   }
 
@@ -341,13 +347,7 @@
 var x = new C().f; // Inferred type: int
 ''');
     LibraryElementForLink library = linker.getLibrary(linkerInputs.testDartUri);
-    expect(
-        library
-            .getContainedName('x')
-            .asTypeInferenceNode
-            .variableElement
-            .inferredType
-            .toString(),
+    expect(_getVariable(library.getContainedName('x')).inferredType.toString(),
         'int');
   }
 
@@ -388,13 +388,7 @@
 var x = new C().f(0); // Inferred type: int
 ''');
     LibraryElementForLink library = linker.getLibrary(linkerInputs.testDartUri);
-    expect(
-        library
-            .getContainedName('x')
-            .asTypeInferenceNode
-            .variableElement
-            .inferredType
-            .toString(),
+    expect(_getVariable(library.getContainedName('x')).inferredType.toString(),
         'int');
   }
 
@@ -442,13 +436,7 @@
 var x = new C().f(); // Inferred type: int
 ''');
     LibraryElementForLink library = linker.getLibrary(linkerInputs.testDartUri);
-    expect(
-        library
-            .getContainedName('x')
-            .asTypeInferenceNode
-            .variableElement
-            .inferredType
-            .toString(),
+    expect(_getVariable(library.getContainedName('x')).inferredType.toString(),
         'int');
   }
 
@@ -483,11 +471,9 @@
     addBundle(bundle);
     createLinker('import "a.dart"; var x = C.f;', path: '/b.dart');
     expect(
-        linker
-            .getLibrary(linkerInputs.testDartUri)
-            .getContainedName('x')
-            .asTypeInferenceNode
-            .variableElement
+        _getVariable(linker
+                .getLibrary(linkerInputs.testDartUri)
+                .getContainedName('x'))
             .inferredType
             .toString(),
         'int');
@@ -498,11 +484,9 @@
     addBundle(bundle);
     createLinker('import "a.dart"; var b = a;', path: '/b.dart');
     expect(
-        linker
-            .getLibrary(linkerInputs.testDartUri)
-            .getContainedName('b')
-            .asTypeInferenceNode
-            .variableElement
+        _getVariable(linker
+                .getLibrary(linkerInputs.testDartUri)
+                .getContainedName('b'))
             .inferredType
             .toString(),
         'int');
@@ -674,16 +658,19 @@
 var y = x;
 ''');
     LibraryElementForLink library = linker.getLibrary(linkerInputs.testDartUri);
-    expect(
-        library
-            .getContainedName('y')
-            .asTypeInferenceNode
-            .variableElement
-            .inferredType
-            .toString(),
+    expect(_getVariable(library.getContainedName('y')).inferredType.toString(),
         'dynamic');
   }
 
+  @failingTest
+  void test_methodCall_withTypeArguments_topLevelVariable() {
+    // The following code is incorrect but it shouldn't crash analyzer.
+    // TODO(paulberry): fix this.
+    createLinker('var f = f/*<int>*/();');
+    LibraryElementForLink library = linker.getLibrary(linkerInputs.testDartUri);
+    library.libraryCycleForLink.ensureLinked();
+  }
+
   void test_multiplyInheritedExecutable_differentSignatures() {
     createLinker('''
 class B {
@@ -823,4 +810,8 @@
     PropertyAccessorElementForLink_Variable v = library.getContainedName('v');
     expect(v.variable.initializer, isNotNull);
   }
+
+  VariableElementForLink _getVariable(ReferenceableElementForLink element) {
+    return (element as PropertyAccessorElementForLink_Variable).variable;
+  }
 }
diff --git a/pkg/analyzer/test/src/summary/resynthesize_ast_test.dart b/pkg/analyzer/test/src/summary/resynthesize_ast_test.dart
index 164d8a7..1adaae0 100644
--- a/pkg/analyzer/test/src/summary/resynthesize_ast_test.dart
+++ b/pkg/analyzer/test/src/summary/resynthesize_ast_test.dart
@@ -47,9 +47,6 @@
   bool get mayCheckTypesOfLocals => false;
 
   @override
-  bool get skipBrokenAstInference => true;
-
-  @override
   void addFile(String content, {String name: '/main.dart'}) {
     addLibrarySource(name, content);
   }
@@ -154,6 +151,12 @@
 
   @override
   @failingTest
+  void test_circularReference_viaClosures_initializerTypes() {
+    super.test_circularReference_viaClosures_initializerTypes();
+  }
+
+  @override
+  @failingTest
   void test_genericMethods_inferJSBuiltin() {
     super.test_genericMethods_inferJSBuiltin();
   }
diff --git a/pkg/analyzer/test/src/summary/resynthesize_test.dart b/pkg/analyzer/test/src/summary/resynthesize_test.dart
index 100dbcb..881ad8f 100644
--- a/pkg/analyzer/test/src/summary/resynthesize_test.dart
+++ b/pkg/analyzer/test/src/summary/resynthesize_test.dart
@@ -51,16 +51,6 @@
 
   bool get checkPropagatedTypes => true;
 
-  /**
-   * Derived classes can override this getter to return `true` in order to
-   * cause certain checks to be skipped if they are known to fail with
-   * AST-based type inference.
-   *
-   * TODO(paulberry): remove this flag once AST-based type inference is fully
-   * working.
-   */
-  bool get skipBrokenAstInference => false;
-
   void addLibrary(String uri) {
     otherLibrarySources.add(context.sourceFactory.forUri(uri));
   }
@@ -211,10 +201,7 @@
     }
   }
 
-  void compareClassElements(
-      ClassElement resynthesized, ClassElement original, String desc) {
-    ClassElementImpl r = ClassElementImpl.getImpl(resynthesized);
-    ClassElementImpl o = ClassElementImpl.getImpl(original);
+  void compareClassElements(ClassElement r, ClassElement o, String desc) {
     compareElements(r, o, desc);
     expect(r.fields.length, o.fields.length, reason: '$desc fields.length');
     for (int i = 0; i < r.fields.length; i++) {
@@ -222,16 +209,18 @@
       compareFieldElements(r.fields[i], o.fields[i], '$desc.field $name');
     }
     compareTypes(r.supertype, o.supertype, '$desc supertype');
-    expect(r.interfaces.length, o.interfaces.length);
+    expect(r.interfaces.length, o.interfaces.length,
+        reason: '$desc interfaces.length');
     for (int i = 0; i < r.interfaces.length; i++) {
       compareTypes(r.interfaces[i], o.interfaces[i],
           '$desc interface ${o.interfaces[i].name}');
     }
-    expect(r.mixins.length, o.mixins.length);
+    expect(r.mixins.length, o.mixins.length, reason: '$desc mixins.length');
     for (int i = 0; i < r.mixins.length; i++) {
       compareTypes(r.mixins[i], o.mixins[i], '$desc mixin ${o.mixins[i].name}');
     }
-    expect(r.typeParameters.length, o.typeParameters.length);
+    expect(r.typeParameters.length, o.typeParameters.length,
+        reason: '$desc typeParameters.length');
     for (int i = 0; i < r.typeParameters.length; i++) {
       compareTypeParameterElements(r.typeParameters[i], o.typeParameters[i],
           '$desc type parameter ${o.typeParameters[i].name}');
@@ -242,18 +231,23 @@
       compareConstructorElements(r.constructors[i], o.constructors[i],
           '$desc constructor ${o.constructors[i].name}');
     }
-    expect(r.accessors.length, o.accessors.length);
+    expect(r.accessors.length, o.accessors.length,
+        reason: '$desc accessors.length');
+    List<PropertyAccessorElement> rAccessors = _getSortedPropertyAccessors(r);
+    List<PropertyAccessorElement> oAccessors = _getSortedPropertyAccessors(o);
     for (int i = 0; i < r.accessors.length; i++) {
-      comparePropertyAccessorElements(r.accessors[i], o.accessors[i],
-          '$desc accessor ${o.accessors[i].name}');
+      comparePropertyAccessorElements(
+          rAccessors[i], oAccessors[i], '$desc accessor ${oAccessors[i].name}');
     }
-    expect(r.methods.length, o.methods.length);
+    expect(r.methods.length, o.methods.length, reason: '$desc methods.length');
     for (int i = 0; i < r.methods.length; i++) {
       compareMethodElements(
           r.methods[i], o.methods[i], '$desc.${o.methods[i].name}');
     }
     compareTypes(r.type, o.type, desc);
-    expect(r.hasBeenInferred, o.hasBeenInferred, reason: desc);
+    if (r is ClassElementImpl && o is ClassElementImpl) {
+      expect(r.hasBeenInferred, o.hasBeenInferred, reason: desc);
+    }
   }
 
   void compareCompilationUnitElements(CompilationUnitElementImpl resynthesized,
@@ -677,7 +671,9 @@
       expect(rImpl.evaluationResult, isNull);
     } else {
       Type rRuntimeType;
-      if (rImpl is FunctionElementImpl) {
+      if (rImpl is ConstFieldElementImpl) {
+        rRuntimeType = ConstFieldElementImpl;
+      } else if (rImpl is FunctionElementImpl) {
         rRuntimeType = FunctionElementImpl;
       } else {
         rRuntimeType = rImpl.runtimeType;
@@ -1040,7 +1036,7 @@
         expect(resynthesized.element.type, same(resynthesized));
       }
       expect(resynthesized.typeArguments.length, original.typeArguments.length,
-          reason: desc);
+          reason: '$desc typeArguments.length');
       for (int i = 0; i < resynthesized.typeArguments.length; i++) {
         if (resynthesized.typeArguments[i].isDynamic &&
             original.typeArguments[i] is TypeParameterType) {
@@ -1108,10 +1104,8 @@
     VariableElementImpl resynthesizedActual =
         getActualElement(resynthesized, desc);
     VariableElementImpl originalActual = getActualElement(original, desc);
-    if (!skipBrokenAstInference) {
-      compareFunctionElements(resynthesizedActual.initializer,
-          originalActual.initializer, '$desc initializer');
-    }
+    compareFunctionElements(resynthesizedActual.initializer,
+        originalActual.initializer, '$desc initializer');
     if (originalActual is ConstVariableElement) {
       Element oEnclosing = original.enclosingElement;
       if (oEnclosing is ClassElement && oEnclosing.isEnum) {
@@ -1247,6 +1241,13 @@
     expect(identifier.staticElement, isNull, reason: desc);
   }
 
+  List<PropertyAccessorElement> _getSortedPropertyAccessors(
+      ClassElement classElement) {
+    List<PropertyAccessorElement> accessors = classElement.accessors.toList();
+    accessors.sort((a, b) => a.displayName.compareTo(b.displayName));
+    return accessors;
+  }
+
   bool _hasModifier(Element element, Modifier modifier) {
     if (modifier == Modifier.ABSTRACT) {
       if (element is ClassElement) {
@@ -3662,7 +3663,8 @@
   }
 
   test_localLabels_inConstructor() {
-    checkLibrary(r'''
+    checkLibrary(
+        r'''
 class C {
   C() {
     aaa: while (true) {}
@@ -3672,11 +3674,13 @@
     }
   }
 }
-''');
+''',
+        allowErrors: true);
   }
 
   test_localLabels_inMethod() {
-    checkLibrary(r'''
+    checkLibrary(
+        r'''
 class C {
   m() {
     aaa: while (true) {}
@@ -3686,11 +3690,13 @@
     }
   }
 }
-''');
+''',
+        allowErrors: true);
   }
 
   test_localLabels_inTopLevelFunction() {
-    checkLibrary(r'''
+    checkLibrary(
+        r'''
 main() {
   aaa: while (true) {}
   bbb: switch (42) {
@@ -3698,7 +3704,8 @@
       break;
   }
 }
-''');
+''',
+        allowErrors: true);
   }
 
   test_localVariables_inConstructor() {
diff --git a/pkg/analyzer/test/src/summary/summary_common.dart b/pkg/analyzer/test/src/summary/summary_common.dart
index d50f60d..a3e2695 100644
--- a/pkg/analyzer/test/src/summary/summary_common.dart
+++ b/pkg/analyzer/test/src/summary/summary_common.dart
@@ -1790,7 +1790,8 @@
           0,
           0,
           0,
-          2
+          2,
+          0
         ],
         referenceValidators: [
           (EntityRef r) => checkTypeRef(r, null, null, 'foo',
@@ -1821,7 +1822,8 @@
           0,
           1,
           0,
-          3
+          3,
+          0
         ],
         referenceValidators: [
           (EntityRef r) => checkTypeRef(r, null, null, 'foo',
@@ -2322,7 +2324,8 @@
     ], ints: [
       42,
       0,
-      2
+      2,
+      0
     ], referenceValidators: [
       (EntityRef r) {
         checkTypeRef(r, 'dart:core', 'dart:core', 'identical',
@@ -6881,9 +6884,9 @@
           UnlinkedConstOperation.cascadeSectionEnd,
         ],
         ints: [
-          5, 0, 1, // m(5)
-          0, 0, // abs()
-          6, 0, 1, // m(5)
+          5, 0, 1, 0, // m(5)
+          0, 0, 0, // abs()
+          6, 0, 1, 0, // m(5)
         ],
         strings: [
           'm',
@@ -6977,7 +6980,8 @@
           0,
           0,
           0,
-          2
+          2,
+          0
         ],
         referenceValidators: [
           (EntityRef r) => checkTypeRef(r, null, null, 'foo',
@@ -7008,7 +7012,8 @@
           0,
           1,
           0,
-          3
+          3,
+          0
         ],
         referenceValidators: [
           (EntityRef r) => checkTypeRef(r, null, null, 'foo',
@@ -7101,6 +7106,53 @@
         operators: [UnlinkedConstOperation.pushParameter], strings: ['x']);
   }
 
+  test_expr_inClosure_refersToParam_methodCall() {
+    if (skipNonConstInitializers) {
+      return;
+    }
+    UnlinkedVariable variable = serializeVariableText('var v = (x) => x.f();');
+    _assertUnlinkedConst(variable.initializer.localFunctions[0].bodyExpr,
+        isValidConst: false,
+        operators: [
+          UnlinkedConstOperation.pushParameter,
+          UnlinkedConstOperation.invokeMethod
+        ],
+        strings: [
+          'x',
+          'f'
+        ],
+        ints: [
+          0,
+          0,
+          0
+        ]);
+  }
+
+  test_expr_inClosure_refersToParam_methodCall_prefixed() {
+    if (skipNonConstInitializers) {
+      return;
+    }
+    UnlinkedVariable variable =
+        serializeVariableText('var v = (x) => x.y.f();');
+    _assertUnlinkedConst(variable.initializer.localFunctions[0].bodyExpr,
+        isValidConst: false,
+        operators: [
+          UnlinkedConstOperation.pushParameter,
+          UnlinkedConstOperation.extractProperty,
+          UnlinkedConstOperation.invokeMethod
+        ],
+        strings: [
+          'x',
+          'y',
+          'f'
+        ],
+        ints: [
+          0,
+          0,
+          0
+        ]);
+  }
+
   test_expr_inClosure_refersToParam_outOfScope() {
     if (skipNonConstInitializers) {
       return;
@@ -7128,6 +7180,86 @@
         ]);
   }
 
+  test_expr_inClosure_refersToParam_prefixedIdentifier() {
+    if (skipNonConstInitializers) {
+      return;
+    }
+    UnlinkedVariable variable = serializeVariableText('var v = (x) => x.y;');
+    _assertUnlinkedConst(variable.initializer.localFunctions[0].bodyExpr,
+        operators: [
+          UnlinkedConstOperation.pushParameter,
+          UnlinkedConstOperation.extractProperty
+        ],
+        strings: [
+          'x',
+          'y'
+        ]);
+  }
+
+  test_expr_inClosure_refersToParam_prefixedIdentifier_assign() {
+    if (skipNonConstInitializers) {
+      return;
+    }
+    UnlinkedVariable variable =
+        serializeVariableText('var v = (x) => x.y = null;');
+    _assertUnlinkedConst(variable.initializer.localFunctions[0].bodyExpr,
+        isValidConst: false,
+        operators: [
+          UnlinkedConstOperation.pushNull,
+          UnlinkedConstOperation.pushParameter,
+          UnlinkedConstOperation.assignToProperty
+        ],
+        strings: [
+          'x',
+          'y'
+        ],
+        assignmentOperators: [
+          UnlinkedExprAssignOperator.assign
+        ]);
+  }
+
+  test_expr_inClosure_refersToParam_prefixedPrefixedIdentifier() {
+    if (skipNonConstInitializers) {
+      return;
+    }
+    UnlinkedVariable variable = serializeVariableText('var v = (x) => x.y.z;');
+    _assertUnlinkedConst(variable.initializer.localFunctions[0].bodyExpr,
+        operators: [
+          UnlinkedConstOperation.pushParameter,
+          UnlinkedConstOperation.extractProperty,
+          UnlinkedConstOperation.extractProperty
+        ],
+        strings: [
+          'x',
+          'y',
+          'z'
+        ]);
+  }
+
+  test_expr_inClosure_refersToParam_prefixedPrefixedIdentifier_assign() {
+    if (skipNonConstInitializers) {
+      return;
+    }
+    UnlinkedVariable variable =
+        serializeVariableText('var v = (x) => x.y.z = null;');
+    _assertUnlinkedConst(variable.initializer.localFunctions[0].bodyExpr,
+        isValidConst: false,
+        operators: [
+          UnlinkedConstOperation.pushNull,
+          UnlinkedConstOperation.pushParameter,
+          UnlinkedConstOperation.extractProperty,
+          UnlinkedConstOperation.assignToProperty
+        ],
+        strings: [
+          'x',
+          'y',
+          'z'
+        ],
+        assignmentOperators: [
+          UnlinkedExprAssignOperator.assign
+        ]);
+  }
+
   test_expr_invokeMethod_instance() {
     if (skipNonConstInitializers) {
       return;
@@ -7154,7 +7286,8 @@
           2,
           3,
           2,
-          1
+          1,
+          0
         ],
         strings: [
           'b',
@@ -7167,6 +7300,39 @@
         ]);
   }
 
+  test_expr_invokeMethod_withTypeParameters() {
+    if (skipNonConstInitializers) {
+      return;
+    }
+    UnlinkedVariable variable = serializeVariableText('''
+class C {
+  f<T, U>() => null;
+}
+final v = new C().f<int, String>();
+''');
+    _assertUnlinkedConst(variable.initializer.bodyExpr,
+        isValidConst: false,
+        operators: [
+          UnlinkedConstOperation.invokeConstructor,
+          UnlinkedConstOperation.invokeMethod
+        ],
+        ints: [
+          0,
+          0,
+          0,
+          0,
+          2
+        ],
+        strings: [
+          'f'
+        ],
+        referenceValidators: [
+          (EntityRef r) => checkTypeRef(r, null, null, 'C'),
+          (EntityRef r) => checkTypeRef(r, 'dart:core', 'dart:core', 'int'),
+          (EntityRef r) => checkTypeRef(r, 'dart:core', 'dart:core', 'String')
+        ]);
+  }
+
   test_expr_invokeMethodRef_instance() {
     if (skipNonConstInitializers) {
       return;
@@ -7195,7 +7361,8 @@
           10,
           20,
           0,
-          2
+          2,
+          0
         ],
         strings: [],
         referenceValidators: [
@@ -7232,6 +7399,7 @@
         ],
         ints: [
           0,
+          0,
           0
         ],
         strings: [],
@@ -7263,7 +7431,8 @@
         ],
         ints: [
           0,
-          1
+          1,
+          0
         ],
         referenceValidators: [
           (EntityRef r) => checkTypeRef(r, null, null, 'u',
@@ -7273,6 +7442,33 @@
         ]);
   }
 
+  test_expr_invokeMethodRef_withTypeParameters() {
+    if (skipNonConstInitializers) {
+      return;
+    }
+    UnlinkedVariable variable = serializeVariableText('''
+f<T, U>() => null;
+final v = f<int, String>();
+''');
+    _assertUnlinkedConst(variable.initializer.bodyExpr,
+        isValidConst: false,
+        operators: [
+          UnlinkedConstOperation.invokeMethodRef
+        ],
+        ints: [
+          0,
+          0,
+          2
+        ],
+        referenceValidators: [
+          (EntityRef r) => checkTypeRef(r, null, null, 'f',
+              expectedKind: ReferenceKind.topLevelFunction,
+              numTypeParameters: 2),
+          (EntityRef r) => checkTypeRef(r, 'dart:core', 'dart:core', 'int'),
+          (EntityRef r) => checkTypeRef(r, 'dart:core', 'dart:core', 'String')
+        ]);
+  }
+
   test_expr_throwException() {
     if (skipNonConstInitializers) {
       return;
@@ -7399,6 +7595,7 @@
         ints: [
           1,
           0,
+          0,
           0
         ],
         strings: [],
diff --git a/pkg/analyzer/test/src/task/dart_test.dart b/pkg/analyzer/test/src/task/dart_test.dart
index cddc662..25a6bef 100644
--- a/pkg/analyzer/test/src/task/dart_test.dart
+++ b/pkg/analyzer/test/src/task/dart_test.dart
@@ -21,7 +21,7 @@
 import 'package:analyzer/src/services/lint.dart';
 import 'package:analyzer/src/task/dart.dart';
 import 'package:analyzer/src/task/html.dart';
-import 'package:analyzer/src/task/strong/info.dart';
+import 'package:analyzer/src/task/strong/ast_properties.dart' as strong_ast;
 import 'package:analyzer/task/dart.dart';
 import 'package:analyzer/task/general.dart';
 import 'package:analyzer/task/model.dart';
@@ -66,6 +66,7 @@
   runReflectiveTests(PartiallyResolveUnitReferencesTaskTest);
   runReflectiveTests(PropagateVariableTypesInUnitTaskTest);
   runReflectiveTests(PropagateVariableTypeTaskTest);
+  runReflectiveTests(ReferencedNamesBuilderTest);
   runReflectiveTests(ResolveDirectiveElementsTaskTest);
   runReflectiveTests(ResolveInstanceFieldsInUnitTaskTest);
   runReflectiveTests(ResolveLibraryTaskTest);
@@ -134,6 +135,8 @@
     new isInstanceOf<PropagateVariableTypeTask>();
 isInstanceOf isResolveDirectiveElementsTask =
     new isInstanceOf<ResolveDirectiveElementsTask>();
+isInstanceOf isResolveLibraryReferencesTask =
+    new isInstanceOf<ResolveLibraryReferencesTask>();
 isInstanceOf isResolveLibraryTask = new isInstanceOf<ResolveLibraryTask>();
 isInstanceOf isResolveLibraryTypeNamesTask =
     new isInstanceOf<ResolveLibraryTypeNamesTask>();
@@ -769,10 +772,11 @@
 part of lib;
 '''
     });
-    expect(outputs, hasLength(3));
+    expect(outputs, hasLength(4));
     // simple outputs
     expect(outputs[BUILD_LIBRARY_ERRORS], isEmpty);
     expect(outputs[IS_LAUNCHABLE], isFalse);
+    expect(outputs[REFERENCED_NAMES], isNotNull);
     // LibraryElement output
     expect(libraryElement, isNotNull);
     expect(libraryElement.entryPoint, isNull);
@@ -1570,28 +1574,30 @@
 library my_lib3;
 import 'my_lib2.dart';
 ''');
-    AnalysisTarget lib1Target = new LibrarySpecificUnit(lib1Source, lib1Source);
-    AnalysisTarget lib2Target = new LibrarySpecificUnit(lib2Source, lib2Source);
-    AnalysisTarget lib3Target = new LibrarySpecificUnit(lib3Source, lib3Source);
-    computeResult(lib1Target, LIBRARY_CYCLE);
+
+    computeResult(lib1Source, LIBRARY_CYCLE);
     expect(outputs[LIBRARY_CYCLE], hasLength(1));
-    computeResult(lib2Target, LIBRARY_CYCLE);
+    computeResult(lib2Source, LIBRARY_CYCLE);
     expect(outputs[LIBRARY_CYCLE], hasLength(1));
-    computeResult(lib3Target, LIBRARY_CYCLE);
+    computeResult(lib3Source, LIBRARY_CYCLE);
     expect(outputs[LIBRARY_CYCLE], hasLength(1));
 
-    // complete the cycle
+    // create a cycle
     context.setContents(
         lib1Source,
         '''
 library my_lib1;
 import 'my_lib3.dart';
 ''');
-    computeResult(lib1Target, LIBRARY_CYCLE);
+    _expectInvalid(lib1Source);
+    _expectInvalid(lib2Source);
+    _expectInvalid(lib3Source);
+
+    computeResult(lib1Source, LIBRARY_CYCLE);
     expect(outputs[LIBRARY_CYCLE], hasLength(3));
-    computeResult(lib2Target, LIBRARY_CYCLE);
+    computeResult(lib2Source, LIBRARY_CYCLE);
     expect(outputs[LIBRARY_CYCLE], hasLength(3));
-    computeResult(lib3Target, LIBRARY_CYCLE);
+    computeResult(lib3Source, LIBRARY_CYCLE);
     expect(outputs[LIBRARY_CYCLE], hasLength(3));
 
     // break the cycle again
@@ -1600,11 +1606,15 @@
         '''
 library my_lib1;
 ''');
-    computeResult(lib1Target, LIBRARY_CYCLE);
+    _expectInvalid(lib1Source);
+    _expectInvalid(lib2Source);
+    _expectInvalid(lib3Source);
+
+    computeResult(lib1Source, LIBRARY_CYCLE);
     expect(outputs[LIBRARY_CYCLE], hasLength(1));
-    computeResult(lib2Target, LIBRARY_CYCLE);
+    computeResult(lib2Source, LIBRARY_CYCLE);
     expect(outputs[LIBRARY_CYCLE], hasLength(1));
-    computeResult(lib3Target, LIBRARY_CYCLE);
+    computeResult(lib3Source, LIBRARY_CYCLE);
     expect(outputs[LIBRARY_CYCLE], hasLength(1));
   }
 
@@ -1627,12 +1637,10 @@
 library my_lib3;
 import 'my_lib2.dart';
 ''');
-    AnalysisTarget lib1Target = new LibrarySpecificUnit(lib1Source, lib1Source);
-    AnalysisTarget lib2Target = new LibrarySpecificUnit(lib2Source, lib2Source);
-    AnalysisTarget lib3Target = new LibrarySpecificUnit(lib3Source, lib3Source);
-    computeResult(lib1Target, LIBRARY_CYCLE);
+
+    computeResult(lib1Source, LIBRARY_CYCLE);
     expect(outputs[LIBRARY_CYCLE], hasLength(1));
-    computeResult(lib2Target, LIBRARY_CYCLE);
+    computeResult(lib2Source, LIBRARY_CYCLE);
     expect(outputs[LIBRARY_CYCLE], hasLength(1));
     // lib3 is not reachable, so we have not yet computed its library
     // cycles
@@ -1644,13 +1652,17 @@
 library my_lib1;
 import 'my_lib3.dart';
 ''');
+    _expectInvalid(lib1Source);
+    _expectInvalid(lib2Source);
+    _expectInvalid(lib3Source);
+
     // Ensure that invalidation correctly invalidated everything reachable
     // through lib3
-    computeResult(lib1Target, LIBRARY_CYCLE);
+    computeResult(lib1Source, LIBRARY_CYCLE);
     expect(outputs[LIBRARY_CYCLE], hasLength(3));
-    computeResult(lib2Target, LIBRARY_CYCLE);
+    computeResult(lib2Source, LIBRARY_CYCLE);
     expect(outputs[LIBRARY_CYCLE], hasLength(3));
-    computeResult(lib3Target, LIBRARY_CYCLE);
+    computeResult(lib3Source, LIBRARY_CYCLE);
     expect(outputs[LIBRARY_CYCLE], hasLength(3));
   }
 
@@ -1763,6 +1775,9 @@
 import 'my_lib3.dart';
 var foo = 123;
 ''');
+    _expectInvalid(lib1Source);
+    _expectInvalid(lib2Source);
+    _expectInvalid(lib3Source);
 
     computeResult(lib1Target, RESOLVED_UNIT);
     computeResult(lib2Target, RESOLVED_UNIT);
@@ -1797,7 +1812,7 @@
         '''
 import 'dart:core';
 ''');
-    computeResult(new LibrarySpecificUnit(source, source), LIBRARY_CYCLE);
+    computeResult(source, LIBRARY_CYCLE);
     List<LibraryElement> component = getLibraryCycle(outputs);
     List<CompilationUnitElement> units = getLibraryCycleUnits(outputs);
     List<CompilationUnitElement> deps = getLibraryCycleDependencies(outputs);
@@ -1922,82 +1937,47 @@
       '/db.dart': '''
 '''
     });
-    computeResult(
-        new LibrarySpecificUnit(sources[0], sources[0]), LIBRARY_CYCLE);
+    computeResult(sources[0], LIBRARY_CYCLE);
     Map<ResultDescriptor, dynamic> results0 = outputs;
-    computeResult(
-        new LibrarySpecificUnit(sources[1], sources[1]), LIBRARY_CYCLE);
+    computeResult(sources[1], LIBRARY_CYCLE);
     Map<ResultDescriptor, dynamic> results1 = outputs;
-    computeResult(
-        new LibrarySpecificUnit(sources[0], sources[2]), LIBRARY_CYCLE);
-    Map<ResultDescriptor, dynamic> results2 = outputs;
-    computeResult(
-        new LibrarySpecificUnit(sources[0], sources[3]), LIBRARY_CYCLE);
-    Map<ResultDescriptor, dynamic> results3 = outputs;
-    computeResult(
-        new LibrarySpecificUnit(sources[4], sources[4]), LIBRARY_CYCLE);
+    computeResult(sources[4], LIBRARY_CYCLE);
     Map<ResultDescriptor, dynamic> results4 = outputs;
-    computeResult(
-        new LibrarySpecificUnit(sources[5], sources[5]), LIBRARY_CYCLE);
+    computeResult(sources[5], LIBRARY_CYCLE);
     Map<ResultDescriptor, dynamic> results5 = outputs;
-    computeResult(
-        new LibrarySpecificUnit(sources[5], sources[6]), LIBRARY_CYCLE);
-    Map<ResultDescriptor, dynamic> results6 = outputs;
-    computeResult(
-        new LibrarySpecificUnit(sources[5], sources[7]), LIBRARY_CYCLE);
-    Map<ResultDescriptor, dynamic> results7 = outputs;
 
     List<LibraryElement> component0 = getLibraryCycle(results0);
     List<LibraryElement> component1 = getLibraryCycle(results1);
-    List<LibraryElement> component2 = getLibraryCycle(results2);
-    List<LibraryElement> component3 = getLibraryCycle(results3);
     List<LibraryElement> component4 = getLibraryCycle(results4);
     List<LibraryElement> component5 = getLibraryCycle(results5);
-    List<LibraryElement> component6 = getLibraryCycle(results6);
-    List<LibraryElement> component7 = getLibraryCycle(results7);
 
     expect(component0, hasLength(2));
     expect(component1, hasLength(2));
-    expect(component2, hasLength(2));
-    expect(component3, hasLength(2));
     expect(component4, hasLength(2));
     expect(component5, hasLength(2));
-    expect(component6, hasLength(2));
-    expect(component7, hasLength(2));
 
     List<CompilationUnitElement> units0 = getLibraryCycleUnits(results0);
     List<CompilationUnitElement> units1 = getLibraryCycleUnits(results1);
-    List<CompilationUnitElement> units2 = getLibraryCycleUnits(results2);
-    List<CompilationUnitElement> units3 = getLibraryCycleUnits(results3);
     List<CompilationUnitElement> units4 = getLibraryCycleUnits(results4);
     List<CompilationUnitElement> units5 = getLibraryCycleUnits(results5);
-    List<CompilationUnitElement> units6 = getLibraryCycleUnits(results6);
-    List<CompilationUnitElement> units7 = getLibraryCycleUnits(results7);
     expect(units0, hasLength(4));
     expect(units1, hasLength(4));
-    expect(units2, hasLength(4));
-    expect(units3, hasLength(4));
     expect(units4, hasLength(4));
     expect(units5, hasLength(4));
-    expect(units6, hasLength(4));
-    expect(units7, hasLength(4));
 
     List<CompilationUnitElement> dep0 = getLibraryCycleDependencies(results0);
     List<CompilationUnitElement> dep1 = getLibraryCycleDependencies(results1);
-    List<CompilationUnitElement> dep2 = getLibraryCycleDependencies(results2);
-    List<CompilationUnitElement> dep3 = getLibraryCycleDependencies(results3);
     List<CompilationUnitElement> dep4 = getLibraryCycleDependencies(results4);
     List<CompilationUnitElement> dep5 = getLibraryCycleDependencies(results5);
-    List<CompilationUnitElement> dep6 = getLibraryCycleDependencies(results6);
-    List<CompilationUnitElement> dep7 = getLibraryCycleDependencies(results7);
     expect(dep0, hasLength(1)); // dart:core
     expect(dep1, hasLength(1)); // dart:core
-    expect(dep2, hasLength(1)); // dart:core
-    expect(dep3, hasLength(1)); // dart:core
     expect(dep4, hasLength(5)); // dart:core, a.dart, aa.dart, ab.dart, b.dart
     expect(dep5, hasLength(5)); // dart:core, a.dart, aa.dart, ab.dart, b.dart
-    expect(dep6, hasLength(5)); // dart:core, a.dart, aa.dart, ab.dart, b.dart
-    expect(dep7, hasLength(5)); // dart:core, a.dart, aa.dart, ab.dart, b.dart
+  }
+
+  void _expectInvalid(Source librarySource) {
+    CacheEntry entry = context.getCacheEntry(librarySource);
+    expect(entry.getState(LIBRARY_CYCLE), CacheState.INVALID);
   }
 }
 
@@ -3686,6 +3666,452 @@
 }
 
 @reflectiveTest
+class ReferencedNamesBuilderTest extends _AbstractDartTaskTest {
+  void setUp() {
+    super.setUp();
+    context.analysisOptions = new AnalysisOptionsImpl()
+      ..enableGenericMethods = true
+      ..strongMode = true;
+  }
+
+  test_class_constructor() {
+    ReferencedNames info = _computeReferencedNames('''
+class U {
+  U.named(A a, B b) {
+    C c = null;
+  }
+}
+''');
+    expect(info.names, unorderedEquals(['A', 'B', 'C']));
+    expect(info.superToSubs.keys, isEmpty);
+    expect(info.instantiatedNames, isEmpty);
+    expect(info.userToDependsOn.keys, unorderedEquals(['U']));
+    expect(info.userToDependsOn['U'], unorderedEquals(['A', 'B']));
+  }
+
+  test_class_field() {
+    ReferencedNames info = _computeReferencedNames('''
+class U {
+  A f = new B();
+}
+''');
+    expect(info.names, unorderedEquals(['A', 'B']));
+    expect(info.superToSubs.keys, isEmpty);
+    expect(info.instantiatedNames, unorderedEquals(['B']));
+    expect(info.userToDependsOn.keys, unorderedEquals(['U']));
+    expect(info.userToDependsOn['U'], unorderedEquals(['A', 'B']));
+  }
+
+  test_class_getter() {
+    ReferencedNames info = _computeReferencedNames('''
+class U {
+  A get a => new B();
+}
+''');
+    expect(info.names, unorderedEquals(['A', 'B']));
+    expect(info.superToSubs.keys, isEmpty);
+    expect(info.instantiatedNames, unorderedEquals(['B']));
+    expect(info.userToDependsOn.keys, unorderedEquals(['U']));
+    expect(info.userToDependsOn['U'], unorderedEquals(['A']));
+  }
+
+  test_class_members() {
+    ReferencedNames info = _computeReferencedNames('''
+class U {
+  int a;
+  int get b;
+  set c(_) {}
+  m(D d) {
+    a;
+    b;
+    c = 1;
+    m();
+  }
+}
+''');
+    expect(info.names, unorderedEquals(['int', 'D']));
+    expect(info.superToSubs.keys, isEmpty);
+    expect(info.instantiatedNames, isEmpty);
+    expect(info.userToDependsOn.keys, unorderedEquals(['U']));
+    expect(info.userToDependsOn['U'], unorderedEquals(['int', 'D']));
+  }
+
+  test_class_members_dontHideQualified() {
+    ReferencedNames info = _computeReferencedNames('''
+class U {
+  int a;
+  int get b;
+  set c(_) {}
+  m(D d) {
+    d.a;
+    d.b;
+    d.c;
+  }
+}
+''');
+    expect(info.names, unorderedEquals(['int', 'D', 'a', 'b', 'c']));
+    expect(info.superToSubs.keys, isEmpty);
+    expect(info.instantiatedNames, isEmpty);
+    expect(info.userToDependsOn.keys, unorderedEquals(['U']));
+    expect(info.userToDependsOn['U'], unorderedEquals(['int', 'D']));
+  }
+
+  test_class_method() {
+    ReferencedNames info = _computeReferencedNames('''
+class U {
+  A m(B p) {
+    C v = 0;
+  }
+}
+''');
+    expect(info.names, unorderedEquals(['A', 'B', 'C']));
+    expect(info.superToSubs.keys, isEmpty);
+    expect(info.instantiatedNames, isEmpty);
+    expect(info.userToDependsOn.keys, unorderedEquals(['U']));
+    expect(info.userToDependsOn['U'], unorderedEquals(['A', 'B']));
+  }
+
+  test_class_method_localVariables() {
+    ReferencedNames info = _computeReferencedNames('''
+class U {
+  A m() {
+    B b = null;
+    b;
+    {
+      C c = null;
+      b;
+      c;
+    }
+    d;
+  }
+}
+''');
+    expect(info.names, unorderedEquals(['A', 'B', 'C', 'd']));
+    expect(info.superToSubs.keys, isEmpty);
+    expect(info.instantiatedNames, isEmpty);
+    expect(info.userToDependsOn.keys, unorderedEquals(['U']));
+    expect(info.userToDependsOn['U'], unorderedEquals(['A']));
+  }
+
+  test_class_method_parameters() {
+    ReferencedNames info = _computeReferencedNames('''
+class U {
+  m(A a) {
+    a;
+    b;
+  }
+}
+''');
+    expect(info.names, unorderedEquals(['A', 'b']));
+    expect(info.superToSubs.keys, isEmpty);
+    expect(info.instantiatedNames, isEmpty);
+    expect(info.userToDependsOn.keys, unorderedEquals(['U']));
+    expect(info.userToDependsOn['U'], unorderedEquals(['A']));
+  }
+
+  test_class_method_typeParameters() {
+    ReferencedNames info = _computeReferencedNames('''
+class U {
+  A m<T>(B b, T t) {
+    C c = 0;
+  }
+}
+''');
+    expect(info.names, unorderedEquals(['A', 'B', 'C']));
+    expect(info.superToSubs.keys, isEmpty);
+    expect(info.instantiatedNames, isEmpty);
+    expect(info.userToDependsOn.keys, unorderedEquals(['U']));
+    expect(info.userToDependsOn['U'], unorderedEquals(['A', 'B']));
+  }
+
+  test_class_setter() {
+    ReferencedNames info = _computeReferencedNames('''
+class U {
+  set a(A a) {
+    B b = null;
+  }
+}
+''');
+    expect(info.names, unorderedEquals(['A', 'B']));
+    expect(info.superToSubs.keys, isEmpty);
+    expect(info.instantiatedNames, isEmpty);
+    expect(info.userToDependsOn.keys, unorderedEquals(['U']));
+    expect(info.userToDependsOn['U'], unorderedEquals(['A']));
+  }
+
+  test_class_typeParameters() {
+    ReferencedNames info = _computeReferencedNames('''
+class U<T> {
+  T f = new A<T>();
+}
+''');
+    expect(info.names, unorderedEquals(['A']));
+    expect(info.superToSubs.keys, isEmpty);
+    expect(info.instantiatedNames, unorderedEquals(['A']));
+    expect(info.userToDependsOn.keys, unorderedEquals(['U']));
+    expect(info.userToDependsOn['U'], unorderedEquals(['A']));
+  }
+
+  test_instantiatedNames_importPrefix() {
+    ReferencedNames info = _computeReferencedNames('''
+import 'a.dart' as p1;
+import 'b.dart' as p2;
+main() {
+  new p1.A();
+  new p1.A.c1();
+  new p1.B();
+  new p2.C();
+  new D();
+  new D.c2();
+}
+''');
+    expect(info.names, unorderedEquals(['A', 'B', 'C', 'D', 'c1', 'c2']));
+    expect(info.superToSubs.keys, isEmpty);
+    expect(info.instantiatedNames, unorderedEquals(['A', 'B', 'C', 'D']));
+    expect(info.userToDependsOn.keys, unorderedEquals(['main']));
+    expect(info.userToDependsOn['main'], isEmpty);
+  }
+
+  test_localFunction() {
+    ReferencedNames info = _computeReferencedNames('''
+f(A a) {
+  g(B b) {}
+}
+''');
+    expect(info.names, unorderedEquals(['A', 'B']));
+    expect(info.superToSubs.keys, isEmpty);
+    expect(info.instantiatedNames, isEmpty);
+    expect(info.userToDependsOn.keys, unorderedEquals(['f']));
+    expect(info.userToDependsOn['f'], unorderedEquals(['A']));
+  }
+
+  test_superToSubs_importPrefix() {
+    ReferencedNames info = _computeReferencedNames('''
+import 'a.dart' as p1;
+import 'b.dart' as p2;
+class U extends p1.A with p2.B implements p2.C {}
+''');
+    expect(info.names, unorderedEquals(['A', 'B', 'C']));
+    expect(info.superToSubs.keys, unorderedEquals(['A', 'B', 'C']));
+    expect(info.superToSubs['A'], unorderedEquals(['U']));
+    expect(info.superToSubs['B'], unorderedEquals(['U']));
+    expect(info.superToSubs['C'], unorderedEquals(['U']));
+    expect(info.instantiatedNames, isEmpty);
+    expect(info.userToDependsOn.keys, unorderedEquals(['U']));
+    expect(info.userToDependsOn['U'], unorderedEquals(['A', 'B', 'C']));
+  }
+
+  test_topLevelVariable() {
+    ReferencedNames info = _computeReferencedNames('''
+A v = new B(c);
+''');
+    expect(info.names, unorderedEquals(['A', 'B', 'c']));
+    expect(info.superToSubs.keys, isEmpty);
+    expect(info.instantiatedNames, unorderedEquals(['B']));
+    expect(info.userToDependsOn.keys, unorderedEquals(['v']));
+    expect(info.userToDependsOn['v'], unorderedEquals(['A', 'B', 'c']));
+  }
+
+  test_topLevelVariable_multiple() {
+    ReferencedNames info = _computeReferencedNames('''
+A v1 = new B(c), v2 = new D<E>(f);
+''');
+    expect(info.names, unorderedEquals(['A', 'B', 'c', 'D', 'E', 'f']));
+    expect(info.superToSubs.keys, isEmpty);
+    expect(info.instantiatedNames, unorderedEquals(['B', 'D']));
+    expect(info.userToDependsOn.keys, unorderedEquals(['v1', 'v2']));
+    expect(info.userToDependsOn['v1'], unorderedEquals(['A', 'B', 'c']));
+    expect(info.userToDependsOn['v2'], unorderedEquals(['A', 'D', 'E', 'f']));
+  }
+
+  test_unit_classTypeAlias() {
+    ReferencedNames info = _computeReferencedNames('''
+class U = A with B implements C;
+''');
+    expect(info.names, unorderedEquals(['A', 'B', 'C']));
+    expect(info.superToSubs.keys, unorderedEquals(['A', 'B', 'C']));
+    expect(info.superToSubs['A'], unorderedEquals(['U']));
+    expect(info.superToSubs['B'], unorderedEquals(['U']));
+    expect(info.superToSubs['C'], unorderedEquals(['U']));
+    expect(info.instantiatedNames, isEmpty);
+    expect(info.userToDependsOn.keys, unorderedEquals(['U']));
+    expect(info.userToDependsOn['U'], unorderedEquals(['A', 'B', 'C']));
+  }
+
+  test_unit_classTypeAlias_typeParameters() {
+    ReferencedNames info = _computeReferencedNames('''
+class U<T1, T2 extends D> = A<T1> with B<T2> implements C<T1, T2>;
+''');
+    expect(info.names, unorderedEquals(['A', 'B', 'C', 'D']));
+    expect(info.superToSubs.keys, unorderedEquals(['A', 'B', 'C']));
+    expect(info.superToSubs['A'], unorderedEquals(['U']));
+    expect(info.superToSubs['B'], unorderedEquals(['U']));
+    expect(info.superToSubs['C'], unorderedEquals(['U']));
+    expect(info.instantiatedNames, isEmpty);
+    expect(info.userToDependsOn.keys, unorderedEquals(['U']));
+    expect(info.userToDependsOn['U'], unorderedEquals(['A', 'B', 'C', 'D']));
+  }
+
+  test_unit_function() {
+    ReferencedNames info = _computeReferencedNames('''
+A f(B b) {
+  C c = 0;
+}
+''');
+    expect(info.names, unorderedEquals(['A', 'B', 'C']));
+    expect(info.superToSubs.keys, isEmpty);
+    expect(info.instantiatedNames, isEmpty);
+    expect(info.userToDependsOn.keys, unorderedEquals(['f']));
+    expect(info.userToDependsOn['f'], unorderedEquals(['A', 'B']));
+  }
+
+  test_unit_function_doc() {
+    ReferencedNames info = _computeReferencedNames('''
+/**
+ * Documentation [C.d] reference.
+ */
+A f(B b) {}
+''');
+    expect(info.names, unorderedEquals(['A', 'B', 'C', 'd']));
+    expect(info.superToSubs.keys, isEmpty);
+    expect(info.instantiatedNames, isEmpty);
+    expect(info.userToDependsOn.keys, unorderedEquals(['f']));
+    expect(info.userToDependsOn['f'], unorderedEquals(['A', 'B']));
+  }
+
+  test_unit_function_localFunctions() {
+    ReferencedNames info = _computeReferencedNames('''
+A f() {
+  B b = null;
+  C g() {}
+  g();
+}
+''');
+    expect(info.names, unorderedEquals(['A', 'B', 'C']));
+    expect(info.superToSubs.keys, isEmpty);
+    expect(info.instantiatedNames, isEmpty);
+    expect(info.userToDependsOn.keys, unorderedEquals(['f']));
+    expect(info.userToDependsOn['f'], unorderedEquals(['A']));
+  }
+
+  test_unit_function_localsDontHideQualified() {
+    ReferencedNames info = _computeReferencedNames('''
+f(A a, B b) {
+  var v = 0;
+  a.v;
+  a.b;
+}
+''');
+    expect(info.names, unorderedEquals(['A', 'B', 'v', 'b']));
+    expect(info.superToSubs.keys, isEmpty);
+    expect(info.instantiatedNames, isEmpty);
+    expect(info.userToDependsOn.keys, unorderedEquals(['f']));
+    expect(info.userToDependsOn['f'], unorderedEquals(['A', 'B']));
+  }
+
+  test_unit_function_localVariables() {
+    ReferencedNames info = _computeReferencedNames('''
+A f() {
+  B b = null;
+  b;
+  {
+    C c = null;
+    b;
+    c;
+  }
+  d;
+}
+''');
+    expect(info.names, unorderedEquals(['A', 'B', 'C', 'd']));
+    expect(info.superToSubs.keys, isEmpty);
+    expect(info.instantiatedNames, isEmpty);
+    expect(info.userToDependsOn.keys, unorderedEquals(['f']));
+    expect(info.userToDependsOn['f'], unorderedEquals(['A']));
+  }
+
+  test_unit_function_parameters() {
+    ReferencedNames info = _computeReferencedNames('''
+A f(B b) {
+  C c = 0;
+  b;
+}
+''');
+    expect(info.names, unorderedEquals(['A', 'B', 'C']));
+    expect(info.superToSubs.keys, isEmpty);
+    expect(info.instantiatedNames, isEmpty);
+    expect(info.userToDependsOn.keys, unorderedEquals(['f']));
+    expect(info.userToDependsOn['f'], unorderedEquals(['A', 'B']));
+  }
+
+  test_unit_function_typeParameters() {
+    ReferencedNames info = _computeReferencedNames('''
+A f<T>(B b, T t) {
+  C c = 0;
+}
+''');
+    expect(info.names, unorderedEquals(['A', 'B', 'C']));
+    expect(info.superToSubs.keys, isEmpty);
+    expect(info.instantiatedNames, isEmpty);
+    expect(info.userToDependsOn.keys, unorderedEquals(['f']));
+    expect(info.userToDependsOn['f'], unorderedEquals(['A', 'B']));
+  }
+
+  test_unit_functionTypeAlias() {
+    ReferencedNames info = _computeReferencedNames('''
+typedef A F(B B, C c(D d));
+''');
+    expect(info.names, unorderedEquals(['A', 'B', 'C', 'D']));
+    expect(info.superToSubs.keys, isEmpty);
+    expect(info.instantiatedNames, isEmpty);
+    expect(info.userToDependsOn.keys, unorderedEquals(['F']));
+    expect(info.userToDependsOn['F'], unorderedEquals(['A', 'B', 'C', 'D']));
+  }
+
+  test_unit_functionTypeAlias_typeParameters() {
+    ReferencedNames info = _computeReferencedNames('''
+typedef A F<T>(B b, T t);
+''');
+    expect(info.names, unorderedEquals(['A', 'B']));
+    expect(info.superToSubs.keys, isEmpty);
+    expect(info.instantiatedNames, isEmpty);
+    expect(info.userToDependsOn.keys, unorderedEquals(['F']));
+    expect(info.userToDependsOn['F'], unorderedEquals(['A', 'B']));
+  }
+
+  test_unit_getter() {
+    ReferencedNames info = _computeReferencedNames('''
+A get aaa {
+  return new B();
+}
+''');
+    expect(info.names, unorderedEquals(['A', 'B']));
+    expect(info.superToSubs.keys, isEmpty);
+    expect(info.instantiatedNames, unorderedEquals(['B']));
+    expect(info.userToDependsOn.keys, unorderedEquals(['aaa']));
+    expect(info.userToDependsOn['aaa'], unorderedEquals(['A']));
+  }
+
+  test_unit_setter() {
+    ReferencedNames info = _computeReferencedNames('''
+set aaa(A a) {
+  B b = null;
+}
+''');
+    expect(info.names, unorderedEquals(['A', 'B']));
+    expect(info.superToSubs.keys, isEmpty);
+    expect(info.instantiatedNames, isEmpty);
+    expect(info.userToDependsOn.keys, unorderedEquals(['aaa']));
+    expect(info.userToDependsOn['aaa'], unorderedEquals(['A']));
+  }
+
+  ReferencedNames _computeReferencedNames(String code) {
+    Source source = newSource('/test.dart', code);
+    computeResult(source, REFERENCED_NAMES, matcher: isBuildLibraryElementTask);
+    return outputs[REFERENCED_NAMES];
+  }
+}
+
+@reflectiveTest
 class ResolveDirectiveElementsTaskTest extends _AbstractDartTaskTest {
   test_perform() {
     List<Source> sources = newSources({
@@ -4464,20 +4890,41 @@
 
 @reflectiveTest
 class ScanDartTaskTest extends _AbstractDartTaskTest {
+  test_ignore_info() {
+    _performScanTask('''
+//ignore: error_code
+var x = '';
+foo(); // ignore:   error_code_2
+bar(); //ignore: error_code, error_code_2
+''');
+
+    IgnoreInfo info = outputs[IGNORE_INFO];
+    expect(info.ignores.keys, hasLength(3));
+    expect(info.ignores[1].first, 'error_code');
+    expect(info.ignores[3].first, 'error_code_2');
+    expect(info.ignores[4], unorderedEquals(['error_code', 'error_code_2']));
+  }
+
   test_perform_errors() {
     _performScanTask('import "');
-    expect(outputs, hasLength(3));
+    expect(outputs, hasLength(4));
     expect(outputs[LINE_INFO], isNotNull);
     expect(outputs[SCAN_ERRORS], hasLength(1));
     expect(outputs[TOKEN_STREAM], isNotNull);
+    IgnoreInfo ignoreInfo = outputs[IGNORE_INFO];
+    expect(ignoreInfo, isNotNull);
+    expect(ignoreInfo.hasIgnores, isFalse);
   }
 
   test_perform_noErrors() {
     _performScanTask('class A {}');
-    expect(outputs, hasLength(3));
+    expect(outputs, hasLength(4));
     expect(outputs[LINE_INFO], isNotNull);
     expect(outputs[SCAN_ERRORS], hasLength(0));
     expect(outputs[TOKEN_STREAM], isNotNull);
+    IgnoreInfo ignoreInfo = outputs[IGNORE_INFO];
+    expect(ignoreInfo, isNotNull);
+    expect(ignoreInfo.hasIgnores, isFalse);
   }
 
   test_perform_script() {
@@ -5047,9 +5494,7 @@
         AstFinder.getStatementsInTopLevelFunction(unit, "main");
     ExpressionStatement statement = statements[1];
     IndexExpression idx = statement.expression;
-    expect(DynamicInvoke.get(idx.target), isNotNull);
-    expect(DynamicInvoke.get(idx.target), isNotNull);
-    expect(DynamicInvoke.get(idx.target), isTrue);
+    expect(strong_ast.isDynamicInvoke(idx.target), isTrue);
   }
 
   void test_perform_verifyError() {
@@ -5289,8 +5734,7 @@
       List<Source> sources, ResultDescriptor result,
       {isInstanceOf matcher: null}) {
     Map<ResultDescriptor, dynamic> compute(Source source) {
-      computeResult(new LibrarySpecificUnit(source, source), result,
-          matcher: matcher);
+      computeResult(source, result, matcher: matcher);
       return outputs;
     }
     return sources.map(compute).toList();
diff --git a/pkg/analyzer/test/src/task/driver_test.dart b/pkg/analyzer/test/src/task/driver_test.dart
index efbf9ee..39e513b 100644
--- a/pkg/analyzer/test/src/task/driver_test.dart
+++ b/pkg/analyzer/test/src/task/driver_test.dart
@@ -819,6 +819,9 @@
   AnalysisCache analysisCache;
 
   @override
+  final AnalysisOptionsImpl analysisOptions = new AnalysisOptionsImpl();
+
+  @override
   List<AnalysisTarget> explicitTargets = <AnalysisTarget>[];
 
   @override
diff --git a/pkg/analyzer/test/src/task/incremental_element_builder_test.dart b/pkg/analyzer/test/src/task/incremental_element_builder_test.dart
index 8ef225a..8d495af 100644
--- a/pkg/analyzer/test/src/task/incremental_element_builder_test.dart
+++ b/pkg/analyzer/test/src/task/incremental_element_builder_test.dart
@@ -6,8 +6,13 @@
 
 import 'package:analyzer/dart/ast/ast.dart';
 import 'package:analyzer/dart/element/element.dart';
+import 'package:analyzer/src/dart/ast/utilities.dart';
+import 'package:analyzer/src/dart/element/element.dart';
+import 'package:analyzer/src/generated/engine.dart';
 import 'package:analyzer/src/generated/source.dart';
+import 'package:analyzer/src/task/dart.dart';
 import 'package:analyzer/src/task/incremental_element_builder.dart';
+import 'package:analyzer/task/dart.dart';
 import 'package:unittest/unittest.dart';
 
 import '../../reflective_tests.dart';
@@ -36,6 +41,879 @@
     return newCode.substring(node.offset, node.end);
   }
 
+  test_classDelta_constructor_0to1() {
+    var helper = new _ClassDeltaHelper('A');
+    _buildOldUnit(r'''
+class A {
+}
+''');
+    helper.initOld(oldUnit);
+    ConstructorElement oldConstructorElement =
+        helper.element.unnamedConstructor;
+    _buildNewUnit(r'''
+class A {
+  A.a();
+}
+''');
+    helper.initNew(newUnit, unitDelta);
+    // nodes
+    ClassMember newConstructorNode = helper.newMembers[0];
+    // elements
+    ConstructorElement newConstructorElement = newConstructorNode.element;
+    expect(newConstructorElement, isNotNull);
+    expect(newConstructorElement.name, 'a');
+    // classElement.constructors
+    ClassElement classElement = helper.element;
+    expect(classElement.constructors, unorderedEquals([newConstructorElement]));
+    // verify delta
+    expect(helper.delta.addedConstructors,
+        unorderedEquals([newConstructorElement]));
+    expect(helper.delta.removedConstructors,
+        unorderedEquals([oldConstructorElement]));
+    expect(helper.delta.addedAccessors, isEmpty);
+    expect(helper.delta.removedAccessors, isEmpty);
+    expect(helper.delta.addedMethods, isEmpty);
+    expect(helper.delta.removedMethods, isEmpty);
+  }
+
+  test_classDelta_constructor_1to0() {
+    var helper = new _ClassDeltaHelper('A');
+    _buildOldUnit(r'''
+class A {
+  A.a();
+}
+''');
+    helper.initOld(oldUnit);
+    ConstructorElement oldElementA = helper.element.getNamedConstructor('a');
+    _buildNewUnit(r'''
+class A {
+}
+''');
+    helper.initNew(newUnit, unitDelta);
+    // classElement.constructors
+    ClassElement classElement = helper.element;
+    {
+      List<ConstructorElement> constructors = classElement.constructors;
+      expect(constructors, hasLength(1));
+      expect(constructors[0].isDefaultConstructor, isTrue);
+      expect(constructors[0].isSynthetic, isTrue);
+    }
+    // verify delta
+    expect(helper.delta.addedConstructors, unorderedEquals([]));
+    expect(helper.delta.removedConstructors, unorderedEquals([oldElementA]));
+    expect(helper.delta.addedAccessors, isEmpty);
+    expect(helper.delta.removedAccessors, isEmpty);
+    expect(helper.delta.addedMethods, isEmpty);
+    expect(helper.delta.removedMethods, isEmpty);
+  }
+
+  test_classDelta_constructor_1to2() {
+    var helper = new _ClassDeltaHelper('A');
+    _buildOldUnit(r'''
+class A {
+  A.a();
+}
+''');
+    helper.initOld(oldUnit);
+    _buildNewUnit(r'''
+class A {
+  A.a();
+  A.b();
+}
+''');
+    helper.initNew(newUnit, unitDelta);
+    // nodes
+    ClassMember nodeA = helper.newMembers[0];
+    ClassMember nodeB = helper.newMembers[1];
+    expect(nodeA, same(helper.oldMembers[0]));
+    // elements
+    ConstructorElement elementA = nodeA.element;
+    ConstructorElement elementB = nodeB.element;
+    expect(elementA, isNotNull);
+    expect(elementB, isNotNull);
+    expect(elementA.name, 'a');
+    expect(elementB.name, 'b');
+    // classElement.constructors
+    ClassElement classElement = helper.element;
+    expect(classElement.constructors, unorderedEquals([elementA, elementB]));
+    // verify delta
+    expect(helper.delta.addedConstructors, unorderedEquals([elementB]));
+    expect(helper.delta.removedConstructors, unorderedEquals([]));
+    expect(helper.delta.addedAccessors, isEmpty);
+    expect(helper.delta.removedAccessors, isEmpty);
+    expect(helper.delta.addedMethods, isEmpty);
+    expect(helper.delta.removedMethods, isEmpty);
+  }
+
+  test_classDelta_constructor_2to1() {
+    var helper = new _ClassDeltaHelper('A');
+    _buildOldUnit(r'''
+class A {
+  A.a();
+  A.b();
+}
+''');
+    helper.initOld(oldUnit);
+    ConstructorElement oldElementA = helper.element.getNamedConstructor('a');
+    _buildNewUnit(r'''
+class A {
+  A.b();
+}
+''');
+    helper.initNew(newUnit, unitDelta);
+    // nodes
+    ClassMember nodeB = helper.newMembers[0];
+    expect(nodeB, same(helper.oldMembers[1]));
+    // elements
+    ConstructorElement elementB = nodeB.element;
+    expect(elementB, isNotNull);
+    expect(elementB.name, 'b');
+    // classElement.constructors
+    ClassElement classElement = helper.element;
+    expect(classElement.constructors, unorderedEquals([elementB]));
+    // verify delta
+    expect(helper.delta.addedConstructors, unorderedEquals([]));
+    expect(helper.delta.removedConstructors, unorderedEquals([oldElementA]));
+    expect(helper.delta.addedAccessors, isEmpty);
+    expect(helper.delta.removedAccessors, isEmpty);
+    expect(helper.delta.addedMethods, isEmpty);
+    expect(helper.delta.removedMethods, isEmpty);
+  }
+
+  test_classDelta_constructor_2to2_reorder() {
+    var helper = new _ClassDeltaHelper('A');
+    _buildOldUnit(r'''
+class A {
+  A.a();
+  A.b();
+}
+''');
+    helper.initOld(oldUnit);
+    _buildNewUnit(r'''
+class A {
+  A.b();
+  A.a();
+}
+''');
+    helper.initNew(newUnit, unitDelta);
+    // nodes
+    ClassMember nodeB = helper.newMembers[0];
+    ClassMember nodeA = helper.newMembers[1];
+    expect(nodeB, same(helper.oldMembers[1]));
+    expect(nodeA, same(helper.oldMembers[0]));
+    // elements
+    ConstructorElement elementB = nodeB.element;
+    ConstructorElement elementA = nodeA.element;
+    expect(elementB, isNotNull);
+    expect(elementA, isNotNull);
+    expect(elementB.name, 'b');
+    expect(elementA.name, 'a');
+    // classElement.constructors
+    ClassElement classElement = helper.element;
+    expect(classElement.constructors, unorderedEquals([elementB, elementA]));
+    // verify delta
+    expect(helper.delta.addedConstructors, isEmpty);
+    expect(helper.delta.removedConstructors, isEmpty);
+    expect(helper.delta.addedAccessors, isEmpty);
+    expect(helper.delta.removedAccessors, isEmpty);
+    expect(helper.delta.addedMethods, isEmpty);
+    expect(helper.delta.removedMethods, isEmpty);
+  }
+
+  test_classDelta_constructor_fieldReference_initializer() {
+    var helper = new _ClassDeltaHelper('A');
+    _buildOldUnit(r'''
+class A {
+  final int f;
+  A() : f = 1 {}
+}
+''');
+    helper.initOld(oldUnit);
+    _buildNewUnit(r'''
+class A {
+  final int f;
+  A() : f = 1;
+}
+''');
+    helper.initNew(newUnit, unitDelta);
+  }
+
+  test_classDelta_constructor_fieldReference_parameter() {
+    var helper = new _ClassDeltaHelper('A');
+    _buildOldUnit(r'''
+class A {
+  final int f;
+  A(this.f) {}
+}
+''');
+    helper.initOld(oldUnit);
+    _buildNewUnit(r'''
+class A {
+  final int f;
+  A(this.f);
+}
+''');
+    helper.initNew(newUnit, unitDelta);
+  }
+
+  test_classDelta_constructor_fieldReference_parameter_default() {
+    var helper = new _ClassDeltaHelper('A');
+    _buildOldUnit(r'''
+class A {
+  final int f;
+  A([this.f = 1]) {}
+}
+''');
+    helper.initOld(oldUnit);
+    _buildNewUnit(r'''
+class A {
+  final int f;
+  A([this.f = 1]);
+}
+''');
+    helper.initNew(newUnit, unitDelta);
+  }
+
+  test_classDelta_duplicate_constructor() {
+    var helper = new _ClassDeltaHelper('A');
+    _buildOldUnit(r'''
+class A {
+  A() {}
+}
+''');
+    helper.initOld(oldUnit);
+    _buildNewUnit(r'''
+class A {
+  A() {}
+  A() {}
+}
+''');
+    helper.initNew(newUnit, unitDelta);
+    // nodes
+    ConstructorDeclaration oldNode = helper.oldMembers[0];
+    ConstructorDeclaration newNode1 = helper.newMembers[0];
+    ConstructorDeclaration newNode2 = helper.newMembers[1];
+    // elements
+    ConstructorElement oldElement = oldNode.element;
+    ConstructorElement newElement1 = newNode1.element;
+    ConstructorElement newElement2 = newNode2.element;
+    expect(newElement1, same(oldElement));
+    expect(newElement2, isNot(same(oldElement)));
+    expect(oldElement.name, '');
+    expect(newElement1.name, '');
+    expect(newElement2.name, '');
+    // verify delta
+    expect(helper.delta.addedConstructors, unorderedEquals([newElement2]));
+    expect(helper.delta.removedConstructors, isEmpty);
+    expect(helper.delta.addedAccessors, isEmpty);
+    expect(helper.delta.removedAccessors, isEmpty);
+    expect(helper.delta.addedMethods, isEmpty);
+    expect(helper.delta.removedMethods, isEmpty);
+  }
+
+  test_classDelta_duplicate_method() {
+    var helper = new _ClassDeltaHelper('A');
+    _buildOldUnit(r'''
+class A {
+  m() {}
+}
+''');
+    helper.initOld(oldUnit);
+    _buildNewUnit(r'''
+class A {
+  m() {}
+  m() {}
+}
+''');
+    helper.initNew(newUnit, unitDelta);
+    // nodes
+    MethodDeclaration oldNode = helper.oldMembers[0];
+    MethodDeclaration newNode1 = helper.newMembers[0];
+    MethodDeclaration newNode2 = helper.newMembers[1];
+    // elements
+    MethodElement oldElement = oldNode.element;
+    MethodElement newElement1 = newNode1.element;
+    MethodElement newElement2 = newNode2.element;
+    expect(newElement1, same(oldElement));
+    expect(newElement2, isNot(same(oldElement)));
+    expect(oldElement.name, 'm');
+    expect(newElement1.name, 'm');
+    expect(newElement2.name, 'm');
+    // verify delta
+    expect(helper.delta.addedConstructors, isEmpty);
+    expect(helper.delta.removedConstructors, isEmpty);
+    expect(helper.delta.addedAccessors, isEmpty);
+    expect(helper.delta.removedAccessors, isEmpty);
+    expect(helper.delta.addedMethods, unorderedEquals([newElement2]));
+    expect(helper.delta.removedMethods, isEmpty);
+  }
+
+  test_classDelta_field_add() {
+    var helper = new _ClassDeltaHelper('A');
+    _buildOldUnit(r'''
+class A {
+  int aaa;
+}
+''');
+    helper.initOld(oldUnit);
+    _buildNewUnit(r'''
+class A {
+  int aaa;
+  int bbb;
+}
+''');
+    helper.initNew(newUnit, unitDelta);
+    // nodes
+    FieldDeclaration nodeA = helper.newMembers[0];
+    FieldDeclaration newNodeB = helper.newMembers[1];
+    List<VariableDeclaration> newFieldsB = newNodeB.fields.variables;
+    expect(nodeA, same(helper.oldMembers[0]));
+    expect(newFieldsB, hasLength(1));
+    // elements
+    FieldElement newFieldElementB = newFieldsB[0].name.staticElement;
+    expect(newFieldElementB.name, 'bbb');
+    // verify delta
+    expect(helper.delta.addedConstructors, isEmpty);
+    expect(helper.delta.removedConstructors, isEmpty);
+    expect(helper.delta.addedAccessors,
+        unorderedEquals([newFieldElementB.getter, newFieldElementB.setter]));
+    expect(helper.delta.removedAccessors, isEmpty);
+    expect(helper.delta.addedMethods, isEmpty);
+    expect(helper.delta.removedMethods, isEmpty);
+  }
+
+  test_classDelta_field_remove() {
+    var helper = new _ClassDeltaHelper('A');
+    _buildOldUnit(r'''
+class A {
+  int aaa;
+  int bbb;
+}
+''');
+    helper.initOld(oldUnit);
+    _buildNewUnit(r'''
+class A {
+  int aaa;
+}
+''');
+    helper.initNew(newUnit, unitDelta);
+    // nodes
+    FieldDeclaration nodeA = helper.newMembers[0];
+    FieldDeclaration oldNodeB = helper.oldMembers[1];
+    List<VariableDeclaration> oldFieldsB = oldNodeB.fields.variables;
+    expect(nodeA, same(helper.oldMembers[0]));
+    // elements
+    FieldElement oldFieldElementB = oldFieldsB[0].name.staticElement;
+    expect(oldFieldElementB.name, 'bbb');
+    // verify delta
+    expect(helper.delta.addedConstructors, isEmpty);
+    expect(helper.delta.removedConstructors, isEmpty);
+    expect(helper.delta.addedAccessors, isEmpty);
+    expect(helper.delta.removedAccessors,
+        unorderedEquals([oldFieldElementB.getter, oldFieldElementB.setter]));
+    expect(helper.delta.addedMethods, isEmpty);
+    expect(helper.delta.removedMethods, isEmpty);
+  }
+
+  test_classDelta_getter_add() {
+    var helper = new _ClassDeltaHelper('A');
+    _buildOldUnit(r'''
+class A {
+  int get aaa => 1;
+}
+''');
+    helper.initOld(oldUnit);
+    _buildNewUnit(r'''
+class A {
+  int get aaa => 1;
+  int get bbb => 2;
+}
+''');
+    helper.initNew(newUnit, unitDelta);
+    // nodes
+    MethodDeclaration nodeA = helper.oldMembers[0];
+    MethodDeclaration newNodeB = helper.newMembers[1];
+    expect(nodeA, same(helper.oldMembers[0]));
+    // elements
+    PropertyAccessorElement elementA = nodeA.element;
+    PropertyAccessorElement newElementB = newNodeB.element;
+    expect(elementA, isNotNull);
+    expect(elementA.name, 'aaa');
+    expect(newElementB, isNotNull);
+    expect(newElementB.name, 'bbb');
+    // verify delta
+    expect(helper.delta.addedConstructors, isEmpty);
+    expect(helper.delta.removedConstructors, isEmpty);
+    expect(helper.delta.addedAccessors, unorderedEquals([newElementB]));
+    expect(helper.delta.removedAccessors, isEmpty);
+    expect(helper.delta.addedMethods, isEmpty);
+    expect(helper.delta.removedMethods, isEmpty);
+  }
+
+  test_classDelta_getter_remove() {
+    var helper = new _ClassDeltaHelper('A');
+    _buildOldUnit(r'''
+class A {
+  int get aaa => 1;
+  int get bbb => 2;
+}
+''');
+    helper.initOld(oldUnit);
+    _buildNewUnit(r'''
+class A {
+  int get aaa => 1;
+}
+''');
+    helper.initNew(newUnit, unitDelta);
+    // nodes
+    MethodDeclaration nodeA = helper.oldMembers[0];
+    MethodDeclaration oldNodeB = helper.oldMembers[1];
+    expect(nodeA, same(helper.oldMembers[0]));
+    // elements
+    PropertyAccessorElement elementA = nodeA.element;
+    PropertyAccessorElement oldElementB = oldNodeB.element;
+    expect(elementA, isNotNull);
+    expect(elementA.name, 'aaa');
+    // verify delta
+    expect(helper.delta.addedConstructors, isEmpty);
+    expect(helper.delta.removedConstructors, isEmpty);
+    expect(helper.delta.addedAccessors, isEmpty);
+    expect(helper.delta.removedAccessors, unorderedEquals([oldElementB]));
+    expect(helper.delta.addedMethods, isEmpty);
+    expect(helper.delta.removedMethods, isEmpty);
+  }
+
+  test_classDelta_method_add() {
+    var helper = new _ClassDeltaHelper('A');
+    _buildOldUnit(r'''
+class A {
+  aaa() {}
+}
+''');
+    helper.initOld(oldUnit);
+    _buildNewUnit(r'''
+class A {
+  aaa() {}
+  bbb() {}
+}
+''');
+    helper.initNew(newUnit, unitDelta);
+    // nodes
+    ClassMember nodeA = helper.oldMembers[0];
+    ClassMember newNodeB = helper.newMembers[1];
+    expect(nodeA, same(helper.oldMembers[0]));
+    // elements
+    MethodElement elementA = nodeA.element;
+    MethodElement newElementB = newNodeB.element;
+    expect(elementA, isNotNull);
+    expect(elementA.name, 'aaa');
+    expect(newElementB, isNotNull);
+    expect(newElementB.name, 'bbb');
+    // verify delta
+    expect(helper.delta.addedConstructors, isEmpty);
+    expect(helper.delta.removedConstructors, isEmpty);
+    expect(helper.delta.addedAccessors, isEmpty);
+    expect(helper.delta.removedAccessors, isEmpty);
+    expect(helper.delta.addedMethods, unorderedEquals([newElementB]));
+    expect(helper.delta.removedMethods, isEmpty);
+  }
+
+  test_classDelta_method_addParameter() {
+    var helper = new _ClassDeltaHelper('A');
+    _buildOldUnit(r'''
+class A {
+  aaa() {}
+  bbb() {}
+}
+''');
+    helper.initOld(oldUnit);
+    _buildNewUnit(r'''
+class A {
+  aaa(int p) {}
+  bbb() {}
+}
+''');
+    helper.initNew(newUnit, unitDelta);
+    // nodes
+    ClassMember oldNodeA = helper.oldMembers[0];
+    ClassMember newNodeA = helper.newMembers[0];
+    ClassMember nodeB = helper.newMembers[1];
+    expect(newNodeA, isNot(same(oldNodeA)));
+    expect(nodeB, same(helper.oldMembers[1]));
+    // elements
+    MethodElement oldElementA = oldNodeA.element;
+    MethodElement newElementA = newNodeA.element;
+    MethodElement elementB = nodeB.element;
+    expect(newElementA, isNotNull);
+    expect(newElementA.name, 'aaa');
+    expect(oldElementA.parameters, hasLength(0));
+    expect(newElementA.parameters, hasLength(1));
+    expect(elementB, isNotNull);
+    expect(elementB.name, 'bbb');
+    // verify delta
+    expect(helper.delta.addedConstructors, isEmpty);
+    expect(helper.delta.removedConstructors, isEmpty);
+    expect(helper.delta.addedAccessors, isEmpty);
+    expect(helper.delta.removedAccessors, isEmpty);
+    expect(helper.delta.addedMethods, unorderedEquals([newElementA]));
+    expect(helper.delta.removedMethods, unorderedEquals([oldElementA]));
+  }
+
+  test_classDelta_method_changeName() {
+    var helper = new _ClassDeltaHelper('A');
+    _buildOldUnit(r'''
+class A {
+  aaa(int ap) {
+    int av = 1;
+    af(afp) {}
+  }
+  bbb(int bp) {
+    int bv = 1;
+    bf(bfp) {}
+  }
+}
+''');
+    helper.initOld(oldUnit);
+    ConstructorElement oldConstructor = helper.element.unnamedConstructor;
+    _buildNewUnit(r'''
+class A {
+  aaa2(int ap) {
+    int av = 1;
+    af(afp) {}
+  }
+  bbb(int bp) {
+    int bv = 1;
+    bf(bfp) {}
+  }
+}
+''');
+    helper.initNew(newUnit, unitDelta);
+    expect(helper.element.unnamedConstructor, same(oldConstructor));
+    // nodes
+    ClassMember oldNodeA = helper.oldMembers[0];
+    ClassMember newNodeA = helper.newMembers[0];
+    ClassMember nodeB = helper.newMembers[1];
+    expect(nodeB, same(helper.oldMembers[1]));
+    // elements
+    MethodElement oldElementA = oldNodeA.element;
+    MethodElement newElementA = newNodeA.element;
+    MethodElement elementB = nodeB.element;
+    expect(newElementA, isNotNull);
+    expect(newElementA.name, 'aaa2');
+    expect(elementB, isNotNull);
+    expect(elementB.name, 'bbb');
+    // verify delta
+    expect(helper.delta.addedConstructors, isEmpty);
+    expect(helper.delta.removedConstructors, isEmpty);
+    expect(helper.delta.addedAccessors, isEmpty);
+    expect(helper.delta.removedAccessors, isEmpty);
+    expect(helper.delta.addedMethods, unorderedEquals([newElementA]));
+    expect(helper.delta.removedMethods, unorderedEquals([oldElementA]));
+  }
+
+  test_classDelta_method_remove() {
+    var helper = new _ClassDeltaHelper('A');
+    _buildOldUnit(r'''
+class A {
+  aaa() {}
+  bbb() {}
+}
+''');
+    helper.initOld(oldUnit);
+    _buildNewUnit(r'''
+class A {
+  aaa() {}
+}
+''');
+    helper.initNew(newUnit, unitDelta);
+    // nodes
+    ClassMember nodeA = helper.oldMembers[0];
+    ClassMember oldNodeB = helper.oldMembers[1];
+    expect(nodeA, same(helper.oldMembers[0]));
+    // elements
+    MethodElement elementA = nodeA.element;
+    MethodElement oldElementB = oldNodeB.element;
+    expect(elementA, isNotNull);
+    expect(elementA.name, 'aaa');
+    // verify delta
+    expect(helper.delta.addedConstructors, isEmpty);
+    expect(helper.delta.removedConstructors, isEmpty);
+    expect(helper.delta.addedAccessors, isEmpty);
+    expect(helper.delta.removedAccessors, isEmpty);
+    expect(helper.delta.addedMethods, isEmpty);
+    expect(helper.delta.removedMethods, unorderedEquals([oldElementB]));
+  }
+
+  test_classDelta_method_removeParameter() {
+    var helper = new _ClassDeltaHelper('A');
+    _buildOldUnit(r'''
+class A {
+  aaa(int p) {}
+  bbb() {}
+}
+''');
+    helper.initOld(oldUnit);
+    _buildNewUnit(r'''
+class A {
+  aaa() {}
+  bbb() {}
+}
+''');
+    helper.initNew(newUnit, unitDelta);
+    // nodes
+    ClassMember oldNodeA = helper.oldMembers[0];
+    ClassMember newNodeA = helper.newMembers[0];
+    ClassMember nodeB = helper.newMembers[1];
+    expect(newNodeA, isNot(same(oldNodeA)));
+    expect(nodeB, same(helper.oldMembers[1]));
+    // elements
+    MethodElement oldElementA = oldNodeA.element;
+    MethodElement newElementA = newNodeA.element;
+    MethodElement elementB = nodeB.element;
+    expect(newElementA, isNotNull);
+    expect(newElementA.name, 'aaa');
+    expect(oldElementA.parameters, hasLength(1));
+    expect(newElementA.parameters, hasLength(0));
+    expect(elementB, isNotNull);
+    expect(elementB.name, 'bbb');
+    // verify delta
+    expect(helper.delta.addedConstructors, isEmpty);
+    expect(helper.delta.removedConstructors, isEmpty);
+    expect(helper.delta.addedAccessors, isEmpty);
+    expect(helper.delta.removedAccessors, isEmpty);
+    expect(helper.delta.addedMethods, unorderedEquals([newElementA]));
+    expect(helper.delta.removedMethods, unorderedEquals([oldElementA]));
+  }
+
+  test_classDelta_null_extendsClause_add() {
+    _verifyNoClassDeltaForTheLast(
+        r'''
+class A {}
+class B {}
+''',
+        r'''
+class A {}
+class B extends A {}
+''');
+  }
+
+  test_classDelta_null_extendsClause_change() {
+    _verifyNoClassDeltaForTheLast(
+        r'''
+class A1 {}
+class A2 {}
+class B extends A1 {}
+''',
+        r'''
+class A1 {}
+class A2 {}
+class B extends A2 {}
+''');
+  }
+
+  test_classDelta_null_extendsClause_remove() {
+    _verifyNoClassDeltaForTheLast(
+        r'''
+class A {}
+class B extends A {}
+''',
+        r'''
+class A {}
+class B {}
+''');
+  }
+
+  test_classDelta_null_implementsClause_add() {
+    _verifyNoClassDeltaForTheLast(
+        r'''
+class A {}
+class B {}
+''',
+        r'''
+class A {}
+class B implements A {}
+''');
+  }
+
+  test_classDelta_null_implementsClause_change() {
+    _verifyNoClassDeltaForTheLast(
+        r'''
+class A1 {}
+class A2 {}
+class B implements A1 {}
+''',
+        r'''
+class A1 {}
+class A2 {}
+class B implements A2 {}
+''');
+  }
+
+  test_classDelta_null_implementsClause_remove() {
+    _verifyNoClassDeltaForTheLast(
+        r'''
+class A {}
+class B implements A {}
+''',
+        r'''
+class A {}
+class B {}
+''');
+  }
+
+  test_classDelta_null_typeParameters_change() {
+    _verifyNoClassDeltaForTheLast(
+        r'''
+class A {}
+class B<T> {}
+''',
+        r'''
+class A {}
+class B<T extends A> {}
+''');
+  }
+
+  test_classDelta_null_withClause_add() {
+    _verifyNoClassDeltaForTheLast(
+        r'''
+class A {}
+class M {}
+class B extends A {}
+''',
+        r'''
+class A {}
+class M {}
+class B extends A with M {}
+''');
+  }
+
+  test_classDelta_null_withClause_change1() {
+    _verifyNoClassDeltaForTheLast(
+        r'''
+class A {}
+class M1 {}
+class M2 {}
+class B extends A with M1 {}
+''',
+        r'''
+class A {}
+class M1 {}
+class M2 {}
+class B extends A with M2 {}
+''');
+  }
+
+  test_classDelta_null_withClause_change2() {
+    _verifyNoClassDeltaForTheLast(
+        r'''
+class A {}
+class M1 {}
+class M2 {}
+class B extends A with M1, M2 {}
+''',
+        r'''
+class A {}
+class M1 {}
+class M2 {}
+class B extends A with M2, M1 {}
+''');
+  }
+
+  test_classDelta_null_withClause_remove() {
+    _verifyNoClassDeltaForTheLast(
+        r'''
+class A {}
+class M {}
+class B extends A with M {}
+''',
+        r'''
+class A {}
+class M {}
+class B extends A {}
+''');
+  }
+
+  test_classDelta_setter_add() {
+    var helper = new _ClassDeltaHelper('A');
+    _buildOldUnit(r'''
+class A {
+  void set aaa(int pa) {}
+}
+''');
+    helper.initOld(oldUnit);
+    _buildNewUnit(r'''
+class A {
+  void set aaa(int pa) {}
+  void set bbb(int pb) {}
+}
+''');
+    helper.initNew(newUnit, unitDelta);
+    // nodes
+    MethodDeclaration nodeA = helper.oldMembers[0];
+    MethodDeclaration newNodeB = helper.newMembers[1];
+    expect(nodeA, same(helper.oldMembers[0]));
+    // elements
+    PropertyAccessorElement elementA = nodeA.element;
+    PropertyAccessorElement newElementB = newNodeB.element;
+    expect(elementA, isNotNull);
+    expect(elementA.name, 'aaa=');
+    expect(newElementB, isNotNull);
+    expect(newElementB.name, 'bbb=');
+    // verify delta
+    expect(helper.delta.addedConstructors, isEmpty);
+    expect(helper.delta.removedConstructors, isEmpty);
+    expect(helper.delta.addedAccessors, unorderedEquals([newElementB]));
+    expect(helper.delta.removedAccessors, isEmpty);
+    expect(helper.delta.addedMethods, isEmpty);
+    expect(helper.delta.removedMethods, isEmpty);
+  }
+
+  test_classDelta_setter_remove() {
+    var helper = new _ClassDeltaHelper('A');
+    _buildOldUnit(r'''
+class A {
+  void set aaa(int pa) {}
+  void set bbb(int pb) {}
+}
+''');
+    helper.initOld(oldUnit);
+    _buildNewUnit(r'''
+class A {
+  void set aaa(int pa) {}
+}
+''');
+    helper.initNew(newUnit, unitDelta);
+    // nodes
+    MethodDeclaration nodeA = helper.oldMembers[0];
+    MethodDeclaration oldNodeB = helper.oldMembers[1];
+    expect(nodeA, same(helper.oldMembers[0]));
+    // elements
+    PropertyAccessorElement elementA = nodeA.element;
+    PropertyAccessorElement oldElementB = oldNodeB.element;
+    expect(elementA, isNotNull);
+    expect(elementA.name, 'aaa=');
+    // verify delta
+    expect(helper.delta.addedConstructors, isEmpty);
+    expect(helper.delta.removedConstructors, isEmpty);
+    expect(helper.delta.addedAccessors, isEmpty);
+    expect(helper.delta.removedAccessors, unorderedEquals([oldElementB]));
+    expect(helper.delta.addedMethods, isEmpty);
+    expect(helper.delta.removedMethods, isEmpty);
+  }
+
+  test_classDelta_typeParameter_same() {
+    _buildOldUnit(r'''
+class A<T> {
+  m() {}
+}
+''');
+    _buildNewUnit(r'''
+class A<T> {
+  m2() {}
+}
+''');
+  }
+
   test_directives_add() {
     _buildOldUnit(r'''
 library test;
@@ -670,15 +1548,160 @@
     expect(unitDelta.removedDeclarations, unorderedEquals([]));
   }
 
+  test_update_addIdentifier_beforeConstructorWithComment() {
+    _buildOldUnit(r'''
+class A {
+  /// CCC
+  A();
+}
+''');
+    _buildNewUnit(r'''
+class A {
+  b
+
+  /// CCC
+  A();
+}
+''');
+  }
+
+  test_update_beforeClassWithDelta_nameOffset() {
+    _buildOldUnit(r'''
+class A {}
+
+class B {
+  A a;
+}
+''');
+    _buildNewUnit(r'''
+class A2 {}
+
+class B {
+  A2 a;
+}
+''');
+  }
+
+  test_update_changeDuplicatingOffsetsMapping() {
+    _buildOldUnit(r'''
+class A {
+  m() {
+  }
+}
+
+/// X
+class C {}
+''');
+    _buildNewUnit(r'''
+class A {
+  m2() {
+    b
+  }
+}
+
+/// X
+class C {}
+''');
+  }
+
+  test_update_closuresOfSyntheticInitializer() {
+    _buildOldUnit(r'''
+f1() {
+  print(1);
+}
+f2() {
+  B b = new B((C c) {});
+}
+''');
+    _buildNewUnit(r'''
+f1() {
+  print(12);
+}
+f2() {
+  B b = new B((C c) {});
+}
+''');
+  }
+
+  test_update_commentReference_empty() {
+    _buildOldUnit(r'''
+/// Empty [] reference.
+class A {}
+''');
+    _buildNewUnit(r'''
+/// Empty [] reference.
+class A {}
+''');
+  }
+
+  test_update_commentReference_notClosed() {
+    _buildOldUnit(r'''
+/// [c)
+class A {}
+''');
+    _buildNewUnit(r'''
+int a;
+/// [c)
+class A {}
+''');
+  }
+
+  test_update_rewrittenConstructorName() {
+    _buildOldUnit(r'''
+class A {
+  A();
+  A.named();
+}
+
+foo() {}
+
+main() {
+  new A();
+  new A.named();
+}
+''');
+    _buildNewUnit(r'''
+class A {
+  A();
+  A.named();
+}
+
+bar() {}
+
+main() {
+  new A();
+  new A.named();
+}
+''');
+  }
+
   void _buildNewUnit(String newCode) {
     this.newCode = newCode;
-    context.setContents(source, newCode);
-    newUnit = context.parseCompilationUnit(source);
-    IncrementalCompilationUnitElementBuilder builder =
-        new IncrementalCompilationUnitElementBuilder(oldUnit, newUnit);
-    builder.build();
-    unitDelta = builder.unitDelta;
-    expect(newUnit.element, unitElement);
+    AnalysisOptionsImpl analysisOptions = context.analysisOptions;
+    analysisOptions.finerGrainedInvalidation = false;
+    try {
+      context.setContents(source, newCode);
+      newUnit = context.parseCompilationUnit(source);
+      IncrementalCompilationUnitElementBuilder builder =
+          new IncrementalCompilationUnitElementBuilder(oldUnit, newUnit);
+      builder.build();
+      unitDelta = builder.unitDelta;
+      expect(newUnit.element, unitElement);
+      // Flush all tokens, ASTs and elements.
+      context.analysisCache.flush((target, result) {
+        return result == TOKEN_STREAM ||
+            result == PARSED_UNIT ||
+            RESOLVED_UNIT_RESULTS.contains(result) ||
+            LIBRARY_ELEMENT_RESULTS.contains(result);
+      });
+      // Compute a new AST with built elements.
+      CompilationUnit newUnitFull = context.computeResult(
+          new LibrarySpecificUnit(source, source), RESOLVED_UNIT1);
+      expect(newUnitFull, isNot(same(newUnit)));
+      new _BuiltElementsValidator().isEqualNodes(newUnitFull, newUnit);
+    } finally {
+      analysisOptions.finerGrainedInvalidation = true;
+    }
   }
 
   void _buildOldUnit(String oldCode, [Source libSource]) {
@@ -691,4 +1714,147 @@
     unitElement = oldUnit.element;
     expect(unitElement, isNotNull);
   }
+
+  void _verifyNoClassDeltaForTheLast(String oldCode, String newCode) {
+    _buildOldUnit(oldCode);
+    List<CompilationUnitMember> oldMembers = oldUnit.declarations.toList();
+    Element oldElementLast = oldMembers.last.element;
+    _buildNewUnit(newCode);
+    List<CompilationUnitMember> newMembers = newUnit.declarations;
+    Element newElementLast = newMembers.last.element;
+    expect(newElementLast, isNot(same(oldElementLast)));
+    expect(unitDelta.classDeltas, isEmpty);
+    expect(unitDelta.removedDeclarations, unorderedEquals([oldElementLast]));
+    expect(unitDelta.addedDeclarations, unorderedEquals([newElementLast]));
+  }
+}
+
+/**
+ * Compares tokens and ASTs, and built elements of declared identifiers.
+ */
+class _BuiltElementsValidator extends AstComparator {
+  @override
+  bool isEqualNodes(AstNode expected, AstNode actual) {
+    // Elements of nodes which are children of ClassDeclaration(s) must be
+    // linked to the corresponding ClassElement(s).
+    if (actual is TypeParameter) {
+      TypeParameterElement element = actual.element;
+      ClassDeclaration classNode = actual.parent.parent;
+      expect(element.enclosingElement, same(classNode.element));
+    } else if (actual is FieldDeclaration) {
+      for (VariableDeclaration field in actual.fields.variables) {
+        Element element = field.element;
+        ClassDeclaration classNode = actual.parent;
+        expect(element.enclosingElement, same(classNode.element));
+      }
+    } else if (actual is ClassMember) {
+      Element element = actual.element;
+      ClassDeclaration classNode = actual.parent;
+      expect(element.enclosingElement, same(classNode.element));
+    }
+    // Field elements referenced by field formal parameters of constructors
+    // must by fields of the enclosing class element.
+    if (actual is FieldFormalParameter) {
+      FieldFormalParameterElement parameterElement = actual.element;
+      FieldElement element = parameterElement.field;
+      ClassDeclaration classNode =
+          actual.getAncestor((n) => n is ClassDeclaration);
+      expect(element.enclosingElement, same(classNode.element));
+    }
+    // Identifiers like 'a.b' in 'new a.b()' might be rewritten if resolver
+    // sees that 'a' is actually a class name, so 'b' is a constructor name.
+    //
+    if (expected is ConstructorName && actual is ConstructorName) {
+      Identifier expectedTypeName = expected.type.name;
+      Identifier actualTypeName = actual.type.name;
+      if (expectedTypeName is PrefixedIdentifier &&
+          actualTypeName is SimpleIdentifier) {
+        return isEqualNodes(expectedTypeName.prefix, actualTypeName) &&
+            isEqualNodes(expectedTypeName.identifier, actual.name);
+      }
+    }
+    // Compare nodes.
+    bool result = super.isEqualNodes(expected, actual);
+    if (!result) {
+      fail('|$actual| != expected |$expected|');
+    }
+    // Verify that declared identifiers have equal elements.
+    if (expected is SimpleIdentifier && actual is SimpleIdentifier) {
+      if (expected.inDeclarationContext()) {
+        expect(actual.inDeclarationContext(), isTrue);
+        Element expectedElement = expected.staticElement;
+        Element actualElement = actual.staticElement;
+        _verifyElement(expectedElement, actualElement, 'staticElement');
+      }
+    }
+    return true;
+  }
+
+  void _verifyElement(Element expected, Element actual, String desc) {
+    if (expected == null && actual == null) {
+      return;
+    }
+    // Prefixes are built later.
+    if (actual is PrefixElement) {
+      return;
+    }
+    // Compare properties.
+    _verifyEqual('$desc name', expected.name, actual.name);
+    _verifyEqual('$desc nameOffset', expected.nameOffset, actual.nameOffset);
+    if (expected is ElementImpl && actual is ElementImpl) {
+      _verifyEqual('$desc codeOffset', expected.codeOffset, actual.codeOffset);
+      _verifyEqual('$desc codeLength', expected.codeLength, actual.codeLength);
+    }
+    if (expected is LocalElement && actual is LocalElement) {
+      _verifyEqual(
+          '$desc visibleRange', expected.visibleRange, actual.visibleRange);
+    }
+    _verifyEqual('$desc documentationComment', expected.documentationComment,
+        actual.documentationComment);
+    {
+      var expectedEnclosing = expected.enclosingElement;
+      var actualEnclosing = actual.enclosingElement;
+      if (expectedEnclosing != null) {
+        expect(actualEnclosing, isNotNull, reason: '$desc enclosingElement');
+        _verifyElement(expectedEnclosing, actualEnclosing,
+            '${expectedEnclosing.name}.$desc');
+      }
+    }
+  }
+
+  void _verifyEqual(String name, expected, actual) {
+    if (actual != expected) {
+      fail('$name\nExpected: $expected\n  Actual: $actual');
+    }
+  }
+}
+
+class _ClassDeltaHelper {
+  final String name;
+
+  ClassElementDelta delta;
+  ClassElement element;
+  List<ClassMember> oldMembers;
+  List<ClassMember> newMembers;
+
+  _ClassDeltaHelper(this.name);
+
+  void initNew(CompilationUnit newUnit, CompilationUnitElementDelta unitDelta) {
+    ClassDeclaration newClass = _findClassNode(newUnit, name);
+    expect(newClass, isNotNull);
+    newMembers = newClass.members.toList();
+    delta = unitDelta.classDeltas[name];
+    expect(delta, isNotNull, reason: 'No delta for class: $name');
+  }
+
+  void initOld(CompilationUnit oldUnit) {
+    ClassDeclaration oldClass = _findClassNode(oldUnit, name);
+    expect(oldClass, isNotNull);
+    element = oldClass.element;
+    oldMembers = oldClass.members.toList();
+  }
+
+  ClassDeclaration _findClassNode(CompilationUnit unit, String name) =>
+      unit.declarations.singleWhere((unitMember) =>
+          unitMember is ClassDeclaration && unitMember.name.name == name);
 }
diff --git a/pkg/analyzer/test/src/task/strong/checker_test.dart b/pkg/analyzer/test/src/task/strong/checker_test.dart
index 5fbc192..0b1a115 100644
--- a/pkg/analyzer/test/src/task/strong/checker_test.dart
+++ b/pkg/analyzer/test/src/task/strong/checker_test.dart
@@ -2,3315 +2,3543 @@
 // for details. All rights reserved. Use of this source code is governed by a
 // BSD-style license that can be found in the LICENSE file.
 
-// TODO(jmesserly): this file needs to be refactored, it's a port from
-// package:dev_compiler's tests
-/// General type checking tests
 library analyzer.test.src.task.strong.checker_test;
 
-import 'package:unittest/unittest.dart';
-
+import '../../../reflective_tests.dart';
 import 'strong_test_helper.dart';
 
 void main() {
   initStrongModeTests();
+  runReflectiveTests(CheckerTest);
+}
 
-  test('ternary operator', () {
+@reflectiveTest
+class CheckerTest {
+  void test_awaitForInCastsStreamElementToVariable() {
     checkFile('''
-        abstract class Comparable<T> {
-          int compareTo(T other);
-          static int compare(Comparable a, Comparable b) => a.compareTo(b);
-        }
-        typedef int Comparator<T>(T a, T b);
+import 'dart:async';
+main() async {
+  // Don't choke if sequence is not stream.
+  await for (var i in /*error:FOR_IN_OF_INVALID_TYPE*/1234) {}
 
-        typedef bool _Predicate<T>(T value);
+  // Dynamic cast.
+  await for (String /*info:DYNAMIC_CAST*/s in new Stream<dynamic>()) {}
 
-        class SplayTreeMap<K, V> {
-          Comparator<K> _comparator;
-          _Predicate _validKey;
+  // Identity cast.
+  await for (String s in new Stream<String>()) {}
 
-          // The warning on assigning to _comparator is legitimate. Since K has
-          // no bound, all we know is that it's object. _comparator's function
-          // type is effectively:              (Object, Object) -> int
-          // We are assigning it a fn of type: (Comparable, Comparable) -> int
-          // There's no telling if that will work. For example, consider:
-          //
-          //     new SplayTreeMap<Uri>();
-          //
-          // This would end up calling .compareTo() on a Uri, which doesn't
-          // define that since it doesn't implement Comparable.
-          SplayTreeMap([int compare(K key1, K key2),
-                        bool isValidKey(potentialKey)])
-            : _comparator = /*warning:DOWN_CAST_COMPOSITE*/(compare == null) ? Comparable.compare : compare,
-              _validKey = (isValidKey != null) ? isValidKey : ((v) => true) {
-            _Predicate<Object> v = (isValidKey != null)
-                ? isValidKey : (/*info:INFERRED_TYPE_CLOSURE*/(_) => true);
+  // Untyped.
+  await for (var s in new Stream<String>()) {}
 
-            v = (isValidKey != null)
-                 ? v : (/*info:INFERRED_TYPE_CLOSURE*/(_) => true);
-          }
-        }
-        void main() {
-          Object obj = 42;
-          dynamic dyn = 42;
-          int i = 42;
+  // Downcast.
+  await for (int /*info:DOWN_CAST_IMPLICIT*/i in new Stream<num>()) {}
+}
+''');
+  }
 
-          // Check the boolean conversion of the condition.
-          print(/*warning:NON_BOOL_CONDITION*/i ? false : true);
-          print((/*info:DOWN_CAST_IMPLICIT*/obj) ? false : true);
-          print((/*info:DYNAMIC_CAST*/dyn) ? false : true);
-        }
-      ''');
-  });
-
-  test('least upper bounds', () {
+  void test_awaitForInCastsSupertypeSequenceToStream() {
     checkFile('''
-      typedef T Returns<T>();
+main() async {
+  dynamic d;
+  await for (var i in /*info:DYNAMIC_CAST*/d) {}
 
-      // regression test for https://github.com/dart-lang/sdk/issues/26094
-      class A <S extends  Returns<S>, T extends Returns<T>> {
-        int test(bool b) {
-          S s;
-          T t;
-          if (b) {
-            return /*warning:RETURN_OF_INVALID_TYPE*/b ? s : t;
-          } else {
-            return /*warning:RETURN_OF_INVALID_TYPE*/s ?? t;
-          }
-        }
-      }
+  Object o;
+  await for (var i in /*info:DOWN_CAST_IMPLICIT*/o) {}
+}
+''');
+  }
 
-      class B<S, T extends S> {
-        T t;
-        S s;
-        int test(bool b) {
-          return /*warning:RETURN_OF_INVALID_TYPE*/b ? t : s;
-        }
-      }
-
-      class C {
-        // Check that the least upper bound of two types with the same
-        // class but different type arguments produces the pointwise
-        // least upper bound of the type arguments
-        int test1(bool b) {
-          List<int> li;
-          List<double> ld;
-          return /*warning:RETURN_OF_INVALID_TYPE*/b ? li : ld;
-        }
-        // TODO(leafp): This case isn't handled yet.  This test checks
-        // the case where two related classes are instantiated with related
-        // but different types.
-        Iterable<num> test2(bool b) {
-          List<int> li;
-          Iterable<double> id;
-          int x =
-              /*info:ASSIGNMENT_CAST should be warning:INVALID_ASSIGNMENT*/
-              b ? li : id;
-          return /*warning:DOWN_CAST_COMPOSITE should be pass*/b ? li : id;
-        }
-      }
-      ''');
-  });
-
-  test('setter return types', () {
+  void test_binaryAndIndexOperators() {
     checkFile('''
-      void voidFn() => null;
-      class A {
-        set a(y) => 4;
-        set b(y) => voidFn();
-        void set c(y) => /*warning:RETURN_OF_INVALID_TYPE*/4;
-        void set d(y) => voidFn();
-        /*warning:NON_VOID_RETURN_FOR_SETTER*/int set e(y) => 4;
-        /*warning:NON_VOID_RETURN_FOR_SETTER*/int set f(y) =>
-            /*warning:RETURN_OF_INVALID_TYPE*/voidFn();
-        set g(y) {return /*warning:RETURN_OF_INVALID_TYPE*/4;}
-        void set h(y) {return /*warning:RETURN_OF_INVALID_TYPE*/4;}
-        /*warning:NON_VOID_RETURN_FOR_SETTER*/int set i(y) {return 4;}
-      }
-    ''');
-  });
+class A {
+  A operator *(B b) => null;
+  A operator /(B b) => null;
+  A operator ~/(B b) => null;
+  A operator %(B b) => null;
+  A operator +(B b) => null;
+  A operator -(B b) => null;
+  A operator <<(B b) => null;
+  A operator >>(B b) => null;
+  A operator &(B b) => null;
+  A operator ^(B b) => null;
+  A operator |(B b) => null;
+  A operator[](B b) => null;
+}
 
-  test('if/for/do/while statements use boolean conversion', () {
+class B {
+  A operator -(B b) => null;
+}
+
+foo() => new A();
+
+test() {
+  A a = new A();
+  B b = new B();
+  var c = foo();
+  a = a * b;
+  a = a * /*info:DYNAMIC_CAST*/c;
+  a = a / b;
+  a = a ~/ b;
+  a = a % b;
+  a = a + b;
+  a = a + /*error:ARGUMENT_TYPE_NOT_ASSIGNABLE*/a;
+  a = a - b;
+  b = /*error:INVALID_ASSIGNMENT*/b - b;
+  a = a << b;
+  a = a >> b;
+  a = a & b;
+  a = a ^ b;
+  a = a | b;
+  c = (/*info:DYNAMIC_INVOKE*/c + b);
+
+  String x = 'hello';
+  int y = 42;
+  x = x + x;
+  x = x + /*info:DYNAMIC_CAST*/c;
+  x = x + /*error:ARGUMENT_TYPE_NOT_ASSIGNABLE*/y;
+
+  bool p = true;
+  p = p && p;
+  p = p && /*info:DYNAMIC_CAST*/c;
+  p = (/*info:DYNAMIC_CAST*/c) && p;
+  p = (/*info:DYNAMIC_CAST*/c) && /*info:DYNAMIC_CAST*/c;
+  p = /*error:NON_BOOL_OPERAND*/y && p;
+  p = c == y;
+
+  a = a[b];
+  a = a[/*info:DYNAMIC_CAST*/c];
+  c = (/*info:DYNAMIC_INVOKE*/c[b]);
+  a[/*error:ARGUMENT_TYPE_NOT_ASSIGNABLE*/y];
+}
+''');
+  }
+
+  void test_castsInConditions() {
     checkFile('''
-      main() {
-        dynamic dyn = 42;
-        Object obj = 42;
-        int i = 42;
-        bool b = false;
+main() {
+  bool b = true;
+  num x = b ? 1 : 2.3;
+  int y = /*info:ASSIGNMENT_CAST*/b ? 1 : 2.3;
+  String z = !b ? "hello" : null;
+  z = b ? null : "hello";
+}
+''');
+  }
 
-        if (b) {}
-        if (/*info:DYNAMIC_CAST*/dyn) {}
-        if (/*info:DOWN_CAST_IMPLICIT*/obj) {}
-        if (/*warning:NON_BOOL_CONDITION*/i) {}
-
-        while (b) {}
-        while (/*info:DYNAMIC_CAST*/dyn) {}
-        while (/*info:DOWN_CAST_IMPLICIT*/obj) {}
-        while (/*warning:NON_BOOL_CONDITION*/i) {}
-
-        do {} while (b);
-        do {} while (/*info:DYNAMIC_CAST*/dyn);
-        do {} while (/*info:DOWN_CAST_IMPLICIT*/obj);
-        do {} while (/*warning:NON_BOOL_CONDITION*/i);
-
-        for (;b;) {}
-        for (;/*info:DYNAMIC_CAST*/dyn;) {}
-        for (;/*info:DOWN_CAST_IMPLICIT*/obj;) {}
-        for (;/*warning:NON_BOOL_CONDITION*/i;) {}
-      }
-    ''');
-  });
-
-  test('for-in casts supertype sequence to iterable', () {
+  void test_castsInConstantContexts() {
     checkFile('''
-      main() {
-        dynamic d;
-        for (var i in /*info:DYNAMIC_CAST*/d) {}
+class A {
+  static const num n = 3.0;
+  // The severe error is from constant evaluation where we know the
+  // concrete type.
+  static const int /*error:VARIABLE_TYPE_MISMATCH*/i = /*info:ASSIGNMENT_CAST*/n;
+  final int fi;
+  const A(num a) : this.fi = /*info:DOWN_CAST_IMPLICIT*/a;
+}
+class B extends A {
+  const B(Object a) : super(/*info:DOWN_CAST_IMPLICIT*/a);
+}
+void foo(Object o) {
+  var a = const A(/*info:DOWN_CAST_IMPLICIT, error:CONST_WITH_NON_CONSTANT_ARGUMENT, error:INVALID_CONSTANT*/o);
+}
+''');
+  }
 
-        Object o;
-        for (var i in /*info:DOWN_CAST_IMPLICIT*/o) {}
-      }
-    ''');
-  });
-
-  test('await for-in casts supertype sequence to stream', () {
+  void test_classOverrideOfGrandInterface_interfaceOfAbstractSuperclass() {
     checkFile('''
-      main() async {
-        dynamic d;
-        await for (var i in /*info:DYNAMIC_CAST*/d) {}
+class A {}
+class B {}
 
-        Object o;
-        await for (var i in /*info:DOWN_CAST_IMPLICIT*/o) {}
-      }
-    ''');
-  });
+abstract class I1 {
+    m(A a);
+}
+abstract class Base implements I1 {}
 
-  test('for-in casts iterable element to variable', () {
+class T1 extends Base {
+  /*error:INVALID_METHOD_OVERRIDE*/m(
+      /*error:INVALID_METHOD_OVERRIDE_NORMAL_PARAM_TYPE*/B a) {}
+}
+''');
+  }
+
+  void test_classOverrideOfGrandInterface_interfaceOfConcreteSuperclass() {
     checkFile('''
-      main() {
-        // Don't choke if sequence is not iterable.
-        for (var i in /*warning:FOR_IN_OF_INVALID_TYPE*/1234) {}
+class A {}
+class B {}
 
-        // Dynamic cast.
-        for (String /*info:DYNAMIC_CAST*/s in <dynamic>[]) {}
+abstract class I1 {
+    m(A a);
+}
 
-        // Identity cast.
-        for (String s in <String>[]) {}
+class /*error:NON_ABSTRACT_CLASS_INHERITS_ABSTRACT_MEMBER_ONE*/Base
+    implements I1 {}
 
-        // Untyped.
-        for (var s in <String>[]) {}
+class T1 extends Base {
+    // not reported technically because if the class is concrete,
+    // it should implement all its interfaces and hence it is
+    // sufficient to check overrides against it.
+    m(/*error:INVALID_METHOD_OVERRIDE_NORMAL_PARAM_TYPE*/B a) {}
+}
+''');
+  }
 
-        // Downcast.
-        for (int /*info:DOWN_CAST_IMPLICIT*/i in <num>[]) {}
-      }
-    ''');
-  });
-
-  test('await for-in casts stream element to variable', () {
+  void test_classOverrideOfGrandInterface_interfaceOfInterfaceOfChild() {
     checkFile('''
-      import 'dart:async';
-      main() async {
-        // Don't choke if sequence is not stream.
-        await for (var i in /*warning:FOR_IN_OF_INVALID_TYPE*/1234) {}
+class A {}
+class B {}
 
-        // Dynamic cast.
-        await for (String /*info:DYNAMIC_CAST*/s in new Stream<dynamic>()) {}
+abstract class I1 {
+    m(A a);
+}
+abstract class I2 implements I1 {}
 
-        // Identity cast.
-        await for (String s in new Stream<String>()) {}
+class T1 implements I2 {
+  /*error:INVALID_METHOD_OVERRIDE*/m(
+      /*error:INVALID_METHOD_OVERRIDE_NORMAL_PARAM_TYPE*/B a) {}
+}
+''');
+  }
 
-        // Untyped.
-        await for (var s in new Stream<String>()) {}
-
-        // Downcast.
-        await for (int /*info:DOWN_CAST_IMPLICIT*/i in new Stream<num>()) {}
-      }
-    ''');
-  });
-
-  test('dynamic invocation', () {
+  void test_classOverrideOfGrandInterface_mixinOfInterfaceOfChild() {
     checkFile('''
-      typedef dynamic A(dynamic x);
-      class B {
-        int call(int x) => x;
-        double col(double x) => x;
-      }
-      void main() {
-        {
-          B f = new B();
-          int x;
-          double y;
-          x = f(3);
-          x = /*warning:INVALID_ASSIGNMENT*/f.col(3.0);
-          y = /*warning:INVALID_ASSIGNMENT*/f(3);
-          y = f.col(3.0);
-          f(/*warning:ARGUMENT_TYPE_NOT_ASSIGNABLE*/3.0);
-          f.col(/*warning:ARGUMENT_TYPE_NOT_ASSIGNABLE*/3);
-        }
-        {
-          Function f = new B();
-          int x;
-          double y;
-          x = /*info:DYNAMIC_CAST, info:DYNAMIC_INVOKE*/f(3);
-          x = /*info:DYNAMIC_CAST, info:DYNAMIC_INVOKE, info:INVALID_ASSIGNMENT*/f.col(3.0);
-          y = /*info:DYNAMIC_CAST, info:DYNAMIC_INVOKE*/f(3);
-          y = /*info:DYNAMIC_CAST, info:DYNAMIC_INVOKE*/f.col(3.0);
-          /*info:DYNAMIC_INVOKE*/f(3.0);
-          // Through type propagation, we know f is actually a B, hence the
-          // hint.
-          /*info:DYNAMIC_INVOKE*/f.col(/*info:ARGUMENT_TYPE_NOT_ASSIGNABLE*/3);
-        }
-        {
-          A f = new B();
-          int x;
-          double y;
-          x = /*info:DYNAMIC_CAST, info:DYNAMIC_INVOKE*/f(3);
-          y = /*info:DYNAMIC_CAST, info:DYNAMIC_INVOKE*/f(3);
-          /*info:DYNAMIC_INVOKE*/f(3.0);
-        }
-        {
-          dynamic g = new B();
-          /*info:DYNAMIC_INVOKE*/g.call(/*info:ARGUMENT_TYPE_NOT_ASSIGNABLE*/32.0);
-          /*info:DYNAMIC_INVOKE*/g.col(42.0);
-          /*info:DYNAMIC_INVOKE*/g.foo(42.0);
-          /*info:DYNAMIC_INVOKE*/g./*info:UNDEFINED_GETTER*/x;
-          A f = new B();
-          /*info:DYNAMIC_INVOKE*/f.col(42.0);
-          /*info:DYNAMIC_INVOKE*/f.foo(42.0);
-          /*info:DYNAMIC_INVOKE*/f./*warning:UNDEFINED_GETTER*/x;
-        }
-      }
-    ''');
-  });
+class A {}
+class B {}
 
-  test('conversion and dynamic invoke', () {
+abstract class M1 {
+    m(A a);
+}
+abstract class I2 extends Object with M1 {}
+
+class T1 implements I2 {
+  /*error:INVALID_METHOD_OVERRIDE*/m(
+      /*error:INVALID_METHOD_OVERRIDE_NORMAL_PARAM_TYPE*/B a) {}
+}
+''');
+  }
+
+  void test_classOverrideOfGrandInterface_superclassOfInterfaceOfChild() {
+    checkFile('''
+class A {}
+class B {}
+
+abstract class I1 {
+    m(A a);
+}
+abstract class I2 extends I1 {}
+
+class T1 implements I2 {
+  /*error:INVALID_METHOD_OVERRIDE*/m(
+      /*error:INVALID_METHOD_OVERRIDE_NORMAL_PARAM_TYPE*/B a) {}
+}
+''');
+  }
+
+  void test_compoundAssignments() {
+    checkFile('''
+class A {
+  A operator *(B b) => null;
+  A operator /(B b) => null;
+  A operator ~/(B b) => null;
+  A operator %(B b) => null;
+  A operator +(B b) => null;
+  A operator -(B b) => null;
+  A operator <<(B b) => null;
+  A operator >>(B b) => null;
+  A operator &(B b) => null;
+  A operator ^(B b) => null;
+  A operator |(B b) => null;
+  D operator [](B index) => null;
+  void operator []=(B index, D value) => null;
+}
+
+class B {
+  A operator -(B b) => null;
+}
+
+class D {
+  D operator +(D d) => null;
+}
+
+foo() => new A();
+
+test() {
+  int x = 0;
+  x += 5;
+  /*error:STATIC_TYPE_ERROR*/x += 3.14;
+
+  double y = 0.0;
+  y += 5;
+  y += 3.14;
+
+  num z = 0;
+  z += 5;
+  z += 3.14;
+
+  x = /*info:DOWN_CAST_IMPLICIT*/x + z;
+  x += /*info:DOWN_CAST_IMPLICIT*/z;
+  y = y + z;
+  y += z;
+
+  dynamic w = 42;
+  x += /*info:DYNAMIC_CAST*/w;
+  y += /*info:DYNAMIC_CAST*/w;
+  z += /*info:DYNAMIC_CAST*/w;
+
+  A a = new A();
+  B b = new B();
+  var c = foo();
+  a = a * b;
+  a *= b;
+  a *= /*info:DYNAMIC_CAST*/c;
+  a /= b;
+  a ~/= b;
+  a %= b;
+  a += b;
+  a += /*error:ARGUMENT_TYPE_NOT_ASSIGNABLE*/a;
+  a -= b;
+  /*error:STATIC_TYPE_ERROR*/b -= /*error:INVALID_ASSIGNMENT*/b;
+  a <<= b;
+  a >>= b;
+  a &= b;
+  a ^= b;
+  a |= b;
+  /*info:DYNAMIC_INVOKE*/c += b;
+
+  var d = new D();
+  a[b] += d;
+  a[/*info:DYNAMIC_CAST*/c] += d;
+  a[/*error:ARGUMENT_TYPE_NOT_ASSIGNABLE*/z] += d;
+  a[b] += /*info:DYNAMIC_CAST*/c;
+  a[b] += /*error:ARGUMENT_TYPE_NOT_ASSIGNABLE*/z;
+  /*info:DYNAMIC_INVOKE,info:DYNAMIC_INVOKE*/c[b] += d;
+}
+''');
+  }
+
+  void test_constructorInvalid() {
+    // Regression test for https://github.com/dart-lang/sdk/issues/26695
+    checkFile('''
+class A {
+  B({ /*error:FIELD_INITIALIZER_OUTSIDE_CONSTRUCTOR*/this.test: 1.0 }) {}
+  final double test = 0.0;
+}
+''');
+  }
+
+  void test_constructors() {
+    checkFile('''
+const num z = 25;
+Object obj = "world";
+
+class A {
+  int x;
+  String y;
+
+  A(this.x) : this.y = /*error:FIELD_INITIALIZER_NOT_ASSIGNABLE*/42;
+
+  A.c1(p): this.x = /*info:DOWN_CAST_IMPLICIT*/z, this.y = /*info:DYNAMIC_CAST*/p;
+
+  A.c2(this.x, this.y);
+
+  A.c3(/*error:INVALID_PARAMETER_DECLARATION*/num this.x, String this.y);
+}
+
+class B extends A {
+  B() : super(/*error:ARGUMENT_TYPE_NOT_ASSIGNABLE*/"hello");
+
+  B.c2(int x, String y) : super.c2(/*error:ARGUMENT_TYPE_NOT_ASSIGNABLE*/y,
+                                   /*error:ARGUMENT_TYPE_NOT_ASSIGNABLE*/x);
+
+  B.c3(num x, Object y) : super.c3(x, /*info:DOWN_CAST_IMPLICIT*/y);
+}
+
+void main() {
+   A a = new A.c2(/*info:DOWN_CAST_IMPLICIT*/z, /*error:ARGUMENT_TYPE_NOT_ASSIGNABLE*/z);
+   var b = new B.c2(/*error:ARGUMENT_TYPE_NOT_ASSIGNABLE*/"hello", /*info:DOWN_CAST_IMPLICIT*/obj);
+}
+''');
+  }
+
+  void test_conversionAndDynamicInvoke() {
     addFile(
         '''
-      dynamic toString = (int x) => x + 42;
-      dynamic hashCode = "hello";
-      ''',
+dynamic toString = (int x) => x + 42;
+dynamic hashCode = "hello";
+''',
         name: '/helper.dart');
     checkFile('''
-      import 'helper.dart' as helper;
+import 'helper.dart' as helper;
 
-      class A {
-        String x = "hello world";
+class A {
+  String x = "hello world";
 
-        void baz1(y) { x + /*info:DYNAMIC_CAST*/y; }
-        static baz2(y) => /*info:DYNAMIC_INVOKE*/y + y;
-      }
+  void baz1(y) { x + /*info:DYNAMIC_CAST*/y; }
+  static baz2(y) => /*info:DYNAMIC_INVOKE*/y + y;
+}
 
-      void foo(String str) {
-        print(str);
-      }
+void foo(String str) {
+  print(str);
+}
 
-      class B {
-        String toString([int arg]) => arg.toString();
-      }
+class B {
+  String toString([int arg]) => arg.toString();
+}
 
-      void bar(a) {
-        foo(/*info:DYNAMIC_CAST,info:DYNAMIC_INVOKE*/a.x);
-      }
+void bar(a) {
+  foo(/*info:DYNAMIC_CAST,info:DYNAMIC_INVOKE*/a.x);
+}
 
-      baz() => new B();
+baz() => new B();
 
-      typedef DynFun(x);
-      typedef StrFun(String x);
+typedef DynFun(x);
+typedef StrFun(String x);
 
-      var bar1 = bar;
+var bar1 = bar;
 
-      void main() {
-        var a = new A();
-        bar(a);
-        (/*info:DYNAMIC_INVOKE*/bar1(a));
-        var b = bar;
-        (/*info:DYNAMIC_INVOKE*/b(a));
-        var f1 = foo;
-        f1("hello");
-        dynamic f2 = foo;
-        (/*info:DYNAMIC_INVOKE*/f2("hello"));
-        DynFun f3 = foo;
-        (/*info:DYNAMIC_INVOKE*/f3("hello"));
-        (/*info:DYNAMIC_INVOKE*/f3(42));
-        StrFun f4 = foo;
-        f4("hello");
-        a.baz1("hello");
-        var b1 = a.baz1;
-        (/*info:DYNAMIC_INVOKE*/b1("hello"));
-        A.baz2("hello");
-        var b2 = A.baz2;
-        (/*info:DYNAMIC_INVOKE*/b2("hello"));
+void main() {
+  var a = new A();
+  bar(a);
+  (/*info:DYNAMIC_INVOKE*/bar1(a));
+  var b = bar;
+  (/*info:DYNAMIC_INVOKE*/b(a));
+  var f1 = foo;
+  f1("hello");
+  dynamic f2 = foo;
+  (/*info:DYNAMIC_INVOKE*/f2("hello"));
+  DynFun f3 = foo;
+  (/*info:DYNAMIC_INVOKE*/f3("hello"));
+  (/*info:DYNAMIC_INVOKE*/f3(42));
+  StrFun f4 = foo;
+  f4("hello");
+  a.baz1("hello");
+  var b1 = a.baz1;
+  (/*info:DYNAMIC_INVOKE*/b1("hello"));
+  A.baz2("hello");
+  var b2 = A.baz2;
+  (/*info:DYNAMIC_INVOKE*/b2("hello"));
 
-        dynamic a1 = new B();
-        (/*info:DYNAMIC_INVOKE*/a1./*info:UNDEFINED_GETTER*/x);
-        a1.toString();
-        (/*info:DYNAMIC_INVOKE*/a1.toString(42));
-        var toStringClosure = a1.toString;
-        (/*info:DYNAMIC_INVOKE*/a1.toStringClosure());
-        (/*info:DYNAMIC_INVOKE*/a1.toStringClosure(42));
-        (/*info:DYNAMIC_INVOKE*/a1.toStringClosure("hello"));
-        a1.hashCode;
+  dynamic a1 = new B();
+  (/*info:DYNAMIC_INVOKE*/a1./*info:UNDEFINED_GETTER*/x);
+  a1.toString();
+  (/*info:DYNAMIC_INVOKE*/a1.toString(42));
+  var toStringClosure = a1.toString;
+  (/*info:DYNAMIC_INVOKE*/a1.toStringClosure());
+  (/*info:DYNAMIC_INVOKE*/a1.toStringClosure(42));
+  (/*info:DYNAMIC_INVOKE*/a1.toStringClosure("hello"));
+  a1.hashCode;
 
-        dynamic toString = () => null;
-        (/*info:DYNAMIC_INVOKE*/toString());
+  dynamic toString = () => null;
+  (/*info:DYNAMIC_INVOKE*/toString());
 
-        (/*info:DYNAMIC_INVOKE*/helper.toString());
-        var toStringClosure2 = helper.toString;
-        (/*info:DYNAMIC_INVOKE*/toStringClosure2());
-        int hashCode = /*info:DYNAMIC_CAST*/helper.hashCode;
+  (/*info:DYNAMIC_INVOKE*/helper.toString());
+  var toStringClosure2 = helper.toString;
+  (/*info:DYNAMIC_INVOKE*/toStringClosure2());
+  int hashCode = /*info:DYNAMIC_CAST*/helper.hashCode;
 
-        baz().toString();
-        baz().hashCode;
-      }
-    ''');
-  });
+  baz().toString();
+  baz().hashCode;
+}
+''');
+  }
 
-  test('Constructors', () {
+  void test_dynamicInvocation() {
     checkFile('''
-      const num z = 25;
-      Object obj = "world";
+typedef dynamic A(dynamic x);
+class B {
+  int call(int x) => x;
+  double col(double x) => x;
+}
+void main() {
+  {
+    B f = new B();
+    int x;
+    double y;
+    x = f(3);
+    x = /*error:INVALID_ASSIGNMENT*/f.col(3.0);
+    y = /*error:INVALID_ASSIGNMENT*/f(3);
+    y = f.col(3.0);
+    f(/*error:ARGUMENT_TYPE_NOT_ASSIGNABLE*/3.0);
+    f.col(/*error:ARGUMENT_TYPE_NOT_ASSIGNABLE*/3);
+  }
+  {
+    Function f = new B();
+    int x;
+    double y;
+    x = /*info:DYNAMIC_CAST, info:DYNAMIC_INVOKE*/f(3);
+    x = /*info:DYNAMIC_CAST, info:DYNAMIC_INVOKE, info:INVALID_ASSIGNMENT*/f.col(3.0);
+    y = /*info:DYNAMIC_CAST, info:DYNAMIC_INVOKE*/f(3);
+    y = /*info:DYNAMIC_CAST, info:DYNAMIC_INVOKE*/f.col(3.0);
+    /*info:DYNAMIC_INVOKE*/f(3.0);
+    // Through type propagation, we know f is actually a B, hence the
+    // hint.
+    /*info:DYNAMIC_INVOKE*/f.col(/*info:ARGUMENT_TYPE_NOT_ASSIGNABLE*/3);
+  }
+  {
+    A f = new B();
+    int x;
+    double y;
+    x = /*info:DYNAMIC_CAST, info:DYNAMIC_INVOKE*/f(3);
+    y = /*info:DYNAMIC_CAST, info:DYNAMIC_INVOKE*/f(3);
+    /*info:DYNAMIC_INVOKE*/f(3.0);
+  }
+  {
+    dynamic g = new B();
+    /*info:DYNAMIC_INVOKE*/g.call(/*info:ARGUMENT_TYPE_NOT_ASSIGNABLE*/32.0);
+    /*info:DYNAMIC_INVOKE*/g.col(42.0);
+    /*info:DYNAMIC_INVOKE*/g.foo(42.0);
+    /*info:DYNAMIC_INVOKE*/g./*info:UNDEFINED_GETTER*/x;
+    A f = new B();
+    /*info:DYNAMIC_INVOKE*/f.col(42.0);
+    /*info:DYNAMIC_INVOKE*/f.foo(42.0);
+    /*info:DYNAMIC_INVOKE*/f./*error:UNDEFINED_GETTER*/x;
+  }
+}
+''');
+  }
 
-      class A {
-        int x;
-        String y;
+  void test_factoryConstructorDowncast() {
+    checkFile(r'''
+class Animal {
+  Animal();
+  factory Animal.cat() => new Cat();
+}
 
-        A(this.x) : this.y = /*warning:FIELD_INITIALIZER_NOT_ASSIGNABLE*/42;
+class Cat extends Animal {}
 
-        A.c1(p): this.x = /*info:DOWN_CAST_IMPLICIT*/z, this.y = /*info:DYNAMIC_CAST*/p;
+void main() {
+  Cat c = /*info:ASSIGNMENT_CAST*/new Animal.cat();
+  c = /*error:STATIC_TYPE_ERROR*/new Animal();
+}''');
+  }
 
-        A.c2(this.x, this.y);
-
-        A.c3(/*severe:INVALID_PARAMETER_DECLARATION*/num this.x, String this.y);
-      }
-
-      class B extends A {
-        B() : super(/*warning:ARGUMENT_TYPE_NOT_ASSIGNABLE*/"hello");
-
-        B.c2(int x, String y) : super.c2(/*warning:ARGUMENT_TYPE_NOT_ASSIGNABLE*/y,
-                                         /*warning:ARGUMENT_TYPE_NOT_ASSIGNABLE*/x);
-
-        B.c3(num x, Object y) : super.c3(x, /*info:DOWN_CAST_IMPLICIT*/y);
-      }
-
-      void main() {
-         A a = new A.c2(/*info:DOWN_CAST_IMPLICIT*/z, /*warning:ARGUMENT_TYPE_NOT_ASSIGNABLE*/z);
-         var b = new B.c2(/*warning:ARGUMENT_TYPE_NOT_ASSIGNABLE*/"hello", /*info:DOWN_CAST_IMPLICIT*/obj);
-      }
-   ''');
-  });
-
-  test('Unbound variable', () {
+  void test_fieldFieldOverride() {
     checkFile('''
-      void main() {
-         dynamic y = /*warning:UNDEFINED_IDENTIFIER should be error*/unboundVariable;
-      }
-   ''');
-  });
+class A {}
+class B extends A {}
+class C extends B {}
 
-  test('Unbound type name', () {
+class Base {
+  B f1;
+  B f2;
+  B f3;
+  B f4;
+}
+
+class Child extends Base {
+  /*error:INVALID_FIELD_OVERRIDE,error:INVALID_METHOD_OVERRIDE*/A f1; // invalid for getter
+  /*error:INVALID_FIELD_OVERRIDE,error:INVALID_METHOD_OVERRIDE*/C f2; // invalid for setter
+  /*error:INVALID_FIELD_OVERRIDE*/var f3;
+  /*error:INVALID_FIELD_OVERRIDE,error:INVALID_METHOD_OVERRIDE,error:INVALID_METHOD_OVERRIDE*/dynamic f4;
+}
+
+class Child2 implements Base {
+  /*error:INVALID_METHOD_OVERRIDE*/A f1; // invalid for getter
+  /*error:INVALID_METHOD_OVERRIDE*/C f2; // invalid for setter
+  var f3;
+  /*error:INVALID_METHOD_OVERRIDE,error:INVALID_METHOD_OVERRIDE*/dynamic f4;
+}
+''');
+  }
+
+  void test_fieldGetterOverride() {
     checkFile('''
-      void main() {
-         /*warning:UNDEFINED_CLASS should be error*/AToB y;
-      }
-   ''');
-  });
+class A {}
+class B extends A {}
+class C extends B {}
 
-  // Regression test for https://github.com/dart-lang/sdk/issues/25069
-  test('Void subtyping', () {
+abstract class Base {
+  B f1;
+  B f2;
+  B f3;
+  B f4;
+}
+
+class Child extends Base {
+  /*error:INVALID_FIELD_OVERRIDE,error:INVALID_METHOD_OVERRIDE*/A get f1 => null;
+  /*error:INVALID_FIELD_OVERRIDE*/C get f2 => null;
+  /*error:INVALID_FIELD_OVERRIDE*/get f3 => null;
+  /*error:INVALID_FIELD_OVERRIDE,error:INVALID_METHOD_OVERRIDE*/dynamic get f4 => null;
+}
+
+class /*error:NON_ABSTRACT_CLASS_INHERITS_ABSTRACT_MEMBER_FOUR*/Child2 implements Base {
+  /*error:INVALID_METHOD_OVERRIDE*/A get f1 => null;
+  C get f2 => null;
+  get f3 => null;
+  /*error:INVALID_METHOD_OVERRIDE*/dynamic get f4 => null;
+}
+''');
+  }
+
+  void test_fieldOverride_fuzzyArrows() {
     checkFile('''
-      typedef int Foo();
-      void foo() {}
-      void main () {
-        Foo x = /*warning:INVALID_ASSIGNMENT,info:USE_OF_VOID_RESULT*/foo();
-      }
-   ''');
-  });
+typedef void ToVoid<T>(T x);
+class F {
+  final ToVoid<dynamic> f = null;
+  final ToVoid<int> g = null;
+}
 
-  group('Ground type subtyping:', () {
-    test('dynamic is top', () {
-      checkFile('''
+class G extends F {
+  /*error:INVALID_FIELD_OVERRIDE*/final ToVoid<int> f = null;
+  /*error:INVALID_FIELD_OVERRIDE, error:INVALID_METHOD_OVERRIDE*/final ToVoid<dynamic> g = null;
+}
 
-      class A {}
-      class B extends A {}
+class H implements F {
+  final ToVoid<int> f = null;
+  /*error:INVALID_METHOD_OVERRIDE*/final ToVoid<dynamic> g = null;
+}
+ ''');
+  }
 
-      void main() {
-         dynamic y;
-         Object o;
-         int i = 0;
-         double d = 0.0;
-         num n;
-         A a;
-         B b;
-         y = o;
-         y = i;
-         y = d;
-         y = n;
-         y = a;
-         y = b;
-      }
-   ''');
-    });
+  void test_fieldSetterOverride() {
+    checkFile('''
+class A {}
+class B extends A {}
+class C extends B {}
 
-    test('dynamic downcasts', () {
-      checkFile('''
+class Base {
+  B f1;
+  B f2;
+  B f3;
+  B f4;
+  B f5;
+}
 
-      class A {}
-      class B extends A {}
+class Child extends Base {
+  /*error:INVALID_FIELD_OVERRIDE*/B get f1 => null;
+  /*error:INVALID_FIELD_OVERRIDE*/B get f2 => null;
+  /*error:INVALID_FIELD_OVERRIDE*/B get f3 => null;
+  /*error:INVALID_FIELD_OVERRIDE*/B get f4 => null;
+  /*error:INVALID_FIELD_OVERRIDE*/B get f5 => null;
 
-      void main() {
-         dynamic y;
-         Object o;
-         int i = 0;
-         double d = 0.0;
-         num n;
-         A a;
-         B b;
-         o = y;
-         i = /*info:DYNAMIC_CAST*/y;
-         d = /*info:DYNAMIC_CAST*/y;
-         n = /*info:DYNAMIC_CAST*/y;
-         a = /*info:DYNAMIC_CAST*/y;
-         b = /*info:DYNAMIC_CAST*/y;
-      }
-   ''');
-    });
+  /*error:INVALID_FIELD_OVERRIDE*/void set f1(A value) {}
+  /*error:INVALID_FIELD_OVERRIDE,error:INVALID_METHOD_OVERRIDE*/void set f2(C value) {}
+  /*error:INVALID_FIELD_OVERRIDE*/void set f3(value) {}
+  /*error:INVALID_FIELD_OVERRIDE,error:INVALID_METHOD_OVERRIDE*/void set f4(dynamic value) {}
+  /*error:INVALID_FIELD_OVERRIDE*/set f5(B value) {}
+}
 
-    test('assigning a class', () {
-      checkFile('''
+class Child2 implements Base {
+  B get f1 => null;
+  B get f2 => null;
+  B get f3 => null;
+  B get f4 => null;
+  B get f5 => null;
 
-      class A {}
-      class B extends A {}
+  void set f1(A value) {}
+  /*error:INVALID_METHOD_OVERRIDE*/void set f2(C value) {}
+  void set f3(value) {}
+  /*error:INVALID_METHOD_OVERRIDE*/void set f4(dynamic value) {}
+  set f5(B value) {}
+}
+''');
+  }
 
-      void main() {
-         dynamic y;
-         Object o;
-         int i = 0;
-         double d = 0.0;
-         num n;
-         A a;
-         B b;
-         y = a;
-         o = a;
-         i = /*warning:INVALID_ASSIGNMENT*/a;
-         d = /*warning:INVALID_ASSIGNMENT*/a;
-         n = /*warning:INVALID_ASSIGNMENT*/a;
-         a = a;
-         b = /*info:DOWN_CAST_IMPLICIT*/a;
-      }
-   ''');
-    });
+  void test_forInCastsIterateElementToVariable() {
+    checkFile('''
+main() {
+  // Don't choke if sequence is not iterable.
+  for (var i in /*error:FOR_IN_OF_INVALID_TYPE*/1234) {}
 
-    test('assigning a subclass', () {
-      checkFile('''
+  // Dynamic cast.
+  for (String /*info:DYNAMIC_CAST*/s in <dynamic>[]) {}
 
-      class A {}
-      class B extends A {}
-      class C extends A {}
+  // Identity cast.
+  for (String s in <String>[]) {}
 
-      void main() {
-         dynamic y;
-         Object o;
-         int i = 0;
-         double d = 0.0;
-         num n;
-         A a;
-         B b;
-         C c;
-         y = b;
-         o = b;
-         i = /*warning:INVALID_ASSIGNMENT*/b;
-         d = /*warning:INVALID_ASSIGNMENT*/b;
-         n = /*warning:INVALID_ASSIGNMENT*/b;
-         a = b;
-         b = b;
-         c = /*warning:INVALID_ASSIGNMENT*/b;
-      }
-   ''');
-    });
+  // Untyped.
+  for (var s in <String>[]) {}
 
-    test('interfaces', () {
-      checkFile('''
+  // Downcast.
+  for (int /*info:DOWN_CAST_IMPLICIT*/i in <num>[]) {}
+}
+''');
+  }
 
-      class A {}
-      class B extends A {}
-      class C extends A {}
-      class D extends B implements C {}
+  void test_forInCastsSupertypeSequenceToIterate() {
+    checkFile('''
+main() {
+  dynamic d;
+  for (var i in /*info:DYNAMIC_CAST*/d) {}
 
-      void main() {
-         A top;
-         B left;
-         C right;
-         D bot;
-         {
-           top = top;
-           top = left;
-           top = right;
-           top = bot;
-         }
-         {
-           left = /*info:DOWN_CAST_IMPLICIT*/top;
-           left = left;
-           left = /*warning:INVALID_ASSIGNMENT*/right;
-           left = bot;
-         }
-         {
-           right = /*info:DOWN_CAST_IMPLICIT*/top;
-           right = /*warning:INVALID_ASSIGNMENT*/left;
-           right = right;
-           right = bot;
-         }
-         {
-           bot = /*info:DOWN_CAST_IMPLICIT*/top;
-           bot = /*info:DOWN_CAST_IMPLICIT*/left;
-           bot = /*info:DOWN_CAST_IMPLICIT*/right;
-           bot = bot;
-         }
-      }
-   ''');
-    });
-  });
+  Object o;
+  for (var i in /*info:DOWN_CAST_IMPLICIT*/o) {}
+}
+''');
+  }
 
-  group('Function typing and subtyping:', () {
-    test('int and object', () {
-      checkFile('''
+  void test_forLoopVariable() {
+    checkFile('''
+foo() {
+  for (int i = 0; i < 10; i++) {
+    i = /*error:INVALID_ASSIGNMENT*/"hi";
+  }
+}
+bar() {
+  for (var i = 0; i < 10; i++) {
+    int j = i + 1;
+  }
+}
+''');
+  }
 
-      typedef Object Top(int x);      // Top of the lattice
-      typedef int Left(int x);        // Left branch
-      typedef int Left2(int x);       // Left branch
-      typedef Object Right(Object x); // Right branch
-      typedef int Bot(Object x);      // Bottom of the lattice
+  void test_functionModifiers_async() {
+    checkFile('''
+import 'dart:async';
+import 'dart:math' show Random;
 
-      Object globalTop(int x) => x;
-      int globalLeft(int x) => x;
-      Object globalRight(Object x) => x;
-      int bot_(Object x) => /*info:DOWN_CAST_IMPLICIT*/x;
-      int globalBot(Object x) => x as int;
+dynamic x;
 
-      void main() {
-        // Note: use locals so we only know the type, not that it's a specific
-        // function declaration. (we can issue better errors in that case.)
-        var top = globalTop;
-        var left = globalLeft;
-        var right = globalRight;
-        var bot = globalBot;
+foo1() async => x;
+Future foo2() async => x;
+Future<int> foo3() async => /*info:DYNAMIC_CAST*/x;
+Future<int> foo4() async => new Future<int>.value(/*info:DYNAMIC_CAST*/x);
+Future<int> foo5() async =>
+    /*error:RETURN_OF_INVALID_TYPE*/new Future<String>.value(/*info:DYNAMIC_CAST*/x);
 
-        { // Check typedef equality
-          Left f = left;
-          Left2 g = f;
-        }
-        {
-          Top f;
-          f = top;
-          f = left;
-          f = right;
-          f = bot;
-        }
-        {
-          Left f;
-          f = /*warning:DOWN_CAST_COMPOSITE*/top;
-          f = left;
-          f = /*warning:DOWN_CAST_COMPOSITE*/right; // Should we reject this?
-          f = bot;
-        }
-        {
-          Right f;
-          f = /*warning:DOWN_CAST_COMPOSITE*/top;
-          f = /*warning:DOWN_CAST_COMPOSITE*/left; // Should we reject this?
-          f = right;
-          f = bot;
-        }
-        {
-          Bot f;
-          f = /*warning:DOWN_CAST_COMPOSITE*/top;
-          f = /*warning:DOWN_CAST_COMPOSITE*/left;
-          f = /*warning:DOWN_CAST_COMPOSITE*/right;
-          f = bot;
-        }
-      }
-   ''');
-    });
+bar1() async { return x; }
+Future bar2() async { return x; }
+Future<int> bar3() async { return /*info:DYNAMIC_CAST*/x; }
+Future<int> bar4() async { return new Future<int>.value(/*info:DYNAMIC_CAST*/x); }
+Future<int> bar5() async {
+  return /*error:RETURN_OF_INVALID_TYPE*/new Future<String>.value(/*info:DYNAMIC_CAST*/x);
+}
 
-    test('classes', () {
-      checkFile('''
+int y;
+Future<int> z;
 
-      class A {}
-      class B extends A {}
+baz() async {
+  int a = /*info:DYNAMIC_CAST*/await x;
+  int b = await y;
+  int c = await z;
+  String d = /*error:INVALID_ASSIGNMENT*/await z;
+}
 
-      typedef A Top(B x);   // Top of the lattice
-      typedef B Left(B x);  // Left branch
-      typedef B Left2(B x); // Left branch
-      typedef A Right(A x); // Right branch
-      typedef B Bot(A x);   // Bottom of the lattice
+Future<bool> get issue_264 async {
+  await 42;
+  if (new Random().nextBool()) {
+    return true;
+  } else {
+    return new Future<bool>.value(false);
+  }
+}
+''');
+  }
 
-      B left(B x) => x;
-      B bot_(A x) => /*info:DOWN_CAST_IMPLICIT*/x;
-      B bot(A x) => x as B;
-      A top(B x) => x;
-      A right(A x) => x;
+  void test_functionModifiers_asyncStar() {
+    checkFile('''
+import 'dart:async';
 
-      void main() {
-        { // Check typedef equality
-          Left f = left;
-          Left2 g = f;
-        }
-        {
-          Top f;
-          f = top;
-          f = left;
-          f = right;
-          f = bot;
-        }
-        {
-          Left f;
-          f = /*severe:STATIC_TYPE_ERROR*/top;
-          f = left;
-          f = /*severe:STATIC_TYPE_ERROR*/right;
-          f = bot;
-        }
-        {
-          Right f;
-          f = /*severe:STATIC_TYPE_ERROR*/top;
-          f = /*severe:STATIC_TYPE_ERROR*/left;
-          f = right;
-          f = bot;
-        }
-        {
-          Bot f;
-          f = /*severe:STATIC_TYPE_ERROR*/top;
-          f = /*severe:STATIC_TYPE_ERROR*/left;
-          f = /*severe:STATIC_TYPE_ERROR*/right;
-          f = bot;
-        }
-      }
-   ''');
-    });
+dynamic x;
 
-    test('dynamic', () {
-      checkFile('''
+bar1() async* { yield x; }
+Stream bar2() async* { yield x; }
+Stream<int> bar3() async* { yield /*info:DYNAMIC_CAST*/x; }
+Stream<int> bar4() async* { yield /*error:YIELD_OF_INVALID_TYPE*/new Stream<int>(); }
 
-      class A {}
+baz1() async* { yield* /*info:DYNAMIC_CAST*/x; }
+Stream baz2() async* { yield* /*info:DYNAMIC_CAST*/x; }
+Stream<int> baz3() async* { yield* /*warning:DOWN_CAST_COMPOSITE*/x; }
+Stream<int> baz4() async* { yield* new Stream<int>(); }
+Stream<int> baz5() async* { yield* /*info:INFERRED_TYPE_ALLOCATION*/new Stream(); }
+''');
+  }
 
-      typedef dynamic Top(dynamic x);     // Top of the lattice
-      typedef dynamic Left(A x);          // Left branch
-      typedef A Right(dynamic x);         // Right branch
-      typedef A Bottom(A x);              // Bottom of the lattice
+  void test_functionModifiers_syncStar() {
+    checkFile('''
+dynamic x;
 
-      void main() {
-        Top top;
-        Left left;
-        Right right;
-        Bottom bot;
-        {
-          Top f;
-          f = top;
-          f = left;
-          f = right;
-          f = bot;
-        }
-        {
-          Left f;
-          f = /*warning:DOWN_CAST_COMPOSITE*/top;
-          f = left;
-          f = /*warning:DOWN_CAST_COMPOSITE*/right;
-          f = bot;
-        }
-        {
-          Right f;
-          f = /*warning:DOWN_CAST_COMPOSITE*/top;
-          f = /*warning:DOWN_CAST_COMPOSITE*/left;
-          f = right;
-          f = bot;
-        }
-        {
-          Bottom f;
-          f = /*warning:DOWN_CAST_COMPOSITE*/top;
-          f = /*warning:DOWN_CAST_COMPOSITE*/left;
-          f = /*warning:DOWN_CAST_COMPOSITE*/right;
-          f = bot;
-        }
-      }
-   ''');
-    });
+bar1() sync* { yield x; }
+Iterable bar2() sync* { yield x; }
+Iterable<int> bar3() sync* { yield /*info:DYNAMIC_CAST*/x; }
+Iterable<int> bar4() sync* { yield /*error:YIELD_OF_INVALID_TYPE*/bar3(); }
 
-    test('dynamic functions - closures are not fuzzy', () {
-      // Regression test for
-      // https://github.com/dart-lang/sdk/issues/26118
-      // https://github.com/dart-lang/sdk/issues/26156
-      checkFile('''
-        void takesF(void f(int x)) {}
+baz1() sync* { yield* /*info:DYNAMIC_CAST*/x; }
+Iterable baz2() sync* { yield* /*info:DYNAMIC_CAST*/x; }
+Iterable<int> baz3() sync* { yield* /*warning:DOWN_CAST_COMPOSITE*/x; }
+Iterable<int> baz4() sync* { yield* bar3(); }
+Iterable<int> baz5() sync* { yield* /*info:INFERRED_TYPE_ALLOCATION*/new List(); }
+''');
+  }
 
-        typedef void TakesInt(int x);
+  void test_functionTypingAndSubtyping_classes() {
+    checkFile('''
+class A {}
+class B extends A {}
 
-        void update(_) {}
-        void updateOpt([_]) {}
-        void updateOptNum([num x]) {}
+typedef A Top(B x);   // Top of the lattice
+typedef B Left(B x);  // Left branch
+typedef B Left2(B x); // Left branch
+typedef A Right(A x); // Right branch
+typedef B Bot(A x);   // Bottom of the lattice
 
-        class A {
-          TakesInt f;
-          A(TakesInt g) {
-            f = update;
-            f = updateOpt;
-            f = updateOptNum;
-          }
-          TakesInt g(bool a, bool b) {
-            if (a) {
-              return update;
-            } else if (b) {
-              return updateOpt;
-            } else {
-              return updateOptNum;
-            }
-          }
-        }
+B left(B x) => x;
+B bot_(A x) => /*info:DOWN_CAST_IMPLICIT*/x;
+B bot(A x) => x as B;
+A top(B x) => x;
+A right(A x) => x;
 
-        void test0() {
-          takesF(update);
-          takesF(updateOpt);
-          takesF(updateOptNum);
-          TakesInt f;
-          f = update;
-          f = updateOpt;
-          f = updateOptNum;
-          new A(update);
-          new A(updateOpt);
-          new A(updateOptNum);
-        }
+void main() {
+  { // Check typedef equality
+    Left f = left;
+    Left2 g = f;
+  }
+  {
+    Top f;
+    f = top;
+    f = left;
+    f = right;
+    f = bot;
+  }
+  {
+    Left f;
+    f = /*error:STATIC_TYPE_ERROR*/top;
+    f = left;
+    f = /*error:STATIC_TYPE_ERROR*/right;
+    f = bot;
+  }
+  {
+    Right f;
+    f = /*error:STATIC_TYPE_ERROR*/top;
+    f = /*error:STATIC_TYPE_ERROR*/left;
+    f = right;
+    f = bot;
+  }
+  {
+    Bot f;
+    f = /*error:STATIC_TYPE_ERROR*/top;
+    f = /*error:STATIC_TYPE_ERROR*/left;
+    f = /*error:STATIC_TYPE_ERROR*/right;
+    f = bot;
+  }
+}
+''');
+  }
 
-        void test1() {
-          void takesF(f(int x)) => null;
-          takesF((dynamic y) => 3);
-        }
+  void test_functionTypingAndSubtyping_dynamic() {
+    checkFile('''
+class A {}
 
-        void test2() {
-          int x;
-          int f/*<T>*/(/*=T*/ t, callback(/*=T*/ x)) { return 3; }
-          f(x, (y) => 3);
-        }
-     ''');
-    });
+typedef dynamic Top(dynamic x);     // Top of the lattice
+typedef dynamic Left(A x);          // Left branch
+typedef A Right(dynamic x);         // Right branch
+typedef A Bottom(A x);              // Bottom of the lattice
 
-    test('dynamic - known functions', () {
-      // Our lattice should look like this:
-      //
-      //
-      //           Bot -> Top
-      //          /        \
-      //      A -> Top    Bot -> A
-      //       /     \      /
-      // Top -> Top   A -> A
-      //         \      /
-      //         Top -> A
-      //
-      // Note that downcasts of known functions are promoted to
-      // static type errors, since they cannot succeed.
-      // This makes some of what look like downcasts turn into
-      // type errors below.
-      checkFile('''
-        class A {}
+void main() {
+  Top top;
+  Left left;
+  Right right;
+  Bottom bot;
+  {
+    Top f;
+    f = top;
+    f = left;
+    f = right;
+    f = bot;
+  }
+  {
+    Left f;
+    f = /*warning:DOWN_CAST_COMPOSITE*/top;
+    f = left;
+    f = /*warning:DOWN_CAST_COMPOSITE*/right;
+    f = bot;
+  }
+  {
+    Right f;
+    f = /*warning:DOWN_CAST_COMPOSITE*/top;
+    f = /*warning:DOWN_CAST_COMPOSITE*/left;
+    f = right;
+    f = bot;
+  }
+  {
+    Bottom f;
+    f = /*warning:DOWN_CAST_COMPOSITE*/top;
+    f = /*warning:DOWN_CAST_COMPOSITE*/left;
+    f = /*warning:DOWN_CAST_COMPOSITE*/right;
+    f = bot;
+  }
+}
+''');
+  }
 
-        typedef dynamic BotTop(dynamic x);
-        typedef dynamic ATop(A x);
-        typedef A BotA(dynamic x);
-        typedef A AA(A x);
-        typedef A TopA(Object x);
-        typedef dynamic TopTop(Object x);
+  void test_functionTypingAndSubtyping_dynamic_knownFunctions() {
+    // Our lattice should look like this:
+    //
+    //
+    //           Bot -> Top
+    //          /        \
+    //      A -> Top    Bot -> A
+    //       /     \      /
+    // Top -> Top   A -> A
+    //         \      /
+    //         Top -> A
+    //
+    // Note that downcasts of known functions are promoted to
+    // static type errors, since they cannot succeed.
+    // This makes some of what look like downcasts turn into
+    // type errors below.
+    checkFile('''
+class A {}
 
-        dynamic aTop(A x) => x;
-        A aa(A x) => x;
-        dynamic topTop(dynamic x) => x;
-        A topA(dynamic x) => /*info:DYNAMIC_CAST*/x;
-        void apply/*<T>*/(/*=T*/ f0, /*=T*/ f1, /*=T*/ f2,
-                          /*=T*/ f3, /*=T*/ f4, /*=T*/ f5) {}
-        void main() {
-          BotTop botTop;
-          BotA botA;
-          {
-            BotTop f;
-            f = topA;
-            f = topTop;
-            f = aa;
-            f = aTop;
-            f = botA;
-            f = botTop;
-            apply/*<BotTop>*/(
-                topA,
-                topTop,
-                aa,
-                aTop,
-                botA,
-                botTop
-                              );
-            apply/*<BotTop>*/(
-                (dynamic x) => new A(),
-                (dynamic x) => (x as Object),
-                (A x) => x,
-                (A x) => null,
-                botA,
-                botTop
-                              );
-          }
-          {
-            ATop f;
-            f = topA;
-            f = topTop;
-            f = aa;
-            f = aTop;
-            f = /*warning:DOWN_CAST_COMPOSITE should be severe:STATIC_TYPE_ERROR*/botA;
-            f = /*warning:DOWN_CAST_COMPOSITE*/botTop;
-            apply/*<ATop>*/(
-                topA,
-                topTop,
-                aa,
-                aTop,
-                /*warning:DOWN_CAST_COMPOSITE should be severe:STATIC_TYPE_ERROR*/botA,
-                /*warning:DOWN_CAST_COMPOSITE*/botTop
-                            );
-            apply/*<ATop>*/(
-                (dynamic x) => new A(),
-                (dynamic x) => (x as Object),
-                (A x) => x,
-                (A x) => null,
-                /*warning:DOWN_CAST_COMPOSITE should be severe:STATIC_TYPE_ERROR*/botA,
-                /*warning:DOWN_CAST_COMPOSITE*/botTop
-                            );
-          }
-          {
-            BotA f;
-            f = topA;
-            f = /*severe:STATIC_TYPE_ERROR*/topTop;
-            f = aa;
-            f = /*severe:STATIC_TYPE_ERROR*/aTop;
-            f = botA;
-            f = /*warning:DOWN_CAST_COMPOSITE*/botTop;
-            apply/*<BotA>*/(
-                topA,
-                /*severe:STATIC_TYPE_ERROR*/topTop,
-                aa,
-                /*severe:STATIC_TYPE_ERROR*/aTop,
-                botA,
-                /*warning:DOWN_CAST_COMPOSITE*/botTop
-                            );
-            apply/*<BotA>*/(
-                (dynamic x) => new A(),
-                /*severe:STATIC_TYPE_ERROR*/(dynamic x) => (x as Object),
-                (A x) => x,
-                /*severe:STATIC_TYPE_ERROR*/(A x) => (x as Object),
-                botA,
-                /*warning:DOWN_CAST_COMPOSITE*/botTop
-                            );
-          }
-          {
-            AA f;
-            f = topA;
-            f = /*severe:STATIC_TYPE_ERROR*/topTop;
-            f = aa;
-            f = /*severe:STATIC_TYPE_ERROR*/aTop; // known function
-            f = /*warning:DOWN_CAST_COMPOSITE*/botA;
-            f = /*warning:DOWN_CAST_COMPOSITE*/botTop;
-            apply/*<AA>*/(
-                topA,
-                /*severe:STATIC_TYPE_ERROR*/topTop,
-                aa,
-                /*severe:STATIC_TYPE_ERROR*/aTop, // known function
-                /*warning:DOWN_CAST_COMPOSITE*/botA,
-                /*warning:DOWN_CAST_COMPOSITE*/botTop
-                          );
-            apply/*<AA>*/(
-                (dynamic x) => new A(),
-                /*severe:STATIC_TYPE_ERROR*/(dynamic x) => (x as Object),
-                (A x) => x,
-                /*severe:STATIC_TYPE_ERROR*/(A x) => (x as Object), // known function
-                /*warning:DOWN_CAST_COMPOSITE*/botA,
-                /*warning:DOWN_CAST_COMPOSITE*/botTop
-                          );
-          }
-          {
-            TopTop f;
-            f = topA;
-            f = topTop;
-            f = /*severe:STATIC_TYPE_ERROR*/aa;
-            f = /*severe:STATIC_TYPE_ERROR*/aTop; // known function
-            f = /*warning:DOWN_CAST_COMPOSITE should be severe:STATIC_TYPE_ERROR*/botA;
-            f = /*warning:DOWN_CAST_COMPOSITE*/botTop;
-            apply/*<TopTop>*/(
-                topA,
-                topTop,
-                /*severe:STATIC_TYPE_ERROR*/aa,
-                /*severe:STATIC_TYPE_ERROR*/aTop, // known function
-                /*warning:DOWN_CAST_COMPOSITE should be severe:STATIC_TYPE_ERROR*/botA,
-                /*warning:DOWN_CAST_COMPOSITE*/botTop
-                              );
-            apply/*<TopTop>*/(
-                (dynamic x) => new A(),
-                (dynamic x) => (x as Object),
-                /*severe:STATIC_TYPE_ERROR*/(A x) => x,
-                /*severe:STATIC_TYPE_ERROR*/(A x) => (x as Object), // known function
-                /*warning:DOWN_CAST_COMPOSITE should be severe:STATIC_TYPE_ERROR*/botA,
-                /*warning:DOWN_CAST_COMPOSITE*/botTop
-                              );
-          }
-          {
-            TopA f;
-            f = topA;
-            f = /*severe:STATIC_TYPE_ERROR*/topTop; // known function
-            f = /*severe:STATIC_TYPE_ERROR*/aa; // known function
-            f = /*severe:STATIC_TYPE_ERROR*/aTop; // known function
-            f = /*warning:DOWN_CAST_COMPOSITE*/botA;
-            f = /*warning:DOWN_CAST_COMPOSITE*/botTop;
-            apply/*<TopA>*/(
-                topA,
-                /*severe:STATIC_TYPE_ERROR*/topTop, // known function
-                /*severe:STATIC_TYPE_ERROR*/aa, // known function
-                /*severe:STATIC_TYPE_ERROR*/aTop, // known function
-                /*warning:DOWN_CAST_COMPOSITE*/botA,
-                /*warning:DOWN_CAST_COMPOSITE*/botTop
-                            );
-            apply/*<TopA>*/(
-                (dynamic x) => new A(),
-                /*severe:STATIC_TYPE_ERROR*/(dynamic x) => (x as Object), // known function
-                /*severe:STATIC_TYPE_ERROR*/(A x) => x, // known function
-                /*severe:STATIC_TYPE_ERROR*/(A x) => (x as Object), // known function
-                /*warning:DOWN_CAST_COMPOSITE*/botA,
-                /*warning:DOWN_CAST_COMPOSITE*/botTop
-                            );
-          }
-        }
-     ''');
-    });
-
-    test('function literal variance', () {
-      checkFile('''
-
-      class A {}
-      class B extends A {}
-
-      typedef T Function2<S, T>(S z);
-
-      A top(B x) => x;
-      B left(B x) => x;
-      A right(A x) => x;
-      B bot(A x) => x as B;
-
-      void main() {
-        {
-          Function2<B, A> f;
-          f = top;
-          f = left;
-          f = right;
-          f = bot;
-        }
-        {
-          Function2<B, B> f;
-          f = /*severe:STATIC_TYPE_ERROR*/top;
-          f = left;
-          f = /*severe:STATIC_TYPE_ERROR*/right;
-          f = bot;
-        }
-        {
-          Function2<A, A> f;
-          f = /*severe:STATIC_TYPE_ERROR*/top;
-          f = /*severe:STATIC_TYPE_ERROR*/left;
-          f = right;
-          f = bot;
-        }
-        {
-          Function2<A, B> f;
-          f = /*severe:STATIC_TYPE_ERROR*/top;
-          f = /*severe:STATIC_TYPE_ERROR*/left;
-          f = /*severe:STATIC_TYPE_ERROR*/right;
-          f = bot;
-        }
-      }
-   ''');
-    });
-
-    test('function variable variance', () {
-      checkFile('''
-
-      class A {}
-      class B extends A {}
-
-      typedef T Function2<S, T>(S z);
-
-      void main() {
-        {
-          Function2<B, A> top;
-          Function2<B, B> left;
-          Function2<A, A> right;
-          Function2<A, B> bot;
-
-          top = right;
-          top = bot;
-          top = top;
-          top = left;
-
-          left = /*warning:DOWN_CAST_COMPOSITE*/top;
-          left = left;
-          left = /*warning:DOWN_CAST_COMPOSITE*/right; // Should we reject this?
-          left = bot;
-
-          right = /*warning:DOWN_CAST_COMPOSITE*/top;
-          right = /*warning:DOWN_CAST_COMPOSITE*/left; // Should we reject this?
-          right = right;
-          right = bot;
-
-          bot = /*warning:DOWN_CAST_COMPOSITE*/top;
-          bot = /*warning:DOWN_CAST_COMPOSITE*/left;
-          bot = /*warning:DOWN_CAST_COMPOSITE*/right;
-          bot = bot;
-        }
-      }
-   ''');
-    });
-
-    test('static method variance', () {
-      checkFile('''
-
-      class A {}
-      class B extends A {}
-
-      class C {
-        static A top(B x) => x;
-        static B left(B x) => x;
-        static A right(A x) => x;
-        static B bot(A x) => x as B;
-      }
-
-      typedef T Function2<S, T>(S z);
-
-      void main() {
-        {
-          Function2<B, A> f;
-          f = C.top;
-          f = C.left;
-          f = C.right;
-          f = C.bot;
-        }
-        {
-          Function2<B, B> f;
-          f = /*severe:STATIC_TYPE_ERROR*/C.top;
-          f = C.left;
-          f = /*severe:STATIC_TYPE_ERROR*/C.right;
-          f = C.bot;
-        }
-        {
-          Function2<A, A> f;
-          f = /*severe:STATIC_TYPE_ERROR*/C.top;
-          f = /*severe:STATIC_TYPE_ERROR*/C.left;
-          f = C.right;
-          f = C.bot;
-        }
-        {
-          Function2<A, B> f;
-          f = /*severe:STATIC_TYPE_ERROR*/C.top;
-          f = /*severe:STATIC_TYPE_ERROR*/C.left;
-          f = /*severe:STATIC_TYPE_ERROR*/C.right;
-          f = C.bot;
-        }
-      }
-   ''');
-    });
-
-    test('instance method variance', () {
-      checkFile('''
-
-      class A {}
-      class B extends A {}
-
-      class C {
-        A top(B x) => x;
-        B left(B x) => x;
-        A right(A x) => x;
-        B bot(A x) => x as B;
-      }
-
-      typedef T Function2<S, T>(S z);
-
-      void main() {
-        C c = new C();
-        {
-          Function2<B, A> f;
-          f = c.top;
-          f = c.left;
-          f = c.right;
-          f = c.bot;
-        }
-        {
-          Function2<B, B> f;
-          f = /*warning:DOWN_CAST_COMPOSITE*/c.top;
-          f = c.left;
-          f = /*warning:DOWN_CAST_COMPOSITE*/c.right;
-          f = c.bot;
-        }
-        {
-          Function2<A, A> f;
-          f = /*warning:DOWN_CAST_COMPOSITE*/c.top;
-          f = /*warning:DOWN_CAST_COMPOSITE*/c.left;
-          f = c.right;
-          f = c.bot;
-        }
-        {
-          Function2<A, B> f;
-          f = /*warning:DOWN_CAST_COMPOSITE*/c.top;
-          f = /*warning:DOWN_CAST_COMPOSITE*/c.left;
-          f = /*warning:DOWN_CAST_COMPOSITE*/c.right;
-          f = c.bot;
-        }
-      }
-   ''');
-    });
-
-    test('higher order function literals 1', () {
-      checkFile('''
-
-      class A {}
-      class B extends A {}
-
-      typedef T Function2<S, T>(S z);
-
-      typedef A BToA(B x);  // Top of the base lattice
-      typedef B AToB(A x);  // Bot of the base lattice
-
-      BToA top(AToB f) => f;
-      AToB left(AToB f) => f;
-      BToA right(BToA f) => f;
-      AToB bot_(BToA f) => /*warning:DOWN_CAST_COMPOSITE*/f;
-      AToB bot(BToA f) => f as AToB;
-
-      void main() {
-        {
-          Function2<AToB, BToA> f; // Top
-          f = top;
-          f = left;
-          f = right;
-          f = bot;
-        }
-        {
-          Function2<AToB, AToB> f; // Left
-          f = /*severe:STATIC_TYPE_ERROR*/top;
-          f = left;
-          f = /*severe:STATIC_TYPE_ERROR*/right;
-          f = bot;
-        }
-        {
-          Function2<BToA, BToA> f; // Right
-          f = /*severe:STATIC_TYPE_ERROR*/top;
-          f = /*severe:STATIC_TYPE_ERROR*/left;
-          f = right;
-          f = bot;
-        }
-        {
-          Function2<BToA, AToB> f; // Bot
-          f = bot;
-          f = /*severe:STATIC_TYPE_ERROR*/left;
-          f = /*severe:STATIC_TYPE_ERROR*/top;
-          f = /*severe:STATIC_TYPE_ERROR*/left;
-        }
-      }
-   ''');
-    });
-
-    test('higher order function literals 2', () {
-      checkFile('''
-
-      class A {}
-      class B extends A {}
-
-      typedef T Function2<S, T>(S z);
-
-      typedef A BToA(B x);  // Top of the base lattice
-      typedef B AToB(A x);  // Bot of the base lattice
-
-      Function2<B, A> top(AToB f) => f;
-      Function2<A, B> left(AToB f) => f;
-      Function2<B, A> right(BToA f) => f;
-      Function2<A, B> bot_(BToA f) => /*warning:DOWN_CAST_COMPOSITE*/f;
-      Function2<A, B> bot(BToA f) => f as Function2<A, B>;
-
-      void main() {
-        {
-          Function2<AToB, BToA> f; // Top
-          f = top;
-          f = left;
-          f = right;
-          f = bot;
-        }
-        {
-          Function2<AToB, AToB> f; // Left
-          f = /*severe:STATIC_TYPE_ERROR*/top;
-          f = left;
-          f = /*severe:STATIC_TYPE_ERROR*/right;
-          f = bot;
-        }
-        {
-          Function2<BToA, BToA> f; // Right
-          f = /*severe:STATIC_TYPE_ERROR*/top;
-          f = /*severe:STATIC_TYPE_ERROR*/left;
-          f = right;
-          f = bot;
-        }
-        {
-          Function2<BToA, AToB> f; // Bot
-          f = bot;
-          f = /*severe:STATIC_TYPE_ERROR*/left;
-          f = /*severe:STATIC_TYPE_ERROR*/top;
-          f = /*severe:STATIC_TYPE_ERROR*/left;
-        }
-      }
-   ''');
-    });
-
-    test('higher order function literals 3', () {
-      checkFile('''
-
-      class A {}
-      class B extends A {}
-
-      typedef T Function2<S, T>(S z);
-
-      typedef A BToA(B x);  // Top of the base lattice
-      typedef B AToB(A x);  // Bot of the base lattice
-
-      BToA top(Function2<A, B> f) => f;
-      AToB left(Function2<A, B> f) => f;
-      BToA right(Function2<B, A> f) => f;
-      AToB bot_(Function2<B, A> f) => /*warning:DOWN_CAST_COMPOSITE*/f;
-      AToB bot(Function2<B, A> f) => f as AToB;
-
-      void main() {
-        {
-          Function2<AToB, BToA> f; // Top
-          f = top;
-          f = left;
-          f = right;
-          f = bot;
-        }
-        {
-          Function2<AToB, AToB> f; // Left
-          f = /*severe:STATIC_TYPE_ERROR*/top;
-          f = left;
-          f = /*severe:STATIC_TYPE_ERROR*/right;
-          f = bot;
-        }
-        {
-          Function2<BToA, BToA> f; // Right
-          f = /*severe:STATIC_TYPE_ERROR*/top;
-          f = /*severe:STATIC_TYPE_ERROR*/left;
-          f = right;
-          f = bot;
-        }
-        {
-          Function2<BToA, AToB> f; // Bot
-          f = bot;
-          f = /*severe:STATIC_TYPE_ERROR*/left;
-          f = /*severe:STATIC_TYPE_ERROR*/top;
-          f = /*severe:STATIC_TYPE_ERROR*/left;
-        }
-      }
-   ''');
-    });
-
-    test('higher order function variables', () {
-      checkFile('''
-
-    class A {}
-    class B extends A {}
-
-    typedef T Function2<S, T>(S z);
+typedef dynamic BotTop(dynamic x);
+typedef dynamic ATop(A x);
+typedef A BotA(dynamic x);
+typedef A AA(A x);
+typedef A TopA(Object x);
+typedef dynamic TopTop(Object x);
 
-    void main() {
-      {
-        Function2<Function2<A, B>, Function2<B, A>> top;
-        Function2<Function2<B, A>, Function2<B, A>> right;
-        Function2<Function2<A, B>, Function2<A, B>> left;
-        Function2<Function2<B, A>, Function2<A, B>> bot;
+dynamic aTop(A x) => x;
+A aa(A x) => x;
+dynamic topTop(dynamic x) => x;
+A topA(dynamic x) => /*info:DYNAMIC_CAST*/x;
+void apply/*<T>*/(/*=T*/ f0, /*=T*/ f1, /*=T*/ f2,
+                  /*=T*/ f3, /*=T*/ f4, /*=T*/ f5) {}
+void main() {
+  BotTop botTop;
+  BotA botA;
+  {
+    BotTop f;
+    f = topA;
+    f = topTop;
+    f = aa;
+    f = aTop;
+    f = botA;
+    f = botTop;
+    apply/*<BotTop>*/(
+        topA,
+        topTop,
+        aa,
+        aTop,
+        botA,
+        botTop
+                      );
+    apply/*<BotTop>*/(
+        (dynamic x) => new A(),
+        (dynamic x) => (x as Object),
+        (A x) => x,
+        (A x) => null,
+        botA,
+        botTop
+                      );
+  }
+  {
+    ATop f;
+    f = topA;
+    f = topTop;
+    f = aa;
+    f = aTop;
+    f = /*warning:DOWN_CAST_COMPOSITE should be error:STATIC_TYPE_ERROR*/botA;
+    f = /*warning:DOWN_CAST_COMPOSITE*/botTop;
+    apply/*<ATop>*/(
+        topA,
+        topTop,
+        aa,
+        aTop,
+        /*warning:DOWN_CAST_COMPOSITE should be error:STATIC_TYPE_ERROR*/botA,
+        /*warning:DOWN_CAST_COMPOSITE*/botTop
+                    );
+    apply/*<ATop>*/(
+        (dynamic x) => new A(),
+        (dynamic x) => (x as Object),
+        (A x) => x,
+        (A x) => null,
+        /*warning:DOWN_CAST_COMPOSITE should be error:STATIC_TYPE_ERROR*/botA,
+        /*warning:DOWN_CAST_COMPOSITE*/botTop
+                    );
+  }
+  {
+    BotA f;
+    f = topA;
+    f = /*error:STATIC_TYPE_ERROR*/topTop;
+    f = aa;
+    f = /*error:STATIC_TYPE_ERROR*/aTop;
+    f = botA;
+    f = /*warning:DOWN_CAST_COMPOSITE*/botTop;
+    apply/*<BotA>*/(
+        topA,
+        /*error:STATIC_TYPE_ERROR*/topTop,
+        aa,
+        /*error:STATIC_TYPE_ERROR*/aTop,
+        botA,
+        /*warning:DOWN_CAST_COMPOSITE*/botTop
+                    );
+    apply/*<BotA>*/(
+        (dynamic x) => new A(),
+        /*error:STATIC_TYPE_ERROR*/(dynamic x) => (x as Object),
+        (A x) => x,
+        /*error:STATIC_TYPE_ERROR*/(A x) => (x as Object),
+        botA,
+        /*warning:DOWN_CAST_COMPOSITE*/botTop
+                    );
+  }
+  {
+    AA f;
+    f = topA;
+    f = /*error:STATIC_TYPE_ERROR*/topTop;
+    f = aa;
+    f = /*error:STATIC_TYPE_ERROR*/aTop; // known function
+    f = /*warning:DOWN_CAST_COMPOSITE*/botA;
+    f = /*warning:DOWN_CAST_COMPOSITE*/botTop;
+    apply/*<AA>*/(
+        topA,
+        /*error:STATIC_TYPE_ERROR*/topTop,
+        aa,
+        /*error:STATIC_TYPE_ERROR*/aTop, // known function
+        /*warning:DOWN_CAST_COMPOSITE*/botA,
+        /*warning:DOWN_CAST_COMPOSITE*/botTop
+                  );
+    apply/*<AA>*/(
+        (dynamic x) => new A(),
+        /*error:STATIC_TYPE_ERROR*/(dynamic x) => (x as Object),
+        (A x) => x,
+        /*error:STATIC_TYPE_ERROR*/(A x) => (x as Object), // known function
+        /*warning:DOWN_CAST_COMPOSITE*/botA,
+        /*warning:DOWN_CAST_COMPOSITE*/botTop
+                  );
+  }
+  {
+    TopTop f;
+    f = topA;
+    f = topTop;
+    f = /*error:STATIC_TYPE_ERROR*/aa;
+    f = /*error:STATIC_TYPE_ERROR*/aTop; // known function
+    f = /*warning:DOWN_CAST_COMPOSITE should be error:STATIC_TYPE_ERROR*/botA;
+    f = /*warning:DOWN_CAST_COMPOSITE*/botTop;
+    apply/*<TopTop>*/(
+        topA,
+        topTop,
+        /*error:STATIC_TYPE_ERROR*/aa,
+        /*error:STATIC_TYPE_ERROR*/aTop, // known function
+        /*warning:DOWN_CAST_COMPOSITE should be error:STATIC_TYPE_ERROR*/botA,
+        /*warning:DOWN_CAST_COMPOSITE*/botTop
+                      );
+    apply/*<TopTop>*/(
+        (dynamic x) => new A(),
+        (dynamic x) => (x as Object),
+        /*error:STATIC_TYPE_ERROR*/(A x) => x,
+        /*error:STATIC_TYPE_ERROR*/(A x) => (x as Object), // known function
+        /*warning:DOWN_CAST_COMPOSITE should be error:STATIC_TYPE_ERROR*/botA,
+        /*warning:DOWN_CAST_COMPOSITE*/botTop
+                      );
+  }
+  {
+    TopA f;
+    f = topA;
+    f = /*error:STATIC_TYPE_ERROR*/topTop; // known function
+    f = /*error:STATIC_TYPE_ERROR*/aa; // known function
+    f = /*error:STATIC_TYPE_ERROR*/aTop; // known function
+    f = /*warning:DOWN_CAST_COMPOSITE*/botA;
+    f = /*warning:DOWN_CAST_COMPOSITE*/botTop;
+    apply/*<TopA>*/(
+        topA,
+        /*error:STATIC_TYPE_ERROR*/topTop, // known function
+        /*error:STATIC_TYPE_ERROR*/aa, // known function
+        /*error:STATIC_TYPE_ERROR*/aTop, // known function
+        /*warning:DOWN_CAST_COMPOSITE*/botA,
+        /*warning:DOWN_CAST_COMPOSITE*/botTop
+                    );
+    apply/*<TopA>*/(
+        (dynamic x) => new A(),
+        /*error:STATIC_TYPE_ERROR*/(dynamic x) => (x as Object), // known function
+        /*error:STATIC_TYPE_ERROR*/(A x) => x, // known function
+        /*error:STATIC_TYPE_ERROR*/(A x) => (x as Object), // known function
+        /*warning:DOWN_CAST_COMPOSITE*/botA,
+        /*warning:DOWN_CAST_COMPOSITE*/botTop
+                    );
+  }
+}
+''');
+  }
 
-        top = right;
-        top = bot;
-        top = top;
-        top = left;
+  void test_functionTypingAndSubtyping_dynamicFunctions_clasuresAreNotFuzzy() {
+    // Regression test for
+    // https://github.com/dart-lang/sdk/issues/26118
+    // https://github.com/dart-lang/sdk/issues/26156
+    checkFile('''
+void takesF(void f(int x)) {}
 
-        left = /*warning:DOWN_CAST_COMPOSITE*/top;
-        left = left;
-        left =
-            /*warning:DOWN_CAST_COMPOSITE should be severe:STATIC_TYPE_ERROR*/right;
-        left = bot;
+typedef void TakesInt(int x);
 
-        right = /*warning:DOWN_CAST_COMPOSITE*/top;
-        right =
-            /*warning:DOWN_CAST_COMPOSITE should be severe:STATIC_TYPE_ERROR*/left;
-        right = right;
-        right = bot;
+void update(_) {}
+void updateOpt([_]) {}
+void updateOptNum([num x]) {}
 
-        bot = /*warning:DOWN_CAST_COMPOSITE*/top;
-        bot = /*warning:DOWN_CAST_COMPOSITE*/left;
-        bot = /*warning:DOWN_CAST_COMPOSITE*/right;
-        bot = bot;
-      }
+class A {
+  TakesInt f;
+  A(TakesInt g) {
+    f = update;
+    f = updateOpt;
+    f = updateOptNum;
+  }
+  TakesInt g(bool a, bool b) {
+    if (a) {
+      return update;
+    } else if (b) {
+      return updateOpt;
+    } else {
+      return updateOptNum;
     }
-   ''');
-    });
+  }
+}
 
-    test('named and optional parameters', () {
-      checkFile('''
+void test0() {
+  takesF(update);
+  takesF(updateOpt);
+  takesF(updateOptNum);
+  TakesInt f;
+  f = update;
+  f = updateOpt;
+  f = updateOptNum;
+  new A(update);
+  new A(updateOpt);
+  new A(updateOptNum);
+}
 
-      class A {}
+void test1() {
+  void takesF(f(int x)) => null;
+  takesF((dynamic y) => 3);
+}
 
-      typedef A FR(A x);
-      typedef A FO([A x]);
-      typedef A FN({A x});
-      typedef A FRR(A x, A y);
-      typedef A FRO(A x, [A y]);
-      typedef A FRN(A x, {A n});
-      typedef A FOO([A x, A y]);
-      typedef A FNN({A x, A y});
-      typedef A FNNN({A z, A y, A x});
+void test2() {
+  int x;
+  int f/*<T>*/(/*=T*/ t, callback(/*=T*/ x)) { return 3; }
+  f(x, (y) => 3);
+}
+''');
+  }
 
-      void main() {
-         FR r;
-         FO o;
-         FN n;
-         FRR rr;
-         FRO ro;
-         FRN rn;
-         FOO oo;
-         FNN nn;
-         FNNN nnn;
-
-         r = r;
-         r = o;
-         r = /*warning:INVALID_ASSIGNMENT*/n;
-         r = /*warning:INVALID_ASSIGNMENT*/rr;
-         r = ro;
-         r = rn;
-         r = oo;
-         r = /*warning:INVALID_ASSIGNMENT*/nn;
-         r = /*warning:INVALID_ASSIGNMENT*/nnn;
-
-         o = /*warning:DOWN_CAST_COMPOSITE*/r;
-         o = o;
-         o = /*warning:INVALID_ASSIGNMENT*/n;
-         o = /*warning:INVALID_ASSIGNMENT*/rr;
-         o = /*warning:INVALID_ASSIGNMENT*/ro;
-         o = /*warning:INVALID_ASSIGNMENT*/rn;
-         o = oo;
-         o = /*warning:INVALID_ASSIGNMENT*/nn;
-         o = /*warning:INVALID_ASSIGNMENT*/nnn;
-
-         n = /*warning:INVALID_ASSIGNMENT*/r;
-         n = /*warning:INVALID_ASSIGNMENT*/o;
-         n = n;
-         n = /*warning:INVALID_ASSIGNMENT*/rr;
-         n = /*warning:INVALID_ASSIGNMENT*/ro;
-         n = /*warning:INVALID_ASSIGNMENT*/rn;
-         n = /*warning:INVALID_ASSIGNMENT*/oo;
-         n = nn;
-         n = nnn;
-
-         rr = /*warning:INVALID_ASSIGNMENT*/r;
-         rr = /*warning:INVALID_ASSIGNMENT*/o;
-         rr = /*warning:INVALID_ASSIGNMENT*/n;
-         rr = rr;
-         rr = ro;
-         rr = /*warning:INVALID_ASSIGNMENT*/rn;
-         rr = oo;
-         rr = /*warning:INVALID_ASSIGNMENT*/nn;
-         rr = /*warning:INVALID_ASSIGNMENT*/nnn;
-
-         ro = /*warning:DOWN_CAST_COMPOSITE*/r;
-         ro = /*warning:INVALID_ASSIGNMENT*/o;
-         ro = /*warning:INVALID_ASSIGNMENT*/n;
-         ro = /*warning:DOWN_CAST_COMPOSITE*/rr;
-         ro = ro;
-         ro = /*warning:INVALID_ASSIGNMENT*/rn;
-         ro = oo;
-         ro = /*warning:INVALID_ASSIGNMENT*/nn;
-         ro = /*warning:INVALID_ASSIGNMENT*/nnn;
-
-         rn = /*warning:DOWN_CAST_COMPOSITE*/r;
-         rn = /*warning:INVALID_ASSIGNMENT*/o;
-         rn = /*warning:INVALID_ASSIGNMENT*/n;
-         rn = /*warning:INVALID_ASSIGNMENT*/rr;
-         rn = /*warning:INVALID_ASSIGNMENT*/ro;
-         rn = rn;
-         rn = /*warning:INVALID_ASSIGNMENT*/oo;
-         rn = /*warning:INVALID_ASSIGNMENT*/nn;
-         rn = /*warning:INVALID_ASSIGNMENT*/nnn;
-
-         oo = /*warning:DOWN_CAST_COMPOSITE*/r;
-         oo = /*warning:DOWN_CAST_COMPOSITE*/o;
-         oo = /*warning:INVALID_ASSIGNMENT*/n;
-         oo = /*warning:DOWN_CAST_COMPOSITE*/rr;
-         oo = /*warning:DOWN_CAST_COMPOSITE*/ro;
-         oo = /*warning:INVALID_ASSIGNMENT*/rn;
-         oo = oo;
-         oo = /*warning:INVALID_ASSIGNMENT*/nn;
-         oo = /*warning:INVALID_ASSIGNMENT*/nnn;
-
-         nn = /*warning:INVALID_ASSIGNMENT*/r;
-         nn = /*warning:INVALID_ASSIGNMENT*/o;
-         nn = /*warning:DOWN_CAST_COMPOSITE*/n;
-         nn = /*warning:INVALID_ASSIGNMENT*/rr;
-         nn = /*warning:INVALID_ASSIGNMENT*/ro;
-         nn = /*warning:INVALID_ASSIGNMENT*/rn;
-         nn = /*warning:INVALID_ASSIGNMENT*/oo;
-         nn = nn;
-         nn = nnn;
-
-         nnn = /*warning:INVALID_ASSIGNMENT*/r;
-         nnn = /*warning:INVALID_ASSIGNMENT*/o;
-         nnn = /*warning:DOWN_CAST_COMPOSITE*/n;
-         nnn = /*warning:INVALID_ASSIGNMENT*/rr;
-         nnn = /*warning:INVALID_ASSIGNMENT*/ro;
-         nnn = /*warning:INVALID_ASSIGNMENT*/rn;
-         nnn = /*warning:INVALID_ASSIGNMENT*/oo;
-         nnn = /*warning:DOWN_CAST_COMPOSITE*/nn;
-         nnn = nnn;
-      }
-   ''');
-    });
-
-    test('Function subtyping: objects with call methods', () {
-      checkFile('''
-
-      typedef int I2I(int x);
-      typedef num N2N(num x);
-      class A {
-         int call(int x) => x;
-      }
-      class B {
-         num call(num x) => x;
-      }
-      int i2i(int x) => x;
-      num n2n(num x) => x;
-      void main() {
-         {
-           I2I f;
-           f = new A();
-           f = /*warning:INVALID_ASSIGNMENT*/new B();
-           f = i2i;
-           f = /*severe:STATIC_TYPE_ERROR*/n2n;
-           f = /*warning:DOWN_CAST_COMPOSITE*/i2i as Object;
-           f = /*warning:DOWN_CAST_COMPOSITE*/n2n as Function;
-         }
-         {
-           N2N f;
-           f = /*warning:INVALID_ASSIGNMENT*/new A();
-           f = new B();
-           f = /*severe:STATIC_TYPE_ERROR*/i2i;
-           f = n2n;
-           f = /*warning:DOWN_CAST_COMPOSITE*/i2i as Object;
-           f = /*warning:DOWN_CAST_COMPOSITE*/n2n as Function;
-         }
-         {
-           A f;
-           f = new A();
-           f = /*warning:INVALID_ASSIGNMENT*/new B();
-           f = /*warning:INVALID_ASSIGNMENT*/i2i;
-           f = /*warning:INVALID_ASSIGNMENT*/n2n;
-           f = /*info:DOWN_CAST_IMPLICIT*/i2i as Object;
-           f = /*info:DOWN_CAST_IMPLICIT*/n2n as Function;
-         }
-         {
-           B f;
-           f = /*warning:INVALID_ASSIGNMENT*/new A();
-           f = new B();
-           f = /*warning:INVALID_ASSIGNMENT*/i2i;
-           f = /*warning:INVALID_ASSIGNMENT*/n2n;
-           f = /*info:DOWN_CAST_IMPLICIT*/i2i as Object;
-           f = /*info:DOWN_CAST_IMPLICIT*/n2n as Function;
-         }
-         {
-           Function f;
-           f = new A();
-           f = new B();
-           f = i2i;
-           f = n2n;
-           f = /*info:DOWN_CAST_IMPLICIT*/i2i as Object;
-           f = (n2n as Function);
-         }
-      }
-   ''');
-    });
-
-    test('void', () {
-      checkFile('''
-
-      class A {
-        void bar() => null;
-        void foo() => bar(); // allowed
-      }
-   ''');
-    });
-
-    test('uninferred closure', () {
-      checkFile('''
-        typedef num Num2Num(num x);
-        void main() {
-          Num2Num g = /*info:INFERRED_TYPE_CLOSURE,severe:STATIC_TYPE_ERROR*/(int x) { return x; };
-          print(g(42));
-        }
-      ''');
-    });
-
-    test('subtype of universal type', () {
-      checkFile('''
-        void main() {
-          nonGenericFn(x) => null;
-          {
-            /*=R*/ f/*<P, R>*/(/*=P*/ p) => null;
-            /*=T*/ g/*<S, T>*/(/*=S*/ s) => null;
-
-            var local = f;
-            local = g; // valid
-
-            // Non-generic function cannot subtype a generic one.
-            local = /*warning:INVALID_ASSIGNMENT*/(x) => null;
-            local = /*warning:INVALID_ASSIGNMENT*/nonGenericFn;
-          }
-          {
-            Iterable/*<R>*/ f/*<P, R>*/(List/*<P>*/ p) => null;
-            List/*<T>*/ g/*<S, T>*/(Iterable/*<S>*/ s) => null;
-
-            var local = f;
-            local = g; // valid
-
-            var local2 = g;
-            local = local2;
-            local2 = /*severe:STATIC_TYPE_ERROR*/f;
-            local2 = /*warning:DOWN_CAST_COMPOSITE*/local;
-
-            // Non-generic function cannot subtype a generic one.
-            local = /*warning:INVALID_ASSIGNMENT*/(x) => null;
-            local = /*warning:INVALID_ASSIGNMENT*/nonGenericFn;
-          }
-        }
-      ''');
-    });
-  });
-
-  test('Relaxed casts', () {
+  void test_functionTypingAndSubtyping_functionLiteralVariance() {
     checkFile('''
+class A {}
+class B extends A {}
 
-      class A {}
+typedef T Function2<S, T>(S z);
 
-      class L<T> {}
-      class M<T> extends L<T> {}
-      //     L<dynamic|Object>
-      //    /              \
-      // M<dynamic|Object>  L<A>
-      //    \              /
-      //          M<A>
-      // In normal Dart, there are additional edges
-      //  from M<A> to M<dynamic>
-      //  from L<A> to M<dynamic>
-      //  from L<A> to L<dynamic>
-      void main() {
-        L lOfDs;
-        L<Object> lOfOs;
-        L<A> lOfAs;
+A top(B x) => x;
+B left(B x) => x;
+A right(A x) => x;
+B bot(A x) => x as B;
 
-        M mOfDs;
-        M<Object> mOfOs;
-        M<A> mOfAs;
+void main() {
+  {
+    Function2<B, A> f;
+    f = top;
+    f = left;
+    f = right;
+    f = bot;
+  }
+  {
+    Function2<B, B> f;
+    f = /*error:STATIC_TYPE_ERROR*/top;
+    f = left;
+    f = /*error:STATIC_TYPE_ERROR*/right;
+    f = bot;
+  }
+  {
+    Function2<A, A> f;
+    f = /*error:STATIC_TYPE_ERROR*/top;
+    f = /*error:STATIC_TYPE_ERROR*/left;
+    f = right;
+    f = bot;
+  }
+  {
+    Function2<A, B> f;
+    f = /*error:STATIC_TYPE_ERROR*/top;
+    f = /*error:STATIC_TYPE_ERROR*/left;
+    f = /*error:STATIC_TYPE_ERROR*/right;
+    f = bot;
+  }
+}
+''');
+  }
 
-        {
-          lOfDs = mOfDs;
-          lOfDs = mOfOs;
-          lOfDs = mOfAs;
-          lOfDs = lOfDs;
-          lOfDs = lOfOs;
-          lOfDs = lOfAs;
-          lOfDs = new L(); // Reset type propagation.
-        }
-        {
-          lOfOs = mOfDs;
-          lOfOs = mOfOs;
-          lOfOs = mOfAs;
-          lOfOs = lOfDs;
-          lOfOs = lOfOs;
-          lOfOs = lOfAs;
-          lOfOs = new L<Object>(); // Reset type propagation.
-        }
-        {
-          lOfAs = /*warning:DOWN_CAST_COMPOSITE*/mOfDs;
-          lOfAs = /*warning:INVALID_ASSIGNMENT*/mOfOs;
-          lOfAs = mOfAs;
-          lOfAs = /*warning:DOWN_CAST_COMPOSITE*/lOfDs;
-          lOfAs = /*info:DOWN_CAST_IMPLICIT*/lOfOs;
-          lOfAs = lOfAs;
-          lOfAs = new L<A>(); // Reset type propagation.
-        }
-        {
-          mOfDs = mOfDs;
-          mOfDs = mOfOs;
-          mOfDs = mOfAs;
-          mOfDs = /*info:DOWN_CAST_IMPLICIT*/lOfDs;
-          mOfDs = /*info:DOWN_CAST_IMPLICIT*/lOfOs;
-          mOfDs = /*warning:DOWN_CAST_COMPOSITE*/lOfAs;
-          mOfDs = new M(); // Reset type propagation.
-        }
-        {
-          mOfOs = mOfDs;
-          mOfOs = mOfOs;
-          mOfOs = mOfAs;
-          mOfOs = /*info:DOWN_CAST_IMPLICIT*/lOfDs;
-          mOfOs = /*info:DOWN_CAST_IMPLICIT*/lOfOs;
-          mOfOs = /*warning:INVALID_ASSIGNMENT*/lOfAs;
-          mOfOs = new M<Object>(); // Reset type propagation.
-        }
-        {
-          mOfAs = /*warning:DOWN_CAST_COMPOSITE*/mOfDs;
-          mOfAs = /*info:DOWN_CAST_IMPLICIT*/mOfOs;
-          mOfAs = mOfAs;
-          mOfAs = /*warning:DOWN_CAST_COMPOSITE*/lOfDs;
-          mOfAs = /*info:DOWN_CAST_IMPLICIT*/lOfOs;
-          mOfAs = /*info:DOWN_CAST_IMPLICIT*/lOfAs;
-        }
-      }
-   ''');
-  });
-
-  test('Type checking literals', () {
+  void test_functionTypingAndSubtyping_functionVariableVariance() {
     checkFile('''
-          test() {
-            num n = 3;
-            int i = 3;
-            String s = "hello";
-            {
-               List<int> l = <int>[i];
-               l = <int>[/*warning:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE*/s];
-               l = <int>[/*info:DOWN_CAST_IMPLICIT*/n];
-               l = <int>[i, /*info:DOWN_CAST_IMPLICIT*/n, /*warning:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE*/s];
-            }
-            {
-               List l = /*info:INFERRED_TYPE_LITERAL*/[i];
-               l = /*info:INFERRED_TYPE_LITERAL*/[s];
-               l = /*info:INFERRED_TYPE_LITERAL*/[n];
-               l = /*info:INFERRED_TYPE_LITERAL*/[i, n, s];
-            }
-            {
-               Map<String, int> m = <String, int>{s: i};
-               m = <String, int>{s: /*warning:MAP_VALUE_TYPE_NOT_ASSIGNABLE*/s};
-               m = <String, int>{s: /*info:DOWN_CAST_IMPLICIT*/n};
-               m = <String, int>{s: i,
-                                 s: /*info:DOWN_CAST_IMPLICIT*/n,
-                                 s: /*warning:MAP_VALUE_TYPE_NOT_ASSIGNABLE*/s};
-            }
-           // TODO(leafp): We can't currently test for key errors since the
-           // error marker binds to the entire entry.
-            {
-               Map m = /*info:INFERRED_TYPE_LITERAL*/{s: i};
-               m = /*info:INFERRED_TYPE_LITERAL*/{s: s};
-               m = /*info:INFERRED_TYPE_LITERAL*/{s: n};
-               m = /*info:INFERRED_TYPE_LITERAL*/
-                   {s: i,
-                    s: n,
-                    s: s};
-               m = /*info:INFERRED_TYPE_LITERAL*/
-                   {i: s,
-                    n: s,
-                    s: s};
-            }
-          }
-   ''');
-  });
+class A {}
+class B extends A {}
 
-  test('casts in constant contexts', () {
+typedef T Function2<S, T>(S z);
+
+void main() {
+  {
+    Function2<B, A> top;
+    Function2<B, B> left;
+    Function2<A, A> right;
+    Function2<A, B> bot;
+
+    top = right;
+    top = bot;
+    top = top;
+    top = left;
+
+    left = /*warning:DOWN_CAST_COMPOSITE*/top;
+    left = left;
+    left = /*warning:DOWN_CAST_COMPOSITE*/right; // Should we reject this?
+    left = bot;
+
+    right = /*warning:DOWN_CAST_COMPOSITE*/top;
+    right = /*warning:DOWN_CAST_COMPOSITE*/left; // Should we reject this?
+    right = right;
+    right = bot;
+
+    bot = /*warning:DOWN_CAST_COMPOSITE*/top;
+    bot = /*warning:DOWN_CAST_COMPOSITE*/left;
+    bot = /*warning:DOWN_CAST_COMPOSITE*/right;
+    bot = bot;
+  }
+}
+''');
+  }
+
+  void test_functionTypingAndSubtyping_higherOrderFunctionLiteral1() {
     checkFile('''
-          class A {
-            static const num n = 3.0;
-            // The severe error is from constant evaluation where we know the
-            // concrete type.
-            static const int /*severe:VARIABLE_TYPE_MISMATCH*/i = /*info:ASSIGNMENT_CAST*/n;
-            final int fi;
-            const A(num a) : this.fi = /*info:DOWN_CAST_IMPLICIT*/a;
-          }
-          class B extends A {
-            const B(Object a) : super(/*info:DOWN_CAST_IMPLICIT*/a);
-          }
-          void foo(Object o) {
-            var a = const A(/*info:DOWN_CAST_IMPLICIT, severe:CONST_WITH_NON_CONSTANT_ARGUMENT, severe:INVALID_CONSTANT*/o);
-          }
-     ''');
-  });
+class A {}
+class B extends A {}
 
-  test('casts in conditionals', () {
+typedef T Function2<S, T>(S z);
+
+typedef A BToA(B x);  // Top of the base lattice
+typedef B AToB(A x);  // Bot of the base lattice
+
+BToA top(AToB f) => f;
+AToB left(AToB f) => f;
+BToA right(BToA f) => f;
+AToB bot_(BToA f) => /*warning:DOWN_CAST_COMPOSITE*/f;
+AToB bot(BToA f) => f as AToB;
+
+void main() {
+  {
+    Function2<AToB, BToA> f; // Top
+    f = top;
+    f = left;
+    f = right;
+    f = bot;
+  }
+  {
+    Function2<AToB, AToB> f; // Left
+    f = /*error:STATIC_TYPE_ERROR*/top;
+    f = left;
+    f = /*error:STATIC_TYPE_ERROR*/right;
+    f = bot;
+  }
+  {
+    Function2<BToA, BToA> f; // Right
+    f = /*error:STATIC_TYPE_ERROR*/top;
+    f = /*error:STATIC_TYPE_ERROR*/left;
+    f = right;
+    f = bot;
+  }
+  {
+    Function2<BToA, AToB> f; // Bot
+    f = bot;
+    f = /*error:STATIC_TYPE_ERROR*/left;
+    f = /*error:STATIC_TYPE_ERROR*/top;
+    f = /*error:STATIC_TYPE_ERROR*/left;
+  }
+}
+''');
+  }
+
+  void test_functionTypingAndSubtyping_higherOrderFunctionLiteral2() {
     checkFile('''
-          main() {
-            bool b = true;
-            num x = b ? 1 : 2.3;
-            int y = /*info:ASSIGNMENT_CAST*/b ? 1 : 2.3;
-            String z = !b ? "hello" : null;
-            z = b ? null : "hello";
-          }
-      ''');
-  });
+class A {}
+class B extends A {}
 
-  // This is a regression test for https://github.com/dart-lang/sdk/issues/25071
-  test('unbound redirecting constructor', () {
+typedef T Function2<S, T>(S z);
+
+typedef A BToA(B x);  // Top of the base lattice
+typedef B AToB(A x);  // Bot of the base lattice
+
+Function2<B, A> top(AToB f) => f;
+Function2<A, B> left(AToB f) => f;
+Function2<B, A> right(BToA f) => f;
+Function2<A, B> bot_(BToA f) => /*warning:DOWN_CAST_COMPOSITE*/f;
+Function2<A, B> bot(BToA f) => f as Function2<A, B>;
+
+void main() {
+  {
+    Function2<AToB, BToA> f; // Top
+    f = top;
+    f = left;
+    f = right;
+    f = bot;
+  }
+  {
+    Function2<AToB, AToB> f; // Left
+    f = /*error:STATIC_TYPE_ERROR*/top;
+    f = left;
+    f = /*error:STATIC_TYPE_ERROR*/right;
+    f = bot;
+  }
+  {
+    Function2<BToA, BToA> f; // Right
+    f = /*error:STATIC_TYPE_ERROR*/top;
+    f = /*error:STATIC_TYPE_ERROR*/left;
+    f = right;
+    f = bot;
+  }
+  {
+    Function2<BToA, AToB> f; // Bot
+    f = bot;
+    f = /*error:STATIC_TYPE_ERROR*/left;
+    f = /*error:STATIC_TYPE_ERROR*/top;
+    f = /*error:STATIC_TYPE_ERROR*/left;
+  }
+}
+''');
+  }
+
+  void test_functionTypingAndSubtyping_higherOrderFunctionLiteral3() {
     checkFile('''
-      class Foo {
-        Foo() : /*severe:REDIRECT_GENERATIVE_TO_MISSING_CONSTRUCTOR*/this.init();
-      }
-       ''');
-  });
+class A {}
+class B extends A {}
 
-  test('redirecting constructor', () {
+typedef T Function2<S, T>(S z);
+
+typedef A BToA(B x);  // Top of the base lattice
+typedef B AToB(A x);  // Bot of the base lattice
+
+BToA top(Function2<A, B> f) => f;
+AToB left(Function2<A, B> f) => f;
+BToA right(Function2<B, A> f) => f;
+AToB bot_(Function2<B, A> f) => /*warning:DOWN_CAST_COMPOSITE*/f;
+AToB bot(Function2<B, A> f) => f as AToB;
+
+void main() {
+  {
+    Function2<AToB, BToA> f; // Top
+    f = top;
+    f = left;
+    f = right;
+    f = bot;
+  }
+  {
+    Function2<AToB, AToB> f; // Left
+    f = /*error:STATIC_TYPE_ERROR*/top;
+    f = left;
+    f = /*error:STATIC_TYPE_ERROR*/right;
+    f = bot;
+  }
+  {
+    Function2<BToA, BToA> f; // Right
+    f = /*error:STATIC_TYPE_ERROR*/top;
+    f = /*error:STATIC_TYPE_ERROR*/left;
+    f = right;
+    f = bot;
+  }
+  {
+    Function2<BToA, AToB> f; // Bot
+    f = bot;
+    f = /*error:STATIC_TYPE_ERROR*/left;
+    f = /*error:STATIC_TYPE_ERROR*/top;
+    f = /*error:STATIC_TYPE_ERROR*/left;
+  }
+}
+''');
+  }
+
+  void test_functionTypingAndSubtyping_higherOrderFunctionVariables() {
     checkFile('''
-          class A {
-            A(A x) {}
-            A.two() : this(/*warning:ARGUMENT_TYPE_NOT_ASSIGNABLE*/3);
-          }
-       ''');
-  });
+class A {}
+class B extends A {}
 
-  test('super constructor', () {
+typedef T Function2<S, T>(S z);
+
+void main() {
+  {
+    Function2<Function2<A, B>, Function2<B, A>> top;
+    Function2<Function2<B, A>, Function2<B, A>> right;
+    Function2<Function2<A, B>, Function2<A, B>> left;
+    Function2<Function2<B, A>, Function2<A, B>> bot;
+
+    top = right;
+    top = bot;
+    top = top;
+    top = left;
+
+    left = /*warning:DOWN_CAST_COMPOSITE*/top;
+    left = left;
+    left =
+        /*warning:DOWN_CAST_COMPOSITE should be error:STATIC_TYPE_ERROR*/right;
+    left = bot;
+
+    right = /*warning:DOWN_CAST_COMPOSITE*/top;
+    right =
+        /*warning:DOWN_CAST_COMPOSITE should be error:STATIC_TYPE_ERROR*/left;
+    right = right;
+    right = bot;
+
+    bot = /*warning:DOWN_CAST_COMPOSITE*/top;
+    bot = /*warning:DOWN_CAST_COMPOSITE*/left;
+    bot = /*warning:DOWN_CAST_COMPOSITE*/right;
+    bot = bot;
+  }
+}
+''');
+  }
+
+  void test_functionTypingAndSubtyping_instanceMethodVariance() {
     checkFile('''
-          class A { A(A x) {} }
-          class B extends A {
-            B() : super(/*warning:ARGUMENT_TYPE_NOT_ASSIGNABLE*/3);
-          }
-       ''');
-  });
+class A {}
+class B extends A {}
 
-  test('factory constructor downcast', () {
+class C {
+  A top(B x) => x;
+  B left(B x) => x;
+  A right(A x) => x;
+  B bot(A x) => x as B;
+}
+
+typedef T Function2<S, T>(S z);
+
+void main() {
+  C c = new C();
+  {
+    Function2<B, A> f;
+    f = c.top;
+    f = c.left;
+    f = c.right;
+    f = c.bot;
+  }
+  {
+    Function2<B, B> f;
+    f = /*warning:DOWN_CAST_COMPOSITE*/c.top;
+    f = c.left;
+    f = /*warning:DOWN_CAST_COMPOSITE*/c.right;
+    f = c.bot;
+  }
+  {
+    Function2<A, A> f;
+    f = /*warning:DOWN_CAST_COMPOSITE*/c.top;
+    f = /*warning:DOWN_CAST_COMPOSITE*/c.left;
+    f = c.right;
+    f = c.bot;
+  }
+  {
+    Function2<A, B> f;
+    f = /*warning:DOWN_CAST_COMPOSITE*/c.top;
+    f = /*warning:DOWN_CAST_COMPOSITE*/c.left;
+    f = /*warning:DOWN_CAST_COMPOSITE*/c.right;
+    f = c.bot;
+  }
+}
+''');
+  }
+
+  void test_functionTypingAndSubtyping_intAndObject() {
+    checkFile('''
+typedef Object Top(int x);      // Top of the lattice
+typedef int Left(int x);        // Left branch
+typedef int Left2(int x);       // Left branch
+typedef Object Right(Object x); // Right branch
+typedef int Bot(Object x);      // Bottom of the lattice
+
+Object globalTop(int x) => x;
+int globalLeft(int x) => x;
+Object globalRight(Object x) => x;
+int bot_(Object x) => /*info:DOWN_CAST_IMPLICIT*/x;
+int globalBot(Object x) => x as int;
+
+void main() {
+  // Note: use locals so we only know the type, not that it's a specific
+  // function declaration. (we can issue better errors in that case.)
+  var top = globalTop;
+  var left = globalLeft;
+  var right = globalRight;
+  var bot = globalBot;
+
+  { // Check typedef equality
+    Left f = left;
+    Left2 g = f;
+  }
+  {
+    Top f;
+    f = top;
+    f = left;
+    f = right;
+    f = bot;
+  }
+  {
+    Left f;
+    f = /*warning:DOWN_CAST_COMPOSITE*/top;
+    f = left;
+    f = /*warning:DOWN_CAST_COMPOSITE*/right; // Should we reject this?
+    f = bot;
+  }
+  {
+    Right f;
+    f = /*warning:DOWN_CAST_COMPOSITE*/top;
+    f = /*warning:DOWN_CAST_COMPOSITE*/left; // Should we reject this?
+    f = right;
+    f = bot;
+  }
+  {
+    Bot f;
+    f = /*warning:DOWN_CAST_COMPOSITE*/top;
+    f = /*warning:DOWN_CAST_COMPOSITE*/left;
+    f = /*warning:DOWN_CAST_COMPOSITE*/right;
+    f = bot;
+  }
+}
+''');
+  }
+
+  void test_functionTypingAndSubtyping_namedAndOptionalParameters() {
+    checkFile('''
+class A {}
+
+typedef A FR(A x);
+typedef A FO([A x]);
+typedef A FN({A x});
+typedef A FRR(A x, A y);
+typedef A FRO(A x, [A y]);
+typedef A FRN(A x, {A n});
+typedef A FOO([A x, A y]);
+typedef A FNN({A x, A y});
+typedef A FNNN({A z, A y, A x});
+
+void main() {
+   FR r;
+   FO o;
+   FN n;
+   FRR rr;
+   FRO ro;
+   FRN rn;
+   FOO oo;
+   FNN nn;
+   FNNN nnn;
+
+   r = r;
+   r = o;
+   r = /*error:INVALID_ASSIGNMENT*/n;
+   r = /*error:INVALID_ASSIGNMENT*/rr;
+   r = ro;
+   r = rn;
+   r = oo;
+   r = /*error:INVALID_ASSIGNMENT*/nn;
+   r = /*error:INVALID_ASSIGNMENT*/nnn;
+
+   o = /*warning:DOWN_CAST_COMPOSITE*/r;
+   o = o;
+   o = /*error:INVALID_ASSIGNMENT*/n;
+   o = /*error:INVALID_ASSIGNMENT*/rr;
+   o = /*error:INVALID_ASSIGNMENT*/ro;
+   o = /*error:INVALID_ASSIGNMENT*/rn;
+   o = oo;
+   o = /*error:INVALID_ASSIGNMENT*/nn;
+   o = /*error:INVALID_ASSIGNMENT*/nnn;
+
+   n = /*error:INVALID_ASSIGNMENT*/r;
+   n = /*error:INVALID_ASSIGNMENT*/o;
+   n = n;
+   n = /*error:INVALID_ASSIGNMENT*/rr;
+   n = /*error:INVALID_ASSIGNMENT*/ro;
+   n = /*error:INVALID_ASSIGNMENT*/rn;
+   n = /*error:INVALID_ASSIGNMENT*/oo;
+   n = nn;
+   n = nnn;
+
+   rr = /*error:INVALID_ASSIGNMENT*/r;
+   rr = /*error:INVALID_ASSIGNMENT*/o;
+   rr = /*error:INVALID_ASSIGNMENT*/n;
+   rr = rr;
+   rr = ro;
+   rr = /*error:INVALID_ASSIGNMENT*/rn;
+   rr = oo;
+   rr = /*error:INVALID_ASSIGNMENT*/nn;
+   rr = /*error:INVALID_ASSIGNMENT*/nnn;
+
+   ro = /*warning:DOWN_CAST_COMPOSITE*/r;
+   ro = /*error:INVALID_ASSIGNMENT*/o;
+   ro = /*error:INVALID_ASSIGNMENT*/n;
+   ro = /*warning:DOWN_CAST_COMPOSITE*/rr;
+   ro = ro;
+   ro = /*error:INVALID_ASSIGNMENT*/rn;
+   ro = oo;
+   ro = /*error:INVALID_ASSIGNMENT*/nn;
+   ro = /*error:INVALID_ASSIGNMENT*/nnn;
+
+   rn = /*warning:DOWN_CAST_COMPOSITE*/r;
+   rn = /*error:INVALID_ASSIGNMENT*/o;
+   rn = /*error:INVALID_ASSIGNMENT*/n;
+   rn = /*error:INVALID_ASSIGNMENT*/rr;
+   rn = /*error:INVALID_ASSIGNMENT*/ro;
+   rn = rn;
+   rn = /*error:INVALID_ASSIGNMENT*/oo;
+   rn = /*error:INVALID_ASSIGNMENT*/nn;
+   rn = /*error:INVALID_ASSIGNMENT*/nnn;
+
+   oo = /*warning:DOWN_CAST_COMPOSITE*/r;
+   oo = /*warning:DOWN_CAST_COMPOSITE*/o;
+   oo = /*error:INVALID_ASSIGNMENT*/n;
+   oo = /*warning:DOWN_CAST_COMPOSITE*/rr;
+   oo = /*warning:DOWN_CAST_COMPOSITE*/ro;
+   oo = /*error:INVALID_ASSIGNMENT*/rn;
+   oo = oo;
+   oo = /*error:INVALID_ASSIGNMENT*/nn;
+   oo = /*error:INVALID_ASSIGNMENT*/nnn;
+
+   nn = /*error:INVALID_ASSIGNMENT*/r;
+   nn = /*error:INVALID_ASSIGNMENT*/o;
+   nn = /*warning:DOWN_CAST_COMPOSITE*/n;
+   nn = /*error:INVALID_ASSIGNMENT*/rr;
+   nn = /*error:INVALID_ASSIGNMENT*/ro;
+   nn = /*error:INVALID_ASSIGNMENT*/rn;
+   nn = /*error:INVALID_ASSIGNMENT*/oo;
+   nn = nn;
+   nn = nnn;
+
+   nnn = /*error:INVALID_ASSIGNMENT*/r;
+   nnn = /*error:INVALID_ASSIGNMENT*/o;
+   nnn = /*warning:DOWN_CAST_COMPOSITE*/n;
+   nnn = /*error:INVALID_ASSIGNMENT*/rr;
+   nnn = /*error:INVALID_ASSIGNMENT*/ro;
+   nnn = /*error:INVALID_ASSIGNMENT*/rn;
+   nnn = /*error:INVALID_ASSIGNMENT*/oo;
+   nnn = /*warning:DOWN_CAST_COMPOSITE*/nn;
+   nnn = nnn;
+}
+''');
+  }
+
+  void test_functionTypingAndSubtyping_objectsWithCallMethods() {
+    checkFile('''
+typedef int I2I(int x);
+typedef num N2N(num x);
+class A {
+   int call(int x) => x;
+}
+class B {
+   num call(num x) => x;
+}
+int i2i(int x) => x;
+num n2n(num x) => x;
+void main() {
+   {
+     I2I f;
+     f = new A();
+     f = /*error:INVALID_ASSIGNMENT*/new B();
+     f = i2i;
+     f = /*error:STATIC_TYPE_ERROR*/n2n;
+     f = /*warning:DOWN_CAST_COMPOSITE*/i2i as Object;
+     f = /*warning:DOWN_CAST_COMPOSITE*/n2n as Function;
+   }
+   {
+     N2N f;
+     f = /*error:INVALID_ASSIGNMENT*/new A();
+     f = new B();
+     f = /*error:STATIC_TYPE_ERROR*/i2i;
+     f = n2n;
+     f = /*warning:DOWN_CAST_COMPOSITE*/i2i as Object;
+     f = /*warning:DOWN_CAST_COMPOSITE*/n2n as Function;
+   }
+   {
+     A f;
+     f = new A();
+     f = /*error:INVALID_ASSIGNMENT*/new B();
+     f = /*error:INVALID_ASSIGNMENT*/i2i;
+     f = /*error:INVALID_ASSIGNMENT*/n2n;
+     f = /*info:DOWN_CAST_IMPLICIT*/i2i as Object;
+     f = /*info:DOWN_CAST_IMPLICIT*/n2n as Function;
+   }
+   {
+     B f;
+     f = /*error:INVALID_ASSIGNMENT*/new A();
+     f = new B();
+     f = /*error:INVALID_ASSIGNMENT*/i2i;
+     f = /*error:INVALID_ASSIGNMENT*/n2n;
+     f = /*info:DOWN_CAST_IMPLICIT*/i2i as Object;
+     f = /*info:DOWN_CAST_IMPLICIT*/n2n as Function;
+   }
+   {
+     Function f;
+     f = new A();
+     f = new B();
+     f = i2i;
+     f = n2n;
+     f = /*info:DOWN_CAST_IMPLICIT*/i2i as Object;
+     f = (n2n as Function);
+   }
+}
+''');
+  }
+
+  void test_functionTypingAndSubtyping_staticMethodVariance() {
+    checkFile('''
+class A {}
+class B extends A {}
+
+class C {
+  static A top(B x) => x;
+  static B left(B x) => x;
+  static A right(A x) => x;
+  static B bot(A x) => x as B;
+}
+
+typedef T Function2<S, T>(S z);
+
+void main() {
+  {
+    Function2<B, A> f;
+    f = C.top;
+    f = C.left;
+    f = C.right;
+    f = C.bot;
+  }
+  {
+    Function2<B, B> f;
+    f = /*error:STATIC_TYPE_ERROR*/C.top;
+    f = C.left;
+    f = /*error:STATIC_TYPE_ERROR*/C.right;
+    f = C.bot;
+  }
+  {
+    Function2<A, A> f;
+    f = /*error:STATIC_TYPE_ERROR*/C.top;
+    f = /*error:STATIC_TYPE_ERROR*/C.left;
+    f = C.right;
+    f = C.bot;
+  }
+  {
+    Function2<A, B> f;
+    f = /*error:STATIC_TYPE_ERROR*/C.top;
+    f = /*error:STATIC_TYPE_ERROR*/C.left;
+    f = /*error:STATIC_TYPE_ERROR*/C.right;
+    f = C.bot;
+  }
+}
+''');
+  }
+
+  void test_functionTypingAndSubtyping_subtypeOfUniversalType() {
+    checkFile('''
+void main() {
+  nonGenericFn(x) => null;
+  {
+    /*=R*/ f/*<P, R>*/(/*=P*/ p) => null;
+    /*=T*/ g/*<S, T>*/(/*=S*/ s) => null;
+
+    var local = f;
+    local = g; // valid
+
+    // Non-generic function cannot subtype a generic one.
+    local = /*error:INVALID_ASSIGNMENT*/(x) => null;
+    local = /*error:INVALID_ASSIGNMENT*/nonGenericFn;
+  }
+  {
+    Iterable/*<R>*/ f/*<P, R>*/(List/*<P>*/ p) => null;
+    List/*<T>*/ g/*<S, T>*/(Iterable/*<S>*/ s) => null;
+
+    var local = f;
+    local = g; // valid
+
+    var local2 = g;
+    local = local2;
+    local2 = /*error:STATIC_TYPE_ERROR*/f;
+    local2 = /*warning:DOWN_CAST_COMPOSITE*/local;
+
+    // Non-generic function cannot subtype a generic one.
+    local = /*error:INVALID_ASSIGNMENT*/(x) => null;
+    local = /*error:INVALID_ASSIGNMENT*/nonGenericFn;
+  }
+}
+''');
+  }
+
+  void test_functionTypingAndSubtyping_uninferredClosure() {
+    checkFile('''
+typedef num Num2Num(num x);
+void main() {
+  Num2Num g = /*info:INFERRED_TYPE_CLOSURE,error:STATIC_TYPE_ERROR*/(int x) { return x; };
+  print(g(42));
+}
+''');
+  }
+
+  void test_functionTypingAndSubtyping_void() {
+    checkFile('''
+class A {
+  void bar() => null;
+  void foo() => bar(); // allowed
+}
+''');
+  }
+
+  void test_genericClassMethodOverride() {
+    checkFile('''
+class A {}
+class B extends A {}
+
+class Base<T extends B> {
+  T foo() => null;
+}
+
+class Derived<S extends A> extends Base<B> {
+  /*error:INVALID_METHOD_OVERRIDE*/S
+      /*error:INVALID_METHOD_OVERRIDE_RETURN_TYPE*/foo() => null;
+}
+
+class Derived2<S extends B> extends Base<B> {
+  S foo() => null;
+}
+''');
+  }
+
+  void test_genericFunctionWrongNumberOfArguments() {
     checkFile(r'''
-        class Animal {
-          Animal();
-          factory Animal.cat() => new Cat();
-        }
+/*=T*/ foo/*<T>*/(/*=T*/ x, /*=T*/ y) => x;
+/*=T*/ bar/*<T>*/({/*=T*/ x, /*=T*/ y}) => x;
 
-        class Cat extends Animal {}
+main() {
+  String x;
+  // resolving these shouldn't crash.
+  foo/*error:EXTRA_POSITIONAL_ARGUMENTS*/(1, 2, 3);
+  x = foo/*error:EXTRA_POSITIONAL_ARGUMENTS*/('1', '2', '3');
+  foo/*error:NOT_ENOUGH_REQUIRED_ARGUMENTS*/(1);
+  x = foo/*error:NOT_ENOUGH_REQUIRED_ARGUMENTS*/('1');
+  x = /*info:DYNAMIC_CAST*/foo/*error:EXTRA_POSITIONAL_ARGUMENTS*/(1, 2, 3);
+  x = /*info:DYNAMIC_CAST*/foo/*error:NOT_ENOUGH_REQUIRED_ARGUMENTS*/(1);
 
-        void main() {
-          Cat c = /*info:ASSIGNMENT_CAST*/new Animal.cat();
-          c = /*severe:STATIC_TYPE_ERROR*/new Animal();
-        }''');
-  });
+  // named arguments
+  bar(y: 1, x: 2, /*error:UNDEFINED_NAMED_PARAMETER*/z: 3);
+  x = bar(/*error:UNDEFINED_NAMED_PARAMETER*/z: '1', x: '2', y: '3');
+  bar(y: 1);
+  x = bar(x: '1', /*error:UNDEFINED_NAMED_PARAMETER*/z: 42);
+  x = /*info:DYNAMIC_CAST*/bar(y: 1, x: 2, /*error:UNDEFINED_NAMED_PARAMETER*/z: 3);
+  x = /*info:DYNAMIC_CAST*/bar(x: 1);
+}
+''');
+  }
 
-  test('field/field override', () {
+  void test_genericMethodOverride() {
     checkFile('''
-          class A {}
-          class B extends A {}
-          class C extends B {}
+class Future<T> {
+  /*=S*/ then/*<S>*/(/*=S*/ onValue(T t)) => null;
+}
 
-          class Base {
-            B f1;
-            B f2;
-            B f3;
-            B f4;
-          }
+class DerivedFuture<T> extends Future<T> {
+  /*=S*/ then/*<S>*/(/*=S*/ onValue(T t)) => null;
+}
 
-          class Child extends Base {
-            /*severe:INVALID_FIELD_OVERRIDE,severe:INVALID_METHOD_OVERRIDE*/A f1; // invalid for getter
-            /*severe:INVALID_FIELD_OVERRIDE,severe:INVALID_METHOD_OVERRIDE*/C f2; // invalid for setter
-            /*severe:INVALID_FIELD_OVERRIDE*/var f3;
-            /*severe:INVALID_FIELD_OVERRIDE,severe:INVALID_METHOD_OVERRIDE,severe:INVALID_METHOD_OVERRIDE*/dynamic f4;
-          }
+class DerivedFuture2<A> extends Future<A> {
+  /*=B*/ then/*<B>*/(/*=B*/ onValue(A a)) => null;
+}
 
-          class Child2 implements Base {
-            /*severe:INVALID_METHOD_OVERRIDE*/A f1; // invalid for getter
-            /*severe:INVALID_METHOD_OVERRIDE*/C f2; // invalid for setter
-            var f3;
-            /*severe:INVALID_METHOD_OVERRIDE,severe:INVALID_METHOD_OVERRIDE*/dynamic f4;
-          }
-       ''');
-  });
+class DerivedFuture3<T> extends Future<T> {
+  /*=S*/ then/*<S>*/(Object onValue(T t)) => null;
+}
 
-  test('private override', () {
+class DerivedFuture4<A> extends Future<A> {
+  /*=B*/ then/*<B>*/(Object onValue(A a)) => null;
+}
+''');
+  }
+
+  void test_getterGetterOverride() {
+    checkFile('''
+class A {}
+class B extends A {}
+class C extends B {}
+
+abstract class Base {
+  B get f1;
+  B get f2;
+  B get f3;
+  B get f4;
+}
+
+class Child extends Base {
+  /*error:INVALID_METHOD_OVERRIDE*/A get f1 => null;
+  C get f2 => null;
+  get f3 => null;
+  /*error:INVALID_METHOD_OVERRIDE*/dynamic get f4 => null;
+}
+''');
+  }
+
+  void test_getterOverride_fuzzyArrows() {
+    checkFile('''
+typedef void ToVoid<T>(T x);
+
+class F {
+  ToVoid<dynamic> get f => null;
+  ToVoid<int> get g => null;
+}
+
+class G extends F {
+  ToVoid<int> get f => null;
+  /*error:INVALID_METHOD_OVERRIDE*/ToVoid<dynamic> get g => null;
+}
+
+class H implements F {
+  ToVoid<int> get f => null;
+  /*error:INVALID_METHOD_OVERRIDE*/ToVoid<dynamic> get g => null;
+}
+''');
+  }
+
+  void test_ifForDoWhileStatementsUseBooleanConversion() {
+    checkFile('''
+main() {
+  dynamic dyn = 42;
+  Object obj = 42;
+  int i = 42;
+  bool b = false;
+
+  if (b) {}
+  if (/*info:DYNAMIC_CAST*/dyn) {}
+  if (/*info:DOWN_CAST_IMPLICIT*/obj) {}
+  if (/*error:NON_BOOL_CONDITION*/i) {}
+
+  while (b) {}
+  while (/*info:DYNAMIC_CAST*/dyn) {}
+  while (/*info:DOWN_CAST_IMPLICIT*/obj) {}
+  while (/*error:NON_BOOL_CONDITION*/i) {}
+
+  do {} while (b);
+  do {} while (/*info:DYNAMIC_CAST*/dyn);
+  do {} while (/*info:DOWN_CAST_IMPLICIT*/obj);
+  do {} while (/*error:NON_BOOL_CONDITION*/i);
+
+  for (;b;) {}
+  for (;/*info:DYNAMIC_CAST*/dyn;) {}
+  for (;/*info:DOWN_CAST_IMPLICIT*/obj;) {}
+  for (;/*error:NON_BOOL_CONDITION*/i;) {}
+}
+''');
+  }
+
+  void test_implicitCasts() {
+    addFile('num n; int i = /*info:ASSIGNMENT_CAST*/n;');
+    check();
+    // TODO(jmesserly): should not be emitting the hint as well as the error.
+    // It is a "strong mode hint" however, so it will not be user visible.
     addFile(
-        '''
-          import 'main.dart' as main;
+        'num n; int i = /*info:ASSIGNMENT_CAST,error:INVALID_ASSIGNMENT*/n;');
+    check(implicitCasts: false);
+  }
 
-          class Base {
-            var f1;
-            var _f2;
-            var _f3;
-            get _f4 => null;
-
-            int _m1() => null;
-          }
-
-          class GrandChild extends main.Child {
-            /*severe:INVALID_FIELD_OVERRIDE*/var _f2;
-            /*severe:INVALID_FIELD_OVERRIDE*/var _f3;
-            var _f4;
-
-            /*severe:INVALID_METHOD_OVERRIDE*/String
-                /*warning:INVALID_METHOD_OVERRIDE_RETURN_TYPE*/_m1() => null;
-          }
-    ''',
-        name: '/helper.dart');
-    checkFile('''
-          import 'helper.dart' as helper;
-
-          class Child extends helper.Base {
-            /*severe:INVALID_FIELD_OVERRIDE*/var f1;
-            var _f2;
-            var _f4;
-
-            String _m1() => null;
-          }
+  void test_implicitDynamic_field() {
+    addFile(r'''
+class C {
+  var /*error:IMPLICIT_DYNAMIC_FIELD*/x0;
+  var /*error:IMPLICIT_DYNAMIC_FIELD*/x1 = (<dynamic>[])[0];
+  var /*error:IMPLICIT_DYNAMIC_FIELD*/x2,
+      x3 = 42,
+      /*error:IMPLICIT_DYNAMIC_FIELD*/x4;
+  dynamic y0;
+  dynamic y1 = (<dynamic>[])[0];
+}
     ''');
-  });
+    check(implicitDynamic: false);
+  }
 
-  test('getter/getter override', () {
-    checkFile('''
-          class A {}
-          class B extends A {}
-          class C extends B {}
+  void test_implicitDynamic_function() {
+    addFile(r'''
+/*=T*/ a/*<T>*/(/*=T*/ t) => t;
+/*=T*/ b/*<T>*/() => null;
 
-          abstract class Base {
-            B get f1;
-            B get f2;
-            B get f3;
-            B get f4;
-          }
+void main/*<S>*/() {
+  dynamic d;
+  int i;
+  /*error:IMPLICIT_DYNAMIC_FUNCTION*/a(d);
+  a(42);
+  /*error:IMPLICIT_DYNAMIC_FUNCTION*/b();
+  d = /*error:IMPLICIT_DYNAMIC_FUNCTION*/b();
+  i = b();
 
-          class Child extends Base {
-            /*severe:INVALID_METHOD_OVERRIDE*/A get f1 => null;
-            C get f2 => null;
-            get f3 => null;
-            /*severe:INVALID_METHOD_OVERRIDE*/dynamic get f4 => null;
-          }
-       ''');
-  });
+  void f/*<T>*/(/*=T*/ t) {};
+  /*=T*/ g/*<T>*/() => null;
 
-  test('field/getter override', () {
-    checkFile('''
-          class A {}
-          class B extends A {}
-          class C extends B {}
+  /*error:IMPLICIT_DYNAMIC_FUNCTION*/f(d);
+  f(42);
+  /*error:IMPLICIT_DYNAMIC_FUNCTION*/g();
+  d = /*error:IMPLICIT_DYNAMIC_FUNCTION*/g();
+  i = g();
 
-          abstract class Base {
-            B f1;
-            B f2;
-            B f3;
-            B f4;
-          }
-
-          class Child extends Base {
-            /*severe:INVALID_FIELD_OVERRIDE,severe:INVALID_METHOD_OVERRIDE*/A get f1 => null;
-            /*severe:INVALID_FIELD_OVERRIDE*/C get f2 => null;
-            /*severe:INVALID_FIELD_OVERRIDE*/get f3 => null;
-            /*severe:INVALID_FIELD_OVERRIDE,severe:INVALID_METHOD_OVERRIDE*/dynamic get f4 => null;
-          }
-
-          class /*warning:NON_ABSTRACT_CLASS_INHERITS_ABSTRACT_MEMBER_FOUR*/Child2 implements Base {
-            /*severe:INVALID_METHOD_OVERRIDE*/A get f1 => null;
-            C get f2 => null;
-            get f3 => null;
-            /*severe:INVALID_METHOD_OVERRIDE*/dynamic get f4 => null;
-          }
-       ''');
-  });
-
-  test('setter/setter override', () {
-    checkFile('''
-          class A {}
-          class B extends A {}
-          class C extends B {}
-
-          abstract class Base {
-            void set f1(B value);
-            void set f2(B value);
-            void set f3(B value);
-            void set f4(B value);
-            void set f5(B value);
-          }
-
-          class Child extends Base {
-            void set f1(A value) {}
-            /*severe:INVALID_METHOD_OVERRIDE*/void set f2(C value) {}
-            void set f3(value) {}
-            /*severe:INVALID_METHOD_OVERRIDE*/void set f4(dynamic value) {}
-            set f5(B value) {}
-          }
-       ''');
-  });
-
-  test('field/setter override', () {
-    checkFile('''
-          class A {}
-          class B extends A {}
-          class C extends B {}
-
-          class Base {
-            B f1;
-            B f2;
-            B f3;
-            B f4;
-            B f5;
-          }
-
-          class Child extends Base {
-            /*severe:INVALID_FIELD_OVERRIDE*/B get f1 => null;
-            /*severe:INVALID_FIELD_OVERRIDE*/B get f2 => null;
-            /*severe:INVALID_FIELD_OVERRIDE*/B get f3 => null;
-            /*severe:INVALID_FIELD_OVERRIDE*/B get f4 => null;
-            /*severe:INVALID_FIELD_OVERRIDE*/B get f5 => null;
-
-            /*severe:INVALID_FIELD_OVERRIDE*/void set f1(A value) {}
-            /*severe:INVALID_FIELD_OVERRIDE,severe:INVALID_METHOD_OVERRIDE*/void set f2(C value) {}
-            /*severe:INVALID_FIELD_OVERRIDE*/void set f3(value) {}
-            /*severe:INVALID_FIELD_OVERRIDE,severe:INVALID_METHOD_OVERRIDE*/void set f4(dynamic value) {}
-            /*severe:INVALID_FIELD_OVERRIDE*/set f5(B value) {}
-          }
-
-          class Child2 implements Base {
-            B get f1 => null;
-            B get f2 => null;
-            B get f3 => null;
-            B get f4 => null;
-            B get f5 => null;
-
-            void set f1(A value) {}
-            /*severe:INVALID_METHOD_OVERRIDE*/void set f2(C value) {}
-            void set f3(value) {}
-            /*severe:INVALID_METHOD_OVERRIDE*/void set f4(dynamic value) {}
-            set f5(B value) {}
-          }
-       ''');
-  });
-
-  test('method override', () {
-    checkFile('''
-          class A {}
-          class B extends A {}
-          class C extends B {}
-
-          class Base {
-            B m1(B a) => null;
-            B m2(B a) => null;
-            B m3(B a) => null;
-            B m4(B a) => null;
-            B m5(B a) => null;
-            B m6(B a) => null;
-          }
-
-          class Child extends Base {
-            /*severe:INVALID_METHOD_OVERRIDE*/A m1(A value) => null;
-            /*severe:INVALID_METHOD_OVERRIDE*/C m2(C value) => null;
-            /*severe:INVALID_METHOD_OVERRIDE*/A m3(C value) => null;
-            C m4(A value) => null;
-            m5(value) => null;
-            /*severe:INVALID_METHOD_OVERRIDE*/dynamic m6(dynamic value) => null;
-          }
-       ''');
-  });
-
-  test('method override, fuzzy arrows', () {
-    checkFile('''
-      abstract class A {
-        bool operator ==(Object object);
-      }
-
-      class B implements A {}
-
-
-      class F {
-        void f(x) {}
-        void g(int x) {}
-      }
-
-      class G extends F {
-        /*severe:INVALID_METHOD_OVERRIDE*/void f(int x) {}
-        void g(dynamic x) {}
-      }
-
-      class H implements F {
-        /*severe:INVALID_METHOD_OVERRIDE*/void f(int x) {}
-        void g(dynamic x) {}
-      }
-
-      ''');
-  });
-
-  test('getter override, fuzzy arrows', () {
-    checkFile('''
-      typedef void ToVoid<T>(T x);
-      class F {
-        ToVoid<dynamic> get f => null;
-        ToVoid<int> get g => null;
-      }
-
-      class G extends F {
-        ToVoid<int> get f => null;
-        /*severe:INVALID_METHOD_OVERRIDE*/ToVoid<dynamic> get g => null;
-      }
-
-      class H implements F {
-        ToVoid<int> get f => null;
-        /*severe:INVALID_METHOD_OVERRIDE*/ToVoid<dynamic> get g => null;
-      }
-       ''');
-  });
-
-  test('setter override, fuzzy arrows', () {
-    checkFile('''
-      typedef void ToVoid<T>(T x);
-      class F {
-        void set f(ToVoid<dynamic> x) {}
-        void set g(ToVoid<int> x) {}
-        void set h(dynamic x) {}
-        void set i(int x) {}
-     }
-
-      class G extends F {
-        /*severe:INVALID_METHOD_OVERRIDE*/void set f(ToVoid<int> x) {}
-        void set g(ToVoid<dynamic> x) {}
-        void set h(int x) {}
-        /*severe:INVALID_METHOD_OVERRIDE*/void set i(dynamic x) {}
-      }
-
-      class H implements F {
-        /*severe:INVALID_METHOD_OVERRIDE*/void set f(ToVoid<int> x) {}
-        void set g(ToVoid<dynamic> x) {}
-        void set h(int x) {}
-        /*severe:INVALID_METHOD_OVERRIDE*/void set i(dynamic x) {}
-      }
-       ''');
-  });
-
-  test('field override, fuzzy arrows', () {
-    checkFile('''
-      typedef void ToVoid<T>(T x);
-      class F {
-        final ToVoid<dynamic> f = null;
-        final ToVoid<int> g = null;
-      }
-
-      class G extends F {
-        /*severe:INVALID_FIELD_OVERRIDE*/final ToVoid<int> f = null;
-        /*severe:INVALID_FIELD_OVERRIDE, severe:INVALID_METHOD_OVERRIDE*/final ToVoid<dynamic> g = null;
-      }
-
-      class H implements F {
-        final ToVoid<int> f = null;
-        /*severe:INVALID_METHOD_OVERRIDE*/final ToVoid<dynamic> g = null;
-      }
-       ''');
-  });
-
-  test('generic class method override', () {
-    checkFile('''
-          class A {}
-          class B extends A {}
-
-          class Base<T extends B> {
-            T foo() => null;
-          }
-
-          class Derived<S extends A> extends Base<B> {
-            /*severe:INVALID_METHOD_OVERRIDE*/S
-                /*warning:INVALID_METHOD_OVERRIDE_RETURN_TYPE*/foo() => null;
-          }
-
-          class Derived2<S extends B> extends Base<B> {
-            S foo() => null;
-          }
-       ''');
-  });
-
-  test('generic method override', () {
-    checkFile('''
-          class Future<T> {
-            /*=S*/ then/*<S>*/(/*=S*/ onValue(T t)) => null;
-          }
-
-          class DerivedFuture<T> extends Future<T> {
-            /*=S*/ then/*<S>*/(/*=S*/ onValue(T t)) => null;
-          }
-
-          class DerivedFuture2<A> extends Future<A> {
-            /*=B*/ then/*<B>*/(/*=B*/ onValue(A a)) => null;
-          }
-
-          class DerivedFuture3<T> extends Future<T> {
-            /*=S*/ then/*<S>*/(Object onValue(T t)) => null;
-          }
-
-          class DerivedFuture4<A> extends Future<A> {
-            /*=B*/ then/*<B>*/(Object onValue(A a)) => null;
-          }
-      ''');
-  });
-
-  test('generic function wrong number of arguments', () {
-    checkFile(r'''
-          /*=T*/ foo/*<T>*/(/*=T*/ x, /*=T*/ y) => x;
-          /*=T*/ bar/*<T>*/({/*=T*/ x, /*=T*/ y}) => x;
-
-          main() {
-            String x;
-            // resolving these shouldn't crash.
-            foo/*warning:EXTRA_POSITIONAL_ARGUMENTS*/(1, 2, 3);
-            x = foo/*warning:EXTRA_POSITIONAL_ARGUMENTS*/('1', '2', '3');
-            foo/*warning:NOT_ENOUGH_REQUIRED_ARGUMENTS*/(1);
-            x = foo/*warning:NOT_ENOUGH_REQUIRED_ARGUMENTS*/('1');
-            x = /*info:DYNAMIC_CAST*/foo/*warning:EXTRA_POSITIONAL_ARGUMENTS*/(1, 2, 3);
-            x = /*info:DYNAMIC_CAST*/foo/*warning:NOT_ENOUGH_REQUIRED_ARGUMENTS*/(1);
-
-            // named arguments
-            bar(y: 1, x: 2, /*warning:UNDEFINED_NAMED_PARAMETER*/z: 3);
-            x = bar(/*warning:UNDEFINED_NAMED_PARAMETER*/z: '1', x: '2', y: '3');
-            bar(y: 1);
-            x = bar(x: '1', /*warning:UNDEFINED_NAMED_PARAMETER*/z: 42);
-            x = /*info:DYNAMIC_CAST*/bar(y: 1, x: 2, /*warning:UNDEFINED_NAMED_PARAMETER*/z: 3);
-            x = /*info:DYNAMIC_CAST*/bar(x: 1);
-          }
-      ''');
-  });
-
-  test('type promotion from dynamic', () {
-    checkFile(r'''
-          f() {
-            dynamic x;
-            if (x is int) {
-              int y = x;
-              String z = /*warning:INVALID_ASSIGNMENT*/x;
-            }
-          }
-          g() {
-            Object x;
-            if (x is int) {
-              int y = x;
-              String z = /*warning:INVALID_ASSIGNMENT*/x;
-            }
-          }
+  /*error:IMPLICIT_DYNAMIC_INVOKE*/(/*<T>*/(/*=T*/ t) => t)(d);
+  (/*<T>*/(/*=T*/ t) => t)(42);
+  (/*<T>*/() => null as dynamic/*=T*/)/*<int>*/();
+}
     ''');
-  });
+    check(implicitDynamic: false);
+  }
+  void test_implicitDynamic_listLiteral() {
+    addFile(r'''
 
-  test('unary operators', () {
+var l0 = /*error:IMPLICIT_DYNAMIC_LIST_LITERAL*/[];
+List l1 = /*error:IMPLICIT_DYNAMIC_LIST_LITERAL*/[];
+List<dynamic> l2 = /*error:IMPLICIT_DYNAMIC_LIST_LITERAL*/[];
+dynamic d = 42;
+var l3 = /*error:IMPLICIT_DYNAMIC_LIST_LITERAL*/[d, d];
+
+var l4 = <dynamic>[];
+var l5 = <int>[];
+List<int> l6 = /*info:INFERRED_TYPE_LITERAL*/[];
+var l7 = /*info:INFERRED_TYPE_LITERAL*/[42];
+    ''');
+    check(implicitDynamic: false);
+  }
+
+  void test_implicitDynamic_mapLiteral() {
+    addFile(r'''
+var m0 = /*error:IMPLICIT_DYNAMIC_MAP_LITERAL*/{};
+Map m1 = /*error:IMPLICIT_DYNAMIC_MAP_LITERAL*/{};
+Map<dynamic, dynamic> m2 = /*error:IMPLICIT_DYNAMIC_MAP_LITERAL*/{};
+dynamic d = 42;
+var m3 = /*error:IMPLICIT_DYNAMIC_MAP_LITERAL*/{d: d};
+var m4 = /*info:INFERRED_TYPE_LITERAL,error:IMPLICIT_DYNAMIC_MAP_LITERAL*/{'x': d, 'y': d};
+var m5 = /*info:INFERRED_TYPE_LITERAL,error:IMPLICIT_DYNAMIC_MAP_LITERAL*/{d: 'x'};
+
+var m6 = <dynamic, dynamic>{};
+var m7 = <String, String>{};
+Map<String, String> m8 = /*info:INFERRED_TYPE_LITERAL*/{};
+var m9 = /*info:INFERRED_TYPE_LITERAL*/{'hi': 'there'};
+    ''');
+    check(implicitDynamic: false);
+  }
+
+  void test_implicitDynamic_method() {
+    addFile(r'''
+class C {
+  /*=T*/ m/*<T>*/(/*=T*/ s) => s;
+  /*=T*/ n/*<T>*/() => null;
+}
+class D<E> {
+  /*=T*/ m/*<T>*/(/*=T*/ s) => s;
+  /*=T*/ n/*<T>*/() => null;
+}
+void f() {
+  dynamic d;
+  int i;
+  new C()./*error:IMPLICIT_DYNAMIC_METHOD*/m(d);
+  new C().m(42);
+  new C()./*error:IMPLICIT_DYNAMIC_METHOD*/n();
+  d = new C()./*error:IMPLICIT_DYNAMIC_METHOD*/n();
+  i = new C().n();
+
+  new D<int>()./*error:IMPLICIT_DYNAMIC_METHOD*/m(d);
+  new D<int>().m(42);
+  new D<int>()./*error:IMPLICIT_DYNAMIC_METHOD*/n();
+  d = new D<int>()./*error:IMPLICIT_DYNAMIC_METHOD*/n();
+  i = new D<int>().n();
+}
+    ''');
+    check(implicitDynamic: false);
+  }
+
+  void test_implicitDynamic_parameter() {
+    addFile(r'''
+const dynamic DYNAMIC_VALUE = 42;
+
+// simple formal
+void f0(/*error:IMPLICIT_DYNAMIC_PARAMETER*/x) {}
+void f1(dynamic x) {}
+
+// default formal
+void df0([/*error:IMPLICIT_DYNAMIC_PARAMETER*/x = DYNAMIC_VALUE]) {}
+void df1([dynamic x = DYNAMIC_VALUE]) {}
+
+// https://github.com/dart-lang/sdk/issues/25794
+void df2([/*error:IMPLICIT_DYNAMIC_PARAMETER*/x = 42]) {}
+
+// default formal (named)
+void nf0({/*error:IMPLICIT_DYNAMIC_PARAMETER*/x: DYNAMIC_VALUE}) {}
+void nf1({dynamic x: DYNAMIC_VALUE}) {}
+
+// https://github.com/dart-lang/sdk/issues/25794
+void nf2({/*error:IMPLICIT_DYNAMIC_PARAMETER*/x: 42}) {}
+
+// field formal
+class C {
+  var /*error:IMPLICIT_DYNAMIC_FIELD*/x;
+  C(this.x);
+}
+
+// function typed formal
+void ftf0(void x(/*error:IMPLICIT_DYNAMIC_PARAMETER*/y)) {}
+void ftf1(void x(int y)) {}
+    ''');
+    check(implicitDynamic: false);
+  }
+
+  void test_implicitDynamic_return() {
+    addFile(r'''
+// function
+/*error:IMPLICIT_DYNAMIC_RETURN*/f0() {}
+dynamic f1() { return 42; }
+
+// nested function
+void main() {
+  /*error:IMPLICIT_DYNAMIC_RETURN*/g0() {}
+  dynamic g1() { return 42; }
+}
+
+// methods
+class B {
+  int m1() => 42;
+}
+class C extends B {
+  /*error:IMPLICIT_DYNAMIC_RETURN*/m0() => 123;
+  m1() => 123;
+  dynamic m2() => 'hi';
+}
+
+// accessors
+set x(int value) {}
+/*error:IMPLICIT_DYNAMIC_RETURN*/get y0 => 42;
+dynamic get y1 => 42;
+
+// function typed formals
+void ftf0(/*error:IMPLICIT_DYNAMIC_RETURN*/f(int x)) {}
+void ftf1(dynamic f(int x)) {}
+
+// function expressions
+var fe0 = (int x) => x as dynamic;
+var fe1 = (int x) => x;
+    ''');
+    check(implicitDynamic: false);
+  }
+
+  void test_implicitDynamic_type() {
+    addFile(r'''
+class C<T> {}
+class M1<T extends /*error:IMPLICIT_DYNAMIC_TYPE*/List> {}
+class M2<T> {}
+class I<T> {}
+class D<T, S> extends /*error:IMPLICIT_DYNAMIC_TYPE*/C
+    with M1, /*error:IMPLICIT_DYNAMIC_TYPE*/M2
+    implements /*error:IMPLICIT_DYNAMIC_TYPE*/I {}
+
+C f(D d) {
+  D x = new /*error:IMPLICIT_DYNAMIC_TYPE*/D();
+  D<int, dynamic> y = /*info:INFERRED_TYPE_ALLOCATION*/new /*error:IMPLICIT_DYNAMIC_TYPE*/D();
+  D<dynamic, int> z = /*info:INFERRED_TYPE_ALLOCATION*/new /*error:IMPLICIT_DYNAMIC_TYPE*/D();
+  return new /*error:IMPLICIT_DYNAMIC_TYPE*/C();
+}
+
+class A<T extends num> {}
+class N1<T extends List<int>> {}
+class N2<T extends Object> {}
+class J<T extends Object> {}
+class B<T extends Object> extends A with N1, N2 implements J {}
+A g(B b) {
+  B y = /*info:INFERRED_TYPE_ALLOCATION*/new B();
+  return /*info:INFERRED_TYPE_ALLOCATION*/new A();
+}
+    ''');
+    check(implicitDynamic: false);
+  }
+
+  void test_implicitDynamic_variable() {
+    addFile(r'''
+var /*error:IMPLICIT_DYNAMIC_VARIABLE*/x0;
+var /*error:IMPLICIT_DYNAMIC_VARIABLE*/x1 = (<dynamic>[])[0];
+var /*error:IMPLICIT_DYNAMIC_VARIABLE*/x2,
+    x3 = 42,
+    /*error:IMPLICIT_DYNAMIC_VARIABLE*/x4;
+dynamic y0;
+dynamic y1 = (<dynamic>[])[0];
+    ''');
+    check(implicitDynamic: false);
+  }
+
+  void test_invalidOverrides_baseClassOverrideToChildInterface() {
     checkFile('''
-      class A {
-        A operator ~() => null;
-        A operator +(int x) => null;
-        A operator -(int x) => null;
-        A operator -() => null;
-      }
+class A {}
+class B {}
 
-      foo() => new A();
+abstract class I {
+    m(A a);
+}
 
-      test() {
-        A a = new A();
-        var c = foo();
-        dynamic d;
+class Base {
+    m(B a) {}
+}
 
-        ~a;
-        (/*info:DYNAMIC_INVOKE*/~d);
+class /*error:INCONSISTENT_METHOD_INHERITANCE*/T1
+    /*error:INVALID_METHOD_OVERRIDE_FROM_BASE*/extends Base implements I {}
+''');
+  }
 
-        !/*warning:NON_BOOL_NEGATION_EXPRESSION*/a;
-        !/*info:DYNAMIC_CAST*/d;
-
-        -a;
-        (/*info:DYNAMIC_INVOKE*/-d);
-
-        ++a;
-        --a;
-        (/*info:DYNAMIC_INVOKE*/++d);
-        (/*info:DYNAMIC_INVOKE*/--d);
-
-        a++;
-        a--;
-        (/*info:DYNAMIC_INVOKE*/d++);
-        (/*info:DYNAMIC_INVOKE*/d--);
-      }''');
-  });
-
-  test('binary and index operators', () {
+  void test_invalidOverrides_childOverride() {
     checkFile('''
-          class A {
-            A operator *(B b) => null;
-            A operator /(B b) => null;
-            A operator ~/(B b) => null;
-            A operator %(B b) => null;
-            A operator +(B b) => null;
-            A operator -(B b) => null;
-            A operator <<(B b) => null;
-            A operator >>(B b) => null;
-            A operator &(B b) => null;
-            A operator ^(B b) => null;
-            A operator |(B b) => null;
-            A operator[](B b) => null;
-          }
+class A {}
+class B {}
 
-          class B {
-            A operator -(B b) => null;
-          }
+class Base {
+    A f;
+}
 
-          foo() => new A();
+class T1 extends Base {
+  /*warning:MISMATCHED_GETTER_AND_SETTER_TYPES_FROM_SUPERTYPE, error:INVALID_FIELD_OVERRIDE, error:INVALID_METHOD_OVERRIDE*/B get
+      /*error:INVALID_GETTER_OVERRIDE_RETURN_TYPE*/f => null;
+}
 
-          test() {
-            A a = new A();
-            B b = new B();
-            var c = foo();
-            a = a * b;
-            a = a * /*info:DYNAMIC_CAST*/c;
-            a = a / b;
-            a = a ~/ b;
-            a = a % b;
-            a = a + b;
-            a = a + /*warning:ARGUMENT_TYPE_NOT_ASSIGNABLE*/a;
-            a = a - b;
-            b = /*warning:INVALID_ASSIGNMENT*/b - b;
-            a = a << b;
-            a = a >> b;
-            a = a & b;
-            a = a ^ b;
-            a = a | b;
-            c = (/*info:DYNAMIC_INVOKE*/c + b);
+class T2 extends Base {
+  /*warning:MISMATCHED_GETTER_AND_SETTER_TYPES_FROM_SUPERTYPE, error:INVALID_FIELD_OVERRIDE, error:INVALID_METHOD_OVERRIDE*/set f(
+      /*error:INVALID_SETTER_OVERRIDE_NORMAL_PARAM_TYPE*/B b) => null;
+}
 
-            String x = 'hello';
-            int y = 42;
-            x = x + x;
-            x = x + /*info:DYNAMIC_CAST*/c;
-            x = x + /*warning:ARGUMENT_TYPE_NOT_ASSIGNABLE*/y;
+class T3 extends Base {
+  /*error:INVALID_FIELD_OVERRIDE, error:INVALID_METHOD_OVERRIDE*/final B
+      /*warning:FINAL_NOT_INITIALIZED, error:INVALID_GETTER_OVERRIDE_RETURN_TYPE*/f;
+}
+class T4 extends Base {
+  // two: one for the getter one for the setter.
+  /*error:INVALID_FIELD_OVERRIDE, error:INVALID_METHOD_OVERRIDE, error:INVALID_METHOD_OVERRIDE*/B
+      /*error:INVALID_GETTER_OVERRIDE_RETURN_TYPE, error:INVALID_SETTER_OVERRIDE_NORMAL_PARAM_TYPE*/f;
+}
 
-            bool p = true;
-            p = p && p;
-            p = p && /*info:DYNAMIC_CAST*/c;
-            p = (/*info:DYNAMIC_CAST*/c) && p;
-            p = (/*info:DYNAMIC_CAST*/c) && /*info:DYNAMIC_CAST*/c;
-            p = /*warning:NON_BOOL_OPERAND*/y && p;
-            p = c == y;
+class /*error:NON_ABSTRACT_CLASS_INHERITS_ABSTRACT_MEMBER_ONE*/T5 implements Base {
+  /*warning:MISMATCHED_GETTER_AND_SETTER_TYPES_FROM_SUPERTYPE, error:INVALID_METHOD_OVERRIDE*/B get
+      /*error:INVALID_GETTER_OVERRIDE_RETURN_TYPE*/f => null;
+}
 
-            a = a[b];
-            a = a[/*info:DYNAMIC_CAST*/c];
-            c = (/*info:DYNAMIC_INVOKE*/c[b]);
-            a[/*warning:ARGUMENT_TYPE_NOT_ASSIGNABLE*/y];
-          }
-       ''');
-  });
+class /*error:NON_ABSTRACT_CLASS_INHERITS_ABSTRACT_MEMBER_ONE*/T6 implements Base {
+  /*warning:MISMATCHED_GETTER_AND_SETTER_TYPES_FROM_SUPERTYPE, error:INVALID_METHOD_OVERRIDE*/set f(
+      /*error:INVALID_SETTER_OVERRIDE_NORMAL_PARAM_TYPE*/B b) => null;
+}
 
-  test('null coalescing operator', () {
+class /*error:NON_ABSTRACT_CLASS_INHERITS_ABSTRACT_MEMBER_ONE*/T7 implements Base {
+  /*error:INVALID_METHOD_OVERRIDE*/final B
+      /*error:INVALID_GETTER_OVERRIDE_RETURN_TYPE*/f = null;
+}
+class T8 implements Base {
+  // two: one for the getter one for the setter.
+  /*error:INVALID_METHOD_OVERRIDE, error:INVALID_METHOD_OVERRIDE*/B
+      /*error:INVALID_GETTER_OVERRIDE_RETURN_TYPE, error:INVALID_SETTER_OVERRIDE_NORMAL_PARAM_TYPE*/f;
+}
+''');
+  }
+
+  void test_invalidOverrides_childOverride2() {
     checkFile('''
-          class A {}
-          class C<T> {}
-          main() {
-            A a, b;
-            a ??= new A();
-            b = b ?? new A();
+class A {}
+class B {}
 
-            // downwards inference
-            C<int> c, d;
-            c ??= /*info:INFERRED_TYPE_ALLOCATION*/new C();
-            d = d ?? /*info:INFERRED_TYPE_ALLOCATION*/new C();
-          }
-       ''');
-  });
+class Base {
+    m(A a) {}
+}
 
-  test('compound assignments', () {
+class Test extends Base {
+  /*error:INVALID_METHOD_OVERRIDE*/m(
+        /*error:INVALID_METHOD_OVERRIDE_NORMAL_PARAM_TYPE*/B a) {}
+}
+''');
+  }
+
+  void test_invalidOverrides_classOverrideOfInterface() {
     checkFile('''
-          class A {
-            A operator *(B b) => null;
-            A operator /(B b) => null;
-            A operator ~/(B b) => null;
-            A operator %(B b) => null;
-            A operator +(B b) => null;
-            A operator -(B b) => null;
-            A operator <<(B b) => null;
-            A operator >>(B b) => null;
-            A operator &(B b) => null;
-            A operator ^(B b) => null;
-            A operator |(B b) => null;
-            D operator [](B index) => null;
-            void operator []=(B index, D value) => null;
-          }
+class A {}
+class B {}
 
-          class B {
-            A operator -(B b) => null;
-          }
+abstract class I {
+    m(A a);
+}
 
-          class D {
-            D operator +(D d) => null;
-          }
+class T1 implements I {
+  /*error:INVALID_METHOD_OVERRIDE*/m(
+      /*error:INVALID_METHOD_OVERRIDE_NORMAL_PARAM_TYPE*/B a) {}
+}
+''');
+  }
 
-          foo() => new A();
-
-          test() {
-            int x = 0;
-            x += 5;
-            /*severe:STATIC_TYPE_ERROR*/x += 3.14;
-
-            double y = 0.0;
-            y += 5;
-            y += 3.14;
-
-            num z = 0;
-            z += 5;
-            z += 3.14;
-
-            x = /*info:DOWN_CAST_IMPLICIT*/x + z;
-            x += /*info:DOWN_CAST_IMPLICIT*/z;
-            y = y + z;
-            y += z;
-
-            dynamic w = 42;
-            x += /*info:DYNAMIC_CAST*/w;
-            y += /*info:DYNAMIC_CAST*/w;
-            z += /*info:DYNAMIC_CAST*/w;
-
-            A a = new A();
-            B b = new B();
-            var c = foo();
-            a = a * b;
-            a *= b;
-            a *= /*info:DYNAMIC_CAST*/c;
-            a /= b;
-            a ~/= b;
-            a %= b;
-            a += b;
-            a += /*warning:ARGUMENT_TYPE_NOT_ASSIGNABLE*/a;
-            a -= b;
-            /*severe:STATIC_TYPE_ERROR*/b -= /*warning:INVALID_ASSIGNMENT*/b;
-            a <<= b;
-            a >>= b;
-            a &= b;
-            a ^= b;
-            a |= b;
-            /*info:DYNAMIC_INVOKE*/c += b;
-
-            var d = new D();
-            a[b] += d;
-            a[/*info:DYNAMIC_CAST*/c] += d;
-            a[/*warning:ARGUMENT_TYPE_NOT_ASSIGNABLE*/z] += d;
-            a[b] += /*info:DYNAMIC_CAST*/c;
-            a[b] += /*warning:ARGUMENT_TYPE_NOT_ASSIGNABLE*/z;
-            /*info:DYNAMIC_INVOKE,info:DYNAMIC_INVOKE*/c[b] += d;
-          }
-       ''');
-  });
-
-  test('super call placement', () {
+  void test_invalidOverrides_doubleOverride() {
     checkFile('''
-          class Base {
-            var x;
-            Base() : x = print('Base.1') { print('Base.2'); }
-          }
+class A {}
+class B {}
 
-          class Derived extends Base {
-            var y, z;
-            Derived()
-                : y = print('Derived.1'),
-                  /*severe:INVALID_SUPER_INVOCATION*/super(),
-                  z = print('Derived.2') {
-              print('Derived.3');
-            }
-          }
+class Grandparent {
+    m(A a) {}
+}
+class Parent extends Grandparent {
+    m(A a) {}
+}
 
-          class Valid extends Base {
-            var y, z;
-            Valid()
-                : y = print('Valid.1'),
-                  z = print('Valid.2'),
-                  super() {
-              print('Valid.3');
-            }
-          }
+class Test extends Parent {
+    // Reported only once
+    /*error:INVALID_METHOD_OVERRIDE*/m(
+        /*error:INVALID_METHOD_OVERRIDE_NORMAL_PARAM_TYPE*/B a) {}
+}
+''');
+  }
 
-          class AlsoValid extends Base {
-            AlsoValid() : super();
-          }
-
-          main() => new Derived();
-       ''');
-  });
-
-  test('for loop variable', () {
+  void test_invalidOverrides_doubleOverride2() {
     checkFile('''
-          foo() {
-            for (int i = 0; i < 10; i++) {
-              i = /*warning:INVALID_ASSIGNMENT*/"hi";
-            }
-          }
-          bar() {
-            for (var i = 0; i < 10; i++) {
-              int j = i + 1;
-            }
-          }
-        ''');
-  });
+class A {}
+class B {}
 
-  test('loadLibrary', () {
-    addFile('''library lib1;''', name: '/lib1.dart');
-    checkFile(r'''
-        import 'lib1.dart' deferred as lib1;
-        import 'dart:async' show Future;
-        main() {
-          Future f = lib1.loadLibrary();
-        }''');
-  });
+class Grandparent {
+    m(A a) {}
+}
+class Parent extends Grandparent {
+  /*error:INVALID_METHOD_OVERRIDE*/m(
+      /*error:INVALID_METHOD_OVERRIDE_NORMAL_PARAM_TYPE*/B a) {}
+}
 
-  group('invalid overrides', () {
-    test('child override', () {
-      checkFile('''
-            class A {}
-            class B {}
+class Test extends Parent {
+    m(B a) {}
+}
+''');
+  }
 
-            class Base {
-                A f;
-            }
+  void test_invalidOverrides_grandChildOverride() {
+    checkFile('''
+class A {}
+class B {}
 
-            class T1 extends Base {
-              /*warning:MISMATCHED_GETTER_AND_SETTER_TYPES_FROM_SUPERTYPE, severe:INVALID_FIELD_OVERRIDE, severe:INVALID_METHOD_OVERRIDE*/B get
-                  /*warning:INVALID_GETTER_OVERRIDE_RETURN_TYPE*/f => null;
-            }
+class Grandparent {
+    m(A a) {}
+    int x;
+}
+class Parent extends Grandparent {
+}
 
-            class T2 extends Base {
-              /*warning:MISMATCHED_GETTER_AND_SETTER_TYPES_FROM_SUPERTYPE, severe:INVALID_FIELD_OVERRIDE, severe:INVALID_METHOD_OVERRIDE*/set f(
-                  /*warning:INVALID_SETTER_OVERRIDE_NORMAL_PARAM_TYPE*/B b) => null;
-            }
+class Test extends Parent {
+    /*error:INVALID_METHOD_OVERRIDE*/m(
+          /*error:INVALID_METHOD_OVERRIDE_NORMAL_PARAM_TYPE*/B a) {}
+    /*error:INVALID_FIELD_OVERRIDE*/int x;
+}
+''');
+  }
 
-            class T3 extends Base {
-              /*severe:INVALID_FIELD_OVERRIDE, severe:INVALID_METHOD_OVERRIDE*/final B
-                  /*warning:FINAL_NOT_INITIALIZED, warning:INVALID_GETTER_OVERRIDE_RETURN_TYPE*/f;
-            }
-            class T4 extends Base {
-              // two: one for the getter one for the setter.
-              /*severe:INVALID_FIELD_OVERRIDE, severe:INVALID_METHOD_OVERRIDE, severe:INVALID_METHOD_OVERRIDE*/B
-                  /*warning:INVALID_GETTER_OVERRIDE_RETURN_TYPE, warning:INVALID_SETTER_OVERRIDE_NORMAL_PARAM_TYPE*/f;
-            }
+  void test_invalidOverrides_mixinOverrideOfInterface() {
+    checkFile('''
+class A {}
+class B {}
 
-            class /*warning:NON_ABSTRACT_CLASS_INHERITS_ABSTRACT_MEMBER_ONE*/T5 implements Base {
-              /*warning:MISMATCHED_GETTER_AND_SETTER_TYPES_FROM_SUPERTYPE, severe:INVALID_METHOD_OVERRIDE*/B get
-                  /*warning:INVALID_GETTER_OVERRIDE_RETURN_TYPE*/f => null;
-            }
+abstract class I {
+    m(A a);
+}
 
-            class /*warning:NON_ABSTRACT_CLASS_INHERITS_ABSTRACT_MEMBER_ONE*/T6 implements Base {
-              /*warning:MISMATCHED_GETTER_AND_SETTER_TYPES_FROM_SUPERTYPE, severe:INVALID_METHOD_OVERRIDE*/set f(
-                  /*warning:INVALID_SETTER_OVERRIDE_NORMAL_PARAM_TYPE*/B b) => null;
-            }
+class M {
+    m(B a) {}
+}
 
-            class /*warning:NON_ABSTRACT_CLASS_INHERITS_ABSTRACT_MEMBER_ONE*/T7 implements Base {
-              /*severe:INVALID_METHOD_OVERRIDE*/final B
-                  /*warning:INVALID_GETTER_OVERRIDE_RETURN_TYPE*/f = null;
-            }
-            class T8 implements Base {
-              // two: one for the getter one for the setter.
-              /*severe:INVALID_METHOD_OVERRIDE, severe:INVALID_METHOD_OVERRIDE*/B
-                  /*warning:INVALID_GETTER_OVERRIDE_RETURN_TYPE, warning:INVALID_SETTER_OVERRIDE_NORMAL_PARAM_TYPE*/f;
-            }
-         ''');
-    });
+class /*error:INCONSISTENT_METHOD_INHERITANCE*/T1
+    extends Object with /*error:INVALID_METHOD_OVERRIDE_FROM_MIXIN*/M
+    implements I {}
+''');
+  }
 
-    test('child override 2', () {
-      checkFile('''
-            class A {}
-            class B {}
+  void test_invalidOverrides_mixinOverrideToBase() {
+    checkFile('''
+class A {}
+class B {}
 
-            class Base {
-                m(A a) {}
-            }
+class Base {
+    m(A a) {}
+    int x;
+}
 
-            class Test extends Base {
-              /*severe:INVALID_METHOD_OVERRIDE*/m(
-                    /*warning:INVALID_METHOD_OVERRIDE_NORMAL_PARAM_TYPE*/B a) {}
-            }
-         ''');
-    });
-    test('grandchild override', () {
-      checkFile('''
-            class A {}
-            class B {}
+class M1 {
+    m(B a) {}
+}
 
-            class Grandparent {
-                m(A a) {}
-                int x;
-            }
-            class Parent extends Grandparent {
-            }
+class M2 {
+    int x;
+}
 
-            class Test extends Parent {
-                /*severe:INVALID_METHOD_OVERRIDE*/m(
-                      /*warning:INVALID_METHOD_OVERRIDE_NORMAL_PARAM_TYPE*/B a) {}
-                /*severe:INVALID_FIELD_OVERRIDE*/int x;
-            }
-         ''');
-    });
+class /*error:INCONSISTENT_METHOD_INHERITANCE*/T1 extends Base
+    with /*error:INVALID_METHOD_OVERRIDE_FROM_MIXIN*/M1 {}
+class /*error:INCONSISTENT_METHOD_INHERITANCE*/T2 extends Base
+    with /*error:INVALID_METHOD_OVERRIDE_FROM_MIXIN*/M1, /*error:INVALID_FIELD_OVERRIDE*/M2 {}
+class /*error:INCONSISTENT_METHOD_INHERITANCE*/T3 extends Base
+    with /*error:INVALID_FIELD_OVERRIDE*/M2, /*error:INVALID_METHOD_OVERRIDE_FROM_MIXIN*/M1 {}
+''');
+  }
 
-    test('double override', () {
-      checkFile('''
-            class A {}
-            class B {}
+  void test_invalidOverrides_mixinOverrideToMixin() {
+    checkFile('''
+class A {}
+class B {}
 
-            class Grandparent {
-                m(A a) {}
-            }
-            class Parent extends Grandparent {
-                m(A a) {}
-            }
+class Base {
+}
 
-            class Test extends Parent {
-                // Reported only once
-                /*severe:INVALID_METHOD_OVERRIDE*/m(
-                    /*warning:INVALID_METHOD_OVERRIDE_NORMAL_PARAM_TYPE*/B a) {}
-            }
-         ''');
-    });
+class M1 {
+    m(B a) {}
+    int x;
+}
 
-    test('double override 2', () {
-      checkFile('''
-            class A {}
-            class B {}
+class M2 {
+    m(A a) {}
+    int x;
+}
 
-            class Grandparent {
-                m(A a) {}
-            }
-            class Parent extends Grandparent {
-              /*severe:INVALID_METHOD_OVERRIDE*/m(
-                  /*warning:INVALID_METHOD_OVERRIDE_NORMAL_PARAM_TYPE*/B a) {}
-            }
+class /*error:INCONSISTENT_METHOD_INHERITANCE*/T1 extends Base
+    with M1,
+    /*error:INVALID_METHOD_OVERRIDE_FROM_MIXIN,error:INVALID_FIELD_OVERRIDE*/M2 {}
+''');
+  }
 
-            class Test extends Parent {
-                m(B a) {}
-            }
-         ''');
-    });
-
-    test('mixin override to base', () {
-      checkFile('''
-            class A {}
-            class B {}
-
-            class Base {
-                m(A a) {}
-                int x;
-            }
-
-            class M1 {
-                m(B a) {}
-            }
-
-            class M2 {
-                int x;
-            }
-
-            class /*warning:INCONSISTENT_METHOD_INHERITANCE*/T1 extends Base
-                with /*severe:INVALID_METHOD_OVERRIDE*/M1 {}
-            class /*warning:INCONSISTENT_METHOD_INHERITANCE*/T2 extends Base
-                with /*severe:INVALID_METHOD_OVERRIDE*/M1, /*severe:INVALID_FIELD_OVERRIDE*/M2 {}
-            class /*warning:INCONSISTENT_METHOD_INHERITANCE*/T3 extends Base
-                with /*severe:INVALID_FIELD_OVERRIDE*/M2, /*severe:INVALID_METHOD_OVERRIDE*/M1 {}
-         ''');
-    });
-
-    test('mixin override to mixin', () {
-      checkFile('''
-            class A {}
-            class B {}
-
-            class Base {
-            }
-
-            class M1 {
-                m(B a) {}
-                int x;
-            }
-
-            class M2 {
-                m(A a) {}
-                int x;
-            }
-
-            class /*warning:INCONSISTENT_METHOD_INHERITANCE*/T1 extends Base
-                with M1,
-                /*severe:INVALID_METHOD_OVERRIDE,severe:INVALID_FIELD_OVERRIDE*/M2 {}
-         ''');
-    });
-
+  void test_invalidOverrides_noDuplicateMixinOverride() {
     // This is a regression test for a bug in an earlier implementation were
     // names were hiding errors if the first mixin override looked correct,
     // but subsequent ones did not.
-    test('no duplicate mixin override', () {
-      checkFile('''
-            class A {}
-            class B {}
+    checkFile('''
+class A {}
+class B {}
 
-            class Base {
-                m(A a) {}
-            }
+class Base {
+    m(A a) {}
+}
 
-            class M1 {
-                m(A a) {}
-            }
+class M1 {
+    m(A a) {}
+}
 
-            class M2 {
-                m(B a) {}
-            }
+class M2 {
+    m(B a) {}
+}
 
-            class M3 {
-                m(B a) {}
-            }
+class M3 {
+    m(B a) {}
+}
 
-            class /*warning:INCONSISTENT_METHOD_INHERITANCE*/T1 extends Base
-                with M1, /*severe:INVALID_METHOD_OVERRIDE*/M2, M3 {}
-         ''');
-    });
+class /*error:INCONSISTENT_METHOD_INHERITANCE*/T1 extends Base
+    with M1, /*error:INVALID_METHOD_OVERRIDE_FROM_MIXIN*/M2, M3 {}
+''');
+  }
 
-    test('class override of interface', () {
-      checkFile('''
-            class A {}
-            class B {}
-
-            abstract class I {
-                m(A a);
-            }
-
-            class T1 implements I {
-              /*severe:INVALID_METHOD_OVERRIDE*/m(
-                  /*warning:INVALID_METHOD_OVERRIDE_NORMAL_PARAM_TYPE*/B a) {}
-            }
-         ''');
-    });
-
-    test('base class override to child interface', () {
-      checkFile('''
-            class A {}
-            class B {}
-
-            abstract class I {
-                m(A a);
-            }
-
-            class Base {
-                m(B a) {}
-            }
-
-            class /*warning:INCONSISTENT_METHOD_INHERITANCE*/T1
-                /*severe:INVALID_METHOD_OVERRIDE*/extends Base implements I {}
-         ''');
-    });
-
-    test('mixin override of interface', () {
-      checkFile('''
-            class A {}
-            class B {}
-
-            abstract class I {
-                m(A a);
-            }
-
-            class M {
-                m(B a) {}
-            }
-
-            class /*warning:INCONSISTENT_METHOD_INHERITANCE*/T1
-                extends Object with /*severe:INVALID_METHOD_OVERRIDE*/M
-                implements I {}
-         ''');
-    });
-
+  void
+      test_invalidOverrides_noErrorsIfSubclassCorrectlyOverrideBaseAndInterface() {
     // This is a case were it is incorrect to say that the base class
     // incorrectly overrides the interface.
-    test('no errors if subclass correctly overrides base and interface', () {
-      checkFile('''
-            class A {}
-            class B {}
-
-            class Base {
-                m(A a) {}
-            }
-
-            class I1 {
-                m(B a) {}
-            }
-
-            class /*warning:INCONSISTENT_METHOD_INHERITANCE*/T1
-                /*severe:INVALID_METHOD_OVERRIDE*/extends Base
-                implements I1 {}
-
-            class T2 extends Base implements I1 {
-                m(a) {}
-            }
-
-            class /*warning:INCONSISTENT_METHOD_INHERITANCE*/T3
-                extends Object with /*severe:INVALID_METHOD_OVERRIDE*/Base
-                implements I1 {}
-
-            class T4 extends Object with Base implements I1 {
-                m(a) {}
-            }
-         ''');
-    });
-  });
-
-  group('class override of grand interface', () {
-    test('interface of interface of child', () {
-      checkFile('''
-              class A {}
-              class B {}
-
-              abstract class I1 {
-                  m(A a);
-              }
-              abstract class I2 implements I1 {}
-
-              class T1 implements I2 {
-                /*severe:INVALID_METHOD_OVERRIDE*/m(
-                    /*warning:INVALID_METHOD_OVERRIDE_NORMAL_PARAM_TYPE*/B a) {}
-              }
-           ''');
-    });
-    test('superclass of interface of child', () {
-      checkFile('''
-              class A {}
-              class B {}
-
-              abstract class I1 {
-                  m(A a);
-              }
-              abstract class I2 extends I1 {}
-
-              class T1 implements I2 {
-                /*severe:INVALID_METHOD_OVERRIDE*/m(
-                    /*warning:INVALID_METHOD_OVERRIDE_NORMAL_PARAM_TYPE*/B a) {}
-              }
-           ''');
-    });
-    test('mixin of interface of child', () {
-      checkFile('''
-              class A {}
-              class B {}
-
-              abstract class M1 {
-                  m(A a);
-              }
-              abstract class I2 extends Object with M1 {}
-
-              class T1 implements I2 {
-                /*severe:INVALID_METHOD_OVERRIDE*/m(
-                    /*warning:INVALID_METHOD_OVERRIDE_NORMAL_PARAM_TYPE*/B a) {}
-              }
-           ''');
-    });
-    test('interface of abstract superclass', () {
-      checkFile('''
-              class A {}
-              class B {}
-
-              abstract class I1 {
-                  m(A a);
-              }
-              abstract class Base implements I1 {}
-
-              class T1 extends Base {
-                /*severe:INVALID_METHOD_OVERRIDE*/m(
-                    /*warning:INVALID_METHOD_OVERRIDE_NORMAL_PARAM_TYPE*/B a) {}
-              }
-           ''');
-    });
-    test('interface of concrete superclass', () {
-      checkFile('''
-              class A {}
-              class B {}
-
-              abstract class I1 {
-                  m(A a);
-              }
-
-              class /*warning:NON_ABSTRACT_CLASS_INHERITS_ABSTRACT_MEMBER_ONE*/Base
-                  implements I1 {}
-
-              class T1 extends Base {
-                  // not reported technically because if the class is concrete,
-                  // it should implement all its interfaces and hence it is
-                  // sufficient to check overrides against it.
-                  m(/*warning:INVALID_METHOD_OVERRIDE_NORMAL_PARAM_TYPE*/B a) {}
-              }
-           ''');
-    });
-  });
-
-  group('mixin override of grand interface', () {
-    test('interface of interface of child', () {
-      checkFile('''
-              class A {}
-              class B {}
-
-              abstract class I1 {
-                  m(A a);
-              }
-              abstract class I2 implements I1 {}
-
-              class M {
-                  m(B a) {}
-              }
-
-              class /*warning:INCONSISTENT_METHOD_INHERITANCE*/T1
-                  extends Object with /*severe:INVALID_METHOD_OVERRIDE*/M
-                  implements I2 {}
-           ''');
-    });
-    test('superclass of interface of child', () {
-      checkFile('''
-              class A {}
-              class B {}
-
-              abstract class I1 {
-                  m(A a);
-              }
-              abstract class I2 extends I1 {}
-
-              class M {
-                  m(B a) {}
-              }
-
-              class /*warning:INCONSISTENT_METHOD_INHERITANCE*/T1
-                  extends Object with /*severe:INVALID_METHOD_OVERRIDE*/M
-                  implements I2 {}
-           ''');
-    });
-    test('mixin of interface of child', () {
-      checkFile('''
-              class A {}
-              class B {}
-
-              abstract class M1 {
-                  m(A a);
-              }
-              abstract class I2 extends Object with M1 {}
-
-              class M {
-                  m(B a) {}
-              }
-
-              class /*warning:INCONSISTENT_METHOD_INHERITANCE*/T1
-                  extends Object with /*severe:INVALID_METHOD_OVERRIDE*/M
-                  implements I2 {}
-           ''');
-    });
-    test('interface of abstract superclass', () {
-      checkFile('''
-              class A {}
-              class B {}
-
-              abstract class I1 {
-                  m(A a);
-              }
-              abstract class Base implements I1 {}
-
-              class M {
-                  m(B a) {}
-              }
-
-              class /*warning:INCONSISTENT_METHOD_INHERITANCE*/T1 extends Base
-                  with /*severe:INVALID_METHOD_OVERRIDE*/M {}
-           ''');
-    });
-    test('interface of concrete superclass', () {
-      checkFile('''
-              class A {}
-              class B {}
-
-              abstract class I1 {
-                  m(A a);
-              }
-
-              class /*warning:NON_ABSTRACT_CLASS_INHERITS_ABSTRACT_MEMBER_ONE*/Base
-                  implements I1 {}
-
-              class M {
-                  m(B a) {}
-              }
-
-              class /*warning:INCONSISTENT_METHOD_INHERITANCE*/T1 extends Base
-                  with M {}
-           ''');
-    });
-  });
-
-  group('superclass override of grand interface', () {
-    test('interface of interface of child', () {
-      checkFile('''
-              class A {}
-              class B {}
-
-              abstract class I1 {
-                  m(A a);
-              }
-              abstract class I2 implements I1 {}
-
-              class Base {
-                  m(B a) {}
-              }
-
-              class /*warning:INCONSISTENT_METHOD_INHERITANCE*/T1
-                  /*severe:INVALID_METHOD_OVERRIDE*/extends Base implements I2 {}
-           ''');
-    });
-    test('superclass of interface of child', () {
-      checkFile('''
-              class A {}
-              class B {}
-
-              abstract class I1 {
-                  m(A a);
-              }
-              abstract class I2 extends I1 {}
-
-              class Base {
-                  m(B a) {}
-              }
-
-              class /*warning:INCONSISTENT_METHOD_INHERITANCE*/T1
-                  /*severe:INVALID_METHOD_OVERRIDE*/extends Base
-                  implements I2 {}
-           ''');
-    });
-    test('mixin of interface of child', () {
-      checkFile('''
-              class A {}
-              class B {}
-
-              abstract class M1 {
-                  m(A a);
-              }
-              abstract class I2 extends Object with M1 {}
-
-              class Base {
-                  m(B a) {}
-              }
-
-              class /*warning:INCONSISTENT_METHOD_INHERITANCE*/T1
-                  /*severe:INVALID_METHOD_OVERRIDE*/extends Base
-                  implements I2 {}
-           ''');
-    });
-    test('interface of abstract superclass', () {
-      checkFile('''
-              class A {}
-              class B {}
-
-              abstract class I1 {
-                  m(A a);
-              }
-
-              abstract class Base implements I1 {
-                /*severe:INVALID_METHOD_OVERRIDE*/m(
-                    /*warning:INVALID_METHOD_OVERRIDE_NORMAL_PARAM_TYPE*/B a) {}
-              }
-
-              class T1 extends Base {
-                  // we consider the base class incomplete because it is
-                  // abstract, so we report the error here too.
-                  // TODO(sigmund): consider tracking overrides in a fine-grain
-                  // manner, then this and the double-overrides would not be
-                  // reported.
-                  /*severe:INVALID_METHOD_OVERRIDE*/m(B a) {}
-              }
-           ''');
-    });
-    test('interface of concrete superclass', () {
-      checkFile('''
-              class A {}
-              class B {}
-
-              abstract class I1 {
-                  m(A a);
-              }
-
-              class Base implements I1 {
-                /*severe:INVALID_METHOD_OVERRIDE*/m(
-                    /*warning:INVALID_METHOD_OVERRIDE_NORMAL_PARAM_TYPE*/B a) {}
-              }
-
-              class T1 extends Base {
-                  m(B a) {}
-              }
-           ''');
-    });
-  });
-
-  group('no duplicate reports from overriding interfaces', () {
-    test('type overrides same method in multiple interfaces', () {
-      checkFile('''
-              class A {}
-              class B {}
-
-              abstract class I1 {
-                  m(A a);
-              }
-              abstract class I2 implements I1 {
-                  m(A a);
-              }
-
-              class Base {}
-
-              class T1 implements I2 {
-                /*severe:INVALID_METHOD_OVERRIDE*/m(
-                    /*warning:INVALID_METHOD_OVERRIDE_NORMAL_PARAM_TYPE*/B a) {}
-              }
-           ''');
-    });
-
-    test('type and base type override same method in interface', () {
-      checkFile('''
-              class A {}
-              class B {}
-
-              abstract class I1 {
-                  m(A a);
-              }
-
-              class Base {
-                  m(B a) {}
-              }
-
-              // Note: no error reported in `extends Base` to avoid duplicating
-              // the error in T1.
-              class T1 extends Base implements I1 {
-                /*severe:INVALID_METHOD_OVERRIDE*/m(
-                    /*warning:INVALID_METHOD_OVERRIDE_NORMAL_PARAM_TYPE*/B a) {}
-              }
-
-              // If there is no error in the class, we do report the error at
-              // the base class:
-              class /*warning:INCONSISTENT_METHOD_INHERITANCE*/T2
-                  /*severe:INVALID_METHOD_OVERRIDE*/extends Base
-                  implements I1 {}
-           ''');
-    });
-
-    test('type and mixin override same method in interface', () {
-      checkFile('''
-              class A {}
-              class B {}
-
-              abstract class I1 {
-                  m(A a);
-              }
-
-              class M {
-                  m(B a) {}
-              }
-
-              class T1 extends Object with M implements I1 {
-                /*severe:INVALID_METHOD_OVERRIDE*/m(
-                    /*warning:INVALID_METHOD_OVERRIDE_NORMAL_PARAM_TYPE*/B a) {}
-              }
-
-              class /*warning:INCONSISTENT_METHOD_INHERITANCE*/T2
-                  extends Object with /*severe:INVALID_METHOD_OVERRIDE*/M
-                  implements I1 {}
-           ''');
-    });
-
-    test('two grand types override same method in interface', () {
-      checkFile('''
-              class A {}
-              class B {}
-
-              abstract class I1 {
-                  m(A a);
-              }
-
-              class Grandparent {
-                  m(B a) {}
-              }
-
-              class Parent1 extends Grandparent {
-                  m(B a) {}
-              }
-              class Parent2 extends Grandparent {}
-
-              // Note: otherwise both errors would be reported on this line
-              class /*warning:INCONSISTENT_METHOD_INHERITANCE*/T1
-                  /*severe:INVALID_METHOD_OVERRIDE*/extends Parent1
-                  implements I1 {}
-              class /*warning:INCONSISTENT_METHOD_INHERITANCE*/T2
-                  /*severe:INVALID_METHOD_OVERRIDE*/extends Parent2
-                  implements I1 {}
-           ''');
-    });
-
-    test('two mixins override same method in interface', () {
-      checkFile('''
-              class A {}
-              class B {}
-
-              abstract class I1 {
-                  m(A a);
-              }
-
-              class M1 {
-                  m(B a) {}
-              }
-
-              class M2 {
-                  m(B a) {}
-              }
-
-              // Here we want to report both, because the error location is
-              // different.
-              // TODO(sigmund): should we merge these as well?
-              class /*warning:INCONSISTENT_METHOD_INHERITANCE*/T1 extends Object
-                  with /*severe:INVALID_METHOD_OVERRIDE*/M1,
-                  /*severe:INVALID_METHOD_OVERRIDE*/M2
-                  implements I1 {}
-           ''');
-    });
-
-    test('base type and mixin override same method in interface', () {
-      checkFile('''
-              class A {}
-              class B {}
-
-              abstract class I1 {
-                  m(A a);
-              }
-
-              class Base {
-                  m(B a) {}
-              }
-
-              class M {
-                  m(B a) {}
-              }
-
-              // Here we want to report both, because the error location is
-              // different.
-              // TODO(sigmund): should we merge these as well?
-              class /*warning:INCONSISTENT_METHOD_INHERITANCE*/T1
-                  /*severe:INVALID_METHOD_OVERRIDE*/extends Base
-                  with /*severe:INVALID_METHOD_OVERRIDE*/M
-                  implements I1 {}
-           ''');
-    });
-  });
-
-  test('invalid runtime checks', () {
     checkFile('''
-          typedef int I2I(int x);
-          typedef int D2I(x);
-          typedef int II2I(int x, int y);
-          typedef int DI2I(x, int y);
-          typedef int ID2I(int x, y);
-          typedef int DD2I(x, y);
+class A {}
+class B {}
 
-          typedef I2D(int x);
-          typedef D2D(x);
-          typedef II2D(int x, int y);
-          typedef DI2D(x, int y);
-          typedef ID2D(int x, y);
-          typedef DD2D(x, y);
+class Base {
+    m(A a) {}
+}
 
-          int foo(int x) => x;
-          int bar(int x, int y) => x + y;
+class I1 {
+    m(B a) {}
+}
 
-          void main() {
-            bool b;
-            b = /*info:NON_GROUND_TYPE_CHECK_INFO*/foo is I2I;
-            b = /*info:NON_GROUND_TYPE_CHECK_INFO*/foo is D2I;
-            b = /*info:NON_GROUND_TYPE_CHECK_INFO*/foo is I2D;
-            b = foo is D2D;
+class /*error:INCONSISTENT_METHOD_INHERITANCE*/T1
+    /*error:INVALID_METHOD_OVERRIDE_FROM_BASE*/extends Base
+    implements I1 {}
 
-            b = /*info:NON_GROUND_TYPE_CHECK_INFO*/bar is II2I;
-            b = /*info:NON_GROUND_TYPE_CHECK_INFO*/bar is DI2I;
-            b = /*info:NON_GROUND_TYPE_CHECK_INFO*/bar is ID2I;
-            b = /*info:NON_GROUND_TYPE_CHECK_INFO*/bar is II2D;
-            b = /*info:NON_GROUND_TYPE_CHECK_INFO*/bar is DD2I;
-            b = /*info:NON_GROUND_TYPE_CHECK_INFO*/bar is DI2D;
-            b = /*info:NON_GROUND_TYPE_CHECK_INFO*/bar is ID2D;
-            b = bar is DD2D;
+class T2 extends Base implements I1 {
+    m(a) {}
+}
 
-            // For as, the validity of checks is deferred to runtime.
-            Function f;
-            f = foo as I2I;
-            f = foo as D2I;
-            f = foo as I2D;
-            f = foo as D2D;
+class /*error:INCONSISTENT_METHOD_INHERITANCE*/T3
+    extends Object with /*error:INVALID_METHOD_OVERRIDE_FROM_MIXIN*/Base
+    implements I1 {}
 
-            f = bar as II2I;
-            f = bar as DI2I;
-            f = bar as ID2I;
-            f = bar as II2D;
-            f = bar as DD2I;
-            f = bar as DI2D;
-            f = bar as ID2D;
-            f = bar as DD2D;
-          }
-      ''');
-  });
+class T4 extends Object with Base implements I1 {
+    m(a) {}
+}
+''');
+  }
 
-  group('function modifiers', () {
-    test('async', () {
-      checkFile('''
-        import 'dart:async';
-        import 'dart:math' show Random;
+  void test_invalidRuntimeChecks() {
+    checkFile('''
+typedef int I2I(int x);
+typedef int D2I(x);
+typedef int II2I(int x, int y);
+typedef int DI2I(x, int y);
+typedef int ID2I(int x, y);
+typedef int DD2I(x, y);
 
-        dynamic x;
+typedef I2D(int x);
+typedef D2D(x);
+typedef II2D(int x, int y);
+typedef DI2D(x, int y);
+typedef ID2D(int x, y);
+typedef DD2D(x, y);
 
-        foo1() async => x;
-        Future foo2() async => x;
-        Future<int> foo3() async => /*info:DYNAMIC_CAST*/x;
-        Future<int> foo4() async => new Future<int>.value(/*info:DYNAMIC_CAST*/x);
-        Future<int> foo5() async =>
-            /*warning:RETURN_OF_INVALID_TYPE*/new Future<String>.value(/*info:DYNAMIC_CAST*/x);
+int foo(int x) => x;
+int bar(int x, int y) => x + y;
 
-        bar1() async { return x; }
-        Future bar2() async { return x; }
-        Future<int> bar3() async { return /*info:DYNAMIC_CAST*/x; }
-        Future<int> bar4() async { return new Future<int>.value(/*info:DYNAMIC_CAST*/x); }
-        Future<int> bar5() async {
-          return /*warning:RETURN_OF_INVALID_TYPE*/new Future<String>.value(/*info:DYNAMIC_CAST*/x);
-        }
+void main() {
+  bool b;
+  b = /*info:NON_GROUND_TYPE_CHECK_INFO*/foo is I2I;
+  b = /*info:NON_GROUND_TYPE_CHECK_INFO*/foo is D2I;
+  b = /*info:NON_GROUND_TYPE_CHECK_INFO*/foo is I2D;
+  b = foo is D2D;
 
-        int y;
-        Future<int> z;
+  b = /*info:NON_GROUND_TYPE_CHECK_INFO*/bar is II2I;
+  b = /*info:NON_GROUND_TYPE_CHECK_INFO*/bar is DI2I;
+  b = /*info:NON_GROUND_TYPE_CHECK_INFO*/bar is ID2I;
+  b = /*info:NON_GROUND_TYPE_CHECK_INFO*/bar is II2D;
+  b = /*info:NON_GROUND_TYPE_CHECK_INFO*/bar is DD2I;
+  b = /*info:NON_GROUND_TYPE_CHECK_INFO*/bar is DI2D;
+  b = /*info:NON_GROUND_TYPE_CHECK_INFO*/bar is ID2D;
+  b = bar is DD2D;
 
-        baz() async {
-          int a = /*info:DYNAMIC_CAST*/await x;
-          int b = await y;
-          int c = await z;
-          String d = /*warning:INVALID_ASSIGNMENT*/await z;
-        }
+  // For as, the validity of checks is deferred to runtime.
+  Function f;
+  f = foo as I2I;
+  f = foo as D2I;
+  f = foo as I2D;
+  f = foo as D2D;
 
-        Future<bool> get issue_264 async {
-          await 42;
-          if (new Random().nextBool()) {
-            return true;
-          } else {
-            return new Future<bool>.value(false);
-          }
-        }
-    ''');
-    });
+  f = bar as II2I;
+  f = bar as DI2I;
+  f = bar as ID2I;
+  f = bar as II2D;
+  f = bar as DD2I;
+  f = bar as DI2D;
+  f = bar as ID2D;
+  f = bar as DD2D;
+}
+''');
+  }
 
-    test('async*', () {
-      checkFile('''
-        import 'dart:async';
+  void test_leastUpperBounds() {
+    checkFile('''
+typedef T Returns<T>();
 
-        dynamic x;
+// regression test for https://github.com/dart-lang/sdk/issues/26094
+class A <S extends  Returns<S>, T extends Returns<T>> {
+  int test(bool b) {
+    S s;
+    T t;
+    if (b) {
+      return /*error:RETURN_OF_INVALID_TYPE*/b ? s : t;
+    } else {
+      return /*error:RETURN_OF_INVALID_TYPE*/s ?? t;
+    }
+  }
+}
 
-        bar1() async* { yield x; }
-        Stream bar2() async* { yield x; }
-        Stream<int> bar3() async* { yield /*info:DYNAMIC_CAST*/x; }
-        Stream<int> bar4() async* { yield /*warning:YIELD_OF_INVALID_TYPE*/new Stream<int>(); }
+class B<S, T extends S> {
+  T t;
+  S s;
+  int test(bool b) {
+    return /*error:RETURN_OF_INVALID_TYPE*/b ? t : s;
+  }
+}
 
-        baz1() async* { yield* /*info:DYNAMIC_CAST*/x; }
-        Stream baz2() async* { yield* /*info:DYNAMIC_CAST*/x; }
-        Stream<int> baz3() async* { yield* /*warning:DOWN_CAST_COMPOSITE*/x; }
-        Stream<int> baz4() async* { yield* new Stream<int>(); }
-        Stream<int> baz5() async* { yield* /*info:INFERRED_TYPE_ALLOCATION*/new Stream(); }
-    ''');
-    });
+class C {
+  // Check that the least upper bound of two types with the same
+  // class but different type arguments produces the pointwise
+  // least upper bound of the type arguments
+  int test1(bool b) {
+    List<int> li;
+    List<double> ld;
+    return /*error:RETURN_OF_INVALID_TYPE*/b ? li : ld;
+  }
+  // TODO(leafp): This case isn't handled yet.  This test checks
+  // the case where two related classes are instantiated with related
+  // but different types.
+  Iterable<num> test2(bool b) {
+    List<int> li;
+    Iterable<double> id;
+    int x =
+        /*info:ASSIGNMENT_CAST should be error:INVALID_ASSIGNMENT*/
+        b ? li : id;
+    return /*warning:DOWN_CAST_COMPOSITE should be pass*/b ? li : id;
+  }
+}
+''');
+  }
 
-    test('sync*', () {
-      checkFile('''
-        dynamic x;
+  void test_loadLibrary() {
+    addFile('''library lib1;''', name: '/lib1.dart');
+    checkFile(r'''
+import 'lib1.dart' deferred as lib1;
+import 'dart:async' show Future;
+main() {
+  Future f = lib1.loadLibrary();
+}''');
+  }
 
-        bar1() sync* { yield x; }
-        Iterable bar2() sync* { yield x; }
-        Iterable<int> bar3() sync* { yield /*info:DYNAMIC_CAST*/x; }
-        Iterable<int> bar4() sync* { yield /*warning:YIELD_OF_INVALID_TYPE*/bar3(); }
+  void test_methodOverride() {
+    checkFile('''
+class A {}
+class B extends A {}
+class C extends B {}
 
-        baz1() sync* { yield* /*info:DYNAMIC_CAST*/x; }
-        Iterable baz2() sync* { yield* /*info:DYNAMIC_CAST*/x; }
-        Iterable<int> baz3() sync* { yield* /*warning:DOWN_CAST_COMPOSITE*/x; }
-        Iterable<int> baz4() sync* { yield* bar3(); }
-        Iterable<int> baz5() sync* { yield* /*info:INFERRED_TYPE_ALLOCATION*/new List(); }
-    ''');
-    });
-  });
+class Base {
+  B m1(B a) => null;
+  B m2(B a) => null;
+  B m3(B a) => null;
+  B m4(B a) => null;
+  B m5(B a) => null;
+  B m6(B a) => null;
+}
+
+class Child extends Base {
+  /*error:INVALID_METHOD_OVERRIDE*/A m1(A value) => null;
+  /*error:INVALID_METHOD_OVERRIDE*/C m2(C value) => null;
+  /*error:INVALID_METHOD_OVERRIDE*/A m3(C value) => null;
+  C m4(A value) => null;
+  m5(value) => null;
+  /*error:INVALID_METHOD_OVERRIDE*/dynamic m6(dynamic value) => null;
+}
+''');
+  }
+
+  void test_methodOverride_fuzzyArrows() {
+    checkFile('''
+abstract class A {
+  bool operator ==(Object object);
+}
+
+class B implements A {}
+
+class F {
+  void f(x) {}
+  void g(int x) {}
+}
+
+class G extends F {
+  /*error:INVALID_METHOD_OVERRIDE*/void f(int x) {}
+  void g(dynamic x) {}
+}
+
+class H implements F {
+  /*error:INVALID_METHOD_OVERRIDE*/void f(int x) {}
+  void g(dynamic x) {}
+}
+''');
+  }
+
+  void test_mixinOverrideOfGrandInterface_interfaceOfAbstractSuperclass() {
+    checkFile('''
+class A {}
+class B {}
+
+abstract class I1 {
+    m(A a);
+}
+abstract class Base implements I1 {}
+
+class M {
+    m(B a) {}
+}
+
+class /*error:INCONSISTENT_METHOD_INHERITANCE*/T1 extends Base
+    with /*error:INVALID_METHOD_OVERRIDE_FROM_MIXIN*/M {}
+''');
+  }
+
+  void test_mixinOverrideOfGrandInterface_interfaceOfConcreteSuperclass() {
+    checkFile('''
+class A {}
+class B {}
+
+abstract class I1 {
+    m(A a);
+}
+
+class /*error:NON_ABSTRACT_CLASS_INHERITS_ABSTRACT_MEMBER_ONE*/Base
+    implements I1 {}
+
+class M {
+    m(B a) {}
+}
+
+class /*error:INCONSISTENT_METHOD_INHERITANCE*/T1 extends Base
+    with M {}
+''');
+  }
+
+  void test_mixinOverrideOfGrandInterface_interfaceOfInterfaceOfChild() {
+    checkFile('''
+class A {}
+class B {}
+
+abstract class I1 {
+    m(A a);
+}
+abstract class I2 implements I1 {}
+
+class M {
+    m(B a) {}
+}
+
+class /*error:INCONSISTENT_METHOD_INHERITANCE*/T1
+    extends Object with /*error:INVALID_METHOD_OVERRIDE_FROM_MIXIN*/M
+    implements I2 {}
+''');
+  }
+
+  void test_mixinOverrideOfGrandInterface_mixinOfInterfaceOfChild() {
+    checkFile('''
+class A {}
+class B {}
+
+abstract class M1 {
+    m(A a);
+}
+abstract class I2 extends Object with M1 {}
+
+class M {
+    m(B a) {}
+}
+
+class /*error:INCONSISTENT_METHOD_INHERITANCE*/T1
+    extends Object with /*error:INVALID_METHOD_OVERRIDE_FROM_MIXIN*/M
+    implements I2 {}
+''');
+  }
+
+  void test_mixinOverrideOfGrandInterface_superclassOfInterfaceOfChild() {
+    checkFile('''
+class A {}
+class B {}
+
+abstract class I1 {
+    m(A a);
+}
+abstract class I2 extends I1 {}
+
+class M {
+    m(B a) {}
+}
+
+class /*error:INCONSISTENT_METHOD_INHERITANCE*/T1
+    extends Object with /*error:INVALID_METHOD_OVERRIDE_FROM_MIXIN*/M
+    implements I2 {}
+''');
+  }
+
+  void
+      test_noDuplicateReportsFromOverridingInterfaces_baseTypeAndMixinOverrideSameMethodInInterface() {
+    checkFile('''
+class A {}
+class B {}
+
+abstract class I1 {
+    m(A a);
+}
+
+class Base {
+    m(B a) {}
+}
+
+class M {
+    m(B a) {}
+}
+
+// Here we want to report both, because the error location is
+// different.
+// TODO(sigmund): should we merge these as well?
+class /*error:INCONSISTENT_METHOD_INHERITANCE*/T1
+    /*error:INVALID_METHOD_OVERRIDE_FROM_BASE*/extends Base
+    with /*error:INVALID_METHOD_OVERRIDE_FROM_MIXIN*/M
+    implements I1 {}
+''');
+  }
+
+  void
+      test_noDuplicateReportsFromOverridingInterfaces_twoGrandTypesOverrideSameMethodInInterface() {
+    checkFile('''
+class A {}
+class B {}
+
+abstract class I1 {
+    m(A a);
+}
+
+class Grandparent {
+    m(B a) {}
+}
+
+class Parent1 extends Grandparent {
+    m(B a) {}
+}
+class Parent2 extends Grandparent {}
+
+// Note: otherwise both errors would be reported on this line
+class /*error:INCONSISTENT_METHOD_INHERITANCE*/T1
+    /*error:INVALID_METHOD_OVERRIDE_FROM_BASE*/extends Parent1
+    implements I1 {}
+class /*error:INCONSISTENT_METHOD_INHERITANCE*/T2
+    /*error:INVALID_METHOD_OVERRIDE_FROM_BASE*/extends Parent2
+    implements I1 {}
+''');
+  }
+
+  void
+      test_noDuplicateReportsFromOverridingInterfaces_twoMixinsOverrideSameMethodInInterface() {
+    checkFile('''
+class A {}
+class B {}
+
+abstract class I1 {
+    m(A a);
+}
+
+class M1 {
+    m(B a) {}
+}
+
+class M2 {
+    m(B a) {}
+}
+
+// Here we want to report both, because the error location is
+// different.
+// TODO(sigmund): should we merge these as well?
+class /*error:INCONSISTENT_METHOD_INHERITANCE*/T1 extends Object
+    with /*error:INVALID_METHOD_OVERRIDE_FROM_MIXIN*/M1,
+    /*error:INVALID_METHOD_OVERRIDE_FROM_MIXIN*/M2
+    implements I1 {}
+''');
+  }
+
+  void
+      test_noDuplicateReportsFromOverridingInterfaces_typeAndBaseTypeOverrideSameMethodInInterface() {
+    checkFile('''
+class A {}
+class B {}
+
+abstract class I1 {
+    m(A a);
+}
+
+class Base {
+    m(B a) {}
+}
+
+// Note: no error reported in `extends Base` to avoid duplicating
+// the error in T1.
+class T1 extends Base implements I1 {
+  /*error:INVALID_METHOD_OVERRIDE*/m(
+      /*error:INVALID_METHOD_OVERRIDE_NORMAL_PARAM_TYPE*/B a) {}
+}
+
+// If there is no error in the class, we do report the error at
+// the base class:
+class /*error:INCONSISTENT_METHOD_INHERITANCE*/T2
+    /*error:INVALID_METHOD_OVERRIDE_FROM_BASE*/extends Base
+    implements I1 {}
+''');
+  }
+
+  void
+      test_noDuplicateReportsFromOverridingInterfaces_typeAndMixinOverrideSameMethodInInterface() {
+    checkFile('''
+class A {}
+class B {}
+
+abstract class I1 {
+    m(A a);
+}
+
+class M {
+    m(B a) {}
+}
+
+class T1 extends Object with M implements I1 {
+  /*error:INVALID_METHOD_OVERRIDE*/m(
+      /*error:INVALID_METHOD_OVERRIDE_NORMAL_PARAM_TYPE*/B a) {}
+}
+
+class /*error:INCONSISTENT_METHOD_INHERITANCE*/T2
+    extends Object with /*error:INVALID_METHOD_OVERRIDE_FROM_MIXIN*/M
+    implements I1 {}
+''');
+  }
+
+  void
+      test_noDuplicateReportsFromOverridingInterfaces_typeOverridesSomeMethodInMultipleInterfaces() {
+    checkFile('''
+class A {}
+class B {}
+
+abstract class I1 {
+    m(A a);
+}
+abstract class I2 implements I1 {
+    m(A a);
+}
+
+class Base {}
+
+class T1 implements I2 {
+  /*error:INVALID_METHOD_OVERRIDE*/m(
+      /*error:INVALID_METHOD_OVERRIDE_NORMAL_PARAM_TYPE*/B a) {}
+}
+''');
+  }
+
+  void test_nullCoalescingOperator() {
+    checkFile('''
+class A {}
+class C<T> {}
+main() {
+  A a, b;
+  a ??= new A();
+  b = b ?? new A();
+
+  // downwards inference
+  C<int> c, d;
+  c ??= /*info:INFERRED_TYPE_ALLOCATION*/new C();
+  d = d ?? /*info:INFERRED_TYPE_ALLOCATION*/new C();
+}
+''');
+  }
+
+  void test_privateOverride() {
+    addFile(
+        '''
+import 'main.dart' as main;
+
+class Base {
+  var f1;
+  var _f2;
+  var _f3;
+  get _f4 => null;
+
+  int _m1() => null;
+}
+
+class GrandChild extends main.Child {
+  /*error:INVALID_FIELD_OVERRIDE*/var _f2;
+  /*error:INVALID_FIELD_OVERRIDE*/var _f3;
+  var _f4;
+
+  /*error:INVALID_METHOD_OVERRIDE*/String
+      /*error:INVALID_METHOD_OVERRIDE_RETURN_TYPE*/_m1() => null;
+}
+''',
+        name: '/helper.dart');
+    checkFile('''
+import 'helper.dart' as helper;
+
+class Child extends helper.Base {
+  /*error:INVALID_FIELD_OVERRIDE*/var f1;
+  var _f2;
+  var _f4;
+
+  String _m1() => null;
+}
+''');
+  }
+
+  void test_redirectingConstructor() {
+    checkFile('''
+class A {
+  A(A x) {}
+  A.two() : this(/*error:ARGUMENT_TYPE_NOT_ASSIGNABLE*/3);
+}
+''');
+  }
+
+  void test_relaxedCasts() {
+    checkFile('''
+class A {}
+
+class L<T> {}
+class M<T> extends L<T> {}
+//     L<dynamic|Object>
+//    /              \
+// M<dynamic|Object>  L<A>
+//    \              /
+//          M<A>
+// In normal Dart, there are additional edges
+//  from M<A> to M<dynamic>
+//  from L<A> to M<dynamic>
+//  from L<A> to L<dynamic>
+void main() {
+  L lOfDs;
+  L<Object> lOfOs;
+  L<A> lOfAs;
+
+  M mOfDs;
+  M<Object> mOfOs;
+  M<A> mOfAs;
+
+  {
+    lOfDs = mOfDs;
+    lOfDs = mOfOs;
+    lOfDs = mOfAs;
+    lOfDs = lOfDs;
+    lOfDs = lOfOs;
+    lOfDs = lOfAs;
+    lOfDs = new L(); // Reset type propagation.
+  }
+  {
+    lOfOs = mOfDs;
+    lOfOs = mOfOs;
+    lOfOs = mOfAs;
+    lOfOs = lOfDs;
+    lOfOs = lOfOs;
+    lOfOs = lOfAs;
+    lOfOs = new L<Object>(); // Reset type propagation.
+  }
+  {
+    lOfAs = /*warning:DOWN_CAST_COMPOSITE*/mOfDs;
+    lOfAs = /*error:INVALID_ASSIGNMENT*/mOfOs;
+    lOfAs = mOfAs;
+    lOfAs = /*warning:DOWN_CAST_COMPOSITE*/lOfDs;
+    lOfAs = /*info:DOWN_CAST_IMPLICIT*/lOfOs;
+    lOfAs = lOfAs;
+    lOfAs = new L<A>(); // Reset type propagation.
+  }
+  {
+    mOfDs = mOfDs;
+    mOfDs = mOfOs;
+    mOfDs = mOfAs;
+    mOfDs = /*info:DOWN_CAST_IMPLICIT*/lOfDs;
+    mOfDs = /*info:DOWN_CAST_IMPLICIT*/lOfOs;
+    mOfDs = /*warning:DOWN_CAST_COMPOSITE*/lOfAs;
+    mOfDs = new M(); // Reset type propagation.
+  }
+  {
+    mOfOs = mOfDs;
+    mOfOs = mOfOs;
+    mOfOs = mOfAs;
+    mOfOs = /*info:DOWN_CAST_IMPLICIT*/lOfDs;
+    mOfOs = /*info:DOWN_CAST_IMPLICIT*/lOfOs;
+    mOfOs = /*error:INVALID_ASSIGNMENT*/lOfAs;
+    mOfOs = new M<Object>(); // Reset type propagation.
+  }
+  {
+    mOfAs = /*warning:DOWN_CAST_COMPOSITE*/mOfDs;
+    mOfAs = /*info:DOWN_CAST_IMPLICIT*/mOfOs;
+    mOfAs = mOfAs;
+    mOfAs = /*warning:DOWN_CAST_COMPOSITE*/lOfDs;
+    mOfAs = /*info:DOWN_CAST_IMPLICIT*/lOfOs;
+    mOfAs = /*info:DOWN_CAST_IMPLICIT*/lOfAs;
+  }
+}
+''');
+  }
+
+  void test_setterOverride_fuzzyArrows() {
+    checkFile('''
+typedef void ToVoid<T>(T x);
+class F {
+  void set f(ToVoid<dynamic> x) {}
+  void set g(ToVoid<int> x) {}
+  void set h(dynamic x) {}
+  void set i(int x) {}
+}
+
+class G extends F {
+  /*error:INVALID_METHOD_OVERRIDE*/void set f(ToVoid<int> x) {}
+  void set g(ToVoid<dynamic> x) {}
+  void set h(int x) {}
+  /*error:INVALID_METHOD_OVERRIDE*/void set i(dynamic x) {}
+}
+
+class H implements F {
+  /*error:INVALID_METHOD_OVERRIDE*/void set f(ToVoid<int> x) {}
+  void set g(ToVoid<dynamic> x) {}
+  void set h(int x) {}
+  /*error:INVALID_METHOD_OVERRIDE*/void set i(dynamic x) {}
+}
+ ''');
+  }
+
+  void test_setterReturnTypes() {
+    checkFile('''
+void voidFn() => null;
+class A {
+  set a(y) => 4;
+  set b(y) => voidFn();
+  void set c(y) => /*error:RETURN_OF_INVALID_TYPE*/4;
+  void set d(y) => voidFn();
+  /*warning:NON_VOID_RETURN_FOR_SETTER*/int set e(y) => 4;
+  /*warning:NON_VOID_RETURN_FOR_SETTER*/int set f(y) =>
+      /*error:RETURN_OF_INVALID_TYPE*/voidFn();
+  set g(y) {return /*error:RETURN_OF_INVALID_TYPE*/4;}
+  void set h(y) {return /*error:RETURN_OF_INVALID_TYPE*/4;}
+  /*warning:NON_VOID_RETURN_FOR_SETTER*/int set i(y) {return 4;}
+}
+''');
+  }
+
+  void test_setterSetterOverride() {
+    checkFile('''
+class A {}
+class B extends A {}
+class C extends B {}
+
+abstract class Base {
+  void set f1(B value);
+  void set f2(B value);
+  void set f3(B value);
+  void set f4(B value);
+  void set f5(B value);
+}
+
+class Child extends Base {
+  void set f1(A value) {}
+  /*error:INVALID_METHOD_OVERRIDE*/void set f2(C value) {}
+  void set f3(value) {}
+  /*error:INVALID_METHOD_OVERRIDE*/void set f4(dynamic value) {}
+  set f5(B value) {}
+}
+''');
+  }
+
+  void test_superCallPlacement() {
+    checkFile('''
+class Base {
+  var x;
+  Base() : x = print('Base.1') { print('Base.2'); }
+}
+
+class Derived extends Base {
+  var y, z;
+  Derived()
+      : y = print('Derived.1'),
+        /*error:INVALID_SUPER_INVOCATION*/super(),
+        z = print('Derived.2') {
+    print('Derived.3');
+  }
+}
+
+class Valid extends Base {
+  var y, z;
+  Valid()
+      : y = print('Valid.1'),
+        z = print('Valid.2'),
+        super() {
+    print('Valid.3');
+  }
+}
+
+class AlsoValid extends Base {
+  AlsoValid() : super();
+}
+
+main() => new Derived();
+''');
+  }
+
+  void test_superclassOverrideOfGrandInterface_interfaceOfAbstractSuperclass() {
+    checkFile('''
+class A {}
+class B {}
+
+abstract class I1 {
+    m(A a);
+}
+
+abstract class Base implements I1 {
+  /*error:INVALID_METHOD_OVERRIDE*/m(
+      /*error:INVALID_METHOD_OVERRIDE_NORMAL_PARAM_TYPE*/B a) {}
+}
+
+class T1 extends Base {
+    // we consider the base class incomplete because it is
+    // abstract, so we report the error here too.
+    // TODO(sigmund): consider tracking overrides in a fine-grain
+    // manner, then this and the double-overrides would not be
+    // reported.
+    /*error:INVALID_METHOD_OVERRIDE*/m(B a) {}
+}
+''');
+  }
+
+  void test_superclassOverrideOfGrandInterface_interfaceOfConcreteSuperclass() {
+    checkFile('''
+class A {}
+class B {}
+
+abstract class I1 {
+    m(A a);
+}
+
+class Base implements I1 {
+  /*error:INVALID_METHOD_OVERRIDE*/m(
+      /*error:INVALID_METHOD_OVERRIDE_NORMAL_PARAM_TYPE*/B a) {}
+}
+
+class T1 extends Base {
+    m(B a) {}
+}
+''');
+  }
+
+  void test_superclassOverrideOfGrandInterface_interfaceOfInterfaceOfChild() {
+    checkFile('''
+class A {}
+class B {}
+
+abstract class I1 {
+    m(A a);
+}
+abstract class I2 implements I1 {}
+
+class Base {
+    m(B a) {}
+}
+
+class /*error:INCONSISTENT_METHOD_INHERITANCE*/T1
+    /*error:INVALID_METHOD_OVERRIDE_FROM_BASE*/extends Base implements I2 {}
+''');
+  }
+
+  void test_superclassOverrideOfGrandInterface_mixinOfInterfaceOfChild() {
+    checkFile('''
+class A {}
+class B {}
+
+abstract class M1 {
+    m(A a);
+}
+abstract class I2 extends Object with M1 {}
+
+class Base {
+    m(B a) {}
+}
+
+class /*error:INCONSISTENT_METHOD_INHERITANCE*/T1
+    /*error:INVALID_METHOD_OVERRIDE_FROM_BASE*/extends Base
+    implements I2 {}
+''');
+  }
+
+  void test_superclassOverrideOfGrandInterface_superclassOfInterfaceOfChild() {
+    checkFile('''
+class A {}
+class B {}
+
+abstract class I1 {
+    m(A a);
+}
+abstract class I2 extends I1 {}
+
+class Base {
+    m(B a) {}
+}
+
+class /*error:INCONSISTENT_METHOD_INHERITANCE*/T1
+    /*error:INVALID_METHOD_OVERRIDE_FROM_BASE*/extends Base
+    implements I2 {}
+''');
+  }
+
+  void test_superConstructor() {
+    checkFile('''
+class A { A(A x) {} }
+class B extends A {
+  B() : super(/*error:ARGUMENT_TYPE_NOT_ASSIGNABLE*/3);
+}
+''');
+  }
+
+  void test_ternaryOperator() {
+    checkFile('''
+abstract class Comparable<T> {
+  int compareTo(T other);
+  static int compare(Comparable a, Comparable b) => a.compareTo(b);
+}
+typedef int Comparator<T>(T a, T b);
+
+typedef bool _Predicate<T>(T value);
+
+class SplayTreeMap<K, V> {
+  Comparator<K> _comparator;
+  _Predicate _validKey;
+
+  // The warning on assigning to _comparator is legitimate. Since K has
+  // no bound, all we know is that it's object. _comparator's function
+  // type is effectively:              (Object, Object) -> int
+  // We are assigning it a fn of type: (Comparable, Comparable) -> int
+  // There's no telling if that will work. For example, consider:
+  //
+  //     new SplayTreeMap<Uri>();
+  //
+  // This would end up calling .compareTo() on a Uri, which doesn't
+  // define that since it doesn't implement Comparable.
+  SplayTreeMap([int compare(K key1, K key2),
+                bool isValidKey(potentialKey)])
+    : _comparator = /*warning:DOWN_CAST_COMPOSITE*/(compare == null) ? Comparable.compare : compare,
+      _validKey = (isValidKey != null) ? isValidKey : ((v) => true) {
+    _Predicate<Object> v = (isValidKey != null)
+        ? isValidKey : (/*info:INFERRED_TYPE_CLOSURE*/(_) => true);
+
+    v = (isValidKey != null)
+         ? v : (/*info:INFERRED_TYPE_CLOSURE*/(_) => true);
+  }
+}
+void main() {
+  Object obj = 42;
+  dynamic dyn = 42;
+  int i = 42;
+
+  // Check the boolean conversion of the condition.
+  print(/*error:NON_BOOL_CONDITION*/i ? false : true);
+  print((/*info:DOWN_CAST_IMPLICIT*/obj) ? false : true);
+  print((/*info:DYNAMIC_CAST*/dyn) ? false : true);
+}
+''');
+  }
+
+  void test_typeCheckingLiterals() {
+    checkFile('''
+test() {
+  num n = 3;
+  int i = 3;
+  String s = "hello";
+  {
+     List<int> l = <int>[i];
+     l = <int>[/*error:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE*/s];
+     l = <int>[/*info:DOWN_CAST_IMPLICIT*/n];
+     l = <int>[i, /*info:DOWN_CAST_IMPLICIT*/n, /*error:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE*/s];
+  }
+  {
+     List l = /*info:INFERRED_TYPE_LITERAL*/[i];
+     l = /*info:INFERRED_TYPE_LITERAL*/[s];
+     l = /*info:INFERRED_TYPE_LITERAL*/[n];
+     l = /*info:INFERRED_TYPE_LITERAL*/[i, n, s];
+  }
+  {
+     Map<String, int> m = <String, int>{s: i};
+     m = <String, int>{s: /*error:MAP_VALUE_TYPE_NOT_ASSIGNABLE*/s};
+     m = <String, int>{s: /*info:DOWN_CAST_IMPLICIT*/n};
+     m = <String, int>{s: i,
+                       s: /*info:DOWN_CAST_IMPLICIT*/n,
+                       s: /*error:MAP_VALUE_TYPE_NOT_ASSIGNABLE*/s};
+  }
+ // TODO(leafp): We can't currently test for key errors since the
+ // error marker binds to the entire entry.
+  {
+     Map m = /*info:INFERRED_TYPE_LITERAL*/{s: i};
+     m = /*info:INFERRED_TYPE_LITERAL*/{s: s};
+     m = /*info:INFERRED_TYPE_LITERAL*/{s: n};
+     m = /*info:INFERRED_TYPE_LITERAL*/
+         {s: i,
+          s: n,
+          s: s};
+     m = /*info:INFERRED_TYPE_LITERAL*/
+         {i: s,
+          n: s,
+          s: s};
+  }
+}
+''');
+  }
+
+  void test_typePromotionFromDynamic() {
+    checkFile(r'''
+f() {
+  dynamic x;
+  if (x is int) {
+    int y = x;
+    String z = /*error:INVALID_ASSIGNMENT*/x;
+  }
+}
+g() {
+  Object x;
+  if (x is int) {
+    int y = x;
+    String z = /*error:INVALID_ASSIGNMENT*/x;
+  }
+}
+''');
+  }
+
+  void test_typeSubtyping_assigningClass() {
+    checkFile('''
+class A {}
+class B extends A {}
+
+void main() {
+   dynamic y;
+   Object o;
+   int i = 0;
+   double d = 0.0;
+   num n;
+   A a;
+   B b;
+   y = a;
+   o = a;
+   i = /*error:INVALID_ASSIGNMENT*/a;
+   d = /*error:INVALID_ASSIGNMENT*/a;
+   n = /*error:INVALID_ASSIGNMENT*/a;
+   a = a;
+   b = /*info:DOWN_CAST_IMPLICIT*/a;
+}
+''');
+  }
+
+  void test_typeSubtyping_assigningSubclass() {
+    checkFile('''
+class A {}
+class B extends A {}
+class C extends A {}
+
+void main() {
+   dynamic y;
+   Object o;
+   int i = 0;
+   double d = 0.0;
+   num n;
+   A a;
+   B b;
+   C c;
+   y = b;
+   o = b;
+   i = /*error:INVALID_ASSIGNMENT*/b;
+   d = /*error:INVALID_ASSIGNMENT*/b;
+   n = /*error:INVALID_ASSIGNMENT*/b;
+   a = b;
+   b = b;
+   c = /*error:INVALID_ASSIGNMENT*/b;
+}
+''');
+  }
+
+  void test_typeSubtyping_dynamicDowncasts() {
+    checkFile('''
+class A {}
+class B extends A {}
+
+void main() {
+   dynamic y;
+   Object o;
+   int i = 0;
+   double d = 0.0;
+   num n;
+   A a;
+   B b;
+   o = y;
+   i = /*info:DYNAMIC_CAST*/y;
+   d = /*info:DYNAMIC_CAST*/y;
+   n = /*info:DYNAMIC_CAST*/y;
+   a = /*info:DYNAMIC_CAST*/y;
+   b = /*info:DYNAMIC_CAST*/y;
+}
+''');
+  }
+
+  void test_typeSubtyping_dynamicIsTop() {
+    checkFile('''
+class A {}
+class B extends A {}
+
+void main() {
+   dynamic y;
+   Object o;
+   int i = 0;
+   double d = 0.0;
+   num n;
+   A a;
+   B b;
+   y = o;
+   y = i;
+   y = d;
+   y = n;
+   y = a;
+   y = b;
+}
+''');
+  }
+
+  void test_typeSubtyping_interfaces() {
+    checkFile('''
+class A {}
+class B extends A {}
+class C extends A {}
+class D extends B implements C {}
+
+void main() {
+   A top;
+   B left;
+   C right;
+   D bot;
+   {
+     top = top;
+     top = left;
+     top = right;
+     top = bot;
+   }
+   {
+     left = /*info:DOWN_CAST_IMPLICIT*/top;
+     left = left;
+     left = /*error:INVALID_ASSIGNMENT*/right;
+     left = bot;
+   }
+   {
+     right = /*info:DOWN_CAST_IMPLICIT*/top;
+     right = /*error:INVALID_ASSIGNMENT*/left;
+     right = right;
+     right = bot;
+   }
+   {
+     bot = /*info:DOWN_CAST_IMPLICIT*/top;
+     bot = /*info:DOWN_CAST_IMPLICIT*/left;
+     bot = /*info:DOWN_CAST_IMPLICIT*/right;
+     bot = bot;
+   }
+}
+''');
+  }
+
+  void test_unaryOperators() {
+    checkFile('''
+class A {
+  A operator ~() => null;
+  A operator +(int x) => null;
+  A operator -(int x) => null;
+  A operator -() => null;
+}
+
+foo() => new A();
+
+test() {
+  A a = new A();
+  var c = foo();
+  dynamic d;
+
+  ~a;
+  (/*info:DYNAMIC_INVOKE*/~d);
+
+  !/*error:NON_BOOL_NEGATION_EXPRESSION*/a;
+  !/*info:DYNAMIC_CAST*/d;
+
+  -a;
+  (/*info:DYNAMIC_INVOKE*/-d);
+
+  ++a;
+  --a;
+  (/*info:DYNAMIC_INVOKE*/++d);
+  (/*info:DYNAMIC_INVOKE*/--d);
+
+  a++;
+  a--;
+  (/*info:DYNAMIC_INVOKE*/d++);
+  (/*info:DYNAMIC_INVOKE*/d--);
+}''');
+  }
+
+  void test_unboundRedirectingConstructor() {
+    // This is a regression test for https://github.com/dart-lang/sdk/issues/25071
+    checkFile('''
+class Foo {
+  Foo() : /*error:REDIRECT_GENERATIVE_TO_MISSING_CONSTRUCTOR*/this.init();
+}
+ ''');
+  }
+
+  void test_unboundTypeName() {
+    checkFile('''
+void main() {
+   /*error:UNDEFINED_CLASS*/AToB y;
+}
+''');
+  }
+
+  void test_unboundVariable() {
+    checkFile('''
+void main() {
+   dynamic y = /*error:UNDEFINED_IDENTIFIER*/unboundVariable;
+}
+''');
+  }
+
+  void test_voidSubtyping() {
+    // Regression test for https://github.com/dart-lang/sdk/issues/25069
+    checkFile('''
+typedef int Foo();
+void foo() {}
+void main () {
+  Foo x = /*error:INVALID_ASSIGNMENT,info:USE_OF_VOID_RESULT*/foo();
+}
+''');
+  }
 }
diff --git a/pkg/analyzer/test/src/task/strong/inferred_type_test.dart b/pkg/analyzer/test/src/task/strong/inferred_type_test.dart
index a686740..0446efa 100644
--- a/pkg/analyzer/test/src/task/strong/inferred_type_test.dart
+++ b/pkg/analyzer/test/src/task/strong/inferred_type_test.dart
@@ -269,8 +269,8 @@
 
   f = /*info:INFERRED_TYPE_CLOSURE*/(x) => 'hello';
 
-  foo(/*info:INFERRED_TYPE_CLOSURE,info:INFERRED_TYPE_CLOSURE*/(x) { return null; });
-  foo(/*info:INFERRED_TYPE_CLOSURE,info:INFERRED_TYPE_CLOSURE*/(x) { throw "not implemented"; });
+  foo(/*info:INFERRED_TYPE_CLOSURE*/(x) { return null; });
+  foo(/*info:INFERRED_TYPE_CLOSURE*/(x) { throw "not implemented"; });
 }
 ''');
 
@@ -318,7 +318,7 @@
 main() {
   String f() => null;
   var g = f;
-  g = /*info:INFERRED_TYPE_CLOSURE*/() { return /*warning:RETURN_OF_INVALID_TYPE*/1; };
+  g = /*info:INFERRED_TYPE_CLOSURE*/() { return /*error:RETURN_OF_INVALID_TYPE*/1; };
 }
 ''');
     var f = mainUnit.functions[0].localVariables[0];
@@ -340,7 +340,7 @@
 import 'dart:math' show Random;
 test2() {
   List<num> o;
-  var y = o.map(/*info:INFERRED_TYPE_CLOSURE,info:INFERRED_TYPE_CLOSURE*/(x) {
+  var y = o.map(/*info:INFERRED_TYPE_CLOSURE, info:INFERRED_TYPE_CLOSURE*/(x) {
     if (new Random().nextBool()) {
       return x.toInt() + 1;
     } else {
@@ -449,6 +449,29 @@
     expect(f.type.toString(), '() → Iterable<num>');
   }
 
+  void test_bottom() {
+    // When a type is inferred from the expression `null`, the inferred type is
+    // `dynamic`, but the inferred type of the initializer is `bottom`.
+    // TODO(paulberry): Is this intentional/desirable?
+    var mainUnit = checkFile('''
+var v = null;
+''');
+    var v = mainUnit.topLevelVariables[0];
+    expect(v.type.toString(), 'dynamic');
+    expect(v.initializer.type.toString(), '() → <bottom>');
+  }
+
+  void test_bottom_inClosure() {
+    // When a closure's return type is inferred from the expression `null`, the
+    // inferred type is `dynamic`.
+    var mainUnit = checkFile('''
+var v = () => null;
+''');
+    var v = mainUnit.topLevelVariables[0];
+    expect(v.type.toString(), '() → dynamic');
+    expect(v.initializer.type.toString(), '() → () → dynamic');
+  }
+
   void test_canInferAlsoFromStaticAndInstanceFieldsFlagOn() {
     addFile(
         '''
@@ -479,6 +502,32 @@
 ''');
   }
 
+  void test_circularReference_viaClosures() {
+    var mainUnit = checkFile('''
+var x = () => y;
+var y = () => x;
+''');
+    var x = mainUnit.topLevelVariables[0];
+    var y = mainUnit.topLevelVariables[1];
+    expect(x.name, 'x');
+    expect(y.name, 'y');
+    expect(x.type.toString(), 'dynamic');
+    expect(y.type.toString(), 'dynamic');
+  }
+
+  void test_circularReference_viaClosures_initializerTypes() {
+    var mainUnit = checkFile('''
+var x = () => y;
+var y = () => x;
+''');
+    var x = mainUnit.topLevelVariables[0];
+    var y = mainUnit.topLevelVariables[1];
+    expect(x.name, 'x');
+    expect(y.name, 'y');
+    expect(x.initializer.returnType.toString(), '() → dynamic');
+    expect(y.initializer.returnType.toString(), '() → dynamic');
+  }
+
   void test_conflictsCanHappen() {
     checkFile('''
 class I1 {
@@ -497,12 +546,12 @@
 }
 
 class C1 implements A, B {
-  /*severe:INVALID_METHOD_OVERRIDE*/get a => null;
+  /*error:INVALID_METHOD_OVERRIDE*/get a => null;
 }
 
 // Still ambiguous
 class C2 implements B, A {
-  /*severe:INVALID_METHOD_OVERRIDE*/get a => null;
+  /*error:INVALID_METHOD_OVERRIDE*/get a => null;
 }
 ''');
   }
@@ -534,7 +583,7 @@
 }
 
 class C2 implements A, B {
-  /*severe:INVALID_METHOD_OVERRIDE*/get a => null;
+  /*error:INVALID_METHOD_OVERRIDE*/get a => null;
 }
 ''');
   }
@@ -546,7 +595,7 @@
 }
 
 class B implements A {
-  /*severe:INVALID_METHOD_OVERRIDE*/dynamic get x => 3;
+  /*error:INVALID_METHOD_OVERRIDE*/dynamic get x => 3;
 }
 
 foo() {
@@ -570,11 +619,11 @@
 
 test() {
   x = "hi";
-  y = /*warning:INVALID_ASSIGNMENT*/"hi";
+  y = /*error:INVALID_ASSIGNMENT*/"hi";
   A.x = "hi";
-  A.y = /*warning:INVALID_ASSIGNMENT*/"hi";
+  A.y = /*error:INVALID_ASSIGNMENT*/"hi";
   new A().x2 = "hi";
-  new A().y2 = /*warning:INVALID_ASSIGNMENT*/"hi";
+  new A().y2 = /*error:INVALID_ASSIGNMENT*/"hi";
 }
 ''');
   }
@@ -637,7 +686,7 @@
     checkFile('''
 void main() {
   List<int> l;
-  l = /*info:INFERRED_TYPE_LITERAL*/[/*warning:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE*/"hello"];
+  l = /*info:INFERRED_TYPE_LITERAL*/[/*error:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE*/"hello"];
   l = (l = /*info:INFERRED_TYPE_LITERAL*/[1]);
 }
 ''');
@@ -698,30 +747,30 @@
 void main() {
   new F0(/*info:INFERRED_TYPE_LITERAL*/[]);
   new F0(/*info:INFERRED_TYPE_LITERAL*/[3]);
-  new F0(/*info:INFERRED_TYPE_LITERAL*/[/*warning:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE*/"hello"]);
-  new F0(/*info:INFERRED_TYPE_LITERAL*/[/*warning:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE*/"hello",
+  new F0(/*info:INFERRED_TYPE_LITERAL*/[/*error:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE*/"hello"]);
+  new F0(/*info:INFERRED_TYPE_LITERAL*/[/*error:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE*/"hello",
                                       3]);
 
   new F1(a: /*info:INFERRED_TYPE_LITERAL*/[]);
   new F1(a: /*info:INFERRED_TYPE_LITERAL*/[3]);
-  new F1(a: /*info:INFERRED_TYPE_LITERAL*/[/*warning:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE*/"hello"]);
-  new F1(a: /*info:INFERRED_TYPE_LITERAL*/[/*warning:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE*/"hello", 3]);
+  new F1(a: /*info:INFERRED_TYPE_LITERAL*/[/*error:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE*/"hello"]);
+  new F1(a: /*info:INFERRED_TYPE_LITERAL*/[/*error:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE*/"hello", 3]);
 
   new F2(/*info:INFERRED_TYPE_LITERAL*/[]);
   new F2(/*info:INFERRED_TYPE_LITERAL*/[3]);
-  new F2(/*info:INFERRED_TYPE_LITERAL*/[/*warning:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE*/"hello"]);
-  new F2(/*info:INFERRED_TYPE_LITERAL*/[/*warning:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE*/"hello", 3]);
+  new F2(/*info:INFERRED_TYPE_LITERAL*/[/*error:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE*/"hello"]);
+  new F2(/*info:INFERRED_TYPE_LITERAL*/[/*error:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE*/"hello", 3]);
 
   new F3(/*info:INFERRED_TYPE_LITERAL*/[]);
   new F3(/*info:INFERRED_TYPE_LITERAL*/[/*info:INFERRED_TYPE_LITERAL*/[3]]);
-  new F3(/*info:INFERRED_TYPE_LITERAL*/[/*info:INFERRED_TYPE_LITERAL*/[/*warning:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE*/"hello"]]);
-  new F3(/*info:INFERRED_TYPE_LITERAL*/[/*info:INFERRED_TYPE_LITERAL*/[/*warning:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE*/"hello"],
+  new F3(/*info:INFERRED_TYPE_LITERAL*/[/*info:INFERRED_TYPE_LITERAL*/[/*error:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE*/"hello"]]);
+  new F3(/*info:INFERRED_TYPE_LITERAL*/[/*info:INFERRED_TYPE_LITERAL*/[/*error:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE*/"hello"],
                    /*info:INFERRED_TYPE_LITERAL*/[3]]);
 
   new F4(a: /*info:INFERRED_TYPE_LITERAL*/[]);
   new F4(a: /*info:INFERRED_TYPE_LITERAL*/[/*info:INFERRED_TYPE_LITERAL*/[3]]);
-  new F4(a: /*info:INFERRED_TYPE_LITERAL*/[/*info:INFERRED_TYPE_LITERAL*/[/*warning:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE*/"hello"]]);
-  new F4(a: /*info:INFERRED_TYPE_LITERAL*/[/*info:INFERRED_TYPE_LITERAL*/[/*warning:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE*/"hello"],
+  new F4(a: /*info:INFERRED_TYPE_LITERAL*/[/*info:INFERRED_TYPE_LITERAL*/[/*error:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE*/"hello"]]);
+  new F4(a: /*info:INFERRED_TYPE_LITERAL*/[/*info:INFERRED_TYPE_LITERAL*/[/*error:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE*/"hello"],
                       /*info:INFERRED_TYPE_LITERAL*/[3]]);
 }
 ''');
@@ -737,28 +786,28 @@
 void main() {
   f0(/*info:INFERRED_TYPE_LITERAL*/[]);
   f0(/*info:INFERRED_TYPE_LITERAL*/[3]);
-  f0(/*info:INFERRED_TYPE_LITERAL*/[/*warning:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE*/"hello"]);
-  f0(/*info:INFERRED_TYPE_LITERAL*/[/*warning:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE*/"hello", 3]);
+  f0(/*info:INFERRED_TYPE_LITERAL*/[/*error:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE*/"hello"]);
+  f0(/*info:INFERRED_TYPE_LITERAL*/[/*error:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE*/"hello", 3]);
 
   f1(a: /*info:INFERRED_TYPE_LITERAL*/[]);
   f1(a: /*info:INFERRED_TYPE_LITERAL*/[3]);
-  f1(a: /*info:INFERRED_TYPE_LITERAL*/[/*warning:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE*/"hello"]);
-  f1(a: /*info:INFERRED_TYPE_LITERAL*/[/*warning:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE*/"hello", 3]);
+  f1(a: /*info:INFERRED_TYPE_LITERAL*/[/*error:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE*/"hello"]);
+  f1(a: /*info:INFERRED_TYPE_LITERAL*/[/*error:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE*/"hello", 3]);
 
   f2(/*info:INFERRED_TYPE_LITERAL*/[]);
   f2(/*info:INFERRED_TYPE_LITERAL*/[3]);
-  f2(/*info:INFERRED_TYPE_LITERAL*/[/*warning:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE*/"hello"]);
-  f2(/*info:INFERRED_TYPE_LITERAL*/[/*warning:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE*/"hello", 3]);
+  f2(/*info:INFERRED_TYPE_LITERAL*/[/*error:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE*/"hello"]);
+  f2(/*info:INFERRED_TYPE_LITERAL*/[/*error:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE*/"hello", 3]);
 
   f3(/*info:INFERRED_TYPE_LITERAL*/[]);
   f3(/*info:INFERRED_TYPE_LITERAL*/[/*info:INFERRED_TYPE_LITERAL*/[3]]);
-  f3(/*info:INFERRED_TYPE_LITERAL*/[/*info:INFERRED_TYPE_LITERAL*/[/*warning:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE*/"hello"]]);
-  f3(/*info:INFERRED_TYPE_LITERAL*/[/*info:INFERRED_TYPE_LITERAL*/[/*warning:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE*/"hello"], /*info:INFERRED_TYPE_LITERAL*/[3]]);
+  f3(/*info:INFERRED_TYPE_LITERAL*/[/*info:INFERRED_TYPE_LITERAL*/[/*error:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE*/"hello"]]);
+  f3(/*info:INFERRED_TYPE_LITERAL*/[/*info:INFERRED_TYPE_LITERAL*/[/*error:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE*/"hello"], /*info:INFERRED_TYPE_LITERAL*/[3]]);
 
   f4(a: /*info:INFERRED_TYPE_LITERAL*/[]);
   f4(a: /*info:INFERRED_TYPE_LITERAL*/[/*info:INFERRED_TYPE_LITERAL*/[3]]);
-  f4(a: /*info:INFERRED_TYPE_LITERAL*/[/*info:INFERRED_TYPE_LITERAL*/[/*warning:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE*/"hello"]]);
-  f4(a: /*info:INFERRED_TYPE_LITERAL*/[/*info:INFERRED_TYPE_LITERAL*/[/*warning:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE*/"hello"], /*info:INFERRED_TYPE_LITERAL*/[3]]);
+  f4(a: /*info:INFERRED_TYPE_LITERAL*/[/*info:INFERRED_TYPE_LITERAL*/[/*error:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE*/"hello"]]);
+  f4(a: /*info:INFERRED_TYPE_LITERAL*/[/*info:INFERRED_TYPE_LITERAL*/[/*error:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE*/"hello"], /*info:INFERRED_TYPE_LITERAL*/[3]]);
 }
 ''');
   }
@@ -771,29 +820,29 @@
   {
     Function2<int, String> l0 = /*info:INFERRED_TYPE_CLOSURE*/(int x) => null;
     Function2<int, String> l1 = (int x) => "hello";
-    Function2<int, String> l2 = /*warning:INVALID_ASSIGNMENT*/(String x) => "hello";
-    Function2<int, String> l3 = /*warning:INVALID_ASSIGNMENT*/(int x) => 3;
-    Function2<int, String> l4 = /*info:INFERRED_TYPE_CLOSURE*/(int x) {return /*warning:RETURN_OF_INVALID_TYPE*/3;};
+    Function2<int, String> l2 = /*error:INVALID_ASSIGNMENT*/(String x) => "hello";
+    Function2<int, String> l3 = /*error:INVALID_ASSIGNMENT*/(int x) => 3;
+    Function2<int, String> l4 = /*info:INFERRED_TYPE_CLOSURE*/(int x) {return /*error:RETURN_OF_INVALID_TYPE*/3;};
   }
   {
-    Function2<int, String> l0 = /*info:INFERRED_TYPE_CLOSURE, info:INFERRED_TYPE_CLOSURE*/(x) => null;
+    Function2<int, String> l0 = /*info:INFERRED_TYPE_CLOSURE*/(x) => null;
     Function2<int, String> l1 = /*info:INFERRED_TYPE_CLOSURE*/(x) => "hello";
-    Function2<int, String> l2 = /*info:INFERRED_TYPE_CLOSURE, warning:INVALID_ASSIGNMENT*/(x) => 3;
-    Function2<int, String> l3 = /*info:INFERRED_TYPE_CLOSURE, info:INFERRED_TYPE_CLOSURE*/(x) {return /*warning:RETURN_OF_INVALID_TYPE*/3;};
-    Function2<int, String> l4 = /*info:INFERRED_TYPE_CLOSURE, info:INFERRED_TYPE_CLOSURE*/(x) {return /*warning:RETURN_OF_INVALID_TYPE*/x;};
+    Function2<int, String> l2 = /*info:INFERRED_TYPE_CLOSURE, error:INVALID_ASSIGNMENT*/(x) => 3;
+    Function2<int, String> l3 = /*info:INFERRED_TYPE_CLOSURE*/(x) {return /*error:RETURN_OF_INVALID_TYPE*/3;};
+    Function2<int, String> l4 = /*info:INFERRED_TYPE_CLOSURE*/(x) {return /*error:RETURN_OF_INVALID_TYPE*/x;};
   }
   {
     Function2<int, List<String>> l0 = /*info:INFERRED_TYPE_CLOSURE*/(int x) => null;
     Function2<int, List<String>> l1 = (int x) => /*info:INFERRED_TYPE_LITERAL*/["hello"];
-    Function2<int, List<String>> l2 = /*warning:INVALID_ASSIGNMENT*/(String x) => /*info:INFERRED_TYPE_LITERAL*/["hello"];
-    Function2<int, List<String>> l3 = (int x) => /*info:INFERRED_TYPE_LITERAL*/[/*warning:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE*/3];
-    Function2<int, List<String>> l4 = /*info:INFERRED_TYPE_CLOSURE*/(int x) {return /*info:INFERRED_TYPE_LITERAL*/[/*warning:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE*/3];};
+    Function2<int, List<String>> l2 = /*error:INVALID_ASSIGNMENT*/(String x) => /*info:INFERRED_TYPE_LITERAL*/["hello"];
+    Function2<int, List<String>> l3 = (int x) => /*info:INFERRED_TYPE_LITERAL*/[/*error:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE*/3];
+    Function2<int, List<String>> l4 = /*info:INFERRED_TYPE_CLOSURE*/(int x) {return /*info:INFERRED_TYPE_LITERAL*/[/*error:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE*/3];};
   }
   {
     Function2<int, int> l0 = /*info:INFERRED_TYPE_CLOSURE*/(x) => x;
     Function2<int, int> l1 = /*info:INFERRED_TYPE_CLOSURE*/(x) => x+1;
-    Function2<int, String> l2 = /*info:INFERRED_TYPE_CLOSURE, warning:INVALID_ASSIGNMENT*/(x) => x;
-    Function2<int, String> l3 = /*info:INFERRED_TYPE_CLOSURE, info:INFERRED_TYPE_CLOSURE*/(x) => /*info:DYNAMIC_CAST, info:DYNAMIC_INVOKE*/x.substring(3);
+    Function2<int, String> l2 = /*info:INFERRED_TYPE_CLOSURE, error:INVALID_ASSIGNMENT*/(x) => x;
+    Function2<int, String> l3 = /*info:INFERRED_TYPE_CLOSURE*/(x) => /*info:DYNAMIC_CAST, info:DYNAMIC_INVOKE*/x.substring(3);
     Function2<String, String> l4 = /*info:INFERRED_TYPE_CLOSURE*/(x) => x.substring(3);
   }
 }
@@ -842,30 +891,30 @@
 void main() {
   new F0<int>(/*info:INFERRED_TYPE_LITERAL*/[]);
   new F0<int>(/*info:INFERRED_TYPE_LITERAL*/[3]);
-  new F0<int>(/*info:INFERRED_TYPE_LITERAL*/[/*warning:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE*/"hello"]);
-  new F0<int>(/*info:INFERRED_TYPE_LITERAL*/[/*warning:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE*/"hello",
+  new F0<int>(/*info:INFERRED_TYPE_LITERAL*/[/*error:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE*/"hello"]);
+  new F0<int>(/*info:INFERRED_TYPE_LITERAL*/[/*error:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE*/"hello",
                                       3]);
 
   new F1<int>(a: /*info:INFERRED_TYPE_LITERAL*/[]);
   new F1<int>(a: /*info:INFERRED_TYPE_LITERAL*/[3]);
-  new F1<int>(a: /*info:INFERRED_TYPE_LITERAL*/[/*warning:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE*/"hello"]);
-  new F1<int>(a: /*info:INFERRED_TYPE_LITERAL*/[/*warning:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE*/"hello", 3]);
+  new F1<int>(a: /*info:INFERRED_TYPE_LITERAL*/[/*error:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE*/"hello"]);
+  new F1<int>(a: /*info:INFERRED_TYPE_LITERAL*/[/*error:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE*/"hello", 3]);
 
   new F2<int>(/*info:INFERRED_TYPE_LITERAL*/[]);
   new F2<int>(/*info:INFERRED_TYPE_LITERAL*/[3]);
-  new F2<int>(/*info:INFERRED_TYPE_LITERAL*/[/*warning:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE*/"hello"]);
-  new F2<int>(/*info:INFERRED_TYPE_LITERAL*/[/*warning:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE*/"hello", 3]);
+  new F2<int>(/*info:INFERRED_TYPE_LITERAL*/[/*error:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE*/"hello"]);
+  new F2<int>(/*info:INFERRED_TYPE_LITERAL*/[/*error:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE*/"hello", 3]);
 
   new F3<int>(/*info:INFERRED_TYPE_LITERAL*/[]);
   new F3<int>(/*info:INFERRED_TYPE_LITERAL*/[/*info:INFERRED_TYPE_LITERAL*/[3]]);
-  new F3<int>(/*info:INFERRED_TYPE_LITERAL*/[/*info:INFERRED_TYPE_LITERAL*/[/*warning:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE*/"hello"]]);
-  new F3<int>(/*info:INFERRED_TYPE_LITERAL*/[/*info:INFERRED_TYPE_LITERAL*/[/*warning:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE*/"hello"],
+  new F3<int>(/*info:INFERRED_TYPE_LITERAL*/[/*info:INFERRED_TYPE_LITERAL*/[/*error:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE*/"hello"]]);
+  new F3<int>(/*info:INFERRED_TYPE_LITERAL*/[/*info:INFERRED_TYPE_LITERAL*/[/*error:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE*/"hello"],
                    /*info:INFERRED_TYPE_LITERAL*/[3]]);
 
   new F4<int>(a: /*info:INFERRED_TYPE_LITERAL*/[]);
   new F4<int>(a: /*info:INFERRED_TYPE_LITERAL*/[/*info:INFERRED_TYPE_LITERAL*/[3]]);
-  new F4<int>(a: /*info:INFERRED_TYPE_LITERAL*/[/*info:INFERRED_TYPE_LITERAL*/[/*warning:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE*/"hello"]]);
-  new F4<int>(a: /*info:INFERRED_TYPE_LITERAL*/[/*info:INFERRED_TYPE_LITERAL*/[/*warning:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE*/"hello"],
+  new F4<int>(a: /*info:INFERRED_TYPE_LITERAL*/[/*info:INFERRED_TYPE_LITERAL*/[/*error:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE*/"hello"]]);
+  new F4<int>(a: /*info:INFERRED_TYPE_LITERAL*/[/*info:INFERRED_TYPE_LITERAL*/[/*error:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE*/"hello"],
                       /*info:INFERRED_TYPE_LITERAL*/[3]]);
 
   new F3(/*info:INFERRED_TYPE_LITERAL*/[]);
@@ -891,27 +940,27 @@
     var v = f;
     v = /*info:INFERRED_TYPE_CLOSURE*//*<T>*/(int x) => null;
     v = /*<T>*/(int x) => "hello";
-    v = /*warning:INVALID_ASSIGNMENT*//*<T>*/(String x) => "hello";
-    v = /*warning:INVALID_ASSIGNMENT*//*<T>*/(int x) => 3;
-    v = /*info:INFERRED_TYPE_CLOSURE*//*<T>*/(int x) {return /*warning:RETURN_OF_INVALID_TYPE*/3;};
+    v = /*error:INVALID_ASSIGNMENT*//*<T>*/(String x) => "hello";
+    v = /*error:INVALID_ASSIGNMENT*//*<T>*/(int x) => 3;
+    v = /*info:INFERRED_TYPE_CLOSURE*//*<T>*/(int x) {return /*error:RETURN_OF_INVALID_TYPE*/3;};
   }
   {
     String f/*<S>*/(int x) => null;
     var v = f;
     v = /*info:INFERRED_TYPE_CLOSURE, info:INFERRED_TYPE_CLOSURE*//*<T>*/(x) => null;
     v = /*info:INFERRED_TYPE_CLOSURE*//*<T>*/(x) => "hello";
-    v = /*info:INFERRED_TYPE_CLOSURE, warning:INVALID_ASSIGNMENT*//*<T>*/(x) => 3;
-    v = /*info:INFERRED_TYPE_CLOSURE, info:INFERRED_TYPE_CLOSURE*//*<T>*/(x) {return /*warning:RETURN_OF_INVALID_TYPE*/3;};
-    v = /*info:INFERRED_TYPE_CLOSURE, info:INFERRED_TYPE_CLOSURE*//*<T>*/(x) {return /*warning:RETURN_OF_INVALID_TYPE*/x;};
+    v = /*info:INFERRED_TYPE_CLOSURE, error:INVALID_ASSIGNMENT*//*<T>*/(x) => 3;
+    v = /*info:INFERRED_TYPE_CLOSURE, info:INFERRED_TYPE_CLOSURE*//*<T>*/(x) {return /*error:RETURN_OF_INVALID_TYPE*/3;};
+    v = /*info:INFERRED_TYPE_CLOSURE, info:INFERRED_TYPE_CLOSURE*//*<T>*/(x) {return /*error:RETURN_OF_INVALID_TYPE*/x;};
   }
   {
     List<String> f/*<S>*/(int x) => null;
     var v = f;
     v = /*info:INFERRED_TYPE_CLOSURE*//*<T>*/(int x) => null;
     v = /*<T>*/(int x) => /*info:INFERRED_TYPE_LITERAL*/["hello"];
-    v = /*warning:INVALID_ASSIGNMENT*//*<T>*/(String x) => /*info:INFERRED_TYPE_LITERAL*/["hello"];
-    v = /*<T>*/(int x) => /*info:INFERRED_TYPE_LITERAL*/[/*warning:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE*/3];
-    v = /*info:INFERRED_TYPE_CLOSURE*//*<T>*/(int x) {return /*info:INFERRED_TYPE_LITERAL*/[/*warning:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE*/3];};
+    v = /*error:INVALID_ASSIGNMENT*//*<T>*/(String x) => /*info:INFERRED_TYPE_LITERAL*/["hello"];
+    v = /*<T>*/(int x) => /*info:INFERRED_TYPE_LITERAL*/[/*error:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE*/3];
+    v = /*info:INFERRED_TYPE_CLOSURE*//*<T>*/(int x) {return /*info:INFERRED_TYPE_LITERAL*/[/*error:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE*/3];};
   }
   {
     int int2int/*<S>*/(int x) => null;
@@ -921,7 +970,7 @@
     x = /*info:INFERRED_TYPE_CLOSURE*//*<T>*/(x) => x;
     x = /*info:INFERRED_TYPE_CLOSURE*//*<T>*/(x) => x+1;
     var y = int2String;
-    y = /*info:INFERRED_TYPE_CLOSURE, warning:INVALID_ASSIGNMENT*//*<T>*/(x) => x;
+    y = /*info:INFERRED_TYPE_CLOSURE, error:INVALID_ASSIGNMENT*//*<T>*/(x) => x;
     y = /*info:INFERRED_TYPE_CLOSURE, info:INFERRED_TYPE_CLOSURE*//*<T>*/(x) => /*info:DYNAMIC_INVOKE, info:DYNAMIC_CAST*/x.substring(3);
     var z = string2String;
     z = /*info:INFERRED_TYPE_CLOSURE*//*<T>*/(x) => x.substring(3);
@@ -969,77 +1018,77 @@
     A<int, String> a1 = /*info:INFERRED_TYPE_ALLOCATION*/new A.named(3, "hello");
     A<int, String> a2 = new A<int, String>(3, "hello");
     A<int, String> a3 = new A<int, String>.named(3, "hello");
-    A<int, String> a4 = /*severe:STATIC_TYPE_ERROR*/new A<int, dynamic>(3, "hello");
-    A<int, String> a5 = /*severe:STATIC_TYPE_ERROR*/new A<dynamic, dynamic>.named(3, "hello");
+    A<int, String> a4 = /*error:STATIC_TYPE_ERROR*/new A<int, dynamic>(3, "hello");
+    A<int, String> a5 = /*error:STATIC_TYPE_ERROR*/new A<dynamic, dynamic>.named(3, "hello");
   }
   {
     A<int, String> a0 = /*info:INFERRED_TYPE_ALLOCATION*/new A(
-        /*warning:ARGUMENT_TYPE_NOT_ASSIGNABLE*/"hello",
-        /*warning:ARGUMENT_TYPE_NOT_ASSIGNABLE*/3);
+        /*error:ARGUMENT_TYPE_NOT_ASSIGNABLE*/"hello",
+        /*error:ARGUMENT_TYPE_NOT_ASSIGNABLE*/3);
     A<int, String> a1 = /*info:INFERRED_TYPE_ALLOCATION*/new A.named(
-        /*warning:ARGUMENT_TYPE_NOT_ASSIGNABLE*/"hello",
-        /*warning:ARGUMENT_TYPE_NOT_ASSIGNABLE*/3);
+        /*error:ARGUMENT_TYPE_NOT_ASSIGNABLE*/"hello",
+        /*error:ARGUMENT_TYPE_NOT_ASSIGNABLE*/3);
   }
   {
     A<int, String> a0 = /*info:INFERRED_TYPE_ALLOCATION*/new B("hello", 3);
     A<int, String> a1 = /*info:INFERRED_TYPE_ALLOCATION*/new B.named("hello", 3);
     A<int, String> a2 = new B<String, int>("hello", 3);
     A<int, String> a3 = new B<String, int>.named("hello", 3);
-    A<int, String> a4 = /*severe:STATIC_TYPE_ERROR*/new B<String, dynamic>("hello", 3);
-    A<int, String> a5 = /*severe:STATIC_TYPE_ERROR*/new B<dynamic, dynamic>.named("hello", 3);
+    A<int, String> a4 = /*error:STATIC_TYPE_ERROR*/new B<String, dynamic>("hello", 3);
+    A<int, String> a5 = /*error:STATIC_TYPE_ERROR*/new B<dynamic, dynamic>.named("hello", 3);
   }
   {
     A<int, String> a0 = /*info:INFERRED_TYPE_ALLOCATION*/new B(
-        /*warning:ARGUMENT_TYPE_NOT_ASSIGNABLE*/3,
-        /*warning:ARGUMENT_TYPE_NOT_ASSIGNABLE*/"hello");
+        /*error:ARGUMENT_TYPE_NOT_ASSIGNABLE*/3,
+        /*error:ARGUMENT_TYPE_NOT_ASSIGNABLE*/"hello");
     A<int, String> a1 = /*info:INFERRED_TYPE_ALLOCATION*/new B.named(
-        /*warning:ARGUMENT_TYPE_NOT_ASSIGNABLE*/3,
-        /*warning:ARGUMENT_TYPE_NOT_ASSIGNABLE*/"hello");
+        /*error:ARGUMENT_TYPE_NOT_ASSIGNABLE*/3,
+        /*error:ARGUMENT_TYPE_NOT_ASSIGNABLE*/"hello");
   }
   {
     A<int, int> a0 = /*info:INFERRED_TYPE_ALLOCATION*/new C(3);
     A<int, int> a1 = /*info:INFERRED_TYPE_ALLOCATION*/new C.named(3);
     A<int, int> a2 = new C<int>(3);
     A<int, int> a3 = new C<int>.named(3);
-    A<int, int> a4 = /*severe:STATIC_TYPE_ERROR*/new C<dynamic>(3);
-    A<int, int> a5 = /*severe:STATIC_TYPE_ERROR*/new C<dynamic>.named(3);
+    A<int, int> a4 = /*error:STATIC_TYPE_ERROR*/new C<dynamic>(3);
+    A<int, int> a5 = /*error:STATIC_TYPE_ERROR*/new C<dynamic>.named(3);
   }
   {
     A<int, int> a0 = /*info:INFERRED_TYPE_ALLOCATION*/new C(
-        /*warning:ARGUMENT_TYPE_NOT_ASSIGNABLE*/"hello");
+        /*error:ARGUMENT_TYPE_NOT_ASSIGNABLE*/"hello");
     A<int, int> a1 = /*info:INFERRED_TYPE_ALLOCATION*/new C.named(
-        /*warning:ARGUMENT_TYPE_NOT_ASSIGNABLE*/"hello");
+        /*error:ARGUMENT_TYPE_NOT_ASSIGNABLE*/"hello");
   }
   {
     A<int, String> a0 = /*info:INFERRED_TYPE_ALLOCATION*/new D("hello");
     A<int, String> a1 = /*info:INFERRED_TYPE_ALLOCATION*/new D.named("hello");
     A<int, String> a2 = new D<int, String>("hello");
     A<int, String> a3 = new D<String, String>.named("hello");
-    A<int, String> a4 = /*severe:STATIC_TYPE_ERROR*/new D<num, dynamic>("hello");
-    A<int, String> a5 = /*severe:STATIC_TYPE_ERROR*/new D<dynamic, dynamic>.named("hello");
+    A<int, String> a4 = /*error:STATIC_TYPE_ERROR*/new D<num, dynamic>("hello");
+    A<int, String> a5 = /*error:STATIC_TYPE_ERROR*/new D<dynamic, dynamic>.named("hello");
   }
   {
     A<int, String> a0 = /*info:INFERRED_TYPE_ALLOCATION*/new D(
-        /*warning:ARGUMENT_TYPE_NOT_ASSIGNABLE*/3);
+        /*error:ARGUMENT_TYPE_NOT_ASSIGNABLE*/3);
     A<int, String> a1 = /*info:INFERRED_TYPE_ALLOCATION*/new D.named(
-        /*warning:ARGUMENT_TYPE_NOT_ASSIGNABLE*/3);
+        /*error:ARGUMENT_TYPE_NOT_ASSIGNABLE*/3);
   }
   { // Currently we only allow variable constraints.  Test that we reject.
-    A<C<int>, String> a0 = /*severe:STATIC_TYPE_ERROR*/new E("hello");
+    A<C<int>, String> a0 = /*error:STATIC_TYPE_ERROR*/new E("hello");
   }
   { // Check named and optional arguments
     A<int, String> a0 = /*info:INFERRED_TYPE_ALLOCATION*/new F(3, "hello",
         a: /*info:INFERRED_TYPE_LITERAL*/[3],
         b: /*info:INFERRED_TYPE_LITERAL*/["hello"]);
     A<int, String> a1 = /*info:INFERRED_TYPE_ALLOCATION*/new F(3, "hello",
-        a: /*info:INFERRED_TYPE_LITERAL*/[/*warning:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE*/"hello"],
-        b: /*info:INFERRED_TYPE_LITERAL*/[/*warning:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE*/3]);
+        a: /*info:INFERRED_TYPE_LITERAL*/[/*error:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE*/"hello"],
+        b: /*info:INFERRED_TYPE_LITERAL*/[/*error:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE*/3]);
     A<int, String> a2 = /*info:INFERRED_TYPE_ALLOCATION*/new F.named(3, "hello", 3, "hello");
     A<int, String> a3 = /*info:INFERRED_TYPE_ALLOCATION*/new F.named(3, "hello");
     A<int, String> a4 = /*info:INFERRED_TYPE_ALLOCATION*/new F.named(3, "hello",
-        /*warning:ARGUMENT_TYPE_NOT_ASSIGNABLE*/"hello", /*warning:ARGUMENT_TYPE_NOT_ASSIGNABLE*/3);
+        /*error:ARGUMENT_TYPE_NOT_ASSIGNABLE*/"hello", /*error:ARGUMENT_TYPE_NOT_ASSIGNABLE*/3);
     A<int, String> a5 = /*info:INFERRED_TYPE_ALLOCATION*/new F.named(3, "hello",
-        /*warning:ARGUMENT_TYPE_NOT_ASSIGNABLE*/"hello");
+        /*error:ARGUMENT_TYPE_NOT_ASSIGNABLE*/"hello");
   }
 }
   ''');
@@ -1048,15 +1097,15 @@
   void test_downwardsInferenceOnListLiterals_inferDownwards() {
     checkFile('''
 void foo([List<String> list1 = /*info:INFERRED_TYPE_LITERAL*/const [],
-          List<String> list2 = /*info:INFERRED_TYPE_LITERAL*/const [/*severe:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE,warning:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE*/42]]) {
+          List<String> list2 = /*info:INFERRED_TYPE_LITERAL*/const [/*error:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE,error:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE*/42]]) {
 }
 
 void main() {
   {
     List<int> l0 = /*info:INFERRED_TYPE_LITERAL*/[];
     List<int> l1 = /*info:INFERRED_TYPE_LITERAL*/[3];
-    List<int> l2 = /*info:INFERRED_TYPE_LITERAL*/[/*warning:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE*/"hello"];
-    List<int> l3 = /*info:INFERRED_TYPE_LITERAL*/[/*warning:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE*/"hello", 3];
+    List<int> l2 = /*info:INFERRED_TYPE_LITERAL*/[/*error:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE*/"hello"];
+    List<int> l3 = /*info:INFERRED_TYPE_LITERAL*/[/*error:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE*/"hello", 3];
   }
   {
     List<dynamic> l0 = [];
@@ -1065,22 +1114,22 @@
     List<dynamic> l3 = /*info:INFERRED_TYPE_LITERAL*/["hello", 3];
   }
   {
-    List<int> l0 = /*severe:STATIC_TYPE_ERROR*/<num>[];
-    List<int> l1 = /*severe:STATIC_TYPE_ERROR*/<num>[3];
-    List<int> l2 = /*severe:STATIC_TYPE_ERROR*/<num>[/*warning:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE*/"hello"];
-    List<int> l3 = /*severe:STATIC_TYPE_ERROR*/<num>[/*warning:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE*/"hello", 3];
+    List<int> l0 = /*error:STATIC_TYPE_ERROR*/<num>[];
+    List<int> l1 = /*error:STATIC_TYPE_ERROR*/<num>[3];
+    List<int> l2 = /*error:STATIC_TYPE_ERROR*/<num>[/*error:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE*/"hello"];
+    List<int> l3 = /*error:STATIC_TYPE_ERROR*/<num>[/*error:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE*/"hello", 3];
   }
   {
     Iterable<int> i0 = /*info:INFERRED_TYPE_LITERAL*/[];
     Iterable<int> i1 = /*info:INFERRED_TYPE_LITERAL*/[3];
-    Iterable<int> i2 = /*info:INFERRED_TYPE_LITERAL*/[/*warning:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE*/"hello"];
-    Iterable<int> i3 = /*info:INFERRED_TYPE_LITERAL*/[/*warning:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE*/"hello", 3];
+    Iterable<int> i2 = /*info:INFERRED_TYPE_LITERAL*/[/*error:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE*/"hello"];
+    Iterable<int> i3 = /*info:INFERRED_TYPE_LITERAL*/[/*error:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE*/"hello", 3];
   }
   {
     const List<int> c0 = /*info:INFERRED_TYPE_LITERAL*/const [];
     const List<int> c1 = /*info:INFERRED_TYPE_LITERAL*/const [3];
-    const List<int> c2 = /*info:INFERRED_TYPE_LITERAL*/const [/*severe:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE,warning:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE*/"hello"];
-    const List<int> c3 = /*info:INFERRED_TYPE_LITERAL*/const [/*severe:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE,warning:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE*/"hello", 3];
+    const List<int> c2 = /*info:INFERRED_TYPE_LITERAL*/const [/*error:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE,error:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE*/"hello"];
+    const List<int> c3 = /*info:INFERRED_TYPE_LITERAL*/const [/*error:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE,error:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE*/"hello", 3];
   }
 }
 ''');
@@ -1149,9 +1198,8 @@
     checkFile('''
 void foo([Map<int, String> m1 = /*info:INFERRED_TYPE_LITERAL*/const {1: "hello"},
     Map<int, String> m2 = /*info:INFERRED_TYPE_LITERAL*/const {
-      // The warning is the type error, and the severe is the compile time
-      // error from const evaluation.
-      /*severe:MAP_KEY_TYPE_NOT_ASSIGNABLE,warning:MAP_KEY_TYPE_NOT_ASSIGNABLE*/"hello":
+      // One error is from type checking and the other is from const evaluation.
+      /*error:MAP_KEY_TYPE_NOT_ASSIGNABLE,error:MAP_KEY_TYPE_NOT_ASSIGNABLE*/"hello":
           "world"
     }]) {
 }
@@ -1160,15 +1208,15 @@
     Map<int, String> l0 = /*info:INFERRED_TYPE_LITERAL*/{};
     Map<int, String> l1 = /*info:INFERRED_TYPE_LITERAL*/{3: "hello"};
     Map<int, String> l2 = /*info:INFERRED_TYPE_LITERAL*/{
-      /*warning:MAP_KEY_TYPE_NOT_ASSIGNABLE*/"hello": "hello"
+      /*error:MAP_KEY_TYPE_NOT_ASSIGNABLE*/"hello": "hello"
     };
     Map<int, String> l3 = /*info:INFERRED_TYPE_LITERAL*/{
-      3: /*warning:MAP_VALUE_TYPE_NOT_ASSIGNABLE*/3
+      3: /*error:MAP_VALUE_TYPE_NOT_ASSIGNABLE*/3
     };
     Map<int, String> l4 = /*info:INFERRED_TYPE_LITERAL*/{
       3: "hello",
-      /*warning:MAP_KEY_TYPE_NOT_ASSIGNABLE*/"hello":
-          /*warning:MAP_VALUE_TYPE_NOT_ASSIGNABLE*/3
+      /*error:MAP_KEY_TYPE_NOT_ASSIGNABLE*/"hello":
+          /*error:MAP_VALUE_TYPE_NOT_ASSIGNABLE*/3
     };
   }
   {
@@ -1183,44 +1231,44 @@
     Map<dynamic, String> l1 = /*info:INFERRED_TYPE_LITERAL*/{3: "hello"};
     Map<dynamic, String> l2 = /*info:INFERRED_TYPE_LITERAL*/{"hello": "hello"};
     Map<dynamic, String> l3 = /*info:INFERRED_TYPE_LITERAL*/{
-      3: /*warning:MAP_VALUE_TYPE_NOT_ASSIGNABLE*/3
+      3: /*error:MAP_VALUE_TYPE_NOT_ASSIGNABLE*/3
     };
     Map<dynamic, String> l4 = /*info:INFERRED_TYPE_LITERAL*/{
       3: "hello",
-      "hello": /*warning:MAP_VALUE_TYPE_NOT_ASSIGNABLE*/3
+      "hello": /*error:MAP_VALUE_TYPE_NOT_ASSIGNABLE*/3
     };
   }
   {
     Map<int, dynamic> l0 = /*info:INFERRED_TYPE_LITERAL*/{};
     Map<int, dynamic> l1 = /*info:INFERRED_TYPE_LITERAL*/{3: "hello"};
     Map<int, dynamic> l2 = /*info:INFERRED_TYPE_LITERAL*/{
-      /*warning:MAP_KEY_TYPE_NOT_ASSIGNABLE*/"hello": "hello"
+      /*error:MAP_KEY_TYPE_NOT_ASSIGNABLE*/"hello": "hello"
     };
     Map<int, dynamic> l3 = /*info:INFERRED_TYPE_LITERAL*/{3: 3};
     Map<int, dynamic> l4 = /*info:INFERRED_TYPE_LITERAL*/{
       3:"hello",
-      /*warning:MAP_KEY_TYPE_NOT_ASSIGNABLE*/"hello": 3
+      /*error:MAP_KEY_TYPE_NOT_ASSIGNABLE*/"hello": 3
     };
   }
   {
-    Map<int, String> l0 = /*severe:STATIC_TYPE_ERROR*/<num, dynamic>{};
-    Map<int, String> l1 = /*severe:STATIC_TYPE_ERROR*/<num, dynamic>{3: "hello"};
-    Map<int, String> l3 = /*severe:STATIC_TYPE_ERROR*/<num, dynamic>{3: 3};
+    Map<int, String> l0 = /*error:STATIC_TYPE_ERROR*/<num, dynamic>{};
+    Map<int, String> l1 = /*error:STATIC_TYPE_ERROR*/<num, dynamic>{3: "hello"};
+    Map<int, String> l3 = /*error:STATIC_TYPE_ERROR*/<num, dynamic>{3: 3};
   }
   {
     const Map<int, String> l0 = /*info:INFERRED_TYPE_LITERAL*/const {};
     const Map<int, String> l1 = /*info:INFERRED_TYPE_LITERAL*/const {3: "hello"};
     const Map<int, String> l2 = /*info:INFERRED_TYPE_LITERAL*/const {
-      /*severe:MAP_KEY_TYPE_NOT_ASSIGNABLE,warning:MAP_KEY_TYPE_NOT_ASSIGNABLE*/"hello":
+      /*error:MAP_KEY_TYPE_NOT_ASSIGNABLE,error:MAP_KEY_TYPE_NOT_ASSIGNABLE*/"hello":
           "hello"
     };
     const Map<int, String> l3 = /*info:INFERRED_TYPE_LITERAL*/const {
-      3: /*severe:MAP_VALUE_TYPE_NOT_ASSIGNABLE,warning:MAP_VALUE_TYPE_NOT_ASSIGNABLE*/3
+      3: /*error:MAP_VALUE_TYPE_NOT_ASSIGNABLE,error:MAP_VALUE_TYPE_NOT_ASSIGNABLE*/3
     };
     const Map<int, String> l4 = /*info:INFERRED_TYPE_LITERAL*/const {
       3:"hello",
-      /*severe:MAP_KEY_TYPE_NOT_ASSIGNABLE,warning:MAP_KEY_TYPE_NOT_ASSIGNABLE*/"hello":
-          /*severe:MAP_VALUE_TYPE_NOT_ASSIGNABLE,warning:MAP_VALUE_TYPE_NOT_ASSIGNABLE*/3
+      /*error:MAP_KEY_TYPE_NOT_ASSIGNABLE,error:MAP_KEY_TYPE_NOT_ASSIGNABLE*/"hello":
+          /*error:MAP_VALUE_TYPE_NOT_ASSIGNABLE,error:MAP_VALUE_TYPE_NOT_ASSIGNABLE*/3
     };
   }
 }
@@ -1232,15 +1280,15 @@
 import 'dart:async';
 Stream<List<int>> foo() async* {
   yield /*info:INFERRED_TYPE_LITERAL*/[];
-  yield /*warning:YIELD_OF_INVALID_TYPE*/new Stream();
-  yield* /*warning:YIELD_OF_INVALID_TYPE*/[];
+  yield /*error:YIELD_OF_INVALID_TYPE*/new Stream();
+  yield* /*error:YIELD_OF_INVALID_TYPE*/[];
   yield* /*info:INFERRED_TYPE_ALLOCATION*/new Stream();
 }
 
 Iterable<Map<int, int>> bar() sync* {
   yield /*info:INFERRED_TYPE_LITERAL*/{};
-  yield /*warning:YIELD_OF_INVALID_TYPE*/new List();
-  yield* /*warning:YIELD_OF_INVALID_TYPE*/{};
+  yield /*error:YIELD_OF_INVALID_TYPE*/new List();
+  yield* /*error:YIELD_OF_INVALID_TYPE*/{};
   yield* /*info:INFERRED_TYPE_ALLOCATION*/new List();
 }
   ''');
@@ -1268,6 +1316,14 @@
     expect(x.type.toString(), 'int');
   }
 
+  void test_futureThen() {
+    checkFile('''
+import 'dart:async';
+Future f;
+Future<int> t1 = f.then((_) => new Future<int>.value(42));
+''');
+  }
+
   void test_genericMethods_basicDownwardInference() {
     checkFile(r'''
 /*=T*/ f/*<S, T>*/(/*=S*/ s) => null;
@@ -1291,7 +1347,7 @@
   new Foo<String>().method("str");
   new Foo().method("str");
 
-  new Foo<String>().method(/*warning:ARGUMENT_TYPE_NOT_ASSIGNABLE*/42);
+  new Foo<String>().method(/*error:ARGUMENT_TYPE_NOT_ASSIGNABLE*/42);
 }
 ''');
   }
@@ -1325,8 +1381,8 @@
   // Types other than int and double are not accepted.
   printInt(
       /*info:DOWN_CAST_IMPLICIT*/min(
-          /*warning:ARGUMENT_TYPE_NOT_ASSIGNABLE*/"hi",
-          /*warning:ARGUMENT_TYPE_NOT_ASSIGNABLE*/"there"));
+          /*error:ARGUMENT_TYPE_NOT_ASSIGNABLE*/"hi",
+          /*error:ARGUMENT_TYPE_NOT_ASSIGNABLE*/"there"));
 }
 ''');
   }
@@ -1337,10 +1393,10 @@
 /*=T*/ m/*<T>*/(/*=T*/ x) => x;
 }
 class D extends C {
-/*severe:INVALID_METHOD_OVERRIDE*/m(x) => x;
+/*error:INVALID_METHOD_OVERRIDE*/m(x) => x;
 }
 main() {
-  int y = /*info:DYNAMIC_CAST*/new D()./*warning:WRONG_NUMBER_OF_TYPE_ARGUMENTS*/m/*<int>*/(42);
+  int y = /*info:DYNAMIC_CAST*/new D()./*error:WRONG_NUMBER_OF_TYPE_ARGUMENTS*/m/*<int>*/(42);
   print(y);
 }
 ''');
@@ -1351,7 +1407,7 @@
 /*=T*/ f/*<T>*/(List/*<T>*/ s) => null;
 main() {
   String x = f(/*info:INFERRED_TYPE_LITERAL*/['hi']);
-  String y = f(/*info:INFERRED_TYPE_LITERAL*/[/*warning:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE*/42]);
+  String y = f(/*info:INFERRED_TYPE_LITERAL*/[/*error:LIST_ELEMENT_TYPE_NOT_ASSIGNABLE*/42]);
 }
 ''');
   }
@@ -1456,11 +1512,11 @@
 takeIIO(math.max);
 takeDDO(math.max);
 
-takeOOI(/*severe:STATIC_TYPE_ERROR,warning:ARGUMENT_TYPE_NOT_ASSIGNABLE*/math.max);
-takeIDI(/*severe:STATIC_TYPE_ERROR,warning:ARGUMENT_TYPE_NOT_ASSIGNABLE*/math.max);
-takeDID(/*severe:STATIC_TYPE_ERROR,warning:ARGUMENT_TYPE_NOT_ASSIGNABLE*/math.max);
-takeOON(/*severe:STATIC_TYPE_ERROR,warning:ARGUMENT_TYPE_NOT_ASSIGNABLE*/math.max);
-takeOOO(/*severe:STATIC_TYPE_ERROR,warning:ARGUMENT_TYPE_NOT_ASSIGNABLE*/math.max);
+takeOOI(/*error:STATIC_TYPE_ERROR,error:ARGUMENT_TYPE_NOT_ASSIGNABLE*/math.max);
+takeIDI(/*error:STATIC_TYPE_ERROR,error:ARGUMENT_TYPE_NOT_ASSIGNABLE*/math.max);
+takeDID(/*error:STATIC_TYPE_ERROR,error:ARGUMENT_TYPE_NOT_ASSIGNABLE*/math.max);
+takeOON(/*error:STATIC_TYPE_ERROR,error:ARGUMENT_TYPE_NOT_ASSIGNABLE*/math.max);
+takeOOO(/*error:STATIC_TYPE_ERROR,error:ARGUMENT_TYPE_NOT_ASSIGNABLE*/math.max);
 
 // Also test SimpleIdentifier
 takeIII(min);
@@ -1473,11 +1529,11 @@
 takeIIO(min);
 takeDDO(min);
 
-takeOOI(/*severe:STATIC_TYPE_ERROR,warning:ARGUMENT_TYPE_NOT_ASSIGNABLE*/min);
-takeIDI(/*severe:STATIC_TYPE_ERROR,warning:ARGUMENT_TYPE_NOT_ASSIGNABLE*/min);
-takeDID(/*severe:STATIC_TYPE_ERROR,warning:ARGUMENT_TYPE_NOT_ASSIGNABLE*/min);
-takeOON(/*severe:STATIC_TYPE_ERROR,warning:ARGUMENT_TYPE_NOT_ASSIGNABLE*/min);
-takeOOO(/*severe:STATIC_TYPE_ERROR,warning:ARGUMENT_TYPE_NOT_ASSIGNABLE*/min);
+takeOOI(/*error:STATIC_TYPE_ERROR,error:ARGUMENT_TYPE_NOT_ASSIGNABLE*/min);
+takeIDI(/*error:STATIC_TYPE_ERROR,error:ARGUMENT_TYPE_NOT_ASSIGNABLE*/min);
+takeDID(/*error:STATIC_TYPE_ERROR,error:ARGUMENT_TYPE_NOT_ASSIGNABLE*/min);
+takeOON(/*error:STATIC_TYPE_ERROR,error:ARGUMENT_TYPE_NOT_ASSIGNABLE*/min);
+takeOOO(/*error:STATIC_TYPE_ERROR,error:ARGUMENT_TYPE_NOT_ASSIGNABLE*/min);
 
 // Also PropertyAccess
 takeIII(new C().m);
@@ -1499,14 +1555,14 @@
 //
 // That's legal because we're loosening parameter types.
 //
-takeOON(/*warning:DOWN_CAST_COMPOSITE,warning:ARGUMENT_TYPE_NOT_ASSIGNABLE*/new C().m);
-takeOOO(/*warning:DOWN_CAST_COMPOSITE,warning:ARGUMENT_TYPE_NOT_ASSIGNABLE*/new C().m);
+takeOON(/*warning:DOWN_CAST_COMPOSITE,error:ARGUMENT_TYPE_NOT_ASSIGNABLE*/new C().m);
+takeOOO(/*warning:DOWN_CAST_COMPOSITE,error:ARGUMENT_TYPE_NOT_ASSIGNABLE*/new C().m);
 
 // Note: this is a warning because a downcast of a method tear-off could work
 // in "normal" Dart, due to bivariance.
-takeOOI(/*warning:DOWN_CAST_COMPOSITE,warning:ARGUMENT_TYPE_NOT_ASSIGNABLE*/new C().m);
-takeIDI(/*warning:DOWN_CAST_COMPOSITE,warning:ARGUMENT_TYPE_NOT_ASSIGNABLE*/new C().m);
-takeDID(/*warning:DOWN_CAST_COMPOSITE,warning:ARGUMENT_TYPE_NOT_ASSIGNABLE*/new C().m);
+takeOOI(/*warning:DOWN_CAST_COMPOSITE,error:ARGUMENT_TYPE_NOT_ASSIGNABLE*/new C().m);
+takeIDI(/*warning:DOWN_CAST_COMPOSITE,error:ARGUMENT_TYPE_NOT_ASSIGNABLE*/new C().m);
+takeDID(/*warning:DOWN_CAST_COMPOSITE,error:ARGUMENT_TYPE_NOT_ASSIGNABLE*/new C().m);
 }
 
 void takeIII(int fn(int a, int b)) {}
@@ -1548,10 +1604,10 @@
     checkFile('''
 import 'dart:_foreign_helper' show JS;
 main() {
-  String x = /*warning:INVALID_ASSIGNMENT*/JS('int', '42');
+  String x = /*error:INVALID_ASSIGNMENT*/JS('int', '42');
   var y = JS('String', '"hello"');
   y = "world";
-  y = /*warning:INVALID_ASSIGNMENT*/42;
+  y = /*error:INVALID_ASSIGNMENT*/42;
 }
 ''');
   }
@@ -1844,13 +1900,13 @@
   int i;
 
   s = /*info:DYNAMIC_CAST*/new B().x;
-  s = /*warning:INVALID_ASSIGNMENT*/new B().y;
+  s = /*error:INVALID_ASSIGNMENT*/new B().y;
   s = new B().z;
-  s = /*warning:INVALID_ASSIGNMENT*/new B().w;
+  s = /*error:INVALID_ASSIGNMENT*/new B().w;
 
   i = /*info:DYNAMIC_CAST*/new B().x;
   i = new B().y;
-  i = /*warning:INVALID_ASSIGNMENT*/new B().z;
+  i = /*error:INVALID_ASSIGNMENT*/new B().z;
   i = new B().w;
 }
 ''');
@@ -2011,8 +2067,8 @@
 class B extends A { B(ignore); }
 var a = new A();
 // Note: it doesn't matter that some of these refer to 'x'.
-var b = new B(/*warning:UNDEFINED_IDENTIFIER*/x);  // allocations
-var c1 = [/*warning:UNDEFINED_IDENTIFIER*/x];      // list literals
+var b = new B(/*error:UNDEFINED_IDENTIFIER*/x);  // allocations
+var c1 = [/*error:UNDEFINED_IDENTIFIER*/x];      // list literals
 var c2 = const [];
 var d = <dynamic, dynamic>{'a': 'b'};     // map literals
 var e = new A()..x = 3; // cascades
@@ -2021,32 +2077,32 @@
                         // conected component.
 var g = -3;
 var h = new A() + 3;
-var i = /*warning:UNDEFINED_OPERATOR*/- new A();
+var i = /*error:UNDEFINED_OPERATOR*/- new A();
 var j = null as B;
 
 test1() {
-  a = /*warning:INVALID_ASSIGNMENT*/"hi";
+  a = /*error:INVALID_ASSIGNMENT*/"hi";
   a = new B(3);
-  b = /*warning:INVALID_ASSIGNMENT*/"hi";
+  b = /*error:INVALID_ASSIGNMENT*/"hi";
   b = new B(3);
   c1 = [];
-  c1 = /*warning:INVALID_ASSIGNMENT*/{};
+  c1 = /*error:INVALID_ASSIGNMENT*/{};
   c2 = [];
-  c2 = /*warning:INVALID_ASSIGNMENT*/{};
+  c2 = /*error:INVALID_ASSIGNMENT*/{};
   d = {};
-  d = /*warning:INVALID_ASSIGNMENT*/3;
+  d = /*error:INVALID_ASSIGNMENT*/3;
   e = new A();
-  e = /*warning:INVALID_ASSIGNMENT*/{};
+  e = /*error:INVALID_ASSIGNMENT*/{};
   f = 3;
-  f = /*warning:INVALID_ASSIGNMENT*/false;
+  f = /*error:INVALID_ASSIGNMENT*/false;
   g = 1;
-  g = /*warning:INVALID_ASSIGNMENT*/false;
-  h = /*warning:INVALID_ASSIGNMENT*/false;
+  g = /*error:INVALID_ASSIGNMENT*/false;
+  h = /*error:INVALID_ASSIGNMENT*/false;
   h = new B('b');
   i = false;
   j = new B('b');
-  j = /*warning:INVALID_ASSIGNMENT*/false;
-  j = /*warning:INVALID_ASSIGNMENT*/[];
+  j = /*error:INVALID_ASSIGNMENT*/false;
+  j = /*error:INVALID_ASSIGNMENT*/[];
 }
 ''');
   }
@@ -2079,7 +2135,7 @@
 }
 
 foo() {
-  String y = /*warning:INVALID_ASSIGNMENT*/new B().x;
+  String y = /*error:INVALID_ASSIGNMENT*/new B().x;
   int z = new B().x;
 }
 ''');
@@ -2134,8 +2190,8 @@
 var y = x;
 
 test1() {
-  x = /*warning:INVALID_ASSIGNMENT*/"hi";
-  y = /*warning:INVALID_ASSIGNMENT*/"hi";
+  x = /*error:INVALID_ASSIGNMENT*/"hi";
+  y = /*error:INVALID_ASSIGNMENT*/"hi";
 }
 ''');
   }
@@ -2151,8 +2207,8 @@
 class B { static var y = A.x; }
 
 test1() {
-  A.x = /*warning:INVALID_ASSIGNMENT*/"hi";
-  B.y = /*warning:INVALID_ASSIGNMENT*/"hi";
+  A.x = /*error:INVALID_ASSIGNMENT*/"hi";
+  B.y = /*error:INVALID_ASSIGNMENT*/"hi";
 }
 ''');
   }
@@ -2296,7 +2352,7 @@
     checkFile('''
 class Foo {
   var x = 1;
-  Foo([this.x = /*warning:INVALID_ASSIGNMENT*/"1"]);
+  Foo([this.x = /*error:INVALID_ASSIGNMENT*/"1"]);
 }''');
   }
 
@@ -2551,6 +2607,19 @@
     expect(x.type.toString(), 'Map<String, () → int>');
   }
 
+  void test_inferredType_opAssignToProperty() {
+    var mainUnit = checkFile('''
+class C {
+  num n;
+}
+C f() => null;
+var x = (f().n *= null);
+''');
+    var x = mainUnit.topLevelVariables[0];
+    expect(x.name, 'x');
+    expect(x.type.toString(), 'num');
+  }
+
   void test_inferredType_opAssignToProperty_prefixedIdentifier() {
     var mainUnit = checkFile('''
 class C {
@@ -2578,19 +2647,6 @@
     expect(x.type.toString(), 'num');
   }
 
-  void test_inferredType_opAssignToProperty() {
-    var mainUnit = checkFile('''
-class C {
-  num n;
-}
-C f() => null;
-var x = (f().n *= null);
-''');
-    var x = mainUnit.topLevelVariables[0];
-    expect(x.name, 'x');
-    expect(x.type.toString(), 'num');
-  }
-
   void test_inferredType_opAssignToProperty_viaInterface() {
     var mainUnit = checkFile('''
 class I {
@@ -2605,6 +2661,42 @@
     expect(x.type.toString(), 'num');
   }
 
+  void test_inferredType_viaClosure_multipleLevelsOfNesting() {
+    var mainUnit = checkFile('''
+class C {
+  static final f = (bool b) => (int i) => /*info:INFERRED_TYPE_LITERAL*/{i: b};
+}
+''');
+    var f = mainUnit.getType('C').fields[0];
+    expect(f.type.toString(), '(bool) → (int) → Map<int, bool>');
+  }
+
+  void test_inferredType_viaClosure_typeDependsOnArgs() {
+    var mainUnit = checkFile('''
+class C {
+  static final f = (bool b) => b;
+}
+''');
+    var f = mainUnit.getType('C').fields[0];
+    expect(f.type.toString(), '(bool) → bool');
+  }
+
+  void test_inferredType_viaClosure_typeIndependentOfArgs_field() {
+    var mainUnit = checkFile('''
+class C {
+  static final f = (bool b) => 1;
+}
+''');
+    var f = mainUnit.getType('C').fields[0];
+    expect(f.type.toString(), '(bool) → int');
+  }
+
+  void test_inferredType_viaClosure_typeIndependentOfArgs_topLevel() {
+    var mainUnit = checkFile('final f = (bool b) => 1;');
+    var f = mainUnit.topLevelVariables[0];
+    expect(f.type.toString(), '(bool) → int');
+  }
+
   void test_inferStaticsTransitively() {
     addFile(
         '''
@@ -2701,11 +2793,11 @@
 }
 
 class B extends A {
-  /*severe:INVALID_FIELD_OVERRIDE*/get x => 3;
+  /*error:INVALID_FIELD_OVERRIDE*/get x => 3;
 }
 
 foo() {
-  String y = /*warning:INVALID_ASSIGNMENT*/new B().x;
+  String y = /*error:INVALID_ASSIGNMENT*/new B().x;
   int z = new B().x;
 }
 ''');
@@ -2722,7 +2814,7 @@
 }
 
 foo() {
-  String y = /*warning:INVALID_ASSIGNMENT*/new B().x;
+  String y = /*error:INVALID_ASSIGNMENT*/new B().x;
   int z = new B().x;
 }
 ''');
@@ -2733,7 +2825,7 @@
     checkFile('''
 test1() {
   int x = 3;
-  x = /*warning:INVALID_ASSIGNMENT*/"hi";
+  x = /*error:INVALID_ASSIGNMENT*/"hi";
 }
 ''');
   }
@@ -2742,7 +2834,7 @@
     checkFile('''
 test2() {
   var x = 3;
-  x = /*warning:INVALID_ASSIGNMENT*/"hi";
+  x = /*error:INVALID_ASSIGNMENT*/"hi";
 }
 ''');
   }
@@ -2754,13 +2846,13 @@
 
   test1() {
     var a = x;
-    a = /*warning:INVALID_ASSIGNMENT*/"hi";
+    a = /*error:INVALID_ASSIGNMENT*/"hi";
     a = 3;
     var b = y;
-    b = /*warning:INVALID_ASSIGNMENT*/"hi";
+    b = /*error:INVALID_ASSIGNMENT*/"hi";
     b = 4;
     var c = z;
-    c = /*warning:INVALID_ASSIGNMENT*/"hi";
+    c = /*error:INVALID_ASSIGNMENT*/"hi";
     c = 4;
   }
 
@@ -2776,13 +2868,13 @@
 
 test1() {
   var a = x;
-  a = /*warning:INVALID_ASSIGNMENT*/"hi";
+  a = /*error:INVALID_ASSIGNMENT*/"hi";
   a = 3;
   var b = y;
-  b = /*warning:INVALID_ASSIGNMENT*/"hi";
+  b = /*error:INVALID_ASSIGNMENT*/"hi";
   b = 4;
   var c = z;
-  c = /*warning:INVALID_ASSIGNMENT*/"hi";
+  c = /*error:INVALID_ASSIGNMENT*/"hi";
   c = 4;
 }
 
@@ -2809,7 +2901,7 @@
 }
 foo() {
   int y = new C().x;
-  String z = /*warning:INVALID_ASSIGNMENT*/new C().x;
+  String z = /*error:INVALID_ASSIGNMENT*/new C().x;
 }
 ''');
   }
@@ -2823,11 +2915,11 @@
 
 class B implements A<int> {
   get x => 3;
-  get w => /*warning:RETURN_OF_INVALID_TYPE*/"hello";
+  get w => /*error:RETURN_OF_INVALID_TYPE*/"hello";
 }
 
 foo() {
-  String y = /*warning:INVALID_ASSIGNMENT*/new B().x;
+  String y = /*error:INVALID_ASSIGNMENT*/new B().x;
   int z = new B().x;
 }
 ''');
@@ -2841,11 +2933,11 @@
 
 class B<E> extends A<E> {
   E y;
-  /*severe:INVALID_FIELD_OVERRIDE*/get x => y;
+  /*error:INVALID_FIELD_OVERRIDE*/get x => y;
 }
 
 foo() {
-  int y = /*warning:INVALID_ASSIGNMENT*/new B<String>().x;
+  int y = /*error:INVALID_ASSIGNMENT*/new B<String>().x;
   String z = new B<String>().x;
 }
 ''');
@@ -2874,7 +2966,7 @@
 }
 
 foo () {
-  int y = /*warning:INVALID_ASSIGNMENT*/new B().m(null, null);
+  int y = /*error:INVALID_ASSIGNMENT*/new B().m(null, null);
   String z = new B().m(null, null);
 }
 ''');
@@ -2887,7 +2979,7 @@
 }
 
 class B implements A<int> {
-  /*severe:INVALID_METHOD_OVERRIDE*/dynamic get x => 3;
+  /*error:INVALID_METHOD_OVERRIDE*/dynamic get x => 3;
 }
 
 foo() {
@@ -2930,7 +3022,7 @@
 }
 
 foo () {
-  int y = /*warning:INVALID_ASSIGNMENT*/new B<String>().m(null, null).value;
+  int y = /*error:INVALID_ASSIGNMENT*/new B<String>().m(null, null).value;
   String z = new B<String>().m(null, null).value;
 }
 ''');
@@ -2945,7 +3037,7 @@
 class Bar<T extends Iterable<String>> {
   void foo(T t) {
     for (var i in t) {
-      int x = /*warning:INVALID_ASSIGNMENT*/i;
+      int x = /*error:INVALID_ASSIGNMENT*/i;
     }
   }
 }
@@ -2953,7 +3045,7 @@
 class Baz<T, E extends Iterable<T>, S extends E> {
   void foo(S t) {
     for (var i in t) {
-      int x = /*warning:INVALID_ASSIGNMENT*/i;
+      int x = /*error:INVALID_ASSIGNMENT*/i;
       T y = i;
     }
   }
@@ -2962,7 +3054,7 @@
 test() {
   var list = <Foo>[];
   for (var x in list) {
-    String y = /*warning:INVALID_ASSIGNMENT*/x;
+    String y = /*error:INVALID_ASSIGNMENT*/x;
   }
 
   for (dynamic x in list) {
@@ -2971,7 +3063,7 @@
     String y = /*info:DYNAMIC_CAST,info:INVALID_ASSIGNMENT*/x;
   }
 
-  for (String x in /*warning:FOR_IN_OF_INVALID_ELEMENT_TYPE*/list) {
+  for (String x in /*error:FOR_IN_OF_INVALID_ELEMENT_TYPE*/list) {
     String y = x;
   }
 
@@ -2992,7 +3084,7 @@
 
   var map = <String, Foo>{};
   // Error: map must be an Iterable.
-  for (var x in /*warning:FOR_IN_OF_INVALID_TYPE*/map) {
+  for (var x in /*error:FOR_IN_OF_INVALID_TYPE*/map) {
     String y = /*info:DYNAMIC_CAST*/x;
   }
 
@@ -3039,7 +3131,7 @@
 import 'a.dart';
 import 'b.dart';
 main() {
-  new A().x = /*warning:INVALID_ASSIGNMENT*/'foo';
+  new A().x = /*error:INVALID_ASSIGNMENT*/'foo';
   new B().x = 'foo';
 }
 ''');
@@ -3150,14 +3242,14 @@
     checkFile(r'''
 test1() {
   var x = /*info:INFERRED_TYPE_LITERAL*/[1, 2, 3];
-  x.add(/*warning:ARGUMENT_TYPE_NOT_ASSIGNABLE*/'hi');
-  x.add(/*warning:ARGUMENT_TYPE_NOT_ASSIGNABLE*/4.0);
+  x.add(/*error:ARGUMENT_TYPE_NOT_ASSIGNABLE*/'hi');
+  x.add(/*error:ARGUMENT_TYPE_NOT_ASSIGNABLE*/4.0);
   x.add(4);
   List<num> y = x;
 }
 test2() {
   var x = /*info:INFERRED_TYPE_LITERAL*/[1, 2.0, 3];
-  x.add(/*warning:ARGUMENT_TYPE_NOT_ASSIGNABLE*/'hi');
+  x.add(/*error:ARGUMENT_TYPE_NOT_ASSIGNABLE*/'hi');
   x.add(4.0);
   List<int> y = /*info:ASSIGNMENT_CAST*/x;
 }
@@ -3168,14 +3260,14 @@
     checkFile(r'''
 var x1 = /*info:INFERRED_TYPE_LITERAL*/[1, 2, 3];
 test1() {
-  x1.add(/*warning:ARGUMENT_TYPE_NOT_ASSIGNABLE*/'hi');
-  x1.add(/*warning:ARGUMENT_TYPE_NOT_ASSIGNABLE*/4.0);
+  x1.add(/*error:ARGUMENT_TYPE_NOT_ASSIGNABLE*/'hi');
+  x1.add(/*error:ARGUMENT_TYPE_NOT_ASSIGNABLE*/4.0);
   x1.add(4);
   List<num> y = x1;
 }
 var x2 = /*info:INFERRED_TYPE_LITERAL*/[1, 2.0, 3];
 test2() {
-  x2.add(/*warning:ARGUMENT_TYPE_NOT_ASSIGNABLE*/'hi');
+  x2.add(/*error:ARGUMENT_TYPE_NOT_ASSIGNABLE*/'hi');
   x2.add(4.0);
   List<int> y = /*info:ASSIGNMENT_CAST*/x2;
 }
@@ -3209,18 +3301,18 @@
 test1() {
   var x = /*info:INFERRED_TYPE_LITERAL*/{ 1: 'x', 2: 'y' };
   x[3] = 'z';
-  x[/*warning:ARGUMENT_TYPE_NOT_ASSIGNABLE*/'hi'] = 'w';
-  x[/*warning:ARGUMENT_TYPE_NOT_ASSIGNABLE*/4.0] = 'u';
-  x[3] = /*warning:INVALID_ASSIGNMENT*/42;
+  x[/*error:ARGUMENT_TYPE_NOT_ASSIGNABLE*/'hi'] = 'w';
+  x[/*error:ARGUMENT_TYPE_NOT_ASSIGNABLE*/4.0] = 'u';
+  x[3] = /*error:INVALID_ASSIGNMENT*/42;
   Map<num, String> y = x;
 }
 
 test2() {
   var x = /*info:INFERRED_TYPE_LITERAL*/{ 1: 'x', 2: 'y', 3.0: new RegExp('.') };
   x[3] = 'z';
-  x[/*warning:ARGUMENT_TYPE_NOT_ASSIGNABLE*/'hi'] = 'w';
+  x[/*error:ARGUMENT_TYPE_NOT_ASSIGNABLE*/'hi'] = 'w';
   x[4.0] = 'u';
-  x[3] = /*warning:INVALID_ASSIGNMENT*/42;
+  x[3] = /*error:INVALID_ASSIGNMENT*/42;
   Pattern p = null;
   x[2] = p;
   Map<int, String> y = /*info:ASSIGNMENT_CAST*/x;
@@ -3233,18 +3325,18 @@
 var x1 = /*info:INFERRED_TYPE_LITERAL*/{ 1: 'x', 2: 'y' };
 test1() {
   x1[3] = 'z';
-  x1[/*warning:ARGUMENT_TYPE_NOT_ASSIGNABLE*/'hi'] = 'w';
-  x1[/*warning:ARGUMENT_TYPE_NOT_ASSIGNABLE*/4.0] = 'u';
-  x1[3] = /*warning:INVALID_ASSIGNMENT*/42;
+  x1[/*error:ARGUMENT_TYPE_NOT_ASSIGNABLE*/'hi'] = 'w';
+  x1[/*error:ARGUMENT_TYPE_NOT_ASSIGNABLE*/4.0] = 'u';
+  x1[3] = /*error:INVALID_ASSIGNMENT*/42;
   Map<num, String> y = x1;
 }
 
 var x2 = /*info:INFERRED_TYPE_LITERAL*/{ 1: 'x', 2: 'y', 3.0: new RegExp('.') };
 test2() {
   x2[3] = 'z';
-  x2[/*warning:ARGUMENT_TYPE_NOT_ASSIGNABLE*/'hi'] = 'w';
+  x2[/*error:ARGUMENT_TYPE_NOT_ASSIGNABLE*/'hi'] = 'w';
   x2[4.0] = 'u';
-  x2[3] = /*warning:INVALID_ASSIGNMENT*/42;
+  x2[3] = /*error:INVALID_ASSIGNMENT*/42;
   Pattern p = null;
   x2[2] = p;
   Map<int, String> y = /*info:ASSIGNMENT_CAST*/x2;
@@ -3274,6 +3366,54 @@
     expect(x.type.toString(), 'Map<dynamic, dynamic>');
   }
 
+  void test_methodCall_withTypeArguments_instanceMethod() {
+    var mainUnit = checkFile('''
+class C {
+  D/*<T>*/ f/*<T>*/() => null;
+}
+class D<T> {}
+var f = new C().f/*<int>*/();
+''');
+    var v = mainUnit.topLevelVariables[0];
+    expect(v.type.toString(), 'D<int>');
+  }
+
+  void test_methodCall_withTypeArguments_instanceMethod_identifierSequence() {
+    var mainUnit = checkFile('''
+class C {
+  D/*<T>*/ f/*<T>*/() => null;
+}
+class D<T> {}
+C c;
+var f = c.f/*<int>*/();
+''');
+    var v = mainUnit.topLevelVariables[1];
+    expect(v.name, 'f');
+    expect(v.type.toString(), 'D<int>');
+  }
+
+  void test_methodCall_withTypeArguments_staticMethod() {
+    var mainUnit = checkFile('''
+class C {
+  static D/*<T>*/ f/*<T>*/() => null;
+}
+class D<T> {}
+var f = C.f/*<int>*/();
+''');
+    var v = mainUnit.topLevelVariables[0];
+    expect(v.type.toString(), 'D<int>');
+  }
+
+  void test_methodCall_withTypeArguments_topLevelFunction() {
+    var mainUnit = checkFile('''
+D/*<T>*/ f/*<T>*/() => null;
+class D<T> {}
+var g = f/*<int>*/();
+''');
+    var v = mainUnit.topLevelVariables[0];
+    expect(v.type.toString(), 'D<int>');
+  }
+
   void test_noErrorWhenDeclaredTypeIsNumAndAssignedNull() {
     checkFile('''
 test1() {
@@ -3302,8 +3442,8 @@
   h = 'hello';
   (/*info:DYNAMIC_INVOKE*/h.foo());
 
-  foo(/*info:INFERRED_TYPE_CLOSURE,info:INFERRED_TYPE_CLOSURE*/(x) => null);
-  foo(/*info:INFERRED_TYPE_CLOSURE,info:INFERRED_TYPE_CLOSURE*/(x) => throw "not implemented");
+  foo(/*info:INFERRED_TYPE_CLOSURE*/(x) => null);
+  foo(/*info:INFERRED_TYPE_CLOSURE*/(x) => throw "not implemented");
 }
 ''');
   }
@@ -3346,10 +3486,10 @@
 
 test5() {
   var a1 = new A();
-  a1.x = /*warning:INVALID_ASSIGNMENT*/"hi";
+  a1.x = /*error:INVALID_ASSIGNMENT*/"hi";
 
   A a2 = new A();
-  a2.x = /*warning:INVALID_ASSIGNMENT*/"hi";
+  a2.x = /*error:INVALID_ASSIGNMENT*/"hi";
 }
 ''');
   }
@@ -3502,7 +3642,7 @@
     checkFile('''
 import 'a.dart';
 test() {
-  x = /*warning:INVALID_ASSIGNMENT*/"hi";
+  x = /*error:INVALID_ASSIGNMENT*/"hi";
 }
 ''');
   }
@@ -3560,7 +3700,7 @@
     // Check that type inference dependencies are properly checked when a top
     // level variable appears at the beginning of a string of identifiers
     // separated by '.'.
-    var mainUnit = checkFile('''
+    checkFile('''
 final a = /*info:DYNAMIC_INVOKE*/c.i;
 final c = new C(a);
 class C {
@@ -3583,15 +3723,15 @@
   ///   * all expected failures are listed in the source code using comments
   ///     immediately in front of the AST node that should contain the error.
   ///
-  ///   * errors are formatted as a token `level:Type`, where `level` is the
-  ///     logging level were the error would be reported at, and `Type` is the
-  ///     concrete subclass of [StaticInfo] that denotes the error.
+  ///   * errors are formatted as a token `severity:ErrorCode`, where
+  ///     `severity` is the ErrorSeverity the error would be reported at, and
+  ///     `ErrorCode` is the error code's name.
   ///
   /// For example to check that an assignment produces a type error, you can
   /// create a file like:
   ///
   ///     addFile('''
-  ///       String x = /*severe:STATIC_TYPE_ERROR*/3;
+  ///       String x = /*error:STATIC_TYPE_ERROR*/3;
   ///     ''');
   ///     check();
   ///
diff --git a/pkg/analyzer/test/src/task/strong/strong_test_helper.dart b/pkg/analyzer/test/src/task/strong/strong_test_helper.dart
index 6e5b83b..82829eb 100644
--- a/pkg/analyzer/test/src/task/strong/strong_test_helper.dart
+++ b/pkg/analyzer/test/src/task/strong/strong_test_helper.dart
@@ -11,11 +11,11 @@
 import 'package:analyzer/dart/element/element.dart';
 import 'package:analyzer/file_system/file_system.dart';
 import 'package:analyzer/file_system/memory_file_system.dart';
+import 'package:analyzer/source/error_processor.dart';
 import 'package:analyzer/src/dart/ast/token.dart';
 import 'package:analyzer/src/generated/engine.dart';
 import 'package:analyzer/src/generated/error.dart';
 import 'package:analyzer/src/generated/source.dart';
-import 'package:logging/logging.dart';
 import 'package:source_span/source_span.dart';
 import 'package:unittest/unittest.dart';
 
@@ -29,15 +29,15 @@
 ///   * all expected failures are listed in the source code using comments
 ///     immediately in front of the AST node that should contain the error.
 ///
-///   * errors are formatted as a token `level:Type`, where `level` is the
-///     logging level were the error would be reported at, and `Type` is the
-///     concrete subclass of [StaticInfo] that denotes the error.
+///   * errors are formatted as a token `severity:ErrorCode`, where
+///     `severity` is the ErrorSeverity the error would be reported at, and
+///     `ErrorCode` is the error code's name.
 ///
 /// For example to check that an assignment produces a type error, you can
 /// create a file like:
 ///
 ///     addFile('''
-///       String x = /*severe:STATIC_TYPE_ERROR*/3;
+///       String x = /*error:STATIC_TYPE_ERROR*/3;
 ///     ''');
 ///     check();
 ///
@@ -54,7 +54,7 @@
 /// the file text.
 ///
 /// Returns the main resolved library. This can be used for further checks.
-CompilationUnit check() {
+CompilationUnit check({bool implicitCasts: true, bool implicitDynamic: true}) {
   _checkCalled = true;
 
   expect(files.getFile('/main.dart').exists, true,
@@ -66,14 +66,18 @@
   AnalysisOptionsImpl options = context.analysisOptions as AnalysisOptionsImpl;
   options.strongMode = true;
   options.strongModeHints = true;
+  options.implicitCasts = implicitCasts;
+  options.implicitDynamic = implicitDynamic;
+  var mockSdk = new MockSdk();
+  mockSdk.context.analysisOptions.strongMode = true;
   context.sourceFactory =
-      new SourceFactory([new DartUriResolver(new MockSdk()), uriResolver]);
+      new SourceFactory([new DartUriResolver(mockSdk), uriResolver]);
 
   // Run the checker on /main.dart.
   Source mainSource = uriResolver.resolveAbsolute(new Uri.file('/main.dart'));
   var initialLibrary = context.resolveCompilationUnit2(mainSource, mainSource);
 
-  var collector = new _ErrorCollector();
+  var collector = new _ErrorCollector(context);
 
   // Extract expectations from the comments in the test files, and
   // check that all errors we emit are included in the expected map.
@@ -99,7 +103,7 @@
           e.errorCode != HintCode.UNUSED_IMPORT &&
           e.errorCode != HintCode.UNUSED_LOCAL_VARIABLE &&
           e.errorCode != TodoCode.TODO));
-      _expectErrors(resolved, errors);
+      _expectErrors(context, resolved, errors);
     }
   }
 
@@ -128,14 +132,6 @@
   });
 }
 
-Level _actualErrorLevel(AnalysisError actual) {
-  return const <ErrorSeverity, Level>{
-    ErrorSeverity.ERROR: Level.SEVERE,
-    ErrorSeverity.WARNING: Level.WARNING,
-    ErrorSeverity.INFO: Level.INFO
-  }[actual.errorCode.errorSeverity];
-}
-
 SourceSpanWithContext _createSpanHelper(
     LineInfo lineInfo, int start, Source source, String content,
     {int end}) {
@@ -172,7 +168,14 @@
   }
 }
 
-void _expectErrors(CompilationUnit unit, List<AnalysisError> actualErrors) {
+ErrorSeverity _errorSeverity(AnalysisContext context, AnalysisError error) {
+  // Attempt to process severity in a similar way to analyzer_cli and server.
+  return ErrorProcessor.getProcessor(context, error)?.severity ??
+      error.errorCode.errorSeverity;
+}
+
+void _expectErrors(AnalysisContext context, CompilationUnit unit,
+    List<AnalysisError> actualErrors) {
   var expectedErrors = _findExpectedErrors(unit.beginToken);
 
   // Sort both lists: by offset, then level, then name.
@@ -180,7 +183,7 @@
     int delta = x.offset.compareTo(y.offset);
     if (delta != 0) return delta;
 
-    delta = x.errorCode.errorSeverity.compareTo(y.errorCode.errorSeverity);
+    delta = _errorSeverity(context, x).compareTo(_errorSeverity(context, y));
     if (delta != 0) return delta;
 
     return _errorCodeName(x.errorCode).compareTo(_errorCodeName(y.errorCode));
@@ -189,7 +192,7 @@
     int delta = x.offset.compareTo(y.offset);
     if (delta != 0) return delta;
 
-    delta = x.level.compareTo(y.level);
+    delta = x.severity.compareTo(y.severity);
     if (delta != 0) return delta;
 
     return x.typeName.compareTo(y.typeName);
@@ -202,7 +205,7 @@
   for (var expected in expectedErrors) {
     AnalysisError actual = expected._removeMatchingActual(actualErrors);
     if (actual != null) {
-      if (_actualErrorLevel(actual) != expected.level ||
+      if (_errorSeverity(context, actual) != expected.severity ||
           _errorCodeName(actual.errorCode) != expected.typeName) {
         different[expected] = actual;
       }
@@ -215,14 +218,14 @@
   List<AnalysisError> unexpected = actualErrors;
 
   if (unreported.isNotEmpty || unexpected.isNotEmpty || different.isNotEmpty) {
-    _reportFailure(unit, unreported, unexpected, different);
+    _reportFailure(context, unit, unreported, unexpected, different);
   }
 }
 
 List<_ErrorExpectation> _findExpectedErrors(Token beginToken) {
   var expectedErrors = <_ErrorExpectation>[];
 
-  // Collect expectations like "severe:STATIC_TYPE_ERROR" from comment tokens.
+  // Collect expectations like "error:STATIC_TYPE_ERROR" from comment tokens.
   for (Token t = beginToken; t.type != TokenType.EOF; t = t.next) {
     for (CommentToken c = t.precedingComments; c != null; c = c.next) {
       if (c.type == TokenType.MULTI_LINE_COMMENT) {
@@ -269,6 +272,7 @@
 }
 
 void _reportFailure(
+    AnalysisContext context,
     CompilationUnit unit,
     List<_ErrorExpectation> unreported,
     List<AnalysisError> unexpected,
@@ -283,7 +287,7 @@
     var span = _createSpanHelper(
         unit.lineInfo, offset, unit.element.source, sourceCode,
         end: offset + length);
-    var levelName = _actualErrorLevel(error).name.toLowerCase();
+    var levelName = _errorSeverity(context, error).displayName;
     return '@$offset $levelName:${_errorCodeName(error.errorCode)}\n' +
         span.message(error.message);
   }
@@ -292,8 +296,8 @@
     int offset = error.offset;
     var span = _createSpanHelper(
         unit.lineInfo, offset, unit.element.source, sourceCode);
-    var levelName = error.level.toString().toLowerCase();
-    return '@$offset $levelName:${error.typeName}\n' + span.message('');
+    var severity = error.severity.displayName;
+    return '@$offset $severity:${error.typeName}\n' + span.message('');
   }
 
   var message = new StringBuffer();
@@ -319,15 +323,16 @@
 }
 
 class _ErrorCollector implements AnalysisErrorListener {
+  final AnalysisContext _context;
   List<AnalysisError> errors;
   final bool hints;
 
-  _ErrorCollector({this.hints: true});
+  _ErrorCollector(this._context, {this.hints: true});
 
   void onError(AnalysisError error) {
     // Unless DDC hints are requested, filter them out.
     var HINT = ErrorSeverity.INFO.ordinal;
-    if (hints || error.errorCode.errorSeverity.ordinal > HINT) {
+    if (hints || _errorSeverity(_context, error).ordinal > HINT) {
       errors.add(error);
     }
   }
@@ -336,13 +341,12 @@
 /// Describes an expected message that should be produced by the checker.
 class _ErrorExpectation {
   final int offset;
-  final Level level;
+  final ErrorSeverity severity;
   final String typeName;
 
-  _ErrorExpectation(this.offset, this.level, this.typeName);
+  _ErrorExpectation(this.offset, this.severity, this.typeName);
 
-  String toString() =>
-      '@$offset ${level.toString().toLowerCase()}: [$typeName]';
+  String toString() => '@$offset ${severity.displayName}: [$typeName]';
 
   AnalysisError _removeMatchingActual(List<AnalysisError> actualErrors) {
     for (var actual in actualErrors) {
@@ -373,10 +377,10 @@
     var name = tokens[0].toUpperCase();
     var typeName = tokens[1];
 
-    var level =
-        Level.LEVELS.firstWhere((l) => l.name == name, orElse: () => null);
+    var level = ErrorSeverity.values
+        .firstWhere((l) => l.name == name, orElse: () => null);
     expect(level, isNotNull,
-        reason: 'invalid level in error descriptor: `${tokens[0]}`');
+        reason: 'invalid severity in error descriptor: `${tokens[0]}`');
     expect(typeName, isNotNull,
         reason: 'invalid type in error descriptor: ${tokens[1]}');
     return new _ErrorExpectation(offset, level, typeName);
diff --git a/pkg/analyzer/tool/summary/build_sdk_summaries.dart b/pkg/analyzer/tool/summary/build_sdk_summaries.dart
index 1bbd4da..bf6d8b5 100644
--- a/pkg/analyzer/tool/summary/build_sdk_summaries.dart
+++ b/pkg/analyzer/tool/summary/build_sdk_summaries.dart
@@ -1,16 +1,8 @@
 import 'dart:io';
 
-import 'package:analyzer/dart/ast/ast.dart';
-import 'package:analyzer/dart/element/element.dart';
-import 'package:analyzer/src/generated/engine.dart';
-import 'package:analyzer/src/generated/java_io.dart';
-import 'package:analyzer/src/generated/sdk.dart';
 import 'package:analyzer/src/generated/sdk_io.dart';
-import 'package:analyzer/src/generated/source.dart';
 import 'package:analyzer/src/summary/flat_buffers.dart' as fb;
-import 'package:analyzer/src/summary/index_unit.dart';
-import 'package:analyzer/src/summary/summarize_elements.dart';
-import 'package:path/path.dart';
+import 'package:analyzer/src/summary/summary_file_builder.dart';
 
 main(List<String> args) {
   if (args.length < 1) {
@@ -37,7 +29,7 @@
     // Prepare results.
     //
     String sdkPath = args.length > 2 ? args[2] : null;
-    _Output output = _buildMultipleOutputs(sdkPath, includeSpec);
+    SummaryOutput output = _buildMultipleOutputs(sdkPath, includeSpec);
     if (output == null) {
       exitCode = 1;
       return;
@@ -57,43 +49,32 @@
     //
     // Prepare results.
     //
-    _Output output = _buildMultipleOutputs(sdkPath, true);
+    SummaryOutput output = _buildMultipleOutputs(sdkPath, true);
     if (output == null) {
       exitCode = 1;
       return;
     }
+
     //
     // Write results.
     //
-    fb.Builder builder = new fb.Builder();
-    fb.Offset specSumOffset = builder.writeListUint8(output.spec.sum);
-    fb.Offset specIndexOffset = builder.writeListUint8(output.spec.index);
-    fb.Offset strongSumOffset = builder.writeListUint8(output.strong.sum);
-    fb.Offset strongIndexOffset = builder.writeListUint8(output.strong.index);
-    builder.startTable();
-    builder.addOffset(_FIELD_SPEC_SUM, specSumOffset);
-    builder.addOffset(_FIELD_SPEC_INDEX, specIndexOffset);
-    builder.addOffset(_FIELD_STRONG_SUM, strongSumOffset);
-    builder.addOffset(_FIELD_STRONG_INDEX, strongIndexOffset);
-    fb.Offset offset = builder.endTable();
-    new File(outputPath)
-        .writeAsBytesSync(builder.finish(offset), mode: FileMode.WRITE_ONLY);
+    output.write(outputPath);
   } else if (command == 'extract-spec-sum' && args.length == 3) {
     String inputPath = args[1];
     String outputPath = args[2];
-    _extractSingleOutput(inputPath, _FIELD_SPEC_SUM, outputPath);
+    _extractSingleOutput(inputPath, FIELD_SPEC_SUM, outputPath);
   } else if (command == 'extract-spec-index' && args.length == 3) {
     String inputPath = args[1];
     String outputPath = args[2];
-    _extractSingleOutput(inputPath, _FIELD_SPEC_INDEX, outputPath);
+    _extractSingleOutput(inputPath, FIELD_SPEC_INDEX, outputPath);
   } else if (command == 'extract-strong-sum' && args.length == 3) {
     String inputPath = args[1];
     String outputPath = args[2];
-    _extractSingleOutput(inputPath, _FIELD_STRONG_SUM, outputPath);
+    _extractSingleOutput(inputPath, FIELD_STRONG_SUM, outputPath);
   } else if (command == 'extract-strong-index' && args.length == 3) {
     String inputPath = args[1];
     String outputPath = args[2];
-    _extractSingleOutput(inputPath, _FIELD_STRONG_INDEX, outputPath);
+    _extractSingleOutput(inputPath, FIELD_STRONG_INDEX, outputPath);
   } else {
     _printUsage();
     exitCode = 1;
@@ -106,12 +87,7 @@
  */
 const BINARY_NAME = "build_sdk_summaries";
 
-const int _FIELD_SPEC_INDEX = 1;
-const int _FIELD_SPEC_SUM = 0;
-const int _FIELD_STRONG_INDEX = 3;
-const int _FIELD_STRONG_SUM = 2;
-
-_Output _buildMultipleOutputs(String sdkPath, bool includeSpec) {
+SummaryOutput _buildMultipleOutputs(String sdkPath, bool includeSpec) {
   //
   // Validate the SDK path.
   //
@@ -124,13 +100,23 @@
   } else {
     sdkPath = DirectoryBasedDartSdk.defaultSdkDirectory.getAbsolutePath();
   }
+
   //
   // Build spec and strong outputs.
   //
-  _BuilderOutput spec =
-      includeSpec ? new _Builder(sdkPath, false).build() : null;
-  _BuilderOutput strong = new _Builder(sdkPath, true).build();
-  return new _Output(spec, strong);
+  BuilderOutput spec = includeSpec ? _buildOutput(sdkPath, false) : null;
+  BuilderOutput strong = _buildOutput(sdkPath, true);
+  return new SummaryOutput(spec, strong);
+}
+
+BuilderOutput _buildOutput(String sdkPath, bool strongMode) {
+  String modeName = strongMode ? 'strong' : 'spec';
+  print('Generating $modeName mode summary and index.');
+  Stopwatch sw = new Stopwatch()..start();
+  SummaryBuildConfig config = new SummaryBuildConfig(strongMode: strongMode);
+  BuilderOutput output = new SummaryBuilder.forSdk(sdkPath, config).build();
+  print('\tDone in ${sw.elapsedMilliseconds} ms.');
+  return output;
 }
 
 /**
@@ -168,106 +154,3 @@
   print('  extract-strong-index input_file output_file');
   print('    Extract the strong-mode index file.');
 }
-
-class _Builder {
-  final String sdkPath;
-  final bool strongMode;
-
-  AnalysisContext context;
-  final Set<Source> processedSources = new Set<Source>();
-
-  final PackageBundleAssembler bundleAssembler = new PackageBundleAssembler();
-  final PackageIndexAssembler indexAssembler = new PackageIndexAssembler();
-
-  _Builder(this.sdkPath, this.strongMode);
-
-  /**
-   * Build a strong or spec mode summary for the Dart SDK at [sdkPath].
-   */
-  _BuilderOutput build() {
-    String modeName = strongMode ? 'strong' : 'spec';
-    print('Generating $modeName mode summary and index.');
-    Stopwatch sw = new Stopwatch()..start();
-    //
-    // Prepare SDK.
-    //
-    DirectoryBasedDartSdk sdk =
-        new DirectoryBasedDartSdk(new JavaFile(sdkPath), strongMode);
-    sdk.useSummary = false;
-    sdk.analysisOptions = new AnalysisOptionsImpl()..strongMode = strongMode;
-    context = sdk.context;
-    //
-    // Prepare 'dart:' URIs to serialize.
-    //
-    Set<String> uriSet =
-        sdk.sdkLibraries.map((SdkLibrary library) => library.shortName).toSet();
-    if (!strongMode) {
-      uriSet.add('dart:html/nativewrappers.dart');
-    }
-    uriSet.add('dart:html_common/html_common_dart2js.dart');
-    //
-    // Serialize each SDK library.
-    //
-    for (String uri in uriSet) {
-      Source libSource = sdk.mapDartUri(uri);
-      _serializeLibrary(libSource);
-    }
-    //
-    // Assemble the output.
-    //
-    List<int> sumBytes = bundleAssembler.assemble().toBuffer();
-    List<int> indexBytes = indexAssembler.assemble().toBuffer();
-    print('\tDone in ${sw.elapsedMilliseconds} ms.');
-    return new _BuilderOutput(sumBytes, indexBytes);
-  }
-
-  /**
-   * Serialize the library with the given [source] and all its direct or
-   * indirect imports and exports.
-   */
-  void _serializeLibrary(Source source) {
-    if (!processedSources.add(source)) {
-      return;
-    }
-    LibraryElement element = context.computeLibraryElement(source);
-    bundleAssembler.serializeLibraryElement(element);
-    element.importedLibraries.forEach((e) => _serializeLibrary(e.source));
-    element.exportedLibraries.forEach((e) => _serializeLibrary(e.source));
-    // Index every unit of the library.
-    for (CompilationUnitElement unitElement in element.units) {
-      Source unitSource = unitElement.source;
-      CompilationUnit unit =
-          context.resolveCompilationUnit2(unitSource, source);
-      indexAssembler.indexUnit(unit);
-    }
-  }
-}
-
-class _BuilderOutput {
-  final List<int> sum;
-  final List<int> index;
-
-  _BuilderOutput(this.sum, this.index);
-
-  void writeMultiple(String outputDirectoryPath, String modeName) {
-    // Write summary.
-    {
-      String outputPath = join(outputDirectoryPath, '$modeName.sum');
-      File file = new File(outputPath);
-      file.writeAsBytesSync(sum, mode: FileMode.WRITE_ONLY);
-    }
-    // Write index.
-    {
-      String outputPath = join(outputDirectoryPath, '$modeName.index');
-      File file = new File(outputPath);
-      file.writeAsBytesSync(index, mode: FileMode.WRITE_ONLY);
-    }
-  }
-}
-
-class _Output {
-  final _BuilderOutput spec;
-  final _BuilderOutput strong;
-
-  _Output(this.spec, this.strong);
-}
diff --git a/pkg/analyzer/tool/summary/dump_inferred_types.dart b/pkg/analyzer/tool/summary/dump_inferred_types.dart
index 1ff2c6e..676ecd6 100644
--- a/pkg/analyzer/tool/summary/dump_inferred_types.dart
+++ b/pkg/analyzer/tool/summary/dump_inferred_types.dart
@@ -3,7 +3,6 @@
 // BSD-style license that can be found in the LICENSE file.
 
 import 'dart:convert';
-import 'dart:io';
 
 import 'package:analyzer/dart/element/element.dart';
 import 'package:analyzer/dart/element/type.dart';
diff --git a/pkg/analyzer/tool/summary/generate.dart b/pkg/analyzer/tool/summary/generate.dart
index 4f9018e..869b38d 100644
--- a/pkg/analyzer/tool/summary/generate.dart
+++ b/pkg/analyzer/tool/summary/generate.dart
@@ -544,9 +544,7 @@
     out('class $builderName extends Object with $mixinName '
         'implements ${idlPrefix(name)} {');
     indent(() {
-      out('bool _finished = false;');
       // Generate fields.
-      out();
       for (idlModel.FieldDeclaration field in cls.fields) {
         String fieldName = field.name;
         idlModel.FieldType type = field.type;
@@ -572,7 +570,6 @@
           out('void set $fieldName($typeStr _value) {');
           indent(() {
             String stateFieldName = '_' + fieldName;
-            out('assert(!_finished);');
             // Validate that int(s) are non-negative.
             if (fieldType.typeName == 'int') {
               if (!fieldType.isList) {
@@ -637,8 +634,6 @@
       out();
       out('fb.Offset finish(fb.Builder fbBuilder) {');
       indent(() {
-        out('assert(!_finished);');
-        out('_finished = true;');
         // Write objects and remember Offset(s).
         for (idlModel.FieldDeclaration field in cls.fields) {
           idlModel.FieldType fieldType = field.type;
diff --git a/pkg/analyzer/tool/task_dependency_graph/generate.dart b/pkg/analyzer/tool/task_dependency_graph/generate.dart
index 2b904bb..fc88511 100644
--- a/pkg/analyzer/tool/task_dependency_graph/generate.dart
+++ b/pkg/analyzer/tool/task_dependency_graph/generate.dart
@@ -162,7 +162,7 @@
     List<UriResolver> uriResolvers = [
       new DartUriResolver(sdk),
       new PackageUriResolver(<JavaFile>[packagesDir]),
-      new FileUriResolver()
+      new ResourceUriResolver(PhysicalResourceProvider.INSTANCE)
     ];
     context.sourceFactory = new SourceFactory(uriResolvers);
     Source dartDartSource =
diff --git a/pkg/analyzer/tool/task_dependency_graph/tasks.dot b/pkg/analyzer/tool/task_dependency_graph/tasks.dot
index 402efa5..11040b1 100644
--- a/pkg/analyzer/tool/task_dependency_graph/tasks.dot
+++ b/pkg/analyzer/tool/task_dependency_graph/tasks.dot
@@ -25,6 +25,7 @@
   BuildLibraryElementTask -> BUILD_LIBRARY_ERRORS
   BuildLibraryElementTask -> IS_LAUNCHABLE
   BuildLibraryElementTask -> LIBRARY_ELEMENT1
+  BuildLibraryElementTask -> REFERENCED_NAMES
   BuildPublicNamespaceTask -> LIBRARY_ELEMENT3
   BuildSourceExportClosureTask -> EXPORT_SOURCE_CLOSURE
   BuildTypeProviderTask -> TYPE_PROVIDER
@@ -99,6 +100,8 @@
   GenerateLintsTask -> LINTS
   HINTS -> LibraryUnitErrorsTask
   HINTS [shape=box]
+  IGNORE_INFO -> DartErrorsTask
+  IGNORE_INFO [shape=box]
   IMPORTED_LIBRARIES -> BuildDirectiveElementsTask
   IMPORTED_LIBRARIES -> ReadyLibraryElement2Task
   IMPORTED_LIBRARIES -> ReadyLibraryElement5Task
@@ -189,7 +192,6 @@
   MODIFICATION_TIME -> VerifyUnitTask
   MODIFICATION_TIME [shape=box]
   PARSED_UNIT -> BuildCompilationUnitElementTask
-  PARSED_UNIT -> DartErrorsTask
   PARSED_UNIT [shape=box]
   PARSE_ERRORS -> dartErrorsForSource
   PARSE_ERRORS [shape=box]
@@ -299,7 +301,6 @@
   ResolveInstanceFieldsInUnitTask -> CREATED_RESOLVED_UNIT10
   ResolveInstanceFieldsInUnitTask -> RESOLVED_UNIT10
   ResolveLibraryReferencesTask -> LIBRARY_ELEMENT9
-  ResolveLibraryReferencesTask -> REFERENCED_NAMES
   ResolveLibraryTask -> LIBRARY_ELEMENT
   ResolveLibraryTypeNamesTask -> LIBRARY_ELEMENT6
   ResolveTopLevelLibraryTypeBoundsTask -> LIBRARY_ELEMENT5
@@ -322,6 +323,7 @@
   SOURCE_KIND [shape=box]
   STRONG_MODE_ERRORS -> LibraryUnitErrorsTask
   STRONG_MODE_ERRORS [shape=box]
+  ScanDartTask -> IGNORE_INFO
   ScanDartTask -> LINE_INFO
   ScanDartTask -> SCAN_ERRORS
   ScanDartTask -> TOKEN_STREAM
diff --git a/pkg/analyzer2dart/bin/analyzer2dart.dart b/pkg/analyzer2dart/bin/analyzer2dart.dart
deleted file mode 100644
index 9f372c1..0000000
--- a/pkg/analyzer2dart/bin/analyzer2dart.dart
+++ /dev/null
@@ -1,62 +0,0 @@
-// Copyright (c) 2014, the Dart project authors.  Please see the AUTHORS file
-// for details. All rights reserved. Use of this source code is governed by a
-// BSD-style license that can be found in the LICENSE file.
-
-/** The entry point for the command-line version analyzer2dart. */
-library analyzer2dart.cmdline;
-
-import 'dart:io';
-
-import 'package:analyzer/file_system/physical_file_system.dart';
-import 'package:analyzer/src/generated/element.dart';
-import 'package:analyzer/src/generated/sdk.dart';
-import 'package:analyzer/src/generated/sdk_io.dart';
-import 'package:analyzer/src/generated/source_io.dart';
-import 'package:compiler/src/source_file_provider.dart';
-
-import '../lib/src/closed_world.dart';
-import '../lib/src/driver.dart';
-import '../lib/src/converted_world.dart';
-import '../lib/src/dart_backend.dart';
-
-void main(List<String> args) {
-  // TODO(paulberry): hacky
-  String path = args[0];
-
-  PhysicalResourceProvider provider = PhysicalResourceProvider.INSTANCE;
-  DartSdk sdk = DirectoryBasedDartSdk.defaultSdk;
-  // TODO(johnniwinther): Support user specified output Uri.
-  // TODO(johnniwinther): Integrate messaging.
-  RandomAccessFileOutputProvider outputProvider =
-      new RandomAccessFileOutputProvider(
-          Uri.base.resolve('out.dart'),
-          Uri.base.resolve('out.dart.map'),
-          onInfo: (message) => print(message),
-          onFailure: (message) {
-            print(message);
-            exit(1);
-          });
-
-  Driver analyzer2Dart = new Driver(provider, sdk, outputProvider);
-
-  // Tell the analysis server about the root
-  Source source = analyzer2Dart.setRoot(path);
-
-  // Get the library element associated with the source.
-  FunctionElement entryPointElement = analyzer2Dart.resolveEntryPoint(source);
-
-  // TODO(brianwilkerson,paulberry,johnniwinther): Perform tree-growing by
-  // visiting the ast and feeding the dependencies into a work queue (enqueuer).
-  ClosedWorld world = analyzer2Dart.computeWorld(entryPointElement);
-
-  // TODO(brianwilkerson,paulberry,johnniwinther): Convert the ast into cps by
-  // visiting the ast and invoking the ir builder.
-  // TODO(johnniwinther): Convert the analyzer element model into the dart2js
-  // element model to fit the needs of the cps encoding above.
-  ConvertedWorld convertedWorld = convertWorld(world);
-
-  // TODO(johnniwinther): Feed the cps ir into the new dart2dart backend to
-  // generate dart file(s).
-  compileToDart(analyzer2Dart, convertedWorld);
-}
-
diff --git a/pkg/analyzer2dart/lib/src/closed_world.dart b/pkg/analyzer2dart/lib/src/closed_world.dart
deleted file mode 100644
index 8f91658..0000000
--- a/pkg/analyzer2dart/lib/src/closed_world.dart
+++ /dev/null
@@ -1,57 +0,0 @@
-// Copyright (c) 2014, the Dart project authors.  Please see the AUTHORS file
-// for details. All rights reserved. Use of this source code is governed by a
-// BSD-style license that can be found in the LICENSE file.
-
-library analyzer2dart.closedWorld;
-
-import 'dart:collection';
-
-import 'package:analyzer/analyzer.dart';
-import 'package:analyzer/src/generated/element.dart';
-import 'package:analyzer/src/generated/resolver.dart';
-
-/**
- * Container for the elements and AST nodes which have been determined by
- * tree shaking to be reachable by the program being compiled.
- */
-class ClosedWorld {
-  /// The core types of this world.
-  final TypeProvider typeProvider;
-
-  /// Returns the main function of this closed world compilation.
-  final FunctionElement mainFunction;
-
-  // TODO(paulberry): is it a problem to hold on to all the AST's for the
-  // duration of tree shaking & CPS generation?
-
-  /**
-   * Methods, toplevel functions, etc. that are reachable.
-   */
-  Map<ExecutableElement, Declaration> executableElements =
-      new HashMap<ExecutableElement, Declaration>();
-
-  /**
-   * Fields that are reachable.
-   */
-  Map<FieldElement, VariableDeclaration> fields =
-      new HashMap<FieldElement, VariableDeclaration>();
-
-  /**
-   * Top-level variables that are reachable.
-   */
-  // TODO(johnniwinther): Is there value in splitting fields and top-level
-  // variables?
-  Map<TopLevelVariableElement, VariableDeclaration> variables =
-      new HashMap<TopLevelVariableElement, VariableDeclaration>();
-
-  /**
-   * Classes that are instantiated from reachable code.
-   *
-   * TODO(paulberry): Also keep track of classes that are reachable but not
-   * instantiated (because they are extended or mixed in)
-   */
-  Map<ClassElement, ClassDeclaration> instantiatedClasses =
-      new HashMap<ClassElement, ClassDeclaration>();
-
-  ClosedWorld(this.typeProvider, this.mainFunction);
-}
diff --git a/pkg/analyzer2dart/lib/src/converted_world.dart b/pkg/analyzer2dart/lib/src/converted_world.dart
deleted file mode 100644
index 600487a..0000000
--- a/pkg/analyzer2dart/lib/src/converted_world.dart
+++ /dev/null
@@ -1,98 +0,0 @@
-// Copyright (c) 2014, the Dart project authors.  Please see the AUTHORS file
-// for details. All rights reserved. Use of this source code is governed by a
-// BSD-style license that can be found in the LICENSE file.
-
-library analyzer2dart.convertedWorld;
-
-import 'dart:collection';
-
-import 'package:analyzer/analyzer.dart';
-import 'package:compiler/src/dart_types.dart' as dart2js;
-import 'package:compiler/src/elements/elements.dart' as dart2js;
-import 'package:analyzer/src/generated/element.dart' as analyzer;
-import 'package:compiler/src/cps_ir/cps_ir_nodes.dart' as ir;
-
-import 'closed_world.dart';
-import 'element_converter.dart';
-import 'cps_generator.dart';
-import 'util.dart';
-
-/// A [ClosedWorld] converted to the dart2js element model.
-abstract class ConvertedWorld {
-  Iterable<dart2js.LibraryElement> get libraries;
-  Iterable<dart2js.AstElement> get resolvedElements;
-  Iterable<dart2js.ClassElement> get instantiatedClasses;
-  dart2js.FunctionElement get mainFunction;
-  ir.Node getIr(dart2js.Element element);
-  dart2js.DartTypes get dartTypes;
-}
-
-class _ConvertedWorldImpl implements ConvertedWorld {
-  final dart2js.FunctionElement mainFunction;
-  Map<dart2js.AstElement, ir.Node> executableElements =
-      new HashMap<dart2js.AstElement, ir.Node>();
-  final List<dart2js.ClassElement> instantiatedClasses =
-      <dart2js.ClassElement>[];
-
-  _ConvertedWorldImpl(this.mainFunction);
-
-  // TODO(johnniwinther): Add all used libraries and all SDK libraries to the
-  // set of libraries in the converted world.
-  Iterable<dart2js.LibraryElement> get libraries => [mainFunction.library];
-
-  Iterable<dart2js.AstElement> get resolvedElements => executableElements.keys;
-
-  ir.Node getIr(dart2js.Element element) => executableElements[element];
-
-  final dart2js.DartTypes dartTypes = new _DartTypes();
-}
-
-ConvertedWorld convertWorld(ClosedWorld closedWorld) {
-  ElementConverter converter = new ElementConverter();
-  _ConvertedWorldImpl convertedWorld = new _ConvertedWorldImpl(
-      converter.convertElement(closedWorld.mainFunction));
-
-  void convert(analyzer.Element analyzerElement, AstNode node) {
-    // Skip conversion of SDK sources since we don't generate code for it
-    // anyway.
-    if (analyzerElement.source.isInSystemLibrary) return;
-
-    dart2js.AstElement dart2jsElement =
-        converter.convertElement(analyzerElement);
-    CpsElementVisitor visitor = new CpsElementVisitor(converter, node);
-    ir.Node cpsNode = analyzerElement.accept(visitor);
-    convertedWorld.executableElements[dart2jsElement] = cpsNode;
-    if (cpsNode == null && !analyzerElement.isSynthetic) {
-      String message =
-         'No CPS node generated for $analyzerElement (${node.runtimeType}).';
-      reportSourceMessage(analyzerElement.source, node, message);
-      throw new UnimplementedError(message);
-    }
-  }
-
-  void convertClass(analyzer.ClassElement analyzerElement, _) {
-    // Skip conversion of SDK sources since we don't generate code for it
-    // anyway.
-    if (analyzerElement.source.isInSystemLibrary) return;
-    convertedWorld.instantiatedClasses.add(
-        converter.convertElement(analyzerElement));
-  }
-
-  closedWorld.executableElements.forEach(convert);
-  closedWorld.variables.forEach(convert);
-  closedWorld.fields.forEach(convert);
-  closedWorld.instantiatedClasses.forEach(convertClass);
-
-  return convertedWorld;
-}
-
-// TODO(johnniwinther): Implement [coreTypes] using [TypeProvider].
-class _DartTypes implements dart2js.DartTypes {
-  @override
-  get coreTypes => throw new UnsupportedError("coreTypes");
-
-  @override
-  bool isSubtype(dart2js.DartType t, dart2js.DartType s) {
-    throw new UnsupportedError("isSubtype");
-  }
-}
\ No newline at end of file
diff --git a/pkg/analyzer2dart/lib/src/cps_generator.dart b/pkg/analyzer2dart/lib/src/cps_generator.dart
deleted file mode 100644
index 1e5bb6d..0000000
--- a/pkg/analyzer2dart/lib/src/cps_generator.dart
+++ /dev/null
@@ -1,589 +0,0 @@
-// Copyright (c) 2014, the Dart project authors.  Please see the AUTHORS file
-// for details. All rights reserved. Use of this source code is governed by a
-// BSD-style license that can be found in the LICENSE file.
-
-library analyzer2dart.cps_generator;
-
-import 'package:analyzer/analyzer.dart';
-
-import 'package:compiler/src/dart_types.dart' as dart2js;
-import 'package:compiler/src/elements/elements.dart' as dart2js;
-import 'package:analyzer/src/generated/source.dart';
-import 'package:analyzer/src/generated/element.dart' as analyzer;
-
-import 'package:compiler/src/constant_system_dart.dart'
-    show DART_CONSTANT_SYSTEM;
-import 'package:compiler/src/cps_ir/cps_ir_nodes.dart' as ir;
-import 'package:compiler/src/cps_ir/cps_ir_builder.dart';
-import 'package:compiler/src/universe/universe.dart';
-
-import 'semantic_visitor.dart';
-import 'element_converter.dart';
-import 'util.dart';
-import 'identifier_semantics.dart';
-
-/// Visitor that converts the AST node of an analyzer element into a CPS ir
-/// node.
-class CpsElementVisitor extends analyzer.SimpleElementVisitor<ir.Node> {
-  final ElementConverter converter;
-  final AstNode node;
-
-  CpsElementVisitor(this.converter, this.node);
-
-  @override
-  ir.FunctionDefinition visitFunctionElement(analyzer.FunctionElement element) {
-    CpsGeneratingVisitor visitor = new CpsGeneratingVisitor(converter, element);
-    FunctionDeclaration functionDeclaration = node;
-    return visitor.handleFunctionDeclaration(
-        element, functionDeclaration.functionExpression.body);
-  }
-
-  @override
-  ir.FunctionDefinition visitMethodElement(analyzer.MethodElement element) {
-    CpsGeneratingVisitor visitor = new CpsGeneratingVisitor(converter, element);
-    MethodDeclaration methodDeclaration = node;
-    return visitor.handleFunctionDeclaration(element, methodDeclaration.body);
-  }
-
-  @override
-  ir.FieldDefinition visitTopLevelVariableElement(
-      analyzer.TopLevelVariableElement element) {
-    CpsGeneratingVisitor visitor = new CpsGeneratingVisitor(converter, element);
-    VariableDeclaration variableDeclaration = node;
-    return visitor.handleFieldDeclaration(element, variableDeclaration);
-  }
-
-  @override
-  ir.RootNode visitConstructorElement(analyzer.ConstructorElement element) {
-    CpsGeneratingVisitor visitor = new CpsGeneratingVisitor(converter, element);
-    if (!element.isFactory) {
-      ConstructorDeclaration constructorDeclaration = node;
-      FunctionBody body;
-      if (constructorDeclaration != null) {
-        body = constructorDeclaration.body;
-      } else {
-        assert(element.isSynthetic);
-      }
-      return visitor.handleConstructorDeclaration(element, body);
-    }
-    // TODO(johnniwinther): Support factory constructors.
-    return null;
-  }
-}
-
-/// Visitor that converts analyzer AST nodes into CPS ir nodes.
-class CpsGeneratingVisitor extends SemanticVisitor<ir.Node>
-    with IrBuilderMixin<AstNode> {
-  /// Promote the type of [irBuilder] to [DartIrBuilder].
-  /// The JS backend requires closure conversion which we do not support yet.
-  DartIrBuilder get irBuilder => super.irBuilder;
-  final analyzer.Element element;
-  final ElementConverter converter;
-
-  CpsGeneratingVisitor(this.converter, this.element);
-
-  Source get currentSource => element.source;
-
-  analyzer.LibraryElement get currentLibrary => element.library;
-
-  ir.Node visit(AstNode node) => node.accept(this);
-
-  ir.ConstructorDefinition handleConstructorDeclaration(
-      analyzer.ConstructorElement constructor, FunctionBody body) {
-    dart2js.ConstructorElement element = converter.convertElement(constructor);
-    return withBuilder(
-        new DartIrBuilder(DART_CONSTANT_SYSTEM,
-                          element,
-                          // TODO(johnniwinther): Support closure variables.
-                          new Set<dart2js.Local>()),
-        () {
-      irBuilder.buildFunctionHeader(
-          constructor.parameters.map(converter.convertElement));
-      // Visit the body directly to avoid processing the signature as
-      // expressions.
-      // Call to allow for `body == null` in case of synthesized constructors.
-      build(body);
-      return irBuilder.makeConstructorDefinition(const [], const []);
-    });
-  }
-
-  ir.FieldDefinition handleFieldDeclaration(
-      analyzer.PropertyInducingElement field, VariableDeclaration node) {
-    dart2js.FieldElement element = converter.convertElement(field);
-    return withBuilder(
-        new DartIrBuilder(DART_CONSTANT_SYSTEM,
-                          element,
-                          // TODO(johnniwinther): Support closure variables.
-                          new Set<dart2js.Local>()),
-        () {
-      irBuilder.buildFieldInitializerHeader();
-      ir.Primitive initializer = build(node.initializer);
-      return irBuilder.makeFieldDefinition(initializer);
-    });
-  }
-
-  ir.FunctionDefinition handleFunctionDeclaration(
-      analyzer.ExecutableElement function, FunctionBody body) {
-    dart2js.FunctionElement element = converter.convertElement(function);
-    return withBuilder(
-        new DartIrBuilder(DART_CONSTANT_SYSTEM,
-                          element,
-                          // TODO(johnniwinther): Support closure variables.
-                          new Set<dart2js.Local>()),
-        () {
-      irBuilder.buildFunctionHeader(
-          function.parameters.map(converter.convertElement));
-      // Visit the body directly to avoid processing the signature as
-      // expressions.
-      visit(body);
-      return irBuilder.makeFunctionDefinition(const []);
-    });
-  }
-
-  @override
-  ir.Primitive visitFunctionExpression(FunctionExpression node) {
-    return irBuilder.buildFunctionExpression(
-        handleFunctionDeclaration(node.element, node.body));
-  }
-
-  @override
-  ir.FunctionDefinition visitFunctionDeclaration(FunctionDeclaration node) {
-    return handleFunctionDeclaration(
-        node.element, node.functionExpression.body);
-  }
-
-  @override
-  visitFunctionDeclarationStatement(FunctionDeclarationStatement node) {
-    FunctionDeclaration functionDeclaration = node.functionDeclaration;
-    analyzer.FunctionElement function = functionDeclaration.element;
-    dart2js.FunctionElement element = converter.convertElement(function);
-    ir.FunctionDefinition definition = handleFunctionDeclaration(
-        function, functionDeclaration.functionExpression.body);
-    irBuilder.declareLocalFunction(element, definition);
-  }
-
-  List<ir.Primitive> visitArguments(ArgumentList argumentList) {
-    List<ir.Primitive> arguments = <ir.Primitive>[];
-    for (Expression argument in argumentList.arguments) {
-      ir.Primitive value = build(argument);
-      if (value == null) {
-        giveUp(argument,
-            'Unsupported argument: $argument (${argument.runtimeType}).');
-      }
-      arguments.add(value);
-    }
-    return arguments;
-  }
-
-  @override
-  ir.Node visitMethodInvocation(MethodInvocation node) {
-    // Overridden to avoid eager visits of the receiver and arguments.
-    return handleMethodInvocation(node);
-  }
-
-  @override
-  ir.Primitive visitDynamicInvocation(MethodInvocation node,
-                                      AccessSemantics semantics) {
-    // TODO(johnniwinther): Handle implicit `this`.
-    ir.Primitive receiver = build(semantics.target);
-    List<ir.Primitive> arguments = visitArguments(node.argumentList);
-    return irBuilder.buildDynamicInvocation(
-        receiver,
-        createSelectorFromMethodInvocation(
-            node.argumentList, node.methodName.name),
-        arguments);
-  }
-
-  @override
-  ir.Primitive visitStaticMethodInvocation(MethodInvocation node,
-                                           AccessSemantics semantics) {
-    analyzer.Element staticElement = semantics.element;
-    dart2js.Element element = converter.convertElement(staticElement);
-    List<ir.Primitive> arguments = visitArguments(node.argumentList);
-    return irBuilder.buildStaticFunctionInvocation(
-        element,
-        createCallStructureFromMethodInvocation(node.argumentList),
-        arguments);
-  }
-
-  @override
-  ir.Node visitLocalFunctionAccess(AstNode node, AccessSemantics semantics) {
-    return handleLocalAccess(node, semantics);
-  }
-
-  ir.Primitive handleLocalInvocation(MethodInvocation node,
-                                     AccessSemantics semantics) {
-    analyzer.Element staticElement = semantics.element;
-    dart2js.Element element = converter.convertElement(staticElement);
-    List<ir.Definition> arguments = visitArguments(node.argumentList);
-    CallStructure callStructure = createCallStructureFromMethodInvocation(
-        node.argumentList);
-    if (semantics.kind == AccessKind.LOCAL_FUNCTION) {
-      return irBuilder.buildLocalFunctionInvocation(
-          element, callStructure, arguments);
-    } else {
-      return irBuilder.buildLocalVariableInvocation(
-        element, callStructure, arguments);
-    }
-  }
-
-  @override
-  ir.Node visitLocalVariableInvocation(MethodInvocation node,
-                                       AccessSemantics semantics) {
-    return handleLocalInvocation(node, semantics);
-  }
-
-  @override
-  ir.Primitive visitLocalFunctionInvocation(MethodInvocation node,
-                                            AccessSemantics semantics) {
-    return handleLocalInvocation(node, semantics);
-  }
-
-  @override
-  ir.Primitive visitFunctionExpressionInvocation(
-      FunctionExpressionInvocation node) {
-    ir.Primitive target = build(node.function);
-    List<ir.Definition> arguments = visitArguments(node.argumentList);
-    return irBuilder.buildCallInvocation(
-        target,
-        createCallStructureFromMethodInvocation(node.argumentList),
-        arguments);
-  }
-
-  @override
-  ir.Primitive visitInstanceCreationExpression(
-      InstanceCreationExpression node) {
-    analyzer.Element staticElement = node.staticElement;
-    if (staticElement != null) {
-      dart2js.Element element = converter.convertElement(staticElement);
-      dart2js.DartType type = converter.convertType(node.staticType);
-      List<ir.Primitive> arguments = visitArguments(node.argumentList);
-      return irBuilder.buildConstructorInvocation(
-          element,
-          createCallStructureFromMethodInvocation(node.argumentList),
-          type,
-          arguments);
-    }
-    return giveUp(node, "Unresolved constructor invocation.");
-  }
-
-  @override
-  ir.Constant visitNullLiteral(NullLiteral node) {
-    return irBuilder.buildNullConstant();
-  }
-
-  @override
-  ir.Constant visitBooleanLiteral(BooleanLiteral node) {
-    return irBuilder.buildBooleanConstant(node.value);
-  }
-
-  @override
-  ir.Constant visitDoubleLiteral(DoubleLiteral node) {
-    return irBuilder.buildDoubleConstant(node.value);
-  }
-
-  @override
-  ir.Constant visitIntegerLiteral(IntegerLiteral node) {
-    return irBuilder.buildIntegerConstant(node.value);
-  }
-
-  @override
-  visitAdjacentStrings(AdjacentStrings node) {
-    String value = node.stringValue;
-    if (value != null) {
-      return irBuilder.buildStringConstant(value);
-    }
-    giveUp(node, "Non constant adjacent strings.");
-  }
-
-  @override
-  ir.Constant visitSimpleStringLiteral(SimpleStringLiteral node) {
-    return irBuilder.buildStringConstant(node.value);
-  }
-
-  @override
-  visitStringInterpolation(StringInterpolation node) {
-    giveUp(node, "String interpolation.");
-  }
-
-  @override
-  visitReturnStatement(ReturnStatement node) {
-    irBuilder.buildReturn(build(node.expression));
-  }
-
-  @override
-  ir.Node visitPropertyAccess(PropertyAccess node) {
-    // Overridden to avoid eager visits of the receiver.
-    return handlePropertyAccess(node);
-  }
-
-  @override
-  ir.Node visitLocalVariableAccess(AstNode node, AccessSemantics semantics) {
-    return handleLocalAccess(node, semantics);
-  }
-
-  @override
-  ir.Node visitParameterAccess(AstNode node, AccessSemantics semantics) {
-    return handleLocalAccess(node, semantics);
-  }
-
-  @override
-  visitVariableDeclaration(VariableDeclaration node) {
-    // TODO(johnniwinther): Handle constant local variables.
-    ir.Node initialValue = build(node.initializer);
-    irBuilder.declareLocalVariable(
-        converter.convertElement(node.element),
-        initialValue: initialValue);
-  }
-
-  dart2js.Element getLocal(AstNode node, AccessSemantics semantics) {
-    analyzer.Element element = semantics.element;
-    dart2js.Element target = converter.convertElement(element);
-    assert(invariant(node, target.isLocal, '$target expected to be local.'));
-    return target;
-  }
-
-  ir.Primitive handleLocalAccess(AstNode node, AccessSemantics semantics) {
-    dart2js.Element local = getLocal(node, semantics);
-    if (semantics.kind == AccessKind.LOCAL_FUNCTION) {
-      return irBuilder.buildLocalFunctionGet(local);
-    } else {
-      return irBuilder.buildLocalVariableGet(local);
-    }
-  }
-
-  ir.Primitive handleLocalAssignment(AssignmentExpression node,
-                                     AccessSemantics semantics) {
-    if (node.operator.lexeme != '=') {
-      return giveUp(node, 'Assignment operator: ${node.operator.lexeme}');
-    }
-    return irBuilder.buildLocalVariableSet(
-        getLocal(node, semantics),
-        build(node.rightHandSide));
-  }
-
-  @override
-  ir.Node visitAssignmentExpression(AssignmentExpression node) {
-    // Avoid eager visiting of left and right hand side.
-    return handleAssignmentExpression(node);
-  }
-
-  @override
-  ir.Node visitLocalVariableAssignment(AssignmentExpression node,
-                                       AccessSemantics semantics) {
-    return handleLocalAssignment(node, semantics);
-  }
-
-  @override
-  ir.Node visitParameterAssignment(AssignmentExpression node,
-                                   AccessSemantics semantics) {
-    return handleLocalAssignment(node, semantics);
-  }
-
-  @override
-  ir.Node visitStaticFieldAssignment(AssignmentExpression node,
-                                     AccessSemantics semantics) {
-    if (node.operator.lexeme != '=') {
-      return giveUp(node, 'Assignment operator: ${node.operator.lexeme}');
-    }
-    analyzer.Element element = semantics.element;
-    dart2js.Element target = converter.convertElement(element);
-    // TODO(johnniwinther): Selector information should be computed in the
-    // [TreeShaker] and shared with the [CpsGeneratingVisitor].
-    assert(invariant(node, target.isTopLevel || target.isStatic,
-                     '$target expected to be top-level or static.'));
-    return irBuilder.buildStaticFieldSet(target, build(node.rightHandSide));
-  }
-
-  @override
-  ir.Node visitDynamicAccess(AstNode node, AccessSemantics semantics) {
-    // TODO(johnniwinther): Handle implicit `this`.
-    ir.Primitive receiver = build(semantics.target);
-    return irBuilder.buildDynamicGet(receiver,
-        new Selector.getter(semantics.identifier.name,
-                            converter.convertElement(element.library)));
-  }
-
-  @override
-  ir.Node visitStaticFieldAccess(AstNode node, AccessSemantics semantics) {
-    analyzer.Element element = semantics.element;
-    dart2js.Element target = converter.convertElement(element);
-    // TODO(johnniwinther): Selector information should be computed in the
-    // [TreeShaker] and shared with the [CpsGeneratingVisitor].
-    assert(invariant(node, target.isTopLevel || target.isStatic,
-                     '$target expected to be top-level or static.'));
-    return irBuilder.buildStaticFieldLazyGet(target, null);
-  }
-
-  ir.Primitive handleBinaryExpression(BinaryExpression node,
-                                      String op) {
-    ir.Primitive left = build(node.leftOperand);
-    ir.Primitive right = build(node.rightOperand);
-    Selector selector = new Selector.binaryOperator(op);
-    return irBuilder.buildDynamicInvocation(
-        left, selector, <ir.Primitive>[right]);
-  }
-
-  ir.Node handleLazyOperator(BinaryExpression node, {bool isLazyOr: false}) {
-    return irBuilder.buildLogicalOperator(
-        build(node.leftOperand),
-        subbuild(node.rightOperand),
-        isLazyOr: isLazyOr);
-  }
-
-  @override
-  ir.Node visitBinaryExpression(BinaryExpression node) {
-    // TODO(johnniwinther,paulberry,brianwilkerson): The operator should be
-    // available through an enum.
-    String op = node.operator.lexeme;
-    switch (op) {
-    case '||':
-    case '&&':
-      return handleLazyOperator(node, isLazyOr: op == '||');
-    case '!=':
-      return irBuilder.buildNegation(handleBinaryExpression(node, '=='));
-    default:
-      return handleBinaryExpression(node, op);
-    }
-  }
-
-  @override
-  ir.Node visitConditionalExpression(ConditionalExpression node) {
-    return irBuilder.buildConditional(
-        build(node.condition),
-        subbuild(node.thenExpression),
-        subbuild(node.elseExpression));
-  }
-
-  @override
-  visitIfStatement(IfStatement node) {
-    irBuilder.buildIf(
-        build(node.condition),
-        subbuild(node.thenStatement),
-        subbuild(node.elseStatement));
-  }
-
-  @override
-  visitBlock(Block node) {
-    irBuilder.buildBlock(node.statements, build);
-  }
-
-  @override
-  ir.Node visitListLiteral(ListLiteral node) {
-    dart2js.InterfaceType type = converter.convertType(node.staticType);
-    // TODO(johnniwinther): Use `build` instead of `(e) => build(e)` when issue
-    // 18630 has been resolved.
-    Iterable<ir.Primitive> values = node.elements.map((e) => build(e));
-    return irBuilder.buildListLiteral(type, values);
-  }
-
-  @override
-  ir.Node visitMapLiteral(MapLiteral node) {
-    dart2js.InterfaceType type = converter.convertType(node.staticType);
-    return irBuilder.buildMapLiteral(
-        type,
-        node.entries.map((e) => e.key),
-        node.entries.map((e) => e.value),
-        build);
-  }
-
-  @override
-  visitForStatement(ForStatement node) {
-    // TODO(johnniwinther): Support `for` as a jump target.
-    List<dart2js.LocalElement> loopVariables = <dart2js.LocalElement>[];
-    SubbuildFunction buildInitializer;
-    if (node.variables != null) {
-      buildInitializer = subbuild(node.variables);
-      for (VariableDeclaration variable in node.variables.variables) {
-        loopVariables.add(converter.convertElement(variable.element));
-      }
-    } else {
-      buildInitializer = subbuild(node.initialization);
-    }
-    irBuilder.buildFor(buildInitializer: buildInitializer,
-                       buildCondition: subbuild(node.condition),
-                       buildBody: subbuild(node.body),
-                       buildUpdate: subbuildSequence(node.updaters),
-                       loopVariables: loopVariables);
-  }
-
-  @override
-  visitWhileStatement(WhileStatement node) {
-    // TODO(johnniwinther): Support `while` as a jump target.
-    irBuilder.buildWhile(buildCondition: subbuild(node.condition),
-                         buildBody: subbuild(node.body));
-  }
-
-  @override
-  visitDeclaredIdentifier(DeclaredIdentifier node) {
-    giveUp(node, "Unexpected node: DeclaredIdentifier");
-  }
-
-  @override
-  visitForEachStatement(ForEachStatement node) {
-    SubbuildFunction buildVariableDeclaration;
-    dart2js.Element variableElement;
-    Selector variableSelector;
-    if (node.identifier != null) {
-       AccessSemantics accessSemantics =
-           node.identifier.accept(ACCESS_SEMANTICS_VISITOR);
-       if (accessSemantics.kind == AccessKind.DYNAMIC) {
-         variableSelector = new Selector.setter(
-             node.identifier.name, converter.convertElement(currentLibrary));
-       } else if (accessSemantics.element != null) {
-         variableElement = converter.convertElement(accessSemantics.element);
-         variableSelector = new Selector.setter(
-             variableElement.name,
-             converter.convertElement(accessSemantics.element.library));
-       } else {
-         giveUp(node, 'For-in of unresolved variable: $accessSemantics');
-       }
-    } else {
-      assert(invariant(
-          node, node.loopVariable != null, "Loop variable expected"));
-      variableElement = converter.convertElement(node.loopVariable.element);
-      buildVariableDeclaration = (IrBuilder builder) {
-        builder.declareLocalVariable(variableElement);
-      };
-    }
-    // TODO(johnniwinther): Support `for-in` as a jump target.
-    irBuilder.buildForIn(
-        buildExpression: subbuild(node.iterable),
-        buildVariableDeclaration: buildVariableDeclaration,
-        variableElement: variableElement,
-        variableSelector: variableSelector,
-        buildBody: subbuild(node.body));
-  }
-  @override
-  ir.Primitive visitIsExpression(IsExpression node) {
-    return irBuilder.buildTypeOperator(
-        visit(node.expression),
-        converter.convertType(node.type.type),
-        isTypeTest: true,
-        isNotCheck: node.notOperator != null);
-  }
-
-  @override
-  ir.Primitive visitAsExpression(AsExpression node) {
-    return irBuilder.buildTypeOperator(
-        visit(node.expression),
-        converter.convertType(node.type.type),
-        isTypeTest: false);
-  }
-
-  @override
-  visitTryStatement(TryStatement node) {
-    List<CatchClauseInfo> catchClauseInfos = <CatchClauseInfo>[];
-    for (CatchClause catchClause in node.catchClauses) {
-      catchClauseInfos.add(new CatchClauseInfo(
-          exceptionVariable: converter.convertElement(
-              catchClause.exceptionParameter.staticElement),
-          buildCatchBlock: subbuild(catchClause.body)));
-
-    }
-    irBuilder.buildTry(
-        tryStatementInfo: new TryStatementInfo(),
-        buildTryBlock: subbuild(node.body),
-        catchClauseInfos: catchClauseInfos);
-  }
-}
diff --git a/pkg/analyzer2dart/lib/src/dart_backend.dart b/pkg/analyzer2dart/lib/src/dart_backend.dart
deleted file mode 100644
index 317fd2c..0000000
--- a/pkg/analyzer2dart/lib/src/dart_backend.dart
+++ /dev/null
@@ -1,111 +0,0 @@
-// Copyright (c) 2014, the Dart project authors.  Please see the AUTHORS file
-// for details. All rights reserved. Use of this source code is governed by a
-// BSD-style license that can be found in the LICENSE file.
-
-library analyzer2dart.dart_backend;
-
-import 'package:compiler/src/constant_system_dart.dart';
-import 'package:compiler/src/constants/constant_system.dart';
-import 'package:compiler/src/dart_backend/dart_backend.dart';
-import 'package:compiler/src/dart2jslib.dart';
-import 'package:compiler/src/dart_types.dart';
-import 'package:compiler/src/elements/elements.dart';
-
-import 'driver.dart';
-import 'converted_world.dart';
-
-void compileToDart(Driver driver, ConvertedWorld convertedWorld) {
-  DiagnosticListener listener = new Listener();
-  DartOutputter outputter = new DartOutputter(listener, driver.outputProvider);
-  ElementAstCreationContext context = new _ElementAstCreationContext(
-      listener, convertedWorld.dartTypes);
-  outputter.assembleProgram(
-    libraries: convertedWorld.libraries,
-    instantiatedClasses: convertedWorld.instantiatedClasses,
-    resolvedElements: convertedWorld.resolvedElements,
-    mainFunction: convertedWorld.mainFunction,
-    computeElementAst: (Element element) {
-      return DartBackend.createElementAst(
-          context,
-          element,
-          convertedWorld.getIr(element));
-    },
-    shouldOutput: (Element element) => !element.isSynthesized,
-    isSafeToRemoveTypeDeclarations: (_) => false);
-}
-
-class _ElementAstCreationContext implements ElementAstCreationContext {
-  final Listener listener;
-
-  @override
-  final DartTypes dartTypes;
-
-  _ElementAstCreationContext(this.listener, this.dartTypes);
-
-  @override
-  ConstantSystem get constantSystem => DART_CONSTANT_SYSTEM;
-
-  @override
-  InternalErrorFunction get internalError => listener.internalError;
-
-  @override
-  void traceCompilation(String name) {
-    // Do nothing.
-  }
-
-  @override
-  void traceGraph(String title, irObject) {
-    // Do nothing.
-  }
-}
-
-class Listener implements DiagnosticListener {
-
-  @override
-  void internalError(Spannable spannable, message) {
-    throw new UnimplementedError(message);
-  }
-
-  @override
-  void log(message) {
-    // TODO: implement log
-  }
-
-  @override
-  void reportError(Spannable node,
-                   MessageKind errorCode,
-                   [Map arguments = const {}]) {
-    // TODO: implement reportError
-  }
-
-  @override
-  void reportHint(Spannable node,
-                  MessageKind errorCode,
-                  [Map arguments = const {}]) {
-    // TODO: implement reportHint
-  }
-
-  @override
-  void reportInfo(Spannable node,
-                  MessageKind errorCode,
-                  [Map arguments = const {}]) {
-    // TODO: implement reportInfo
-  }
-
-  @override
-  void reportWarning(Spannable node,
-                     MessageKind errorCode,
-                     [Map arguments = const {}]) {
-    // TODO: implement reportWarning
-  }
-
-  @override
-  spanFromSpannable(Spannable node) {
-    // TODO: implement spanFromSpannable
-  }
-
-  @override
-  withCurrentElement(element, f()) {
-    // TODO: implement withCurrentElement
-  }
-}
diff --git a/pkg/analyzer2dart/lib/src/driver.dart b/pkg/analyzer2dart/lib/src/driver.dart
deleted file mode 100644
index 9707356..0000000
--- a/pkg/analyzer2dart/lib/src/driver.dart
+++ /dev/null
@@ -1,77 +0,0 @@
-// Copyright (c) 2014, the Dart project authors.  Please see the AUTHORS file
-// for details. All rights reserved. Use of this source code is governed by a
-// BSD-style license that can be found in the LICENSE file.
-
-library analyzer2dart.driver;
-
-import 'package:analyzer/file_system/file_system.dart';
-import 'package:analyzer/src/generated/element.dart';
-import 'package:analyzer/src/generated/engine.dart';
-import 'package:analyzer/src/generated/sdk.dart';
-import 'package:analyzer/src/generated/source_io.dart';
-
-import 'package:compiler/compiler.dart';
-
-import 'closed_world.dart';
-import 'tree_shaker.dart';
-
-/**
- * Top level driver for Analyzer2Dart.
- */
-class Driver {
-  final ResourceProvider resourceProvider;
-  final AnalysisContext context;
-  final CompilerOutputProvider outputProvider;
-
-  Driver(this.resourceProvider, DartSdk sdk, this.outputProvider)
-      : context = AnalysisEngine.instance.createAnalysisContext() {
-    // Set up the source factory.
-    // TODO(paulberry): do we want to use ExplicitPackageUriResolver?
-    List<UriResolver> uriResolvers = [
-        new FileUriResolver(),
-        new DartUriResolver(sdk) /* ,
-        new PackageUriResolver(packagesDirectories) */
-    ];
-    context.sourceFactory = new SourceFactory(uriResolvers);
-  }
-
-  /**
-   * Compute the closed world that is reachable from an entry point.
-   */
-  ClosedWorld computeWorld(FunctionElement entryPointElement) {
-    InternalAnalysisContext analysisContext = context;
-    TreeShaker treeShaker =
-        new TreeShaker(analysisContext.typeProvider, entryPointElement);
-    return treeShaker.shake();
-  }
-
-  /**
-   * Given a source, resolve it and return its entry point.
-   */
-  FunctionElement resolveEntryPoint(Source source) {
-    // Get the library element associated with the source.
-    LibraryElement libraryElement = context.computeLibraryElement(source);
-
-    // Get the resolved AST for main
-    FunctionElement entryPointElement = libraryElement.entryPoint;
-    if (entryPointElement == null) {
-      throw new Exception('No main()!');
-    }
-    return entryPointElement;
-  }
-
-  /**
-   * Add the given file as the root of analysis, and return the corresponding
-   * source.
-   */
-  Source setRoot(String path) {
-    File file = resourceProvider.getResource(path);
-    Source source = file.createSource();
-    // add the Source
-    ChangeSet changeSet = new ChangeSet();
-    changeSet.addedSource(source);
-    context.applyChanges(changeSet);
-    // return the Source
-    return source;
-  }
-}
diff --git a/pkg/analyzer2dart/lib/src/element_converter.dart b/pkg/analyzer2dart/lib/src/element_converter.dart
deleted file mode 100644
index 666124e..0000000
--- a/pkg/analyzer2dart/lib/src/element_converter.dart
+++ /dev/null
@@ -1,171 +0,0 @@
-// Copyright (c) 2014, the Dart project authors.  Please see the AUTHORS file
-// for details. All rights reserved. Use of this source code is governed by a
-// BSD-style license that can be found in the LICENSE file.
-
-/// Convertion of elements between the analyzer element model and the dart2js
-/// element model.
-
-library analyzer2dart.element_converter;
-
-import 'package:compiler/src/elements/elements.dart' as dart2js;
-import 'package:compiler/src/util/util.dart' as util;
-import 'package:compiler/src/dart_types.dart' as dart2js;
-import 'package:analyzer/src/generated/element.dart' as analyzer;
-import 'package:analyzer/src/generated/utilities_dart.dart';
-
-part 'modely.dart';
-
-class ElementConverter {
-  /// Map from analyzer elements to their equivalent dart2js elements.
-  Map<analyzer.Element, dart2js.Element> conversionMap =
-      <analyzer.Element, dart2js.Element>{};
-
-  /// Map from dart2js elements to their equivalent analyzer elements.
-  Map<dart2js.Element, analyzer.Element> inversionMap =
-      <dart2js.Element, analyzer.Element>{};
-
-  ElementConverterVisitor visitor;
-
-  ElementConverter() {
-    visitor = new ElementConverterVisitor(this);
-  }
-
-  dart2js.Element convertElement(analyzer.Element input) {
-    return conversionMap.putIfAbsent(input, () {
-      dart2js.Element output = convertElementInternal(input);
-      inversionMap[output] = input;
-      return output;
-    });
-  }
-
-  dart2js.FunctionType convertFunctionType(analyzer.FunctionType input) {
-    dart2js.DartType returnType = convertType(input.returnType);
-    List<dart2js.DartType> requiredParameterTypes =
-        input.normalParameterTypes.map(convertType).toList();
-    List<dart2js.DartType> positionalParameterTypes =
-            input.optionalParameterTypes.map(convertType).toList();
-    List<String> namedParameters =
-        input.namedParameterTypes.keys.toList()..sort();
-    List<dart2js.DartType> namedParameterTypes =
-        namedParameters.map((String name) {
-      return convertType(input.namedParameterTypes[name]);
-    }).toList();
-    return new dart2js.FunctionType.synthesized(
-        returnType,
-        requiredParameterTypes,
-        positionalParameterTypes,
-        namedParameters,
-        namedParameterTypes);
-  }
-
-  dart2js.DartType convertType(analyzer.DartType input) {
-    if (input.isVoid) {
-      return const dart2js.VoidType();
-    } else if (input.isDynamic) {
-      return const dart2js.DynamicType();
-    } else if (input is analyzer.TypeParameterType) {
-      return new dart2js.TypeVariableType(convertElement(input.element));
-    } else if (input is analyzer.InterfaceType) {
-      List<dart2js.DartType> typeArguments =
-          input.typeArguments.map(convertType).toList();
-      return new dart2js.InterfaceType(
-          convertElement(input.element), typeArguments);
-    } else if (input is analyzer.FunctionType) {
-      if (input.element is analyzer.FunctionTypeAliasElement) {
-        List<dart2js.DartType> typeArguments =
-            input.typeArguments.map(convertType).toList();
-        return new dart2js.ResolvedTypedefType(
-            convertElement(input.element),
-            typeArguments,
-            convertFunctionType(input));
-      } else {
-        assert(input.typeArguments.isEmpty);
-        return convertFunctionType(input);
-      }
-    }
-    throw new UnsupportedError(
-        "Conversion of $input (${input.runtimeType}) is not supported.");
-  }
-
-  analyzer.Element invertElement(dart2js.Element input) {
-    return inversionMap[input];
-  }
-
-  dart2js.Element convertElementInternal(analyzer.Element input) {
-    dart2js.Element output = input.accept(visitor);
-    if (output != null) return output;
-    throw new UnsupportedError(
-        "Conversion of $input (${input.runtimeType}) is not supported.");
-  }
-}
-
-/// Visitor that converts analyzer elements to dart2js elements.
-class ElementConverterVisitor
-    extends analyzer.SimpleElementVisitor<dart2js.Element> {
-  final ElementConverter converter;
-
-  ElementConverterVisitor(this.converter);
-
-  @override
-  dart2js.LibraryElement visitLibraryElement(analyzer.LibraryElement input) {
-    return new LibraryElementY(converter, input);
-  }
-
-  @override
-  dart2js.FunctionElement visitFunctionElement(analyzer.FunctionElement input) {
-    if (input.isStatic) {
-      return new TopLevelFunctionElementY(converter, input);
-    } else {
-      return new LocalFunctionElementY(converter, input);
-    }
-  }
-
-  @override
-  dart2js.ParameterElement visitParameterElement(
-      analyzer.ParameterElement input) {
-    return new ParameterElementY(converter, input);
-  }
-
-  @override
-  dart2js.ClassElement visitClassElement(analyzer.ClassElement input) {
-    return new ClassElementY(converter, input);
-  }
-
-  @override
-  dart2js.TypedefElement visitFunctionTypeAliasElement(
-      analyzer.FunctionTypeAliasElement input) {
-    return new TypedefElementY(converter, input);
-  }
-
-  @override
-  dart2js.FieldElement visitTopLevelVariableElement(
-      analyzer.TopLevelVariableElement input) {
-    return new TopLevelVariableElementY(converter, input);
-  }
-
-  @override
-  dart2js.Element visitPropertyAccessorElement(
-      analyzer.PropertyAccessorElement input) {
-    if (input.isSynthetic) {
-      return input.variable.accept(this);
-    }
-    return null;
-  }
-
-  @override
-  dart2js.Element visitLocalVariableElement(
-      analyzer.LocalVariableElement input) {
-    return new LocalVariableElementY(converter, input);
-  }
-
-  @override
-  dart2js.ConstructorElement visitConstructorElement(
-      analyzer.ConstructorElement input) {
-    return new ConstructorElementY(converter, input);
-  }
-
-  @override
-  dart2js.MethodElement visitMethodElement(analyzer.MethodElement input) {
-    return new InstanceMethodElementY(converter, input);
-  }
-}
diff --git a/pkg/analyzer2dart/lib/src/identifier_semantics.dart b/pkg/analyzer2dart/lib/src/identifier_semantics.dart
deleted file mode 100644
index a61cfa0..0000000
--- a/pkg/analyzer2dart/lib/src/identifier_semantics.dart
+++ /dev/null
@@ -1,516 +0,0 @@
-// Copyright (c) 2014, the Dart project authors.  Please see the AUTHORS file
-// for details. All rights reserved. Use of this source code is governed by a
-// BSD-style license that can be found in the LICENSE file.
-
-/**
- * Code for classifying the semantics of identifiers appearing in a Dart file.
- */
-library analyzer2dart.identifierSemantics;
-
-import 'package:analyzer/analyzer.dart';
-import 'package:analyzer/src/generated/element.dart';
-
-// TODO(johnniwinther,paulberry): This should be a constant.
-final AccessSemanticsVisitor ACCESS_SEMANTICS_VISITOR =
-    new AccessSemanticsVisitor();
-
-/**
- * Enum representing the different kinds of destinations which a property
- * access or method or function invocation might refer to.
- */
-class AccessKind {
-  /**
-   * The destination of the access is an instance method, property, or field
-   * of a class, and thus must be determined dynamically.
-   */
-  static const AccessKind DYNAMIC = const AccessKind._('DYNAMIC');
-
-  /**
-   * The destination of the access is a function that is defined locally within
-   * an enclosing function or method.
-   */
-  static const AccessKind LOCAL_FUNCTION = const AccessKind._('LOCAL_FUNCTION');
-
-  /**
-   * The destination of the access is a variable that is defined locally within
-   * an enclosing function or method.
-   */
-  static const AccessKind LOCAL_VARIABLE = const AccessKind._('LOCAL_VARIABLE');
-
-  /**
-   * The destination of the access is a variable that is defined as a parameter
-   * to an enclosing function or method.
-   */
-  static const AccessKind PARAMETER = const AccessKind._('PARAMETER');
-
-  /**
-   * The destination of the access is a field that is defined statically within
-   * a class, or a top level variable within a library.
-   */
-  static const AccessKind STATIC_FIELD = const AccessKind._('STATIC_FIELD');
-
-  /**
-   * The destination of the access is a method that is defined statically
-   * within a class, or at top level within a library.
-   */
-  static const AccessKind STATIC_METHOD = const AccessKind._('STATIC_METHOD');
-
-  /**
-   * The destination of the access is a property getter/setter that is defined
-   * statically within a class, or at top level within a library.
-   */
-  static const AccessKind STATIC_PROPERTY =
-      const AccessKind._('STATIC_PROPERTY');
-
-  /**
-   * The destination of the access is a toplevel class, function typedef, mixin
-   * application, or the built-in type "dynamic".
-   */
-  static const AccessKind TOPLEVEL_TYPE = const AccessKind._('TOPLEVEL_TYPE');
-
-  /**
-   * The destination of the access is a type parameter of the enclosing class.
-   */
-  static const AccessKind TYPE_PARAMETER = const AccessKind._('TYPE_PARAMETER');
-
-  final String name;
-
-  const AccessKind._(this.name);
-
-  String toString() => name;
-}
-
-/**
- * Data structure used to classify the semantics of a property access or method
- * or function invocation.
- */
-// TODO(paulberry,johnniwinther): Support index operations in AccessSemantics.
-class AccessSemantics {
-  /**
-   * The kind of access.
-   */
-  final AccessKind kind;
-
-  /**
-   * The identifier being used to access the property, method, or function.
-   */
-  final SimpleIdentifier identifier;
-
-  /**
-   * The element being accessed, if statically known.  This will be null if
-   * [kind] is DYNAMIC or if the element is undefined (e.g. an attempt to
-   * access a non-existent static method in a class).
-   */
-  final Element element;
-
-  /**
-   * The class containing the element being accessed, if this is a static
-   * reference to an element in a class.  This will be null if [kind] is
-   * DYNAMIC, LOCAL_FUNCTION, LOCAL_VARIABLE, PARAMETER, TOPLEVEL_CLASS, or
-   * TYPE_PARAMETER, or if the element being accessed is defined at toplevel
-   * within a library.
-   *
-   * Note: it is possible for [classElement] to be non-null and for [element]
-   * to be null; for example this occurs if the element being accessed is a
-   * non-existent static method or field inside an existing class.
-   */
-  final ClassElement classElement;
-
-  // TODO(paulberry): would it also be useful to store the libraryElement?
-
-  /**
-   * When [kind] is DYNAMIC, the expression whose runtime type determines the
-   * class in which [identifier] should be looked up.  Null if the expression
-   * is implicit "this".
-   *
-   * When [kind] is not DYNAMIC, this field is always null.
-   */
-  final Expression target;
-
-  /**
-   * True if this is an invocation of a method, or a call on a property.
-   */
-  final bool isInvoke;
-
-  AccessSemantics.dynamic(this.identifier, this.target, {this.isInvoke: false})
-      : kind = AccessKind.DYNAMIC,
-        element = null,
-        classElement = null;
-
-  AccessSemantics.localFunction(this.identifier, this.element, {this.isInvoke:
-      false})
-      : kind = AccessKind.LOCAL_FUNCTION,
-        classElement = null,
-        target = null;
-
-  AccessSemantics.localVariable(this.identifier, this.element, {this.isInvoke:
-      false})
-      : kind = AccessKind.LOCAL_VARIABLE,
-        classElement = null,
-        target = null;
-
-  AccessSemantics.parameter(this.identifier, this.element, {this.isInvoke:
-      false})
-      : kind = AccessKind.PARAMETER,
-        classElement = null,
-        target = null;
-
-  AccessSemantics.staticField(this.identifier, this.element, this.classElement,
-      {this.isInvoke: false})
-      : kind = AccessKind.STATIC_FIELD,
-        target = null;
-
-  AccessSemantics.staticMethod(this.identifier, this.element, this.classElement,
-      {this.isInvoke: false})
-      : kind = AccessKind.STATIC_METHOD,
-        target = null;
-
-  AccessSemantics.staticProperty(this.identifier, this.element,
-      this.classElement, {this.isInvoke: false})
-      : kind = AccessKind.STATIC_PROPERTY,
-        target = null;
-
-  AccessSemantics.toplevelType(this.identifier, this.element, {this.isInvoke:
-      false})
-      : kind = AccessKind.TOPLEVEL_TYPE,
-        classElement = null,
-        target = null;
-
-  AccessSemantics.typeParameter(this.identifier, this.element, {this.isInvoke:
-      false})
-      : kind = AccessKind.TYPE_PARAMETER,
-        classElement = null,
-        target = null;
-
-  /**
-   * True if this is a read access to a property, or a method tear-off.  Note
-   * that both [isRead] and [isWrite] will be true in the case of a
-   * read-modify-write operation (e.g. "+=").
-   */
-  bool get isRead => !isInvoke && identifier.inGetterContext();
-
-  /**
-   * True if this is a write access to a property, or an (erroneous) attempt to
-   * write to a method.  Note that both [isRead] and [isWrite] will be true in
-   * the case of a read-modify-write operation (e.g. "+=").
-   */
-  bool get isWrite => identifier.inSetterContext();
-
-  String toString() {
-    StringBuffer sb = new StringBuffer();
-    sb.write('AccessSemantics[');
-    sb.write('kind=$kind,');
-    if (isRead && isWrite) {
-      assert(!isInvoke);
-      sb.write('read/write,');
-    } else if (isRead) {
-      sb.write('read,');
-    } else if (isWrite) {
-      sb.write('write,');
-    } else if (isInvoke) {
-      sb.write('call,');
-    }
-    if (element != null) {
-      sb.write('element=');
-      if (classElement != null) {
-        sb.write('${classElement.name}.');
-      }
-      sb.write('${element}');
-    } else {
-      if (target == null) {
-        sb.write('target=this.$identifier');
-      } else {
-        sb.write('target=$target.$identifier');
-      }
-    }
-    sb.write(']');
-    return sb.toString();
-  }
-}
-
-// TODO(johnniwinther,paulberry): This should extend a non-recursive visitor.
-class AccessSemanticsVisitor extends RecursiveAstVisitor<AccessSemantics> {
-  /**
-   * Return the semantics for [node].
-   */
-  @override
-  AccessSemantics visitMethodInvocation(MethodInvocation node) {
-    Expression target = node.realTarget;
-    Element staticElement = node.methodName.staticElement;
-    if (target == null) {
-      if (staticElement is FunctionElement) {
-        if (staticElement.enclosingElement is CompilationUnitElement) {
-          return new AccessSemantics.staticMethod(
-              node.methodName,
-              staticElement,
-              null,
-              isInvoke: true);
-        } else {
-          return new AccessSemantics.localFunction(
-              node.methodName,
-              staticElement,
-              isInvoke: true);
-        }
-      } else if (staticElement is MethodElement && staticElement.isStatic) {
-        return new AccessSemantics.staticMethod(
-            node.methodName,
-            staticElement,
-            staticElement.enclosingElement,
-            isInvoke: true);
-      } else if (staticElement is PropertyAccessorElement) {
-        if (staticElement.isSynthetic) {
-          if (staticElement.enclosingElement is CompilationUnitElement) {
-            return new AccessSemantics.staticField(
-                node.methodName,
-                staticElement.variable,
-                null,
-                isInvoke: true);
-          } else if (staticElement.isStatic) {
-            return new AccessSemantics.staticField(
-                node.methodName,
-                staticElement.variable,
-                staticElement.enclosingElement,
-                isInvoke: true);
-          }
-        } else {
-          if (staticElement.enclosingElement is CompilationUnitElement) {
-            return new AccessSemantics.staticProperty(
-                node.methodName,
-                staticElement,
-                null,
-                isInvoke: true);
-          } else if (staticElement.isStatic) {
-            return new AccessSemantics.staticProperty(
-                node.methodName,
-                staticElement,
-                staticElement.enclosingElement,
-                isInvoke: true);
-          }
-        }
-      } else if (staticElement is LocalVariableElement) {
-        return new AccessSemantics.localVariable(
-            node.methodName,
-            staticElement,
-            isInvoke: true);
-      } else if (staticElement is ParameterElement) {
-        return new AccessSemantics.parameter(
-            node.methodName,
-            staticElement,
-            isInvoke: true);
-      } else if (staticElement is TypeParameterElement) {
-        return new AccessSemantics.typeParameter(
-            node.methodName,
-            staticElement,
-            isInvoke: true);
-      } else if (staticElement is ClassElement ||
-          staticElement is FunctionTypeAliasElement ||
-          staticElement is DynamicElementImpl) {
-        return new AccessSemantics.toplevelType(
-            node.methodName,
-            staticElement,
-            isInvoke: true);
-      }
-    } else if (target is Identifier) {
-      Element targetStaticElement = target.staticElement;
-      if (targetStaticElement is PrefixElement) {
-        if (staticElement == null) {
-          return new AccessSemantics.dynamic(
-              node.methodName,
-              null,
-              isInvoke: true);
-        } else if (staticElement is PropertyAccessorElement) {
-          if (staticElement.isSynthetic) {
-            return new AccessSemantics.staticField(
-                node.methodName,
-                staticElement.variable,
-                null,
-                isInvoke: true);
-          } else {
-            return new AccessSemantics.staticProperty(
-                node.methodName,
-                staticElement,
-                null,
-                isInvoke: true);
-          }
-        } else if (staticElement is TypeParameterElement) {
-          return new AccessSemantics.typeParameter(
-              node.methodName,
-              staticElement,
-              isInvoke: true);
-        } else if (staticElement is ClassElement ||
-            staticElement is FunctionTypeAliasElement) {
-          return new AccessSemantics.toplevelType(
-              node.methodName,
-              staticElement,
-              isInvoke: true);
-        } else {
-          return new AccessSemantics.staticMethod(
-              node.methodName,
-              staticElement,
-              null,
-              isInvoke: true);
-        }
-      } else if (targetStaticElement is ClassElement) {
-        if (staticElement is PropertyAccessorElement) {
-          if (staticElement.isSynthetic) {
-            return new AccessSemantics.staticField(
-                node.methodName,
-                staticElement.variable,
-                targetStaticElement,
-                isInvoke: true);
-          } else {
-            return new AccessSemantics.staticProperty(
-                node.methodName,
-                staticElement,
-                targetStaticElement,
-                isInvoke: true);
-          }
-        } else {
-          return new AccessSemantics.staticMethod(
-              node.methodName,
-              staticElement,
-              targetStaticElement,
-              isInvoke: true);
-        }
-      }
-    }
-    return new AccessSemantics.dynamic(node.methodName, target, isInvoke: true);
-  }
-
-  /**
-   * Return the access semantics for [node].
-   */
-  @override
-  AccessSemantics visitPrefixedIdentifier(PrefixedIdentifier node) {
-    return _classifyPrefixed(node.prefix, node.identifier);
-  }
-
-  /**
-   * Return the access semantics for [node].
-   */
-  @override
-  AccessSemantics visitPropertyAccess(PropertyAccess node) {
-    if (node.target is Identifier) {
-      return _classifyPrefixed(node.target, node.propertyName);
-    } else {
-      return new AccessSemantics.dynamic(node.propertyName, node.realTarget);
-    }
-  }
-
-  /**
-   * Return the access semantics for [node].
-   *
-   * Note: if [node] is the right hand side of a [PropertyAccess] or
-   * [PrefixedIdentifier], or the method name of a [MethodInvocation], the return
-   * value is null, since the semantics are determined by the parent.  In
-   * practice these cases should never arise because the parent will visit the
-   * parent node before visiting this one.
-   */
-  @override
-  AccessSemantics visitSimpleIdentifier(SimpleIdentifier node) {
-    AstNode parent = node.parent;
-    if (node.inDeclarationContext()) {
-      // This identifier is a declaration, not a use.
-      return null;
-    }
-    if (parent is TypeName) {
-      // TODO(paulberry): do we need to handle this case?
-      return null;
-    }
-    if ((parent is PropertyAccess && parent.propertyName == node) ||
-        (parent is PrefixedIdentifier && parent.identifier == node) ||
-        (parent is MethodInvocation && parent.methodName == node)) {
-      // The access semantics are determined by the parent.
-      return null;
-    }
-    // TODO(paulberry): handle PrefixElement.
-    Element staticElement = node.staticElement;
-    if (staticElement is PropertyAccessorElement) {
-      if (staticElement.isSynthetic) {
-        if (staticElement.enclosingElement is CompilationUnitElement) {
-          return new AccessSemantics.staticField(
-              node,
-              staticElement.variable,
-              null);
-        } else if (staticElement.isStatic) {
-          return new AccessSemantics.staticField(
-              node,
-              staticElement.variable,
-              staticElement.enclosingElement);
-        }
-      } else {
-        if (staticElement.enclosingElement is CompilationUnitElement) {
-          return new AccessSemantics.staticProperty(node, staticElement, null);
-        } else if (staticElement.isStatic) {
-          return new AccessSemantics.staticProperty(
-              node,
-              staticElement,
-              staticElement.enclosingElement);
-        }
-      }
-    } else if (staticElement is LocalVariableElement) {
-      return new AccessSemantics.localVariable(node, staticElement);
-    } else if (staticElement is ParameterElement) {
-      return new AccessSemantics.parameter(node, staticElement);
-    } else if (staticElement is FunctionElement) {
-      if (staticElement.enclosingElement is CompilationUnitElement) {
-        return new AccessSemantics.staticMethod(node, staticElement, null);
-      } else {
-        return new AccessSemantics.localFunction(node, staticElement);
-      }
-    } else if (staticElement is MethodElement && staticElement.isStatic) {
-      return new AccessSemantics.staticMethod(
-          node,
-          staticElement,
-          staticElement.enclosingElement);
-    } else if (staticElement is TypeParameterElement) {
-      return new AccessSemantics.typeParameter(node, staticElement);
-    } else if (staticElement is ClassElement ||
-        staticElement is FunctionTypeAliasElement ||
-        staticElement is DynamicElementImpl) {
-      return new AccessSemantics.toplevelType(node, staticElement);
-    }
-    return new AccessSemantics.dynamic(node, null);
-  }
-
-  /**
-   * Helper function for classifying an expression of type
-   * Identifier.SimpleIdentifier.
-   */
-  AccessSemantics _classifyPrefixed(Identifier lhs, SimpleIdentifier rhs) {
-    Element lhsElement = lhs.staticElement;
-    Element rhsElement = rhs.staticElement;
-    if (lhsElement is PrefixElement) {
-      if (rhsElement is PropertyAccessorElement) {
-        if (rhsElement.isSynthetic) {
-          return new AccessSemantics.staticField(
-              rhs,
-              rhsElement.variable,
-              null);
-        } else {
-          return new AccessSemantics.staticProperty(rhs, rhsElement, null);
-        }
-      } else if (rhsElement is FunctionElement) {
-        return new AccessSemantics.staticMethod(rhs, rhsElement, null);
-      } else if (rhsElement is ClassElement ||
-          rhsElement is FunctionTypeAliasElement) {
-        return new AccessSemantics.toplevelType(rhs, rhsElement);
-      } else {
-        return new AccessSemantics.dynamic(rhs, null);
-      }
-    } else if (lhsElement is ClassElement) {
-      if (rhsElement is PropertyAccessorElement && rhsElement.isSynthetic) {
-        return new AccessSemantics.staticField(
-            rhs,
-            rhsElement.variable,
-            lhsElement);
-      } else if (rhsElement is MethodElement) {
-        return new AccessSemantics.staticMethod(rhs, rhsElement, lhsElement);
-      } else {
-        return new AccessSemantics.staticProperty(rhs, rhsElement, lhsElement);
-      }
-    } else {
-      return new AccessSemantics.dynamic(rhs, lhs);
-    }
-  }
-}
diff --git a/pkg/analyzer2dart/lib/src/modely.dart b/pkg/analyzer2dart/lib/src/modely.dart
deleted file mode 100644
index e865675..0000000
--- a/pkg/analyzer2dart/lib/src/modely.dart
+++ /dev/null
@@ -1,914 +0,0 @@
-// Copyright (c) 2014, the Dart project authors.  Please see the AUTHORS file
-// for details. All rights reserved. Use of this source code is governed by a
-// BSD-style license that can be found in the LICENSE file.
-
-part of analyzer2dart.element_converter;
-
-
-/// Base [dart2js.Element] implementation for converted analyzer elements.
-class ElementY extends dart2js.Element {
-  final ElementConverter converter;
-  final analyzer.Element element;
-
-  @override
-  String get name => element.name;
-
-  ElementY(this.converter, this.element);
-
-  @override
-  dart2js.LibraryElement get implementationLibrary => library;
-
-  @override
-  dart2js.Element get origin => this;
-
-  @override
-  dart2js.Element get patch => null;
-
-  @override
-  dart2js.Element get declaration => this;
-
-  @override
-  dart2js.Element get implementation => this;
-
-  @override
-  bool get isPatch => false;
-
-  @override
-  bool get isPatched => false;
-
-  @override
-  bool get isDeclaration => true;
-
-  @override
-  bool get isImplementation => false;
-
-  @override
-  dart2js.LibraryElement get library {
-    return converter.convertElement(element.library);
-  }
-
-  @override
-  bool get isLocal => false;
-
-  @override
-  bool get isSynthesized => false;
-
-  unsupported(String method) {
-    throw new UnsupportedError(
-        "'$method' is unsupported on $this ($runtimeType)");
-  }
-
-
-  @override
-  bool get isFinal => unsupported('isFinal');
-
-  @override
-  bool get isStatic => unsupported('isStatic');
-
-  @override
-  bool isForeign(_) => unsupported('isForeign');
-
-  @override
-  bool get impliesType => unsupported('impliesType');
-
-  @override
-  bool get isOperator => unsupported('impliesType');
-
-  @override
-  get position => unsupported('position');
-
-  @override
-  computeType(_) => unsupported('computeType');
-
-  @override
-  get enclosingElement => unsupported('enclosingElement');
-
-  @override
-  accept(_, __) => unsupported('accept');
-
-  @override
-  void addMetadata(_) => unsupported('addMetadata');
-
-  @override
-  get analyzableElement => unsupported('analyzableElement');
-
-  @override
-  asFunctionElement() => unsupported('asFunctionElement');
-
-  @override
-  buildScope() => unsupported('buildScope');
-
-  @override
-  get compilationUnit => unsupported('compilationUnit');
-
-  @override
-  get contextClass => unsupported('contextClass');
-
-  @override
-  void diagnose(context, listener) => unsupported('diagnose');
-
-  @override
-  get enclosingClass => unsupported('enclosingClass');
-
-  @override
-  get enclosingClassOrCompilationUnit {
-    return unsupported('enclosingClassOrCompilationUnit');
-  }
-
-  @override
-  String get fixedBackendName => unsupported('fixedBackendName');
-
-  @override
-  bool get hasFixedBackendName => unsupported('hasFixedBackendName');
-
-  @override
-  bool get isAbstract => unsupported('isAbstract');
-
-  @override
-  bool get isAssignable => unsupported('isAssignable');
-
-  @override
-  bool get isClassMember => unsupported('isClassMember');
-
-  @override
-  bool get isClosure => unsupported('isClosure');
-
-  @override
-  bool get isConst => unsupported('isConst');
-
-  @override
-  bool get isDeferredLoaderGetter => unsupported('isDeferredLoaderGetter');
-
-  @override
-  bool get isFactoryConstructor => unsupported('isFactoryConstructor');
-
-  @override
-  bool get isInjected => unsupported('isInjected');
-
-  @override
-  bool get isInstanceMember => unsupported('isInstanceMember');
-
-  @override
-  bool get isMixinApplication => unsupported('isMixinApplication');
-
-  @override
-  bool get isNative => unsupported('isNative');
-
-  @override
-  bool get isTopLevel => unsupported('isTopLevel');
-
-  @override
-  get kind => unsupported('kind');
-
-  @override
-  get metadata => unsupported('metadata');
-
-  @override
-  get outermostEnclosingMemberOrTopLevel {
-    return unsupported('outermostEnclosingMemberOrTopLevel');
-  }
-
-  @override
-  void setNative(String name) => unsupported('setNative');
-
-  String toString() => '$kind($name)';
-}
-
-abstract class AnalyzableElementY
-    implements ElementY, dart2js.AnalyzableElement {
-  @override
-  bool get hasTreeElements => unsupported('hasTreeElements');
-
-  @override
-  get treeElements => unsupported('treeElements');
-}
-
-abstract class AstElementY implements ElementY, dart2js.AstElement {
-  @override
-  bool get hasNode => unsupported('hasNode');
-
-  @override
-  get node => unsupported('node');
-
-  @override
-  bool get hasResolvedAst => unsupported('hasResolvedAst');
-
-  @override
-  get resolvedAst => unsupported('resolvedAst');
-}
-
-class LibraryElementY extends ElementY with AnalyzableElementY
-    implements dart2js.LibraryElement {
-  analyzer.LibraryElement get element => super.element;
-
-  @override
-  dart2js.ElementKind get kind => dart2js.ElementKind.LIBRARY;
-
-  // TODO(johnniwinther): Ensure the correct semantics of this.
-  @override
-  bool get isInternalLibrary => isPlatformLibrary && element.isPrivate;
-
-  // TODO(johnniwinther): Ensure the correct semantics of this.
-  @override
-  bool get isPlatformLibrary => element.isInSdk;
-
-  @override
-  bool get isDartCore => element.isDartCore;
-
-  LibraryElementY(ElementConverter converter, analyzer.LibraryElement element)
-      : super(converter, element);
-
-  @override
-  void addCompilationUnit(_) => unsupported('addCompilationUnit');
-
-  @override
-  void addImport(element, import, listener) => unsupported('addImport');
-
-  @override
-  void addMember(element, listener) => unsupported('addMember');
-
-  @override
-  void addTag(tag, listener) => unsupported('addTag');
-
-  @override
-  void addToScope(element, listener) => unsupported('addToScope');
-
-  @override
-  bool get canUseNative => unsupported('canUseNative');
-
-  @override
-  Uri get canonicalUri => unsupported('canonicalUri');
-
-  @override
-  int compareTo(other) => unsupported('compareTo');
-
-  @override
-  get compilationUnits => unsupported('compilationUnits');
-
-  @override
-  get entryCompilationUnit => unsupported('entryCompilationUnit');
-
-  @override
-  get exports => unsupported('exports');
-
-  @override
-  bool get exportsHandled => unsupported('exportsHandled');
-
-  @override
-  find(String elementName) => unsupported('find');
-
-  @override
-  findExported(String elementName) => unsupported('findExported');
-
-  @override
-  findLocal(String elementName) => unsupported('findLocal');
-
-  @override
-  void forEachExport(_) => unsupported('forEachExport');
-
-  @override
-  void forEachLocalMember(_) => unsupported('forEachLocalMember');
-
-  @override
-  getImportsFor(element) => unsupported('getImportsFor');
-
-  @override
-  getLibraryFromTag(tag) => unsupported('getLibraryFromTag');
-
-  @override
-  String getLibraryName() => unsupported('getLibraryName');
-
-  @override
-  String getLibraryOrScriptName() => unsupported('getLibraryOrScriptName');
-
-  @override
-  getNonPrivateElementsInScope() => unsupported('getNonPrivateElementsInScope');
-
-  @override
-  bool hasLibraryName() => unsupported('hasLibraryName');
-
-  @override
-  bool get isPackageLibrary => unsupported('isPackageLibrary');
-
-  @override
-  get libraryTag => unsupported('libraryTag');
-
-  @override
-  void set libraryTag(value) => unsupported('libraryTag');
-
-  @override
-  localLookup(elementName) => unsupported('localLookup');
-
-  @override
-  void recordResolvedTag(tag, library) => unsupported('recordResolvedTag');
-
-  @override
-  void setExports(exportedElements) => unsupported('setExports');
-
-  @override
-  get tags => unsupported('tags');
-}
-
-abstract class TopLevelElementMixin implements ElementY {
-  @override
-  bool get isClassMember => false;
-
-  @override
-  bool get isInstanceMember => false;
-
-  @override
-  bool get isTopLevel => true;
-
-  // TODO(johnniwinther): Ensure the correct semantics of this.
-  @override
-  bool get isFactoryConstructor => false;
-
-  @override
-  bool get isStatic {
-    // Semantic difference: Analyzer considers top-level and static class
-    // members to be static, dart2js only considers static class members to be
-    // static.
-    return false;
-  }
-
-  // TODO(johnniwinther): Ensure the correct semantics of this.
-  @override
-  bool get isAbstract => false;
-
-  @override
-  dart2js.ClassElement get enclosingClass => null;
-}
-
-abstract class FunctionElementMixin
-    implements ElementY, dart2js.FunctionElement {
-  analyzer.ExecutableElement get element;
-
-  // TODO(johnniwinther): Ensure the correct semantics of this.
-  @override
-  bool get isExternal => false;
-
-  @override
-  bool get isConst => false;
-
-  @override
-  get abstractField => unsupported('abstractField');
-
-  @override
-  computeSignature(_) => unsupported('computeSignature');
-
-  @override
-  get memberContext => unsupported('memberContext');
-
-  @override
-  get functionSignature => unsupported('functionSignature');
-
-  @override
-  bool get hasFunctionSignature => unsupported('hasFunctionSignature');
-
-  @override
-  get asyncMarker => unsupported('asyncMarker');
-
-  @override
-  List<dart2js.ParameterElement> get parameters {
-    return element.parameters.map(converter.convertElement).toList();
-  }
-
-  @override
-  dart2js.FunctionType get type => converter.convertType(element.type);
-}
-
-class TopLevelFunctionElementY extends ElementY
-    with AnalyzableElementY,
-         AstElementY,
-         TopLevelElementMixin,
-         FunctionElementMixin,
-         MemberElementMixin
-    implements dart2js.MethodElement {
-  analyzer.FunctionElement get element => super.element;
-
-  @override
-  dart2js.ElementKind get kind => dart2js.ElementKind.FUNCTION;
-
-  TopLevelFunctionElementY(ElementConverter converter,
-                           analyzer.FunctionElement element)
-      : super(converter, element);
-
-  @override
-  get nestedClosures => unsupported('nestedClosures');
-}
-
-class LocalFunctionElementY extends ElementY
-    with AnalyzableElementY,
-         AstElementY,
-         LocalElementMixin,
-         FunctionElementMixin
-    implements dart2js.LocalFunctionElement {
-  analyzer.FunctionElement get element => super.element;
-
-  @override
-  dart2js.ElementKind get kind => dart2js.ElementKind.FUNCTION;
-
-  @override
-  bool get isAbstract => false;
-
-  @override
-  bool get isConst => false;
-
-  LocalFunctionElementY(ElementConverter converter,
-                        analyzer.FunctionElement element)
-      : super(converter, element);
-}
-
-class ParameterElementY extends ElementY
-    with AnalyzableElementY, AstElementY, VariableElementMixin
-    implements dart2js.ParameterElement {
-
-  analyzer.ParameterElement get element => super.element;
-
-  @override
-  dart2js.ElementKind get kind => dart2js.ElementKind.PARAMETER;
-
-  @override
-  dart2js.DartType get type => converter.convertType(element.type);
-
-  @override
-  bool get isLocal => true;
-
-  @override
-  bool get isStatic => false;
-
-  @override
-  bool get isConst => false;
-
-  @override
-  bool get isNamed => element.parameterKind == ParameterKind.NAMED;
-
-  @override
-  bool get isOptional => element.parameterKind.isOptional;
-
-  ParameterElementY(ElementConverter converter,
-                    analyzer.ParameterElement element)
-      : super(converter, element) {
-    assert(!element.isInitializingFormal);
-  }
-
-  @override
-  get executableContext => unsupported('executableContext');
-
-  @override
-  get functionDeclaration => unsupported('functionDeclaration');
-
-  @override
-  get functionSignature => unsupported('functionSignature');
-}
-
-class TypeDeclarationElementY extends ElementY
-    with AnalyzableElementY, AstElementY
-    implements dart2js.TypeDeclarationElement {
-
-  TypeDeclarationElementY(ElementConverter converter,
-                          analyzer.Element element)
-      : super(converter, element);
-
-  @override
-  void ensureResolved(compiler) => unsupported('ensureResolved');
-
-  @override
-  bool get isResolved => unsupported('isResolved');
-
-  @override
-  get rawType => null;//unsupported('rawType');
-
-  @override
-  int get resolutionState => unsupported('resolutionState');
-
-  @override
-  get thisType => unsupported('thisType');
-
-  @override
-  get typeVariables => unsupported('typeVariables');
-}
-
-class ClassElementY extends TypeDeclarationElementY
-    implements dart2js.ClassElement {
-
-  analyzer.ClassElement get element => super.element;
-
-  dart2js.ElementKind get kind => dart2js.ElementKind.CLASS;
-
-  @override
-  bool get isObject => element.type.isObject;
-
-  // TODO(johnniwinther): Ensure the correct semantics.
-  // TODO(paulberry,brianwilkerson): [ClassElement.isTypedef] should probably
-  // be renamed to [ClassElement.isNamedMixinApplication].
-  @override
-  bool get isMixinApplication => element.isTypedef;
-
-  @override
-  bool get isUnnamedMixinApplication => false;
-
-  @override
-  bool get isEnumClass => element.isEnum;
-
-  @override
-  bool get isAbstract => element.isAbstract;
-
-  // TODO(johnniwinther): Semantic difference: Dart2js points to unnamed
-  // mixin applications, analyzer points to the type in the extends clause or
-  // Object if omitted.
-  @override
-  dart2js.DartType get supertype {
-    return element.supertype != null
-        ? converter.convertType(element.supertype)
-        : null;
-  }
-
-  @override
-  util.Link<dart2js.DartType> get interfaces {
-    // TODO(johnniwinther): Support interfaces.
-    return const util.Link<dart2js.DartType>();
-  }
-
-  // TODO(johnniwinther): Support generic classes.
-  @override
-  List<dart2js.DartType> get typeVariables => const [];
-
-  @override
-  bool get isStatic => false;
-
-  @override
-  bool get isTopLevel => true;
-
-  @override
-  dart2js.ClassElement get enclosingClass => this;
-
-  ClassElementY(ElementConverter converter, analyzer.ClassElement element)
-      : super(converter, element);
-
-  @override
-  void addBackendMember(element) => unsupported('addBackendMember');
-
-  @override
-  void addMember(element, listener) => unsupported('addMember');
-
-  @override
-  void addToScope(element, listener) => unsupported('addToScope');
-
-  @override
-  get allSupertypes => unsupported('allSupertypes');
-
-  @override
-  get allSupertypesAndSelf => unsupported('allSupertypesAndSelf');
-
-  @override
-  asInstanceOf(cls) => unsupported('asInstanceOf');
-
-  @override
-  get callType => unsupported('callType');
-
-  @override
-  computeTypeParameters(compiler) => unsupported('computeTypeParameters');
-
-  @override
-  get constructors => unsupported('constructors');
-
-  @override
-  void forEachBackendMember(f) => unsupported('forEachBackendMember');
-
-  @override
-  void forEachClassMember(f) => unsupported('forEachClassMember');
-
-  @override
-  void forEachInstanceField(f, {includeSuperAndInjectedMembers: false}) {
-    unsupported('forEachInstanceField');
-  }
-
-  @override
-  void forEachInterfaceMember(f) => unsupported('forEachInterfaceMember');
-
-  @override
-  void forEachLocalMember(f) => unsupported('forEachLocalMember');
-
-  @override
-  void forEachMember(f,
-                     {includeBackendMembers: false,
-                      includeSuperAndInjectedMembers: false}) {
-    unsupported('forEachMember');
-  }
-
-  @override
-  void forEachStaticField(f) => unsupported('forEachStaticField');
-
-  @override
-  bool get hasBackendMembers => unsupported('hasBackendMembers');
-
-  @override
-  bool get hasConstructor => unsupported('hasConstructor');
-
-  @override
-  bool hasFieldShadowedBy(fieldMember) => unsupported('hasFieldShadowedBy');
-
-  @override
-  bool get hasIncompleteHierarchy => unsupported('hasIncompleteHierarchy');
-
-  @override
-  bool get hasLocalScopeMembers => unsupported('hasLocalScopeMembers');
-
-  @override
-  int get hierarchyDepth => unsupported('hierarchyDepth');
-
-  @override
-  int get id => unsupported('id');
-
-  @override
-  bool implementsFunction(compiler) => unsupported('implementsFunction');
-
-  @override
-  bool implementsInterface(intrface) => unsupported('implementsInterface');
-
-  @override
-  bool get isProxy => unsupported('isProxy');
-
-  @override
-  bool isSubclassOf(cls) => unsupported('isSubclassOf');
-
-  @override
-  localLookup(String elementName) => unsupported('localLookup');
-
-  @override
-  lookupBackendMember(String memberName) => unsupported('lookupBackendMember');
-
-  @override
-  lookupClassMember(name) => unsupported('lookupClassMember');
-
-  @override
-  lookupConstructor(selector, [noMatch]) => unsupported('lookupConstructor');
-
-  @override
-  lookupInterfaceMember(name) => unsupported('lookupInterfaceMember');
-
-  @override
-  lookupLocalMember(String memberName) => unsupported('lookupLocalMember');
-
-  @override
-  lookupMember(String memberName) => unsupported('lookupMember');
-
-  @override
-  lookupByName(dart2js.Name  memberName) => unsupported('lookupByName');
-
-  @override
-  lookupSuperMember(String memberName) => unsupported('lookupSuperMember');
-
-  @override
-  lookupSuperMemberInLibrary(memberName, library) {
-    unsupported('lookupSuperMemberInLibrary');
-  }
-
-  @override
-  lookupSuperByName(dart2js.Name memberName) =>
-      unsupported('lookupSuperByName');
-
-  @override
-  String get nativeTagInfo => unsupported('nativeTagInfo');
-
-  @override
-  void reverseBackendMembers() => unsupported('reverseBackendMembers');
-
-  @override
-  dart2js.ClassElement get superclass => unsupported('superclass');
-
-  @override
-  int get supertypeLoadState => unsupported('supertypeLoadState');
-
-  @override
-  dart2js.ConstructorElement lookupDefaultConstructor() => unsupported('lookupDefaultConstructor');
-}
-
-class TypedefElementY extends TypeDeclarationElementY
-    implements dart2js.TypedefElement {
-
-  analyzer.FunctionTypeAliasElement get element => super.element;
-
-  dart2js.ElementKind get kind => dart2js.ElementKind.TYPEDEF;
-
-  TypedefElementY(ElementConverter converter,
-                  analyzer.FunctionTypeAliasElement element)
-      : super(converter, element);
-
-  @override
-  dart2js.DartType get alias => unsupported('alias');
-
-  @override
-  void checkCyclicReference(compiler) => unsupported('checkCyclicReference');
-
-  @override
-  get functionSignature => unsupported('functionSignature');
-}
-
-abstract class VariableElementMixin
-    implements ElementY, dart2js.VariableElement {
-  @override
-  get initializer => unsupported('initializer');
-
-  @override
-  get memberContext => unsupported('memberContext');
-
-  @override
-  get constant => unsupported('constant');
-}
-
-class TopLevelVariableElementY extends ElementY
-    with AnalyzableElementY,
-         AstElementY,
-         TopLevelElementMixin,
-         VariableElementMixin,
-         MemberElementMixin
-    implements dart2js.FieldElement {
-
-  analyzer.TopLevelVariableElement get element => super.element;
-
-  dart2js.ElementKind get kind => dart2js.ElementKind.FIELD;
-
-  @override
-  dart2js.DartType get type => converter.convertType(element.type);
-
-  @override
-  bool get isFinal => element.isFinal;
-
-  @override
-  bool get isConst => element.isConst;
-
-  TopLevelVariableElementY(ElementConverter converter,
-                           analyzer.TopLevelVariableElement element)
-      : super(converter, element);
-
-  @override
-  get nestedClosures => unsupported('nestedClosures');
-}
-
-abstract class LocalElementMixin implements ElementY, dart2js.LocalElement {
-
-  @override
-  bool get isLocal => true;
-
-  @override
-  bool get isInstanceMember => false;
-
-  @override
-  bool get isStatic => false;
-
-  @override
-  bool get isTopLevel => false;
-
-  @override
-  get executableContext => unsupported('executableContext');
-
-  // TODO(johnniwinther): Ensure the correct semantics of this.
-  @override
-  bool get isFactoryConstructor => false;
-}
-
-class LocalVariableElementY extends ElementY
-    with AnalyzableElementY,
-         AstElementY,
-         LocalElementMixin,
-         VariableElementMixin
-    implements dart2js.LocalVariableElement {
-
-  analyzer.LocalVariableElement get element => super.element;
-
-  dart2js.ElementKind get kind => dart2js.ElementKind.VARIABLE;
-
-  @override
-  bool get isConst => element.isConst;
-
-  LocalVariableElementY(ElementConverter converter,
-                        analyzer.LocalVariableElement element)
-      : super(converter, element);
-
-  @override
-  dart2js.DartType get type => unsupported('type');
-}
-
-abstract class ClassMemberMixin implements ElementY {
-  analyzer.ClassMemberElement get element;
-
-  @override
-  dart2js.ClassElement get contextClass => enclosingClass;
-
-  @override
-  dart2js.ClassElement get enclosingClass {
-    return converter.convertElement(element.enclosingElement);
-  }
-
-  @override
-  bool get isClassMember => true;
-
-  @override
-  bool get isTopLevel => false;
-}
-
-class ConstructorElementY extends ElementY
-    with AnalyzableElementY,
-         AstElementY,
-         FunctionElementMixin,
-         ClassMemberMixin,
-         MemberElementMixin
-    implements dart2js.ConstructorElement {
-
-  analyzer.ConstructorElement get element => super.element;
-
-  // TODO(johnniwinther): Support redirecting/factory constructors.
-  @override
-  dart2js.ElementKind get kind => dart2js.ElementKind.GENERATIVE_CONSTRUCTOR;
-
-  // TODO(johnniwinther): Support factory constructors.
-  @override
-  bool get isFactoryConstructor => false;
-
-  // TODO(johnniwinther): Support redirecting factory constructors.
-  @override
-  bool get isRedirectingFactory => false;
-
-  // TODO(johnniwinther): Support redirecting generative constructors.
-  @override
-  bool get isRedirectingGenerative => false;
-
-  @override
-  bool get isStatic => false;
-
-  @override
-  bool get isSynthesized => element.isSynthetic;
-
-  ConstructorElementY(ElementConverter converter,
-                      analyzer.ConstructorElement element)
-      : super(converter, element);
-
-  @override
-  computeEffectiveTargetType(_) => unsupported('computeEffectiveTargetType');
-
-  @override
-  get definingConstructor => unsupported('definingConstructor');
-
-  @override
-  get effectiveTarget => unsupported('effectiveTarget');
-
-  @override
-  get immediateRedirectionTarget => unsupported('immediateRedirectionTarget');
-
-  @override
-  get nestedClosures => unsupported('nestedClosures');
-
-  @override
-  get constantConstructor => unsupported('constantConstructor');
-
-  @override
-  get isFromEnvironmentConstructor {
-    unsupported('isFromEnvironmentConstructor');
-  }
-
-  @override
-  bool get isCyclicRedirection => effectiveTarget.isRedirectingFactory;
-
-  // TODO(johnniwinther): implement redirectionDeferredPrefix
-  @override
-  dart2js.PrefixElement get redirectionDeferredPrefix => null;
-}
-
-class InstanceMethodElementY extends ElementY
-    with AnalyzableElementY,
-         AstElementY,
-         FunctionElementMixin,
-         ClassMemberMixin,
-         MemberElementMixin
-    implements dart2js.MethodElement {
-
-  analyzer.MethodElement get element => super.element;
-
-  @override
-  dart2js.ElementKind get kind => dart2js.ElementKind.FUNCTION;
-
-  @override
-  bool get isStatic => element.isStatic;
-
-  @override
-  bool get isAbstract => element.isAbstract;
-
-  @override
-  bool get isFactoryConstructor => false;
-
-  @override
-  bool get isInstanceMember => true;
-
-  InstanceMethodElementY(ElementConverter converter,
-                         analyzer.MethodElement element)
-      : super(converter, element);
-
-  @override
-  get nestedClosures => unsupported('nestedClosures');
-}
-
-abstract class MemberElementMixin implements dart2js.MemberElement {
-  dart2js.Name get memberName => new dart2js.Name(name, library);
-}
diff --git a/pkg/analyzer2dart/lib/src/semantic_visitor.dart b/pkg/analyzer2dart/lib/src/semantic_visitor.dart
deleted file mode 100644
index b5edd6a..0000000
--- a/pkg/analyzer2dart/lib/src/semantic_visitor.dart
+++ /dev/null
@@ -1,271 +0,0 @@
-// Copyright (c) 2014, the Dart project authors.  Please see the AUTHORS file
-// for details. All rights reserved. Use of this source code is governed by a
-// BSD-style license that can be found in the LICENSE file.
-
-library analyzer2dart.semantic_visitor;
-
-import 'package:analyzer/analyzer.dart';
-import 'package:analyzer/src/generated/source.dart';
-
-import 'util.dart';
-import 'identifier_semantics.dart';
-
-/// An AST visitor which uses the [AccessSemantics] of invocations and accesses
-/// to fine-grain visitor methods.
-abstract class SemanticVisitor<R> extends RecursiveAstVisitor<R> {
-
-  Source get currentSource;
-
-  void reportMessage(AstNode node, String message) {
-    reportSourceMessage(currentSource, node, message);
-  }
-
-  giveUp(AstNode node, String message) {
-    reportMessage(node, message);
-    throw new UnimplementedError(message);
-  }
-
-  bool invariant(AstNode node, condition, String message) {
-    if (condition is Function) {
-      condition = condition();
-    }
-    if (!condition) {
-      reportMessage(node, message);
-      return false;
-    }
-    return true;
-  }
-
-  R visitDynamicInvocation(MethodInvocation node,
-                           AccessSemantics semantics) {
-    return giveUp(node, 'visitDynamicInvocation of $semantics');
-  }
-
-  R visitLocalFunctionInvocation(MethodInvocation node,
-                                 AccessSemantics semantics) {
-    return giveUp(node, 'visitLocalFunctionInvocation of $semantics');
-  }
-
-  R visitLocalVariableInvocation(MethodInvocation node,
-                                 AccessSemantics semantics) {
-    return giveUp(node, 'visitLocalVariableInvocation of $semantics');
-  }
-
-  R visitParameterInvocation(MethodInvocation node,
-                             AccessSemantics semantics) {
-    return giveUp(node, 'visitParameterInvocation of $semantics');
-  }
-
-  R visitStaticFieldInvocation(MethodInvocation node,
-                               AccessSemantics semantics) {
-    return giveUp(node, 'visitStaticFieldInvocation of $semantics');
-  }
-
-  R visitStaticMethodInvocation(MethodInvocation node,
-                                AccessSemantics semantics) {
-    return giveUp(node, 'visitStaticMethodInvocation of $semantics');
-  }
-
-  R visitStaticPropertyInvocation(MethodInvocation node,
-                                  AccessSemantics semantics) {
-    return giveUp(node, 'visitStaticPropertyInvocation of $semantics');
-  }
-
-  @override
-  R visitMethodInvocation(MethodInvocation node) {
-    if (node.target != null) {
-      node.target.accept(this);
-    }
-    node.argumentList.accept(this);
-    return handleMethodInvocation(node);
-  }
-
-  R handleMethodInvocation(MethodInvocation node) {
-    AccessSemantics semantics = node.accept(ACCESS_SEMANTICS_VISITOR);
-    switch (semantics.kind) {
-      case AccessKind.DYNAMIC:
-        return visitDynamicInvocation(node, semantics);
-      case AccessKind.LOCAL_FUNCTION:
-        return visitLocalFunctionInvocation(node, semantics);
-      case AccessKind.LOCAL_VARIABLE:
-        return visitLocalVariableInvocation(node, semantics);
-      case AccessKind.PARAMETER:
-        return visitParameterInvocation(node, semantics);
-      case AccessKind.STATIC_FIELD:
-        return visitStaticFieldInvocation(node, semantics);
-      case AccessKind.STATIC_METHOD:
-        return visitStaticMethodInvocation(node, semantics);
-      case AccessKind.STATIC_PROPERTY:
-        return visitStaticPropertyInvocation(node, semantics);
-      default:
-        // Unexpected access kind.
-        return giveUp(node,
-            'Unexpected ${semantics} in visitMethodInvocation.');
-    }
-  }
-
-  @override
-  R visitPropertyAccess(PropertyAccess node) {
-    if (node.target != null) {
-      node.target.accept(this);
-    }
-    return handlePropertyAccess(node);
-  }
-
-  R handlePropertyAccess(PropertyAccess node) {
-    return _handlePropertyAccess(node, node.accept(ACCESS_SEMANTICS_VISITOR));
-  }
-
-  @override
-  R visitPrefixedIdentifier(PrefixedIdentifier node) {
-    node.prefix.accept(this);
-    return handlePrefixedIdentifier(node);
-  }
-
-  R handlePrefixedIdentifier(PrefixedIdentifier node) {
-    return _handlePropertyAccess(node, node.accept(ACCESS_SEMANTICS_VISITOR));
-  }
-
-  @override
-  R visitSimpleIdentifier(SimpleIdentifier node) {
-    AccessSemantics semantics = node.accept(ACCESS_SEMANTICS_VISITOR);
-    if (semantics != null) {
-      return _handlePropertyAccess(node, semantics);
-    } else {
-      return null;
-    }
-  }
-
-  R visitDynamicAccess(AstNode node, AccessSemantics semantics) {
-    return giveUp(node, 'visitDynamicAccess of $semantics');
-  }
-
-  R visitLocalFunctionAccess(AstNode node, AccessSemantics semantics) {
-    return giveUp(node, 'visitLocalFunctionAccess of $semantics');
-  }
-
-  R visitLocalVariableAccess(AstNode node, AccessSemantics semantics) {
-    return giveUp(node, 'visitLocalVariableAccess of $semantics');
-  }
-
-  R visitParameterAccess(AstNode node, AccessSemantics semantics) {
-    return giveUp(node, 'visitParameterAccess of $semantics');
-  }
-
-  R visitStaticFieldAccess(AstNode node, AccessSemantics semantics) {
-    return giveUp(node, 'visitStaticFieldAccess of $semantics');
-  }
-
-  R visitStaticMethodAccess(AstNode node, AccessSemantics semantics) {
-    return giveUp(node, 'visitStaticMethodAccess of $semantics');
-  }
-
-  R visitStaticPropertyAccess(AstNode node, AccessSemantics semantics) {
-    return giveUp(node, 'visitStaticPropertyAccess of $semantics');
-  }
-
-  R visitToplevelClassAccess(AstNode node, AccessSemantics semantics) {
-    return giveUp(node, 'visitToplevelClassAccess of $semantics');
-  }
-
-  R visitTypeParameterAccess(AstNode node, AccessSemantics semantics) {
-    return giveUp(node, 'visitTypeParameterAccess of $semantics');
-  }
-
-  R _handlePropertyAccess(AstNode node, AccessSemantics semantics) {
-    switch (semantics.kind) {
-      case AccessKind.DYNAMIC:
-        return visitDynamicAccess(node, semantics);
-      case AccessKind.LOCAL_FUNCTION:
-        return visitLocalFunctionAccess(node, semantics);
-      case AccessKind.LOCAL_VARIABLE:
-        return visitLocalVariableAccess(node, semantics);
-      case AccessKind.PARAMETER:
-        return visitParameterAccess(node, semantics);
-      case AccessKind.STATIC_FIELD:
-        return visitStaticFieldAccess(node, semantics);
-      case AccessKind.STATIC_METHOD:
-        return visitStaticMethodAccess(node, semantics);
-      case AccessKind.STATIC_PROPERTY:
-        return visitStaticPropertyAccess(node, semantics);
-      case AccessKind.TOPLEVEL_TYPE:
-        return visitToplevelClassAccess(node, semantics);
-      case AccessKind.TYPE_PARAMETER:
-        return visitTypeParameterAccess(node, semantics);
-      default:
-        // Unexpected access kind.
-        return giveUp(node,
-            'Unexpected ${semantics} in _handlePropertyAccess.');
-    }
-  }
-
-  R visitDynamicPropertyAssignment(AssignmentExpression node,
-                                   AccessSemantics semantics) {
-    return giveUp(node, 'visitDynamicPropertyAssignment of $semantics');
-  }
-
-  R visitLocalFunctionAssignment(AssignmentExpression node,
-                                 AccessSemantics semantics) {
-    return giveUp(node, 'visitLocalFunctionAssignment of $semantics');
-  }
-
-  R visitLocalVariableAssignment(AssignmentExpression node,
-                                 AccessSemantics semantics) {
-    return giveUp(node, 'visitLocalVariableAssignment of $semantics');
-  }
-
-  R visitParameterAssignment(AssignmentExpression node,
-                             AccessSemantics semantics) {
-    return giveUp(node, 'visitParameterAssignment of $semantics');
-  }
-
-  R visitStaticFieldAssignment(AssignmentExpression node,
-                               AccessSemantics semantics) {
-    return giveUp(node, 'visitStaticFieldAssignment of $semantics');
-  }
-
-  R visitStaticMethodAssignment(AssignmentExpression node,
-                                AccessSemantics semantics) {
-    return giveUp(node, 'visitStaticMethodAssignment of $semantics');
-  }
-
-  R visitStaticPropertyAssignment(AssignmentExpression node,
-                                  AccessSemantics semantics) {
-    return giveUp(node, 'visitStaticPropertyAssignment of $semantics');
-  }
-
-  @override
-  R visitAssignmentExpression(AssignmentExpression node) {
-    super.visitAssignmentExpression(node);
-    return handleAssignmentExpression(node);
-  }
-
-  R handleAssignmentExpression(AssignmentExpression node) {
-    AccessSemantics semantics =
-        node.leftHandSide.accept(ACCESS_SEMANTICS_VISITOR);
-    if (semantics == null) {
-      return giveUp(node, 'handleAssignmentExpression with no AccessSemantics');
-    } else {
-      switch (semantics.kind) {
-        case AccessKind.DYNAMIC:
-          return visitDynamicPropertyAssignment(node, semantics);
-        case AccessKind.LOCAL_FUNCTION:
-          return visitLocalFunctionAssignment(node, semantics);
-        case AccessKind.LOCAL_VARIABLE:
-          return visitLocalVariableAssignment(node, semantics);
-        case AccessKind.PARAMETER:
-          return visitParameterAssignment(node, semantics);
-        case AccessKind.STATIC_FIELD:
-          return visitStaticFieldAssignment(node, semantics);
-        case AccessKind.STATIC_METHOD:
-          return visitStaticMethodAssignment(node, semantics);
-        case AccessKind.STATIC_PROPERTY:
-          return visitStaticPropertyAssignment(node, semantics);
-        default:
-          // Unexpected access kind.
-          return giveUp(node,
-              'Unexpected ${semantics} in _handlePropertyAccess.');
-      }
-    }
-  }
-}
diff --git a/pkg/analyzer2dart/lib/src/tree_shaker.dart b/pkg/analyzer2dart/lib/src/tree_shaker.dart
deleted file mode 100644
index 6b9eaa6..0000000
--- a/pkg/analyzer2dart/lib/src/tree_shaker.dart
+++ /dev/null
@@ -1,418 +0,0 @@
-// Copyright (c) 2014, the Dart project authors.  Please see the AUTHORS file
-// for details. All rights reserved. Use of this source code is governed by a
-// BSD-style license that can be found in the LICENSE file.
-
-library analyzer2dart.treeShaker;
-
-import 'dart:collection';
-
-import 'package:analyzer/analyzer.dart';
-import 'package:analyzer/src/generated/element.dart';
-import 'package:analyzer/src/generated/source.dart';
-import 'package:analyzer/src/generated/resolver.dart';
-import 'package:compiler/src/universe/universe.dart';
-
-import 'closed_world.dart';
-import 'util.dart';
-import 'semantic_visitor.dart';
-import 'identifier_semantics.dart';
-
-/**
- * The result of performing local reachability analysis on a method.
- */
-class MethodAnalysis {
-  /**
-   * The AST for the method.
-   */
-  final Declaration declaration;
-
-  /**
-   * The functions statically called by the method.
-   */
-  final List<ExecutableElement> calls = <ExecutableElement>[];
-
-  /**
-   * The fields and top-level variables statically accessed by the method.
-   */
-  // TODO(johnniwinther): Should we split this into reads and writes?
-  final List<PropertyInducingElement> accesses = <PropertyInducingElement>[];
-
-  /**
-   * The selectors used by the method to perform dynamic invocation.
-   */
-  final List<Selector> invokes = <Selector>[];
-
-  /**
-   * The classes that are instantiated by the method.
-   */
-  // TODO(johnniwinther,paulberry): Register instantiated types.
-  // TODO(johnniwinther,paulberry): Register checked types from is/as checks,
-  // catch clauses and (checked) type annotations.
-  final List<ClassElement> instantiates = <ClassElement>[];
-
-  MethodAnalysis(this.declaration);
-}
-
-/**
- * The result of performing local reachability analysis on a class.
- *
- * TODO(paulberry): Do we need to do any other analysis of classes?  (For
- * example, detect annotations that are relevant to mirrors, detect that a
- * class might be used for custom HTML elements, or collect inherited and
- * mixed-in classes).
- */
-class ClassAnalysis {
-  /**
-   * The AST for the class.
-   */
-  final ClassDeclaration declaration;
-
-  ClassAnalysis(this.declaration);
-}
-
-/**
- * This class is responsible for performing local analysis of the source code
- * to provide the information needed to do tree shaking.
- */
-class LocalReachabilityComputer {
-  /**
-   * Perform local reachability analysis of [method].
-   */
-  MethodAnalysis analyzeMethod(ExecutableElement method) {
-    Declaration declaration = method.node;
-    MethodAnalysis analysis = new MethodAnalysis(declaration);
-    if (declaration != null) {
-      declaration.accept(new TreeShakingVisitor(analysis));
-    } else if (method is ConstructorElement) {
-      // This constructor has no associated declaration in the AST.  Either it
-      // is a default constructor for an ordinary class, or it's a synthetic
-      // constructor associated with a mixin.  For now we assume it's a default
-      // constructor, in which case all we need to do is record the class as
-      // being instantiated by this method.  TODO(paulberry): handle the
-      // mixin case.
-      ClassElement instantiatedClass = method.enclosingElement;
-      analysis.instantiates.add(instantiatedClass);
-      if (instantiatedClass.supertype != null) {
-        ClassElement superClass = instantiatedClass.supertype.element;
-        ConstructorElement superConstructor = superClass.unnamedConstructor;
-        if (superConstructor != null) {
-          // TODO(johnniwinther): Register instantiated type and selector.
-          analysis.calls.add(superConstructor);
-        }
-      }
-    } else {
-      // This is an executable element with no associated declaration in the
-      // AST, and it's not a constructor.  TODO(paulberry): can this ever
-      // happen?
-      throw new UnimplementedError();
-    }
-    return analysis;
-  }
-
-  /**
-   * Perform local reachability analysis of [classElement].
-   */
-  ClassAnalysis analyzeClass(ClassElement classElement) {
-    return new ClassAnalysis(classElement.node);
-  }
-
-  /**
-   * Determine which members of [classElement] are matched by the given
-   * [selector].
-   *
-   * [methods] is populated with all the class methods which are matched by the
-   * selector, [accessors] with all the getters and setters which are matched
-   * by the selector, and [fields] with all the fields which are matched by the
-   * selector.
-   */
-  void getMatchingClassMembers(ClassElement classElement, Selector selector,
-      List<MethodElement> methods, List<PropertyAccessorElement> accessors,
-      List<PropertyInducingElement> fields) {
-    // TODO(paulberry): should we walk through superclasses and mixins as well
-    // here?  Or would it be better to make [TreeShaker] responsible for those
-    // relationships (since they are non-local)?  Consider making use of
-    // InheritanceManager to do this.
-    for (MethodElement method in classElement.methods) {
-      // TODO(paulberry): account for arity and named arguments when matching
-      // the selector against the method.
-      if (selector.name == method.name) {
-        methods.add(method);
-      }
-    }
-    if (selector.kind == SelectorKind.GETTER) {
-      for (PropertyAccessorElement accessor in classElement.accessors) {
-        if (accessor.isGetter && selector.name == accessor.name) {
-          if (accessor.isSynthetic) {
-            // This accessor is implied by the corresponding field declaration.
-            fields.add(accessor.variable);
-          } else {
-            accessors.add(accessor);
-          }
-        }
-      }
-    } else if (selector.kind == SelectorKind.SETTER) {
-      // accessor.name uses the convention that setter names end in '='.
-      String selectorNameWithEquals = '${selector.name}=';
-      for (PropertyAccessorElement accessor in classElement.accessors) {
-        if (accessor.isSetter && selectorNameWithEquals == accessor.name) {
-          if (accessor.isSynthetic) {
-            // This accessor is implied by the corresponding field declaration.
-            // TODO(paulberry): should we distinguish reads and writes?
-            fields.add(accessor.variable);
-          } else {
-            accessors.add(accessor);
-          }
-        }
-      }
-    }
-  }
-}
-
-/**
- * This class is responsible for driving the tree shaking process, and
- * and performing the global inferences necessary to determine which methods
- * in the source program are reachable.  It makes use of
- * [LocalReachabilityComputer] to do local analysis of individual classes and
- * methods.
- */
-class TreeShaker {
-  List<Element> _queue = <Element>[];
-  Set<Element> _alreadyEnqueued = new HashSet<Element>();
-  ClosedWorld _world;
-  Set<Selector> _selectors = new HashSet<Selector>();
-  final LocalReachabilityComputer _localComputer =
-      new LocalReachabilityComputer();
-
-  TreeShaker(TypeProvider typeProvider, FunctionElement mainFunction)
-      : _world = new ClosedWorld(typeProvider, mainFunction);
-
-  void _addElement(Element element) {
-    if (_alreadyEnqueued.add(element)) {
-      _queue.add(element);
-    }
-  }
-
-  void _addSelector(Selector selector) {
-    if (_selectors.add(selector)) {
-      // New selector, so match it against all class methods.
-      _world.instantiatedClasses.forEach((ClassElement element, AstNode node) {
-        _matchClassToSelector(element, selector);
-      });
-    }
-  }
-
-  void _matchClassToSelector(ClassElement classElement, Selector selector) {
-    List<MethodElement> methods = <MethodElement>[];
-    List<PropertyAccessorElement> accessors = <PropertyAccessorElement>[];
-    List<PropertyInducingElement> fields = <PropertyInducingElement>[];
-    _localComputer.getMatchingClassMembers(
-        classElement,
-        selector,
-        methods,
-        accessors,
-        fields);
-    methods.forEach(_addElement);
-    accessors.forEach(_addElement);
-    fields.forEach(_addElement);
-  }
-
-  ClosedWorld shake() {
-    _addElement(_world.mainFunction);
-    while (_queue.isNotEmpty) {
-      Element element = _queue.removeLast();
-      if (element is ExecutableElement) {
-        MethodAnalysis analysis = _localComputer.analyzeMethod(element);
-        _world.executableElements[element] = analysis.declaration;
-        analysis.calls.forEach(_addElement);
-        analysis.invokes.forEach(_addSelector);
-        analysis.instantiates.forEach(_addElement);
-        analysis.accesses.forEach(_addElement);
-      } else if (element is ClassElement) {
-        ClassAnalysis analysis = _localComputer.analyzeClass(element);
-        _world.instantiatedClasses[element] = analysis.declaration;
-        for (Selector selector in _selectors) {
-          _matchClassToSelector(element, selector);
-        }
-      } else if (element is FieldElement) {
-        VariableDeclaration declaration = element.node;
-        _world.fields[element] = declaration;
-      } else if (element is TopLevelVariableElement) {
-        VariableDeclaration declaration = element.node;
-        _world.variables[element] = declaration;
-      } else {
-        throw new Exception(
-            'Unexpected element type while tree shaking: '
-                '$element (${element.runtimeType})');
-      }
-    }
-    return _world;
-  }
-}
-
-class TreeShakingVisitor extends SemanticVisitor {
-  final MethodAnalysis analysis;
-
-  TreeShakingVisitor(this.analysis);
-
-  Source get currentSource => analysis.declaration.element.source;
-
-  @override
-  void visitInstanceCreationExpression(InstanceCreationExpression node) {
-    ConstructorElement staticElement = node.staticElement;
-    if (staticElement != null) {
-      analysis.calls.add(staticElement);
-    } else {
-      // TODO(paulberry): deal with this situation.  This can happen, for
-      // example, in the case "main() => new Unresolved();" (which is a
-      // warning, not an error).
-    }
-    super.visitInstanceCreationExpression(node);
-  }
-
-  @override
-  void visitDynamicInvocation(MethodInvocation node,
-      AccessSemantics semantics) {
-    analysis.invokes.add(createSelectorFromMethodInvocation(
-        node.argumentList, node.methodName.name));
-  }
-
-  @override
-  void visitLocalFunctionInvocation(MethodInvocation node,
-      AccessSemantics semantics) {
-    // Locals don't need to be tree shaken.
-  }
-
-  @override
-  void visitLocalVariableInvocation(MethodInvocation node,
-      AccessSemantics semantics) {
-    // Locals don't need to be tree shaken.
-  }
-
-  @override
-  void visitParameterInvocation(MethodInvocation node,
-      AccessSemantics semantics) {
-    // Locals don't need to be tree shaken.
-  }
-
-  @override
-  void visitStaticFieldInvocation(MethodInvocation node,
-      AccessSemantics semantics) {
-    // Invocation of a static field.
-    analysis.accesses.add(semantics.element);
-    analysis.invokes.add(createSelectorFromMethodInvocation(
-        node.argumentList, 'call'));
-  }
-
-  @override
-  void visitStaticMethodInvocation(MethodInvocation node,
-      AccessSemantics semantics) {
-    analysis.calls.add(semantics.element);
-  }
-
-  @override
-  void visitStaticPropertyInvocation(MethodInvocation node,
-      AccessSemantics semantics) {
-    // Invocation of a property.  TODO(paulberry): handle this.
-    super.visitStaticPropertyInvocation(node, semantics);
-  }
-
-  void handleDynamicAccess(AccessSemantics semantics) {
-    if (semantics.isRead) {
-      analysis.invokes.add(
-          new Selector.getter(semantics.identifier.name, null));
-    }
-    if (semantics.isWrite) {
-      // Selector.setter constructor uses the convention that setter names
-      // don't end in '='.
-      analysis.invokes.add(
-          new Selector.setter(semantics.identifier.name, null));
-    }
-  }
-
-  @override
-  void visitDynamicAccess(AstNode node, AccessSemantics semantics) {
-    handleDynamicAccess(semantics);
-  }
-
-  @override
-  void visitLocalFunctionAccess(AstNode node, AccessSemantics semantics) {
-    // Locals don't need to be tree shaken.
-  }
-
-  @override
-  void visitLocalVariableAccess(AstNode node, AccessSemantics semantics) {
-    // Locals don't need to be tree shaken.
-  }
-
-  @override
-  void visitParameterAccess(AstNode node, AccessSemantics semantics) {
-    // Locals don't need to be tree shaken.
-  }
-
-  @override
-  void visitStaticFieldAccess(AstNode node, AccessSemantics semantics) {
-    analysis.accesses.add(semantics.element);
-  }
-
-  @override
-  void visitStaticMethodAccess(AstNode node, AccessSemantics semantics) {
-    // Method tear-off.  TODO(paulberry): implement.
-    super.visitStaticMethodAccess(node, semantics);
-  }
-
-  @override
-  void visitStaticPropertyAccess(AstNode node, AccessSemantics semantics) {
-    // TODO(paulberry): implement.
-    super.visitStaticPropertyAccess(node, semantics);
-  }
-
-  @override
-  void visitConstructorDeclaration(ConstructorDeclaration node) {
-    // TODO(paulberry): handle parameter list.
-    node.initializers.accept(this);
-    node.body.accept(this);
-    if (node.factoryKeyword == null) {
-      // This is a generative constructor.  Figure out if it is redirecting.
-      // If it isn't, then the constructor instantiates the class so we need to
-      // add the class to analysis.instantiates.  (If it is redirecting, then
-      // we don't need to, because the redirected-to constructor will take care
-      // of that).
-      if (node.initializers.length != 1 || node.initializers[0] is! RedirectingConstructorInvocation) {
-        ClassElement classElement = node.element.enclosingElement;
-        analysis.instantiates.add(node.element.enclosingElement);
-        if (!node.initializers.any((i) => i is SuperConstructorInvocation)) {
-          if (classElement.supertype != null) {
-            ClassElement superClass = classElement.supertype.element;
-            ConstructorElement superConstructor = superClass.unnamedConstructor;
-            if (superConstructor != null) {
-              // TODO(johnniwinther): Register instantiated type and selector.
-              analysis.calls.add(superConstructor);
-            }
-          }
-        }
-      }
-    } else if (node.redirectedConstructor != null) {
-      if (node.redirectedConstructor.staticElement == null) {
-        // Factory constructor redirects to a non-existent constructor.
-        // TODO(paulberry): handle this.
-        throw new UnimplementedError();
-      } else {
-        analysis.calls.add(node.redirectedConstructor.staticElement);
-      }
-    }
-  }
-
-  @override
-  void
-      visitRedirectingConstructorInvocation(RedirectingConstructorInvocation node) {
-    // Note: we don't have to worry about node.staticElement being
-    // null, because that would have been detected by the analyzer and
-    // reported as a compile time error.
-    analysis.calls.add(node.staticElement);
-  }
-
-  @override
-  void handleAssignmentExpression(AssignmentExpression node) {
-    // Don't special-case assignment expressions.
-  }
-}
diff --git a/pkg/analyzer2dart/lib/src/util.dart b/pkg/analyzer2dart/lib/src/util.dart
deleted file mode 100644
index fd1c20b..0000000
--- a/pkg/analyzer2dart/lib/src/util.dart
+++ /dev/null
@@ -1,42 +0,0 @@
-// Copyright (c) 2014, the Dart project authors.  Please see the AUTHORS file
-// for details. All rights reserved. Use of this source code is governed by a
-// BSD-style license that can be found in the LICENSE file.
-
-// Utility function shared between different parts of analyzer2dart.
-
-library analyzer2dart.util;
-
-import 'package:analyzer/analyzer.dart';
-import 'package:analyzer/src/generated/source.dart';
-import 'package:compiler/src/elements/elements.dart' show PublicName;
-import 'package:compiler/src/universe/universe.dart';
-import 'package:compiler/src/io/source_file.dart';
-
-CallStructure createCallStructureFromMethodInvocation(ArgumentList node) {
-  int arity = 0;
-  List<String> namedArguments = <String>[];
-  for (Expression argument in node.arguments) {
-    if (argument is NamedExpression) {
-      namedArguments.add(argument.name.label.name);
-    } else {
-      arity++;
-    }
-  }
-  return new CallStructure(arity, namedArguments);
-}
-
-Selector createSelectorFromMethodInvocation(ArgumentList node,
-                                            String name) {
-  CallStructure callStructure = createCallStructureFromMethodInvocation(node);
-  // TODO(johnniwinther): Support private names.
-  return new Selector(SelectorKind.CALL, new PublicName(name), callStructure);
-}
-
-/// Prints [message] together with source code pointed to by [node] from
-/// [source].
-void reportSourceMessage(Source source, AstNode node, String message) {
-  SourceFile sourceFile =
-      new StringSourceFile.fromName(source.fullName, source.contents.data);
-
-  print(sourceFile.getLocationMessage(message, node.offset, node.end));
-}
diff --git a/pkg/analyzer2dart/test/driver_test.dart b/pkg/analyzer2dart/test/driver_test.dart
deleted file mode 100644
index 5807714..0000000
--- a/pkg/analyzer2dart/test/driver_test.dart
+++ /dev/null
@@ -1,74 +0,0 @@
-// Copyright (c) 2014, the Dart project authors.  Please see the AUTHORS file
-// for details. All rights reserved. Use of this source code is governed by a
-// BSD-style license that can be found in the LICENSE file.
-
-import 'mock_sdk.dart';
-import 'package:analyzer/file_system/memory_file_system.dart';
-import 'package:analyzer/src/generated/ast.dart';
-import 'package:analyzer/src/generated/element.dart';
-import 'package:analyzer/src/generated/sdk.dart';
-import 'package:analyzer/src/generated/source.dart';
-import 'package:compiler/src/dart2jslib.dart' show NullSink;
-import 'package:unittest/unittest.dart';
-
-import '../lib/src/closed_world.dart';
-import '../lib/src/driver.dart';
-
-main() {
-  MemoryResourceProvider provider;
-  Driver driver;
-  setUp(() {
-    provider = new MemoryResourceProvider();
-    DartSdk sdk = new MockSdk();
-    driver = new Driver(provider, sdk, NullSink.outputProvider);
-  });
-
-  Source setFakeRoot(String contents) {
-    String path = '/root.dart';
-    provider.newFile(path, contents);
-    return driver.setRoot(path);
-  }
-
-  test('resolveEntryPoint', () {
-    String contents = 'main() {}';
-    Source source = setFakeRoot(contents);
-    FunctionElement element = driver.resolveEntryPoint(source);
-    expect(element.name, equals('main'));
-  });
-
-  test('computeWorld', () {
-    String contents = '''
-main() {
-  foo();
-}
-
-foo() {
-}
-
-bar() {
-}
-''';
-    Source source = setFakeRoot(contents);
-    FunctionElement entryPoint = driver.resolveEntryPoint(source);
-    ClosedWorld world = driver.computeWorld(entryPoint);
-    expect(world.executableElements, hasLength(2));
-    CompilationUnitElement compilationUnit =
-        entryPoint.getAncestor((e) => e is CompilationUnitElement);
-    Map<String, FunctionElement> functions = {};
-    for (FunctionElement functionElement in compilationUnit.functions) {
-      functions[functionElement.name] = functionElement;
-    }
-    FunctionElement mainElement = functions['main'];
-    expect(world.executableElements.keys, contains(mainElement));
-    FunctionDeclaration mainAst = world.executableElements[mainElement];
-    expect(mainAst.element, equals(mainElement));
-    FunctionElement fooElement = functions['foo'];
-    expect(world.executableElements.keys, contains(fooElement));
-    FunctionDeclaration fooAst = world.executableElements[fooElement];
-    expect(fooAst.element, equals(fooElement));
-    FunctionElement barElement = functions['bar'];
-    expect(
-        world.executableElements.keys,
-        isNot(contains(functions[barElement])));
-  });
-}
diff --git a/pkg/analyzer2dart/test/end2end_data.dart b/pkg/analyzer2dart/test/end2end_data.dart
deleted file mode 100644
index 20b680f..0000000
--- a/pkg/analyzer2dart/test/end2end_data.dart
+++ /dev/null
@@ -1,908 +0,0 @@
-// Copyright (c) 2014, the Dart project authors.  Please see the AUTHORS file
-// for details. All rights reserved. Use of this source code is governed by a
-// BSD-style license that can be found in the LICENSE file.
-
-/// Test data for the end2end test.
-library test.end2end.data;
-
-import 'test_helper.dart';
-
-class TestSpec extends TestSpecBase {
-  final String output;
-
-  const TestSpec(String input, [String output])
-      : this.output = output != null ? output : input,
-        super(input);
-}
-
-const List<Group> TEST_DATA = const <Group>[
-  const Group('Empty main', const <TestSpec>[
-    const TestSpec('''
-main() {}
-'''),
-
-    const TestSpec('''
-main() {}
-foo() {}
-''', '''
-main() {}
-'''),
-  ]),
-  const Group('Simple call-chains', const <TestSpec>[
-    const TestSpec('''
-foo() {}
-main() {
-  foo();
-}
-'''),
-
-    const TestSpec('''
-bar() {}
-foo() {
-  bar();
-}
-main() {
-  foo();
-}
-'''),
-
-    const TestSpec('''
-bar() {
-  main();
-}
-foo() {
-  bar();
-}
-main() {
-  foo();
-}
-'''),
-
-  ]),
-  const Group('Literals', const <TestSpec>[
-    const TestSpec('''
-main() {
-  return 0;
-}
-'''),
-
-    const TestSpec('''
-main() {
-  return 1.5;
-}
-'''),
-
-    const TestSpec('''
-main() {
-  return true;
-}
-'''),
-
-    const TestSpec('''
-main() {
-  return false;
-}
-'''),
-
-    const TestSpec('''
-main() {
-  return "a";
-}
-'''),
-
-    const TestSpec('''
-main() {
-  return "a" "b";
-}
-''', '''
-main() {
-  return "ab";
-}
-'''),
-  ]),
-
-  const Group('Parameters', const <TestSpec>[
-    const TestSpec('''
-main(args) {}
-'''),
-
-    const TestSpec('''
-main(a, b) {}
-'''),
-  ]),
-
-  const Group('Typed parameters', const <TestSpec>[
-    const TestSpec('''
-void main(args) {}
-'''),
-
-    const TestSpec('''
-main(int a, String b) {}
-'''),
-
-    const TestSpec('''
-main(Comparator a, List b) {}
-'''),
-
-    const TestSpec('''
-main(Comparator<dynamic> a, List<dynamic> b) {}
-''','''
-main(Comparator a, List b) {}
-'''),
-
-    const TestSpec('''
-main(Map a, Map<dynamic, List<int>> b) {}
-'''),
-  ]),
-
-  const Group('Pass arguments', const <TestSpec>[
-    const TestSpec('''
-foo(a) {}
-main() {
-  foo(null);
-}
-'''),
-
-    const TestSpec('''
-bar(b, c) {}
-foo(a) {}
-main() {
-  foo(null);
-  bar(0, "");
-}
-'''),
-
-    const TestSpec('''
-bar(b) {}
-foo(a) {
-  bar(a);
-}
-main() {
-  foo(null);
-}
-'''),
-  ]),
-
-  const Group('Top level field', const <TestSpec>[
-    const TestSpec('''
-var field;
-main(args) {
-  return field;
-}
-'''),
-
-    // TODO(johnniwinther): Eliminate unneeded `null` initializers.
-    const TestSpec('''
-var field = null;
-main(args) {
-  return field;
-}
-'''),
-
-    const TestSpec('''
-var field = 0;
-main(args) {
-  return field;
-}
-'''),
-
-    const TestSpec('''
-var field;
-main(args) {
-  field = args.length;
-  return field;
-}
-'''),
-  ]),
-
-  const Group('Local variables', const <TestSpec>[
-    const TestSpec('''
-main() {
-  var a;
-  return a;
-}
-''','''
-main() {}
-'''),
-
-    const TestSpec('''
-main() {
-  var a = 0;
-  return a;
-}
-''','''
-main() {
-  return 0;
-}
-'''),
-  ]),
-
-  const Group('Local variable writes', const <TestSpec>[
-    const TestSpec('''
-main() {
-  var a;
-  a = 10;
-  return a;
-}
-''', '''
-main() {
-  return 10;
-}
-'''),
-
-    const TestSpec('''
-main() {
-  var a = 0;
-  a = 10;
-  return a;
-}
-''', '''
-main() {
-  return 10;
-}
-'''),
-
-    const TestSpec('''
-main() {
-  var a = 0;
-  print(a);
-  a = "";
-  print(a);
-  return a;
-}
-''', '''
-main() {
-  var a;
-  print(0);
-  a = "";
-  print(a);
-  return a;
-}
-'''),
-
-    const TestSpec('''
-main(a) {
-  print(a);
-  a = "";
-  print(a);
-  return a;
-}
-'''),
-
-    const TestSpec('''
-main(a) {
-  if (a) {
-    a = "";
-  }
-  print(a);
-  return a;
-}
-''', '''
-main(a) {
-  print(a = a ? "" : a);
-  return a;
-}
-'''),
-  ]),
-
-  const Group('Dynamic access', const <TestSpec>[
-    const TestSpec('''
-main(a) {
-  return a.foo;
-}
-'''),
-
-    const TestSpec('''
-main() {
-  var a = "";
-  return a.foo;
-}
-''','''
-main() {
-  return "".foo;
-}
-'''),
-  ]),
-
-  const Group('Dynamic invocation', const <TestSpec>[
-    const TestSpec('''
-main(a) {
-  return a.foo(0);
-}
-'''),
-
-    const TestSpec('''
-main() {
-  var a = "";
-  return a.foo(0, 1);
-}
-''','''
-main() {
-  return "".foo(0, 1);
-}
-'''),
-  ]),
-
-  const Group('Binary expressions', const <TestSpec>[
-    const TestSpec('''
-main(a, b) {
-  return a + b;
-}
-'''),
-
-    const TestSpec('''
-main(a, b) {
-  return a - b;
-}
-'''),
-
-    const TestSpec('''
-main(a, b) {
-  return a * b;
-}
-'''),
-
-    const TestSpec('''
-main(a, b) {
-  return a / b;
-}
-'''),
-
-    const TestSpec('''
-main(a, b) {
-  return a ~/ b;
-}
-'''),
-
-    const TestSpec('''
-main(a, b) {
-  return a % b;
-}
-'''),
-
-    const TestSpec('''
-main(a, b) {
-  return a < b;
-}
-'''),
-
-    const TestSpec('''
-main(a, b) {
-  return a <= b;
-}
-'''),
-
-    const TestSpec('''
-main(a, b) {
-  return a > b;
-}
-'''),
-
-    const TestSpec('''
-main(a, b) {
-  return a >= b;
-}
-'''),
-
-    const TestSpec('''
-main(a, b) {
-  return a << b;
-}
-'''),
-
-    const TestSpec('''
-main(a, b) {
-  return a >> b;
-}
-'''),
-
-    const TestSpec('''
-main(a, b) {
-  return a & b;
-}
-'''),
-
-    const TestSpec('''
-main(a, b) {
-  return a | b;
-}
-'''),
-
-    const TestSpec('''
-main(a, b) {
-  return a ^ b;
-}
-'''),
-
-    const TestSpec('''
-main(a, b) {
-  return a == b;
-}
-'''),
-
-    const TestSpec('''
-main(a, b) {
-  return a != b;
-}
-''','''
-main(a, b) {
-  return !(a == b);
-}
-'''),
-
-    const TestSpec('''
-main(a, b) {
-  return a && b;
-}
-'''),
-
-    const TestSpec('''
-main(a, b) {
-  return a || b;
-}
-'''),
-  ]),
-
-  const Group('If statement', const <TestSpec>[
-    const TestSpec('''
-main(a) {
-  if (a) {
-    print(0);
-  }
-}
-'''),
-
-    const TestSpec('''
-main(a) {
-  if (a) {
-    print(0);
-  } else {
-    print(1);
-  }
-}
-''','''
-main(a) {
-  a ? print(0) : print(1);
-}
-'''),
-
-    const TestSpec('''
-main(a) {
-  if (a) {
-    print(0);
-  } else {
-    print(1);
-    print(2);
-  }
-}
-'''),
-  ]),
-
-  const Group('Conditional expression', const <TestSpec>[
-    const TestSpec('''
-main(a) {
-  return a ? print(0) : print(1);
-}
-'''),
-  ]),
-
-  // These test that unreachable statements are skipped within a block.
-  const Group('Block statements', const <TestSpec>[
-    const TestSpec('''
-main(a) {
-  return 0;
-  return 1;
-}
-''', '''
-main(a) {
-  return 0;
-}
-'''),
-
-    const TestSpec('''
-main(a) {
-  if (a) {
-    return 0;
-    return 1;
-  } else {
-    return 2;
-    return 3;
-  }
-}
-''', '''
-main(a) {
-  return a ? 0 : 2;
-}
-'''),
-
-    const TestSpec('''
-main(a) {
-  if (a) {
-    print(0);
-    return 0;
-    return 1;
-  } else {
-    print(2);
-    return 2;
-    return 3;
-  }
-}
-''', '''
-main(a) {
-  if (a) {
-    print(0);
-    return 0;
-  } else {
-    print(2);
-    return 2;
-  }
-}
-'''),
-  ]),
-
-  const Group('List literal', const <TestSpec>[
-    const TestSpec('''
-main() {
-  return [];
-}
-'''),
-
-    const TestSpec('''
-main() {
-  return <int>[];
-}
-'''),
-
-    const TestSpec('''
-main() {
-  return <int>[0];
-}
-'''),
-
-    const TestSpec('''
-main(a) {
-  return <int>[0, 1, a];
-}
-'''),
-
-    const TestSpec('''
-main(a) {
-  return [0, [1], [a, <int>[3]]];
-}
-'''),
-  ]),
-
-  const Group('Constructor invocation', const <TestSpec>[
-    const TestSpec('''
-main(a) {
-  new Object();
-}
-'''),
-
-const TestSpec('''
-main(a) {
-  new Deprecated("");
-}
-'''),
-  ]),
-
-  const Group('Map literal', const <TestSpec>[
-    const TestSpec('''
-main() {
-  return {};
-}
-'''),
-
-    const TestSpec('''
-main() {
-  return <int, String>{};
-}
-'''),
-
-    const TestSpec('''
-main() {
-  return <String, int>{"a": 0};
-}
-'''),
-
-    const TestSpec('''
-main(a) {
-  return <String, int>{"a": 0, "b": 1, "c": a};
-}
-'''),
-
-    const TestSpec('''
-main(a) {
-  return {0: "a", 1: {2: "b"}, a: {3: "c"}};
-}
-'''),
-  ]),
-  const Group('For loop', const <TestSpec>[
-    const TestSpec('''
-main() {
-  for (;;) {}
-}
-''', '''
-main() {
-  while (true) {}
-}
-'''),
-
-const TestSpec('''
-main() {
-  for (var i = 0; i < 10; i = i + 1) {
-    print(i);
-  }
-}
-''', '''
-main() {
-  var i = 0;
-  while (i < 10) {
-    print(i);
-    ++i;
-  }
-}
-'''),
-
-const TestSpec('''
-main(i) {
-  for (i = 0; i < 10; i = i + 1) {
-    print(i);
-  }
-}
-''', '''
-main(i) {
-  i = 0;
-  while (i < 10) {
-    print(i);
-    ++i;
-  }
-}
-'''),
-  ]),
-
-  const Group('While loop', const <TestSpec>[
-    const TestSpec('''
-main() {
-  while (true) {}
-}
-'''),
-
-const TestSpec('''
-main() {
-  var i = 0;
-  while (i < 10) {
-    print(i);
-    i = i + 1;
-  }
-}''', '''
-main() {
-  var i = 0;
-  while (i < 10) {
-    print(i);
-    ++i;
-  }
-}'''),
-  ]),
-
-  const Group('Type operators', const <TestSpec>[
-    const TestSpec('''
-main(a) {
-  return a is String;
-}
-'''),
-
-    const TestSpec('''
-main(a) {
-  return a is List<String>;
-}
-'''),
-
-    const TestSpec('''
-main(a) {
-  return a is Comparator<String>;
-}
-'''),
-
-  const TestSpec('''
-main(a) {
-  return a is! String;
-}
-''', '''
-main(a) {
-  return !(a is String);
-}
-'''),
-
-const TestSpec('''
-main(a) {
-  return a as String;
-}
-'''),
-  ]),
-
-  const Group('For in loop', const <TestSpec>[
-// TODO(johnniwinther): Add tests for `i` as top-level, static and instance
-// fields.
-    const TestSpec('''
-main(a) {
-  for (var i in a) {
-    print(i);
-  }
-}
-''', '''
-main(a) {
-  var v0 = a.iterator;
-  while (v0.moveNext()) {
-    print(v0.current);
-  }
-}'''),
-
-    const TestSpec('''
-main(a) {
-  for (var i in a) {
-    print(i);
-    i = 0;
-    print(i);
-  }
-}
-''', '''
-main(a) {
-  var v0 = a.iterator;
-  while (v0.moveNext()) {
-    print(v0.current);
-    print(0);
-  }
-}
-'''),
-
-    const TestSpec('''
-main(a) {
-  var i;
-  for (i in a) {
-    print(i);
-  }
-}
-''', '''
-main(a) {
-  var v0 = a.iterator;
-  while (v0.moveNext()) {
-    print(v0.current);
-  }
-}
-'''),
-  ]),
-
-  const Group('Local functions', const <TestSpec>[
-    const TestSpec('''
-main(a) {
-  local() {}
-  return local();
-}
-''', '''
-main(a) {
-  return (() {})();
-}
-'''),
-
-  const TestSpec('''
-main(a) {
-  local() {}
-  var l = local;
-  return l();
-}
-''', '''
-main(a) {
-  return (() {})();
-}
-'''),
-
-  const TestSpec('''
-main(a) {
-  return () {}();
-}
-''', '''
-main(a) {
-  return (() {})();
-}
-'''),
-
-  const TestSpec('''
-main(a) {
-  var c = a ? () { return 0; } : () { return 1; };
-  return c();
-}
-''', '''
-main(a) {
-  return (a ? () {
-    return 0;
-  } : () {
-    return 1;
-  })();
-}
-'''),
-  ]),
-
-  const Group('Constructors', const <TestSpec>[
-    const TestSpec('''
-class C {}
-main() {
-  return new C();
-}
-'''),
-
-    const TestSpec('''
-class C {
-  C() {}
-}
-main() {
-  return new C();
-}
-'''),
-
-    const TestSpec('''
-class B {}
-class C extends B {
-  C() {}
-}
-main() {
-  return new C();
-}
-'''),
-
-    const TestSpec('''
-class B {
-  B() {}
-}
-class C extends B {}
-main() {
-  return new C();
-}
-'''),
-  ]),
-
-  const Group('Instance method', const <TestSpec>[
-    const TestSpec('''
-class C {
-  C() {}
-  foo() {}
-}
-main() {
-  return new C().foo();
-}
-'''),
-  ]),
-
-  const Group('Try-catch', const <TestSpec>[
-    const TestSpec('''
-main() {
-  try {} catch (e) {}
-}
-''',
-// TODO(kmillikin): Remove the unused stack trace parameter.
-'''
-main() {
-  try {} catch (e, v0) {}
-}
-'''),
-
-    const TestSpec('''
-main() {
-  try {
-    return;
-  } catch (e) {}
-}
-''',
-// TODO(kmillikin): Remove the unused stack trace parameter and unneeded return
-// statement(s).
-'''
-main() {
-  try {
-    return null;
-  } catch (e, v0) {
-    return null;
-  }
-}
-'''),
-  ]),
-];
diff --git a/pkg/analyzer2dart/test/end2end_test.dart b/pkg/analyzer2dart/test/end2end_test.dart
deleted file mode 100644
index 18fe84a..0000000
--- a/pkg/analyzer2dart/test/end2end_test.dart
+++ /dev/null
@@ -1,45 +0,0 @@
-// Copyright (c) 2014, the Dart project authors.  Please see the AUTHORS file
-// for details. All rights reserved. Use of this source code is governed by a
-// BSD-style license that can be found in the LICENSE file.
-
-/// End-to-end test of the analyzer2dart compiler.
-library test.end2end;
-
-import 'mock_sdk.dart';
-import 'package:analyzer/file_system/memory_file_system.dart';
-import 'package:analyzer/src/generated/element.dart';
-import 'package:analyzer/src/generated/sdk.dart';
-import 'package:analyzer/src/generated/source.dart';
-import 'package:unittest/unittest.dart';
-
-import '../lib/src/closed_world.dart';
-import '../lib/src/driver.dart';
-import '../lib/src/converted_world.dart';
-import '../lib/src/dart_backend.dart';
-
-import 'test_helper.dart' hide TestSpec;
-import 'output_helper.dart';
-import 'end2end_data.dart';
-
-main(List<String> args) {
-  performTests(TEST_DATA, unittester, checkResult, args);
-}
-
-checkResult(TestSpec result) {
-  String input = result.input;
-  String expectedOutput = result.output.trim();
-
-  CollectingOutputProvider outputProvider = new CollectingOutputProvider();
-  MemoryResourceProvider provider = new MemoryResourceProvider();
-  DartSdk sdk = new MockSdk();
-  Driver driver = new Driver(provider, sdk, outputProvider);
-  String rootFile = '/root.dart';
-  provider.newFile(rootFile, input);
-  Source rootSource = driver.setRoot(rootFile);
-  FunctionElement entryPoint = driver.resolveEntryPoint(rootSource);
-  ClosedWorld world = driver.computeWorld(entryPoint);
-  ConvertedWorld convertedWorld = convertWorld(world);
-  compileToDart(driver, convertedWorld);
-  String output = outputProvider.output.text.trim();
-  expect(output, equals(expectedOutput));
-}
diff --git a/pkg/analyzer2dart/test/identifier_semantics_test.dart b/pkg/analyzer2dart/test/identifier_semantics_test.dart
deleted file mode 100644
index 0801e7a..0000000
--- a/pkg/analyzer2dart/test/identifier_semantics_test.dart
+++ /dev/null
@@ -1,2536 +0,0 @@
-// Copyright (c) 2014, the Dart project authors.  Please see the AUTHORS file
-// for details. All rights reserved. Use of this source code is governed by a
-// BSD-style license that can be found in the LICENSE file.
-
-import 'package:analyzer/analyzer.dart';
-import 'package:analyzer/file_system/file_system.dart';
-import 'package:analyzer/file_system/memory_file_system.dart';
-import 'package:analyzer/src/generated/element.dart';
-import 'package:analyzer/src/generated/engine.dart';
-import 'package:analyzer/src/generated/sdk.dart';
-import 'package:analyzer/src/generated/source.dart';
-import 'package:analyzer/src/generated/source_io.dart';
-import 'package:analyzer2dart/src/identifier_semantics.dart';
-import 'package:unittest/unittest.dart';
-
-import 'mock_sdk.dart';
-
-main() {
-  test('Call function defined at top level', () {
-    Helper helper = new Helper('''
-g() {}
-
-f() {
-  g();
-}
-''');
-    helper.checkStaticMethod('g()', null, 'g', true, isInvoke: true);
-  });
-
-  test('Call function defined at top level via prefix', () {
-    Helper helper = new Helper('''
-import 'lib.dart' as l;
-
-f() {
-  l.g();
-}
-''');
-    helper.addFile('/lib.dart', '''
-library lib;
-
-g() {}
-''');
-    helper.checkStaticMethod('l.g()', null, 'g', true, isInvoke: true);
-  });
-
-  test('Call method defined statically in class from inside class', () {
-    Helper helper = new Helper('''
-class A {
-  static g() {}
-
-  f() {
-    g();
-  }
-}
-''');
-    helper.checkStaticMethod('g()', 'A', 'g', true, isInvoke: true);
-  });
-
-  test('Call method defined statically in class from outside class', () {
-    Helper helper = new Helper('''
-class A {
-  static g() {}
-}
-f() {
-  A.g();
-}
-''');
-    helper.checkStaticMethod('A.g()', 'A', 'g', true, isInvoke: true);
-  });
-
-  test(
-      'Call method defined statically in class from outside class via prefix',
-      () {
-    Helper helper = new Helper('''
-import 'lib.dart' as l;
-
-f() {
-  l.A.g();
-}
-''');
-    helper.addFile('/lib.dart', '''
-library lib;
-
-class A {
-  static g() {}
-}
-''');
-    helper.checkStaticMethod('l.A.g()', 'A', 'g', true, isInvoke: true);
-  });
-
-  test('Call method defined dynamically in class from inside class', () {
-    Helper helper = new Helper('''
-class A {
-  g() {}
-
-  f() {
-    g();
-  }
-}
-''');
-    helper.checkDynamic('g()', null, 'g', isInvoke: true);
-  });
-
-  test(
-      'Call method defined dynamically in class from outside class via typed var',
-      () {
-    Helper helper = new Helper('''
-class A {
-  g() {}
-}
-f(A a) {
-  a.g();
-}
-''');
-    helper.checkDynamic('a.g()', 'a', 'g', isInvoke: true);
-  });
-
-  test(
-      'Call method defined dynamically in class from outside class via typed expression',
-      () {
-    Helper helper = new Helper('''
-class A {
-  g() {}
-}
-A h() => null;
-f() {
-  h().g();
-}
-''');
-    helper.checkDynamic('h().g()', 'h()', 'g', isInvoke: true);
-  });
-
-  test(
-      'Call method defined dynamically in class from outside class via dynamic var',
-      () {
-    Helper helper = new Helper('''
-f(a) {
-  a.g();
-}
-''');
-    helper.checkDynamic('a.g()', 'a', 'g', isInvoke: true);
-  });
-
-  test(
-      'Call method defined dynamically in class from outside class via dynamic expression',
-      () {
-    Helper helper = new Helper('''
-h() => null;
-f() {
-  h().g();
-}
-''');
-    helper.checkDynamic('h().g()', 'h()', 'g', isInvoke: true);
-  });
-
-  test('Call method defined locally', () {
-    Helper helper = new Helper('''
-f() {
-  g() {}
-  g();
-}
-''');
-    helper.checkLocalFunction('g()', 'g', isInvoke: true);
-  });
-
-  test('Call method undefined at top level', () {
-    Helper helper = new Helper('''
-f() {
-  g();
-}
-''');
-    // Undefined top level invocations are treated as dynamic.
-    // TODO(paulberry): not sure if this is a good idea.  In general, when such
-    // a call appears inside an instance method, it is dynamic, because "this"
-    // might be an instance of a derived class that implements g().  However,
-    // in this case, we are not inside an instance method, so we know that the
-    // target is undefined.
-    helper.checkDynamic('g()', null, 'g', isInvoke: true);
-  });
-
-  test('Call method undefined at top level via prefix', () {
-    Helper helper = new Helper('''
-import 'lib.dart' as l;
-
-f() {
-  l.g();
-}
-''');
-    helper.addFile('/lib.dart', '''
-library lib;
-''');
-    // Undefined top level invocations are treated as dynamic.
-    // TODO(paulberry): not sure if this is a good idea, for similar reasons to
-    // the case above.
-    helper.checkDynamic('l.g()', null, 'g', isInvoke: true);
-  });
-
-  test('Call method undefined statically in class from outside class', () {
-    Helper helper = new Helper('''
-class A {}
-
-f() {
-  A.g();
-}
-''');
-    helper.checkStaticMethod('A.g()', 'A', 'g', false, isInvoke: true);
-  });
-
-  test(
-      'Call method undefined statically in class from outside class via prefix',
-      () {
-    Helper helper = new Helper('''
-import 'lib.dart' as l;
-
-f() {
-  l.A.g();
-}
-''');
-    helper.addFile('/lib.dart', '''
-library lib;
-
-class A {}
-''');
-    helper.checkStaticMethod('l.A.g()', 'A', 'g', false, isInvoke: true);
-  });
-
-  test('Call method undefined dynamically in class from inside class', () {
-    Helper helper = new Helper('''
-class A {
-  f() {
-    g();
-  }
-}
-''');
-    helper.checkDynamic('g()', null, 'g', isInvoke: true);
-  });
-
-  test(
-      'Call method undefined dynamically in class from outside class via typed var',
-      () {
-    Helper helper = new Helper('''
-class A {}
-
-f(A a) {
-  a.g();
-}
-''');
-    helper.checkDynamic('a.g()', 'a', 'g', isInvoke: true);
-  });
-
-  test(
-      'Call method undefined dynamically in class from outside class via typed expression',
-      () {
-    Helper helper = new Helper('''
-class A {}
-
-A h() => null;
-
-f() {
-  h().g();
-}
-''');
-    helper.checkDynamic('h().g()', 'h()', 'g', isInvoke: true);
-  });
-
-  test('Call variable defined at top level', () {
-    Helper helper = new Helper('''
-var x;
-
-f() {
-  x();
-}
-''');
-    helper.checkStaticField('x()', null, 'x', isInvoke: true);
-  });
-
-  test('Call variable defined at top level via prefix', () {
-    Helper helper = new Helper('''
-import 'lib.dart' as l;
-
-f() {
-  return l.x();
-}
-''');
-    helper.addFile('/lib.dart', '''
-library lib;
-
-var x;
-''');
-    helper.checkStaticField('l.x()', null, 'x', isInvoke: true);
-  });
-
-  test('Call field defined statically in class from inside class', () {
-    Helper helper = new Helper('''
-class A {
-  static var x;
-
-  f() {
-    return x();
-  }
-}
-''');
-    helper.checkStaticField('x()', 'A', 'x', isInvoke: true);
-  });
-
-  test('Call field defined statically in class from outside class', () {
-    Helper helper = new Helper('''
-class A {
-  static var x;
-}
-
-f() {
-  return A.x();
-}
-''');
-    helper.checkStaticField('A.x()', 'A', 'x', isInvoke: true);
-  });
-
-  test(
-      'Call field defined statically in class from outside class via prefix',
-      () {
-    Helper helper = new Helper('''
-import 'lib.dart' as l;
-
-f() {
-  return l.A.x();
-}
-''');
-    helper.addFile('/lib.dart', '''
-library lib;
-
-class A {
-  static var x;
-}
-''');
-    helper.checkStaticField('l.A.x()', 'A', 'x', isInvoke: true);
-  });
-
-  test('Call field defined dynamically in class from inside class', () {
-    Helper helper = new Helper('''
-class A {
-  var x;
-
-  f() {
-    return x();
-  }
-}
-''');
-    helper.checkDynamic('x()', null, 'x', isInvoke: true);
-  });
-
-  test(
-      'Call field defined dynamically in class from outside class via typed var',
-      () {
-    Helper helper = new Helper('''
-class A {
-  var x;
-}
-
-f(A a) {
-  return a.x();
-}
-''');
-    helper.checkDynamic('a.x()', 'a', 'x', isInvoke: true);
-  });
-
-  test(
-      'Call field defined dynamically in class from outside class via typed expression',
-      () {
-    Helper helper = new Helper('''
-class A {
-  var x;
-}
-
-A h() => null;
-
-f() {
-  return h().x();
-}
-''');
-    helper.checkDynamic('h().x()', 'h()', 'x', isInvoke: true);
-  });
-
-  test(
-      'Call field defined dynamically in class from outside class via dynamic var',
-      () {
-    Helper helper = new Helper('''
-f(a) {
-  return a.x();
-}
-''');
-    helper.checkDynamic('a.x()', 'a', 'x', isInvoke: true);
-  });
-
-  test(
-      'Call field defined dynamically in class from outside class via dynamic expression',
-      () {
-    Helper helper = new Helper('''
-h() => null;
-
-f() {
-  return h().x();
-}
-''');
-    helper.checkDynamic('h().x()', 'h()', 'x', isInvoke: true);
-  });
-
-  test('Call variable defined locally', () {
-    Helper helper = new Helper('''
-f() {
-  var x;
-  return x();
-}
-''');
-    helper.checkLocalVariable('x()', 'x', isInvoke: true);
-  });
-
-  test('Call variable defined in parameter', () {
-    Helper helper = new Helper('''
-f(x) {
-  return x();
-}
-''');
-    helper.checkParameter('x()', 'x', isInvoke: true);
-  });
-
-  test('Call accessor defined at top level', () {
-    Helper helper = new Helper('''
-get x => null;
-
-f() {
-  return x();
-}
-''');
-    helper.checkStaticProperty('x()', null, 'x', true, isInvoke: true);
-  });
-
-  test('Call accessor defined at top level via prefix', () {
-    Helper helper = new Helper('''
-import 'lib.dart' as l;
-
-f() {
-  return l.x();
-}
-''');
-    helper.addFile('/lib.dart', '''
-library lib;
-
-get x => null;
-''');
-    helper.checkStaticProperty('l.x()', null, 'x', true, isInvoke: true);
-  });
-
-  test('Call accessor defined statically in class from inside class', () {
-    Helper helper = new Helper('''
-class A {
-  static get x => null;
-
-  f() {
-    return x();
-  }
-}
-''');
-    helper.checkStaticProperty('x()', 'A', 'x', true, isInvoke: true);
-  });
-
-  test('Call accessor defined statically in class from outside class', () {
-    Helper helper = new Helper('''
-class A {
-  static get x => null;
-}
-
-f() {
-  return A.x();
-}
-''');
-    helper.checkStaticProperty('A.x()', 'A', 'x', true, isInvoke: true);
-  });
-
-  test(
-      'Call accessor defined statically in class from outside class via prefix',
-      () {
-    Helper helper = new Helper('''
-import 'lib.dart' as l;
-
-f() {
-  return l.A.x();
-}
-''');
-    helper.addFile('/lib.dart', '''
-library lib;
-
-class A {
-  static get x => null;
-}
-''');
-    helper.checkStaticProperty('l.A.x()', 'A', 'x', true, isInvoke: true);
-  });
-
-  test('Call accessor defined dynamically in class from inside class', () {
-    Helper helper = new Helper('''
-class A {
-  get x => null;
-
-  f() {
-    return x();
-  }
-}
-''');
-    helper.checkDynamic('x()', null, 'x', isInvoke: true);
-  });
-
-  test(
-      'Call accessor defined dynamically in class from outside class via typed var',
-      () {
-    Helper helper = new Helper('''
-class A {
-  get x => null;
-}
-
-f(A a) {
-  return a.x();
-}
-''');
-    helper.checkDynamic('a.x()', 'a', 'x', isInvoke: true);
-  });
-
-  test(
-      'Call accessor defined dynamically in class from outside class via typed expression',
-      () {
-    Helper helper = new Helper('''
-class A {
-  get x => null;
-}
-
-A h() => null;
-
-f() {
-  return h().x();
-}
-''');
-    helper.checkDynamic('h().x()', 'h()', 'x', isInvoke: true);
-  });
-
-  test(
-      'Call accessor defined dynamically in class from outside class via dynamic var',
-      () {
-    Helper helper = new Helper('''
-f(a) {
-  return a.x();
-}
-''');
-    helper.checkDynamic('a.x()', 'a', 'x', isInvoke: true);
-  });
-
-  test(
-      'Call accessor defined dynamically in class from outside class via dynamic expression',
-      () {
-    Helper helper = new Helper('''
-h() => null;
-
-f() {
-  return h().x();
-}
-''');
-    helper.checkDynamic('h().x()', 'h()', 'x', isInvoke: true);
-  });
-
-  test('Call class defined at top level', () {
-    Helper helper = new Helper('''
-class A {}
-
-f() {
-  A();
-}
-''');
-    helper.checkTypeReference(
-        'A()',
-        'A',
-        AccessKind.TOPLEVEL_TYPE,
-        isInvoke: true);
-  });
-
-  test('Call class defined at top level via prefix', () {
-    Helper helper = new Helper('''
-import 'lib.dart' as l;
-
-f() {
-  l.A();
-}
-''');
-    helper.addFile('/lib.dart', '''
-library lib;
-
-class A {}
-''');
-    helper.checkTypeReference(
-        'l.A()',
-        'A',
-        AccessKind.TOPLEVEL_TYPE,
-        isInvoke: true);
-  });
-
-  test('Call dynamic type undefined at toplevel', () {
-    Helper helper = new Helper('''
-f() {
-  dynamic();
-}
-''');
-    // Since it is legal to define a toplevel function or a class member called
-    // dynamic, "dynamic()" must be treated as a dynamic access to a function
-    // called "dynamic".
-    helper.checkDynamic('dynamic()', null, 'dynamic', isInvoke: true);
-  });
-
-  test('Call function typedef defined at top level', () {
-    Helper helper = new Helper('''
-typedef F();
-
-f() {
-  F();
-}
-''');
-    helper.checkTypeReference(
-        'F()',
-        'F',
-        AccessKind.TOPLEVEL_TYPE,
-        isInvoke: true);
-  });
-
-  test('Call function typedef defined at top level via prefix', () {
-    Helper helper = new Helper('''
-import 'lib.dart' as l;
-
-f() {
-  l.F();
-}
-''');
-    helper.addFile('/lib.dart', '''
-library lib;
-
-typedef F();
-''');
-    helper.checkTypeReference(
-        'l.F()',
-        'F',
-        AccessKind.TOPLEVEL_TYPE,
-        isInvoke: true);
-  });
-
-  test('Call mixin application defined at top level', () {
-    Helper helper = new Helper('''
-class A {}
-class B {}
-class C = A with B;
-
-f() {
-  C();
-}
-''');
-    helper.checkTypeReference(
-        'C()',
-        'C',
-        AccessKind.TOPLEVEL_TYPE,
-        isInvoke: true);
-  });
-
-  test('Call mixin application defined at top level via prefix', () {
-    Helper helper = new Helper('''
-import 'lib.dart' as l;
-
-f() {
-  l.C();
-}
-''');
-    helper.addFile('/lib.dart', '''
-library lib;
-
-class A {}
-class B {}
-class C = A with B;
-''');
-    helper.checkTypeReference(
-        'l.C()',
-        'C',
-        AccessKind.TOPLEVEL_TYPE,
-        isInvoke: true);
-  });
-
-  test('Call type parameter of enclosing class', () {
-    Helper helper = new Helper('''
-class A<T, U> {
-  f() {
-    U();
-  }
-}
-''');
-    helper.checkTypeReference(
-        'U()',
-        'U',
-        AccessKind.TYPE_PARAMETER,
-        isInvoke: true);
-  });
-
-  test('Get function defined at top level', () {
-    Helper helper = new Helper('''
-g() {}
-
-f() {
-  return g;
-}
-''');
-    helper.checkStaticMethod('g', null, 'g', true, isRead: true);
-  });
-
-  test('Get function defined at top level via prefix', () {
-    Helper helper = new Helper('''
-import 'lib.dart' as l;
-
-f() {
-  return l.g;
-}
-''');
-    helper.addFile('/lib.dart', '''
-library lib;
-
-g() {}
-''');
-    helper.checkStaticMethod('l.g', null, 'g', true, isRead: true);
-  });
-
-  test('Get method defined statically in class from inside class', () {
-    Helper helper = new Helper('''
-class A {
-  static g() {}
-
-  f() {
-    return g;
-  }
-}
-''');
-    helper.checkStaticMethod('g', 'A', 'g', true, isRead: true);
-  });
-
-  test('Get method defined statically in class from outside class', () {
-    Helper helper = new Helper('''
-class A {
-  static g() {}
-}
-f() {
-  return A.g;
-}
-''');
-    helper.checkStaticMethod('A.g', 'A', 'g', true, isRead: true);
-  });
-
-  test(
-      'Get method defined statically in class from outside class via prefix',
-      () {
-    Helper helper = new Helper('''
-import 'lib.dart' as l;
-
-f() {
-  return l.A.g;
-}
-''');
-    helper.addFile('/lib.dart', '''
-library lib;
-
-class A {
-  static g() {}
-}
-''');
-    helper.checkStaticMethod('l.A.g', 'A', 'g', true, isRead: true);
-  });
-
-  test('Get method defined dynamically in class from inside class', () {
-    Helper helper = new Helper('''
-class A {
-  g() {}
-
-  f() {
-    return g;
-  }
-}
-''');
-    helper.checkDynamic('g', null, 'g', isRead: true);
-  });
-
-  test(
-      'Get method defined dynamically in class from outside class via typed var',
-      () {
-    Helper helper = new Helper('''
-class A {
-  g() {}
-}
-f(A a) {
-  return a.g;
-}
-''');
-    helper.checkDynamic('a.g', 'a', 'g', isRead: true);
-  });
-
-  test(
-      'Get method defined dynamically in class from outside class via typed expression',
-      () {
-    Helper helper = new Helper('''
-class A {
-  g() {}
-}
-A h() => null;
-f() {
-  return h().g;
-}
-''');
-    helper.checkDynamic('h().g', 'h()', 'g', isRead: true);
-  });
-
-  test('Get method defined locally', () {
-    Helper helper = new Helper('''
-f() {
-  g() {}
-  return g;
-}
-''');
-    helper.checkLocalFunction('g', 'g', isRead: true);
-  });
-
-  test('Get variable defined at top level', () {
-    Helper helper = new Helper('''
-var x;
-
-f() {
-  return x;
-}
-''');
-    helper.checkStaticField('x', null, 'x', isRead: true);
-  });
-
-  test('Get variable defined at top level via prefix', () {
-    Helper helper = new Helper('''
-import 'lib.dart' as l;
-
-f() {
-  return l.x;
-}
-''');
-    helper.addFile('/lib.dart', '''
-library lib;
-
-var x;
-''');
-    helper.checkStaticField('l.x', null, 'x', isRead: true);
-  });
-
-  test('Get field defined statically in class from inside class', () {
-    Helper helper = new Helper('''
-class A {
-  static var x;
-
-  f() {
-    return x;
-  }
-}
-''');
-    helper.checkStaticField('x', 'A', 'x', isRead: true);
-  });
-
-  test('Get field defined statically in class from outside class', () {
-    Helper helper = new Helper('''
-class A {
-  static var x;
-}
-
-f() {
-  return A.x;
-}
-''');
-    helper.checkStaticField('A.x', 'A', 'x', isRead: true);
-  });
-
-  test(
-      'Get field defined statically in class from outside class via prefix',
-      () {
-    Helper helper = new Helper('''
-import 'lib.dart' as l;
-
-f() {
-  return l.A.x;
-}
-''');
-    helper.addFile('/lib.dart', '''
-library lib;
-
-class A {
-  static var x;
-}
-''');
-    helper.checkStaticField('l.A.x', 'A', 'x', isRead: true);
-  });
-
-  test('Get field defined dynamically in class from inside class', () {
-    Helper helper = new Helper('''
-class A {
-  var x;
-
-  f() {
-    return x;
-  }
-}
-''');
-    helper.checkDynamic('x', null, 'x', isRead: true);
-  });
-
-  test(
-      'Get field defined dynamically in class from outside class via typed var',
-      () {
-    Helper helper = new Helper('''
-class A {
-  var x;
-}
-
-f(A a) {
-  return a.x;
-}
-''');
-    helper.checkDynamic('a.x', 'a', 'x', isRead: true);
-  });
-
-  test(
-      'Get field defined dynamically in class from outside class via typed expression',
-      () {
-    Helper helper = new Helper('''
-class A {
-  var x;
-}
-
-A h() => null;
-
-f() {
-  return h().x;
-}
-''');
-    helper.checkDynamic('h().x', 'h()', 'x', isRead: true);
-  });
-
-  test(
-      'Get field defined dynamically in class from outside class via dynamic var',
-      () {
-    Helper helper = new Helper('''
-f(a) {
-  return a.x;
-}
-''');
-    helper.checkDynamic('a.x', 'a', 'x', isRead: true);
-  });
-
-  test(
-      'Get field defined dynamically in class from outside class via dynamic expression',
-      () {
-    Helper helper = new Helper('''
-h() => null;
-
-f() {
-  return h().x;
-}
-''');
-    helper.checkDynamic('h().x', 'h()', 'x', isRead: true);
-  });
-
-  test('Get variable defined locally', () {
-    Helper helper = new Helper('''
-f() {
-  var x;
-  return x;
-}
-''');
-    helper.checkLocalVariable('x', 'x', isRead: true);
-  });
-
-  test('Get variable defined in parameter', () {
-    Helper helper = new Helper('''
-f(x) {
-  return x;
-}
-''');
-    helper.checkParameter('x', 'x', isRead: true);
-  });
-
-  test('Get accessor defined at top level', () {
-    Helper helper = new Helper('''
-get x => null;
-
-f() {
-  return x;
-}
-''');
-    helper.checkStaticProperty('x', null, 'x', true, isRead: true);
-  });
-
-  test('Get accessor defined at top level via prefix', () {
-    Helper helper = new Helper('''
-import 'lib.dart' as l;
-
-f() {
-  return l.x;
-}
-''');
-    helper.addFile('/lib.dart', '''
-library lib;
-
-get x => null;
-''');
-    helper.checkStaticProperty('l.x', null, 'x', true, isRead: true);
-  });
-
-  test('Get accessor defined statically in class from inside class', () {
-    Helper helper = new Helper('''
-class A {
-  static get x => null;
-
-  f() {
-    return x;
-  }
-}
-''');
-    helper.checkStaticProperty('x', 'A', 'x', true, isRead: true);
-  });
-
-  test('Get accessor defined statically in class from outside class', () {
-    Helper helper = new Helper('''
-class A {
-  static get x => null;
-}
-
-f() {
-  return A.x;
-}
-''');
-    helper.checkStaticProperty('A.x', 'A', 'x', true, isRead: true);
-  });
-
-  test(
-      'Get accessor defined statically in class from outside class via prefix',
-      () {
-    Helper helper = new Helper('''
-import 'lib.dart' as l;
-
-f() {
-  return l.A.x;
-}
-''');
-    helper.addFile('/lib.dart', '''
-library lib;
-
-class A {
-  static get x => null;
-}
-''');
-    helper.checkStaticProperty('l.A.x', 'A', 'x', true, isRead: true);
-  });
-
-  test('Get accessor defined dynamically in class from inside class', () {
-    Helper helper = new Helper('''
-class A {
-  get x => null;
-
-  f() {
-    return x;
-  }
-}
-''');
-    helper.checkDynamic('x', null, 'x', isRead: true);
-  });
-
-  test(
-      'Get accessor defined dynamically in class from outside class via typed var',
-      () {
-    Helper helper = new Helper('''
-class A {
-  get x => null;
-}
-
-f(A a) {
-  return a.x;
-}
-''');
-    helper.checkDynamic('a.x', 'a', 'x', isRead: true);
-  });
-
-  test(
-      'Get accessor defined dynamically in class from outside class via typed expression',
-      () {
-    Helper helper = new Helper('''
-class A {
-  get x => null;
-}
-
-A h() => null;
-
-f() {
-  return h().x;
-}
-''');
-    helper.checkDynamic('h().x', 'h()', 'x', isRead: true);
-  });
-
-  test(
-      'Get accessor defined dynamically in class from outside class via dynamic var',
-      () {
-    Helper helper = new Helper('''
-f(a) {
-  return a.x;
-}
-''');
-    helper.checkDynamic('a.x', 'a', 'x', isRead: true);
-  });
-
-  test(
-      'Get accessor defined dynamically in class from outside class via dynamic expression',
-      () {
-    Helper helper = new Helper('''
-h() => null;
-
-f() {
-  return h().x;
-}
-''');
-    helper.checkDynamic('h().x', 'h()', 'x', isRead: true);
-  });
-
-  test('Get accessor undefined at top level', () {
-    Helper helper = new Helper('''
-f() {
-  return x;
-}
-''');
-    // Undefined top level property accesses are treated as dynamic.
-    // TODO(paulberry): not sure if this is a good idea.  In general, when such
-    // an access appears inside an instance method, it is dynamic, because
-    // "this" might be an instance of a derived class that implements x.
-    // However, in this case, we are not inside an instance method, so we know
-    // that the target is undefined.
-    helper.checkDynamic('x', null, 'x', isRead: true);
-  });
-
-  test('Get accessor undefined at top level via prefix', () {
-    Helper helper = new Helper('''
-import 'lib.dart' as l;
-
-f() {
-  return l.x;
-}
-''');
-    helper.addFile('/lib.dart', '''
-library lib;
-''');
-    // Undefined top level property accesses are treated as dynamic.
-    // TODO(paulberry): not sure if this is a good idea, for similar reasons to
-    // the case above.
-    helper.checkDynamic('l.x', null, 'x', isRead: true);
-  });
-
-  test('Get accessor undefined statically in class from outside class', () {
-    Helper helper = new Helper('''
-class A {}
-
-f() {
-  return A.x;
-}
-''');
-    helper.checkStaticProperty('A.x', 'A', 'x', false, isRead: true);
-  });
-
-  test(
-      'Get accessor undefined statically in class from outside class via prefix',
-      () {
-    Helper helper = new Helper('''
-import 'lib.dart' as l;
-
-f() {
-  return l.A.x;
-}
-''');
-    helper.addFile('/lib.dart', '''
-library lib;
-
-class A {}
-''');
-    helper.checkStaticProperty('l.A.x', 'A', 'x', false, isRead: true);
-  });
-
-  test('Get accessor undefined dynamically in class from inside class', () {
-    Helper helper = new Helper('''
-class A {
-  f() {
-    return x;
-  }
-}
-''');
-    helper.checkDynamic('x', null, 'x', isRead: true);
-  });
-
-  test(
-      'Get accessor undefined dynamically in class from outside class via typed var',
-      () {
-    Helper helper = new Helper('''
-class A {}
-
-f(A a) {
-  return a.x;
-}
-''');
-    helper.checkDynamic('a.x', 'a', 'x', isRead: true);
-  });
-
-  test(
-      'Get accessor undefined dynamically in class from outside class via typed expression',
-      () {
-    Helper helper = new Helper('''
-class A {}
-
-A h() => null;
-
-f() {
-  return h().x;
-}
-''');
-    helper.checkDynamic('h().x', 'h()', 'x', isRead: true);
-  });
-
-  test('Get class defined at top level', () {
-    Helper helper = new Helper('''
-class A {}
-var t = A;
-''');
-    helper.checkTypeReference('A', 'A', AccessKind.TOPLEVEL_TYPE, isRead: true);
-  });
-
-  test('Get class defined at top level via prefix', () {
-    Helper helper = new Helper('''
-import 'lib.dart' as l;
-
-var t = l.A;
-''');
-    helper.addFile('/lib.dart', '''
-library lib;
-
-class A {}
-''');
-    helper.checkTypeReference(
-        'l.A',
-        'A',
-        AccessKind.TOPLEVEL_TYPE,
-        isRead: true);
-  });
-
-  test('Get dynamic type', () {
-    Helper helper = new Helper('''
-var t = dynamic;
-''');
-    helper.checkTypeReference(
-        'dynamic',
-        'dynamic',
-        AccessKind.TOPLEVEL_TYPE,
-        isRead: true);
-  });
-
-  test('Get function typedef defined at top level', () {
-    Helper helper = new Helper('''
-typedef F();
-var t = F;
-''');
-    helper.checkTypeReference('F', 'F', AccessKind.TOPLEVEL_TYPE, isRead: true);
-  });
-
-  test('Get function typedef defined at top level via prefix', () {
-    Helper helper = new Helper('''
-import 'lib.dart' as l;
-
-var t = l.F;
-''');
-    helper.addFile('/lib.dart', '''
-library lib;
-
-typedef F();
-''');
-    helper.checkTypeReference(
-        'l.F',
-        'F',
-        AccessKind.TOPLEVEL_TYPE,
-        isRead: true);
-  });
-
-  test('Get mixin application defined at top level', () {
-    Helper helper = new Helper('''
-class A {}
-class B {}
-class C = A with B;
-var t = C;
-''');
-    helper.checkTypeReference('C', 'C', AccessKind.TOPLEVEL_TYPE, isRead: true);
-  });
-
-  test('Get mixin application defined at top level via prefix', () {
-    Helper helper = new Helper('''
-import 'lib.dart' as l;
-
-var t = l.C;
-''');
-    helper.addFile('/lib.dart', '''
-library lib;
-
-class A {}
-class B {}
-class C = A with B;
-''');
-    helper.checkTypeReference(
-        'l.C',
-        'C',
-        AccessKind.TOPLEVEL_TYPE,
-        isRead: true);
-  });
-
-  test('Get type parameter of enclosing class', () {
-    Helper helper = new Helper('''
-class A<T, U> {
-  f() {
-    var t = U;
-  }
-}
-''');
-    helper.checkTypeReference(
-        'U',
-        'U',
-        AccessKind.TYPE_PARAMETER,
-        isRead: true);
-  });
-
-  test('Set variable defined at top level', () {
-    Helper helper = new Helper('''
-var x;
-
-f() {
-  x = 1;
-}
-''');
-    helper.checkStaticField('x', null, 'x', isWrite: true);
-  });
-
-  test('Set variable defined at top level in foreach loop', () {
-    Helper helper = new Helper('''
-var x;
-
-f() {
-  for (x in []) {}
-}
-''');
-    helper.checkStaticField('x', null, 'x', isWrite: true);
-  });
-
-  test('Set variable defined at top level via prefix', () {
-    Helper helper = new Helper('''
-import 'lib.dart' as l;
-
-f() {
-  l.x = 1;
-}
-''');
-    helper.addFile('/lib.dart', '''
-library lib;
-
-var x;
-''');
-    helper.checkStaticField('l.x', null, 'x', isWrite: true);
-  });
-
-  test('Set field defined statically in class from inside class', () {
-    Helper helper = new Helper('''
-class A {
-  static var x;
-
-  f() {
-    x = 1;
-  }
-}
-''');
-    helper.checkStaticField('x', 'A', 'x', isWrite: true);
-  });
-
-  test(
-      'Set field defined statically in class from inside class in foreach' + ' loop',
-      () {
-    Helper helper = new Helper('''
-class A {
-  static var x;
-
-  f() {
-    for (x in []) {}
-  }
-}
-''');
-    helper.checkStaticField('x', 'A', 'x', isWrite: true);
-  });
-
-  test('Set field defined statically in class from outside class', () {
-    Helper helper = new Helper('''
-class A {
-  static var x;
-}
-
-f() {
-  A.x = 1;
-}
-''');
-    helper.checkStaticField('A.x', 'A', 'x', isWrite: true);
-  });
-
-  test(
-      'Set field defined statically in class from outside class via prefix',
-      () {
-    Helper helper = new Helper('''
-import 'lib.dart' as l;
-
-f() {
-  l.A.x = 1;
-}
-''');
-    helper.addFile('/lib.dart', '''
-library lib;
-
-class A {
-  static var x;
-}
-''');
-    helper.checkStaticField('l.A.x', 'A', 'x', isWrite: true);
-  });
-
-  test('Set field defined dynamically in class from inside class', () {
-    Helper helper = new Helper('''
-class A {
-  var x;
-
-  f() {
-    x = 1;
-  }
-}
-''');
-    helper.checkDynamic('x', null, 'x', isWrite: true);
-  });
-
-  test(
-      'Set field defined dynamically in class from inside class in foreach' + ' loop',
-      () {
-    Helper helper = new Helper('''
-class A {
-  var x;
-
-  f() {
-    for (x in []) {}
-  }
-}
-''');
-    helper.checkDynamic('x', null, 'x', isWrite: true);
-  });
-
-  test(
-      'Set field defined dynamically in class from outside class via typed var',
-      () {
-    Helper helper = new Helper('''
-class A {
-  var x;
-}
-
-f(A a) {
-  a.x = 1;
-}
-''');
-    helper.checkDynamic('a.x', 'a', 'x', isWrite: true);
-  });
-
-  test(
-      'Set field defined dynamically in class from outside class via typed expression',
-      () {
-    Helper helper = new Helper('''
-class A {
-  var x;
-}
-
-A h() => null;
-
-f() {
-  h().x = 1;
-}
-''');
-    helper.checkDynamic('h().x', 'h()', 'x', isWrite: true);
-  });
-
-  test('Set variable defined locally', () {
-    Helper helper = new Helper('''
-f() {
-  var x;
-  x = 1;
-}
-''');
-    helper.checkLocalVariable('x', 'x', isWrite: true);
-  });
-
-  test('Set variable defined locally in foreach loop', () {
-    Helper helper = new Helper('''
-f() {
-  var x;
-  for (x in []) {}
-}
-''');
-    helper.checkLocalVariable('x', 'x', isWrite: true);
-  });
-
-  test('Set variable defined in parameter', () {
-    Helper helper = new Helper('''
-f(x) {
-  x = 1;
-}
-''');
-    helper.checkParameter('x', 'x', isWrite: true);
-  });
-
-  test('Set variable defined in parameter in foreach loop', () {
-    Helper helper = new Helper('''
-f(x) {
-  for (x in []) {}
-}
-''');
-    helper.checkParameter('x', 'x', isWrite: true);
-  });
-
-  test('Set accessor defined at top level', () {
-    Helper helper = new Helper('''
-set x(value) {};
-
-f() {
-  x = 1;
-}
-''');
-    helper.checkStaticProperty('x', null, 'x', true, isWrite: true);
-  });
-
-  test('Set accessor defined at top level in foreach loop', () {
-    Helper helper = new Helper('''
-set x(value) {};
-
-f() {
-  for (x in []) {}
-}
-''');
-    helper.checkStaticProperty('x', null, 'x', true, isWrite: true);
-  });
-
-  test('Set accessor defined at top level via prefix', () {
-    Helper helper = new Helper('''
-import 'lib.dart' as l;
-
-f() {
-  l.x = 1;
-}
-''');
-    helper.addFile('/lib.dart', '''
-library lib;
-
-set x(value) {};
-''');
-    helper.checkStaticProperty('l.x', null, 'x', true, isWrite: true);
-  });
-
-  test('Set accessor defined statically in class from inside class', () {
-    Helper helper = new Helper('''
-class A {
-  static set x(value) {}
-
-  f() {
-    x = 1;
-  }
-}
-''');
-    helper.checkStaticProperty('x', 'A', 'x', true, isWrite: true);
-  });
-
-  test(
-      'Set accessor defined statically in class from inside class in' +
-          ' foreach loop',
-      () {
-    Helper helper = new Helper('''
-class A {
-  static set x(value) {}
-
-  f() {
-    for (x in []) {}
-  }
-}
-''');
-    helper.checkStaticProperty('x', 'A', 'x', true, isWrite: true);
-  });
-
-  test('Set accessor defined statically in class from outside class', () {
-    Helper helper = new Helper('''
-class A {
-  static set x(value) {}
-}
-
-f() {
-  A.x = 1;
-}
-''');
-    helper.checkStaticProperty('A.x', 'A', 'x', true, isWrite: true);
-  });
-
-  test(
-      'Set accessor defined statically in class from outside class via prefix',
-      () {
-    Helper helper = new Helper('''
-import 'lib.dart' as l;
-
-f() {
-  l.A.x = 1;
-}
-''');
-    helper.addFile('/lib.dart', '''
-library lib;
-
-class A {
-  static set x(value) {}
-}
-''');
-    helper.checkStaticProperty('l.A.x', 'A', 'x', true, isWrite: true);
-  });
-
-  test('Set accessor defined dynamically in class from inside class', () {
-    Helper helper = new Helper('''
-class A {
-  set x(value) {}
-
-  f() {
-    x = 1;
-  }
-}
-''');
-    helper.checkDynamic('x', null, 'x', isWrite: true);
-  });
-
-  test(
-      'Set accessor defined dynamically in class from inside class in' +
-          ' foreach loop',
-      () {
-    Helper helper = new Helper('''
-class A {
-  set x(value) {}
-
-  f() {
-    for (x in []) {}
-  }
-}
-''');
-    helper.checkDynamic('x', null, 'x', isWrite: true);
-  });
-
-  test(
-      'Set accessor defined dynamically in class from outside class via typed var',
-      () {
-    Helper helper = new Helper('''
-class A {
-  set x(value) {}
-}
-
-f(A a) {
-  a.x = 1;
-}
-''');
-    helper.checkDynamic('a.x', 'a', 'x', isWrite: true);
-  });
-
-  test(
-      'Set accessor defined dynamically in class from outside class via typed expression',
-      () {
-    Helper helper = new Helper('''
-class A {
-  set x(value) {}
-}
-
-A h() => null;
-
-f() {
-  h().x = 1;
-}
-''');
-    helper.checkDynamic('h().x', 'h()', 'x', isWrite: true);
-  });
-
-  test(
-      'Set accessor defined dynamically in class from outside class via dynamic var',
-      () {
-    Helper helper = new Helper('''
-f(a) {
-  a.x = 1;
-}
-''');
-    helper.checkDynamic('a.x', 'a', 'x', isWrite: true);
-  });
-
-  test(
-      'Set accessor defined dynamically in class from outside class via dynamic expression',
-      () {
-    Helper helper = new Helper('''
-h() => null;
-
-f() {
-  h().x = 1;
-}
-''');
-    helper.checkDynamic('h().x', 'h()', 'x', isWrite: true);
-  });
-
-  test('Set accessor undefined at top level', () {
-    Helper helper = new Helper('''
-f() {
-  x = 1;
-}
-''');
-    helper.checkDynamic('x', null, 'x', isWrite: true);
-  });
-
-  test('Set accessor undefined at top level in foreach loop', () {
-    Helper helper = new Helper('''
-f() {
-  for (x in []) {}
-}
-''');
-    helper.checkDynamic('x', null, 'x', isWrite: true);
-  });
-
-  test('Set accessor undefined at top level via prefix', () {
-    Helper helper = new Helper('''
-import 'lib.dart' as l;
-
-f() {
-  l.x = 1;
-}
-''');
-    helper.addFile('/lib.dart', '''
-library lib;
-''');
-    helper.checkDynamic('l.x', null, 'x', isWrite: true);
-  });
-
-  test('Set accessor undefined statically in class from outside class', () {
-    Helper helper = new Helper('''
-class A {}
-
-f() {
-  A.x = 1;
-}
-''');
-    helper.checkStaticProperty('A.x', 'A', 'x', false, isWrite: true);
-  });
-
-  test(
-      'Set accessor undefined statically in class from outside class via prefix',
-      () {
-    Helper helper = new Helper('''
-import 'lib.dart' as l;
-
-f() {
-  l.A.x = 1;
-}
-''');
-    helper.addFile('/lib.dart', '''
-library lib;
-
-class A {}
-''');
-    helper.checkStaticProperty('l.A.x', 'A', 'x', false, isWrite: true);
-  });
-
-  test('Set accessor undefined dynamically in class from inside class', () {
-    Helper helper = new Helper('''
-class A {
-  f() {
-    x = 1;
-  }
-}
-''');
-    helper.checkDynamic('x', null, 'x', isWrite: true);
-  });
-
-  test(
-      'Set accessor undefined dynamically in class from inside class in' +
-          ' foreach loop',
-      () {
-    Helper helper = new Helper('''
-class A {
-  f() {
-    for (x in []) {}
-  }
-}
-''');
-    helper.checkDynamic('x', null, 'x', isWrite: true);
-  });
-
-  test(
-      'Set accessor undefined dynamically in class from outside class via typed var',
-      () {
-    Helper helper = new Helper('''
-class A {}
-
-f(A a) {
-  a.x = 1;
-}
-''');
-    helper.checkDynamic('a.x', 'a', 'x', isWrite: true);
-  });
-
-  test(
-      'Set accessor undefined dynamically in class from outside class via typed expression',
-      () {
-    Helper helper = new Helper('''
-class A {}
-
-A h() => null;
-
-f() {
-  h().x = 1;
-}
-''');
-    helper.checkDynamic('h().x', 'h()', 'x', isWrite: true);
-  });
-
-  test('RMW variable defined at top level', () {
-    Helper helper = new Helper('''
-var x;
-
-f() {
-  x += 1;
-}
-''');
-    helper.checkStaticField('x', null, 'x', isRead: true, isWrite: true);
-  });
-
-  test('RMW variable defined at top level via prefix', () {
-    Helper helper = new Helper('''
-import 'lib.dart' as l;
-
-f() {
-  l.x += 1;
-}
-''');
-    helper.addFile('/lib.dart', '''
-library lib;
-
-var x;
-''');
-    helper.checkStaticField('l.x', null, 'x', isRead: true, isWrite: true);
-  });
-
-  test('RMW field defined statically in class from inside class', () {
-    Helper helper = new Helper('''
-class A {
-  static var x;
-
-  f() {
-    x += 1;
-  }
-}
-''');
-    helper.checkStaticField('x', 'A', 'x', isRead: true, isWrite: true);
-  });
-
-  test('RMW field defined statically in class from outside class', () {
-    Helper helper = new Helper('''
-class A {
-  static var x;
-}
-
-f() {
-  A.x += 1;
-}
-''');
-    helper.checkStaticField('A.x', 'A', 'x', isRead: true, isWrite: true);
-  });
-
-  test(
-      'RMW field defined statically in class from outside class via prefix',
-      () {
-    Helper helper = new Helper('''
-import 'lib.dart' as l;
-
-f() {
-  l.A.x += 1;
-}
-''');
-    helper.addFile('/lib.dart', '''
-library lib;
-
-class A {
-  static var x;
-}
-''');
-    helper.checkStaticField('l.A.x', 'A', 'x', isRead: true, isWrite: true);
-  });
-
-  test('RMW field defined dynamically in class from inside class', () {
-    Helper helper = new Helper('''
-class A {
-  var x;
-
-  f() {
-    x += 1;
-  }
-}
-''');
-    helper.checkDynamic('x', null, 'x', isRead: true, isWrite: true);
-  });
-
-  test(
-      'RMW field defined dynamically in class from outside class via typed var',
-      () {
-    Helper helper = new Helper('''
-class A {
-  var x;
-}
-
-f(A a) {
-  a.x += 1;
-}
-''');
-    helper.checkDynamic('a.x', 'a', 'x', isRead: true, isWrite: true);
-  });
-
-  test(
-      'RMW field defined dynamically in class from outside class via typed expression',
-      () {
-    Helper helper = new Helper('''
-class A {
-  var x;
-}
-
-A h() => null;
-
-f() {
-  h().x += 1;
-}
-''');
-    helper.checkDynamic('h().x', 'h()', 'x', isRead: true, isWrite: true);
-  });
-
-  test('RMW variable defined locally', () {
-    Helper helper = new Helper('''
-f() {
-  var x;
-  x += 1;
-}
-''');
-    helper.checkLocalVariable('x', 'x', isRead: true, isWrite: true);
-  });
-
-  test('RMW variable defined in parameter', () {
-    Helper helper = new Helper('''
-f(x) {
-  x += 1;
-}
-''');
-    helper.checkParameter('x', 'x', isRead: true, isWrite: true);
-  });
-
-  test('RMW accessor defined at top level', () {
-    Helper helper = new Helper('''
-set x(value) {};
-
-f() {
-  x += 1;
-}
-''');
-    helper.checkStaticProperty(
-        'x',
-        null,
-        'x',
-        true,
-        isRead: true,
-        isWrite: true);
-  });
-
-  test('RMW accessor defined at top level via prefix', () {
-    Helper helper = new Helper('''
-import 'lib.dart' as l;
-
-f() {
-  l.x += 1;
-}
-''');
-    helper.addFile('/lib.dart', '''
-library lib;
-
-set x(value) {};
-''');
-    helper.checkStaticProperty(
-        'l.x',
-        null,
-        'x',
-        true,
-        isRead: true,
-        isWrite: true);
-  });
-
-  test('RMW accessor defined statically in class from inside class', () {
-    Helper helper = new Helper('''
-class A {
-  static set x(value) {}
-
-  f() {
-    x += 1;
-  }
-}
-''');
-    helper.checkStaticProperty(
-        'x',
-        'A',
-        'x',
-        true,
-        isRead: true,
-        isWrite: true);
-  });
-
-  test('RMW accessor defined statically in class from outside class', () {
-    Helper helper = new Helper('''
-class A {
-  static set x(value) {}
-}
-
-f() {
-  A.x += 1;
-}
-''');
-    helper.checkStaticProperty(
-        'A.x',
-        'A',
-        'x',
-        true,
-        isRead: true,
-        isWrite: true);
-  });
-
-  test(
-      'RMW accessor defined statically in class from outside class via prefix',
-      () {
-    Helper helper = new Helper('''
-import 'lib.dart' as l;
-
-f() {
-  l.A.x += 1;
-}
-''');
-    helper.addFile('/lib.dart', '''
-library lib;
-
-class A {
-  static set x(value) {}
-}
-''');
-    helper.checkStaticProperty(
-        'l.A.x',
-        'A',
-        'x',
-        true,
-        isRead: true,
-        isWrite: true);
-  });
-
-  test('RMW accessor defined dynamically in class from inside class', () {
-    Helper helper = new Helper('''
-class A {
-  set x(value) {}
-
-  f() {
-    x += 1;
-  }
-}
-''');
-    helper.checkDynamic('x', null, 'x', isRead: true, isWrite: true);
-  });
-
-  test(
-      'RMW accessor defined dynamically in class from outside class via typed var',
-      () {
-    Helper helper = new Helper('''
-class A {
-  set x(value) {}
-}
-
-f(A a) {
-  a.x += 1;
-}
-''');
-    helper.checkDynamic('a.x', 'a', 'x', isRead: true, isWrite: true);
-  });
-
-  test(
-      'RMW accessor defined dynamically in class from outside class via typed expression',
-      () {
-    Helper helper = new Helper('''
-class A {
-  set x(value) {}
-}
-
-A h() => null;
-
-f() {
-  h().x += 1;
-}
-''');
-    helper.checkDynamic('h().x', 'h()', 'x', isRead: true, isWrite: true);
-  });
-
-  test(
-      'RMW accessor defined dynamically in class from outside class via dynamic var',
-      () {
-    Helper helper = new Helper('''
-f(a) {
-  a.x += 1;
-}
-''');
-    helper.checkDynamic('a.x', 'a', 'x', isRead: true, isWrite: true);
-  });
-
-  test(
-      'RMW accessor defined dynamically in class from outside class via dynamic expression',
-      () {
-    Helper helper = new Helper('''
-h() => null;
-
-f() {
-  h().x += 1;
-}
-''');
-    helper.checkDynamic('h().x', 'h()', 'x', isRead: true, isWrite: true);
-  });
-
-  test('RMW accessor undefined at top level', () {
-    Helper helper = new Helper('''
-f() {
-  x += 1;
-}
-''');
-    helper.checkDynamic('x', null, 'x', isRead: true, isWrite: true);
-  });
-
-  test('RMW accessor undefined at top level via prefix', () {
-    Helper helper = new Helper('''
-import 'lib.dart' as l;
-
-f() {
-  l.x += 1;
-}
-''');
-    helper.addFile('/lib.dart', '''
-library lib;
-''');
-    helper.checkDynamic('l.x', null, 'x', isRead: true, isWrite: true);
-  });
-
-  test('RMW accessor undefined statically in class from outside class', () {
-    Helper helper = new Helper('''
-class A {}
-
-f() {
-  A.x += 1;
-}
-''');
-    helper.checkStaticProperty(
-        'A.x',
-        'A',
-        'x',
-        false,
-        isRead: true,
-        isWrite: true);
-  });
-
-  test(
-      'RMW accessor undefined statically in class from outside class via prefix',
-      () {
-    Helper helper = new Helper('''
-import 'lib.dart' as l;
-
-f() {
-  l.A.x += 1;
-}
-''');
-    helper.addFile('/lib.dart', '''
-library lib;
-
-class A {}
-''');
-    helper.checkStaticProperty(
-        'l.A.x',
-        'A',
-        'x',
-        false,
-        isRead: true,
-        isWrite: true);
-  });
-
-  test('RMW accessor undefined dynamically in class from inside class', () {
-    Helper helper = new Helper('''
-class A {
-  f() {
-    x += 1;
-  }
-}
-''');
-    helper.checkDynamic('x', null, 'x', isRead: true, isWrite: true);
-  });
-
-  test(
-      'RMW accessor undefined dynamically in class from outside class via typed var',
-      () {
-    Helper helper = new Helper('''
-class A {}
-
-f(A a) {
-  a.x += 1;
-}
-''');
-    helper.checkDynamic('a.x', 'a', 'x', isRead: true, isWrite: true);
-  });
-
-  test(
-      'RMW accessor undefined dynamically in class from outside class via typed expression',
-      () {
-    Helper helper = new Helper('''
-class A {}
-
-A h() => null;
-
-f() {
-  h().x += 1;
-}
-''');
-    helper.checkDynamic('h().x', 'h()', 'x', isRead: true, isWrite: true);
-  });
-}
-
-typedef void AccessHandler(Expression node, AccessSemantics semantics);
-
-class Helper {
-  final MemoryResourceProvider provider = new MemoryResourceProvider();
-  Source rootSource;
-  AnalysisContext context;
-
-  Helper(String rootContents) {
-    DartSdk sdk = new MockSdk();
-    String rootFile = '/root.dart';
-    File file = provider.newFile(rootFile, rootContents);
-    rootSource = file.createSource();
-    context = AnalysisEngine.instance.createAnalysisContext();
-    // Set up the source factory.
-    List<UriResolver> uriResolvers = [
-        new ResourceUriResolver(provider),
-        new DartUriResolver(sdk)];
-    context.sourceFactory = new SourceFactory(uriResolvers);
-    // add the Source
-    ChangeSet changeSet = new ChangeSet();
-    changeSet.addedSource(rootSource);
-    context.applyChanges(changeSet);
-  }
-
-  LibraryElement get libraryElement {
-    return context.computeLibraryElement(rootSource);
-  }
-
-  void addFile(String path, String contents) {
-    provider.newFile(path, contents);
-  }
-
-  /**
-   * Verify that the node represented by [expectedSource] is classified as
-   * a dynamic method invocation.
-   */
-  void checkDynamic(String expectedSource, String expectedTarget,
-      String expectedName, {bool isRead: false, bool isWrite: false, bool isInvoke:
-      false}) {
-    TestVisitor visitor = new TestVisitor();
-    int count = 0;
-    visitor.onAccess = (AstNode node, AccessSemantics semantics) {
-      count++;
-      expect(node.toSource(), equals(expectedSource));
-      expect(semantics.kind, equals(AccessKind.DYNAMIC));
-      if (expectedTarget == null) {
-        expect(semantics.target, isNull);
-      } else {
-        expect(semantics.target.toSource(), equals(expectedTarget));
-      }
-      expect(semantics.identifier.name, equals(expectedName));
-      expect(semantics.element, isNull);
-      expect(semantics.classElement, isNull);
-      expect(semantics.isRead, equals(isRead));
-      expect(semantics.isWrite, equals(isWrite));
-      expect(semantics.isInvoke, equals(isInvoke));
-    };
-    libraryElement.unit.accept(visitor);
-    expect(count, equals(1));
-  }
-
-  /**
-   * Verify that the node represented by [expectedSource] is classified as
-   * a local function invocation.
-   */
-  void checkLocalFunction(String expectedSource, String expectedName,
-      {bool isRead: false, bool isWrite: false, bool isInvoke: false}) {
-    TestVisitor visitor = new TestVisitor();
-    int count = 0;
-    visitor.onAccess = (AstNode node, AccessSemantics semantics) {
-      count++;
-      expect(node.toSource(), equals(expectedSource));
-      expect(semantics.kind, equals(AccessKind.LOCAL_FUNCTION));
-      expect(semantics.identifier.name, equals(expectedName));
-      expect(semantics.element.displayName, equals(expectedName));
-      expect(semantics.classElement, isNull);
-      expect(semantics.target, isNull);
-      expect(semantics.isRead, equals(isRead));
-      expect(semantics.isWrite, equals(isWrite));
-      expect(semantics.isInvoke, equals(isInvoke));
-    };
-    libraryElement.unit.accept(visitor);
-    expect(count, equals(1));
-  }
-
-  /**
-   * Verify that the node represented by [expectedSource] is classified as
-   * a local variable access.
-   */
-  void checkLocalVariable(String expectedSource, String expectedName,
-      {bool isRead: false, bool isWrite: false, bool isInvoke: false}) {
-    TestVisitor visitor = new TestVisitor();
-    int count = 0;
-    visitor.onAccess = (AstNode node, AccessSemantics semantics) {
-      count++;
-      expect(node.toSource(), equals(expectedSource));
-      expect(semantics.kind, equals(AccessKind.LOCAL_VARIABLE));
-      expect(semantics.element.name, equals(expectedName));
-      expect(semantics.classElement, isNull);
-      expect(semantics.target, isNull);
-      expect(semantics.isRead, equals(isRead));
-      expect(semantics.isWrite, equals(isWrite));
-      expect(semantics.isInvoke, equals(isInvoke));
-    };
-    libraryElement.unit.accept(visitor);
-    expect(count, equals(1));
-  }
-
-  /**
-   * Verify that the node represented by [expectedSource] is classified as a
-   * parameter access.
-   */
-  void checkParameter(String expectedSource, String expectedName, {bool isRead:
-      false, bool isWrite: false, bool isInvoke: false}) {
-    TestVisitor visitor = new TestVisitor();
-    int count = 0;
-    visitor.onAccess = (AstNode node, AccessSemantics semantics) {
-      count++;
-      expect(node.toSource(), equals(expectedSource));
-      expect(semantics.kind, equals(AccessKind.PARAMETER));
-      expect(semantics.element.name, equals(expectedName));
-      expect(semantics.classElement, isNull);
-      expect(semantics.target, isNull);
-      expect(semantics.isRead, equals(isRead));
-      expect(semantics.isWrite, equals(isWrite));
-      expect(semantics.isInvoke, equals(isInvoke));
-    };
-    libraryElement.unit.accept(visitor);
-    expect(count, equals(1));
-  }
-
-  /**
-   * Verify that the node represented by [expectedSource] is classified as
-   * a static field element reference.
-   */
-  void checkStaticField(String expectedSource, String expectedClass,
-      String expectedName, {bool isRead: false, bool isWrite: false, bool isInvoke:
-      false}) {
-    TestVisitor visitor = new TestVisitor();
-    int count = 0;
-    visitor.onAccess = (Expression node, AccessSemantics semantics) {
-      count++;
-      expect(node.toSource(), equals(expectedSource));
-      expect(semantics.kind, equals(AccessKind.STATIC_FIELD));
-      expect(semantics.identifier.name, equals(expectedName));
-      expect(semantics.element.displayName, equals(expectedName));
-      if (expectedClass == null) {
-        expect(semantics.classElement, isNull);
-      } else {
-        expect(semantics.classElement, isNotNull);
-        expect(semantics.classElement.displayName, equals(expectedClass));
-      }
-      expect(semantics.target, isNull);
-      expect(semantics.isRead, equals(isRead));
-      expect(semantics.isWrite, equals(isWrite));
-      expect(semantics.isInvoke, equals(isInvoke));
-    };
-    libraryElement.unit.accept(visitor);
-    expect(count, equals(1));
-  }
-
-  /**
-   * Verify that the node represented by [expectedSource] is classified as
-   * a static method.
-   */
-  void checkStaticMethod(String expectedSource, String expectedClass,
-      String expectedName, bool defined, {bool isRead: false, bool isWrite: false,
-      bool isInvoke: false}) {
-    TestVisitor visitor = new TestVisitor();
-    int count = 0;
-    visitor.onAccess = (AstNode node, AccessSemantics semantics) {
-      count++;
-      expect(node.toSource(), equals(expectedSource));
-      expect(semantics.kind, equals(AccessKind.STATIC_METHOD));
-      expect(semantics.identifier.name, equals(expectedName));
-      if (expectedClass == null) {
-        expect(semantics.classElement, isNull);
-        if (defined) {
-          expect(semantics.element, new isInstanceOf<FunctionElement>());
-        }
-      } else {
-        expect(semantics.classElement, isNotNull);
-        expect(semantics.classElement.displayName, equals(expectedClass));
-        if (defined) {
-          expect(semantics.element, new isInstanceOf<MethodElement>());
-        }
-      }
-      if (defined) {
-        expect(semantics.element.displayName, equals(expectedName));
-      } else {
-        expect(semantics.element, isNull);
-      }
-      expect(semantics.target, isNull);
-      expect(semantics.isRead, equals(isRead));
-      expect(semantics.isWrite, equals(isWrite));
-      expect(semantics.isInvoke, equals(isInvoke));
-    };
-    libraryElement.unit.accept(visitor);
-    expect(count, equals(1));
-  }
-
-  /**
-   * Verify that the node represented by [expectedSource] is classified as
-   * a static property access.
-   */
-  void checkStaticProperty(String expectedSource, String expectedClass,
-      String expectedName, bool defined, {bool isRead: false, bool isWrite: false,
-      bool isInvoke: false}) {
-    TestVisitor visitor = new TestVisitor();
-    int count = 0;
-    visitor.onAccess = (Expression node, AccessSemantics semantics) {
-      count++;
-      expect(node.toSource(), equals(expectedSource));
-      expect(semantics.kind, equals(AccessKind.STATIC_PROPERTY));
-      expect(semantics.identifier.name, equals(expectedName));
-      if (expectedClass == null) {
-        expect(semantics.classElement, isNull);
-      } else {
-        expect(semantics.classElement, isNotNull);
-        expect(semantics.classElement.displayName, equals(expectedClass));
-      }
-      if (defined) {
-        expect(semantics.element.displayName, equals(expectedName));
-      } else {
-        expect(semantics.element, isNull);
-      }
-      expect(semantics.target, isNull);
-      expect(semantics.isRead, equals(isRead));
-      expect(semantics.isWrite, equals(isWrite));
-      expect(semantics.isInvoke, equals(isInvoke));
-    };
-    libraryElement.unit.accept(visitor);
-    expect(count, equals(1));
-  }
-
-  /**
-   * Verify that the node represented by [expectedSource] is classified as a
-   * reference to a toplevel class or a type parameter.
-   */
-  void checkTypeReference(String expectedSource, String expectedName,
-      AccessKind expectedKind, {bool isRead: false, bool isInvoke: false}) {
-    TestVisitor visitor = new TestVisitor();
-    int count = 0;
-    visitor.onAccess = (AstNode node, AccessSemantics semantics) {
-      count++;
-      expect(node.toSource(), equals(expectedSource));
-      expect(semantics.kind, equals(expectedKind));
-      expect(semantics.element.name, equals(expectedName));
-      expect(semantics.classElement, isNull);
-      expect(semantics.target, isNull);
-      expect(semantics.isRead, equals(isRead));
-      expect(semantics.isWrite, isFalse);
-      expect(semantics.isInvoke, equals(isInvoke));
-    };
-    libraryElement.unit.accept(visitor);
-    expect(count, equals(1));
-  }
-}
-
-/**
- * Visitor class used to run the tests.
- */
-class TestVisitor extends RecursiveAstVisitor {
-  AccessHandler onAccess;
-
-  @override
-  visitMethodInvocation(MethodInvocation node) {
-    onAccess(node, node.accept(ACCESS_SEMANTICS_VISITOR));
-  }
-
-  @override
-  visitPrefixedIdentifier(PrefixedIdentifier node) {
-    onAccess(node, node.accept(ACCESS_SEMANTICS_VISITOR));
-  }
-
-  @override
-  visitPropertyAccess(PropertyAccess node) {
-    onAccess(node, node.accept(ACCESS_SEMANTICS_VISITOR));
-  }
-
-  @override
-  visitSimpleIdentifier(SimpleIdentifier node) {
-    AccessSemantics semantics = node.accept(ACCESS_SEMANTICS_VISITOR);
-    if (semantics != null) {
-      onAccess(node, semantics);
-    }
-  }
-}
diff --git a/pkg/analyzer2dart/test/mock_sdk.dart b/pkg/analyzer2dart/test/mock_sdk.dart
deleted file mode 100644
index 8cebd44..0000000
--- a/pkg/analyzer2dart/test/mock_sdk.dart
+++ /dev/null
@@ -1,250 +0,0 @@
-// Copyright (c) 2014, the Dart project authors.  Please see the AUTHORS file
-// for details. All rights reserved. Use of this source code is governed by a
-// BSD-style license that can be found in the LICENSE file.
-
-library testing.mock_sdk;
-
-import 'package:analyzer/file_system/file_system.dart' as resource;
-import 'package:analyzer/file_system/memory_file_system.dart' as resource;
-import 'package:analyzer/src/generated/engine.dart';
-import 'package:analyzer/src/generated/sdk.dart';
-import 'package:analyzer/src/generated/source.dart';
-
-
-class MockSdk implements DartSdk {
-  static const _MockSdkLibrary LIB_CORE =
-      const _MockSdkLibrary('core', '/lib/core/core.dart', '''
-library dart.core;
-
-class Object {
-  bool operator ==(other) => identical(this, other);
-}
-
-class Function {}
-class StackTrace {}
-class Symbol {}
-class Type {}
-
-abstract class Comparable<T> {
-  int compareTo(T other);
-}
-
-class String implements Comparable<String> {
-  bool get isEmpty => false;
-  bool get isNotEmpty => false;
-  int get length => 0;
-}
-
-class bool extends Object {}
-abstract class num implements Comparable<num> {
-  bool operator <(num other);
-  num operator +(num other);
-  num operator -(num other);
-  num operator *(num other);
-  num operator /(num other);
-  int toInt();
-}
-abstract class int extends num {
-  bool get isEven => false;
-  int operator -();
-}
-class double extends num {}
-class DateTime extends Object {}
-class Null extends Object {}
-
-class Deprecated extends Object {
-  final String expires;
-  const Deprecated(this.expires);
-}
-const Object deprecated = const Deprecated("next release");
-
-abstract class Iterable<E> {}
-
-abstract class List<E> extends Object implements Iterable {
-  void add(E value);
-  E operator [](int index);
-  void operator []=(int index, E value);
-}
-class Map<K, V> extends Object {}
-
-external bool identical(Object a, Object b);
-
-void print(Object object) {}
-
-typedef int Comparator<T>(T a, T b);
-''');
-
-  static const _MockSdkLibrary LIB_ASYNC =
-      const _MockSdkLibrary('async', '/lib/async/async.dart', '''
-library dart.async;
-class Future<T> {
-  static Future wait(List<Future> futures) => null;
-}
-
-class Stream<T> {}
-''');
-
-  static const _MockSdkLibrary LIB_MATH =
-      const _MockSdkLibrary('math', '/lib/math/math.dart', '''
-library dart.math;
-const double E = 2.718281828459045;
-const double PI = 3.1415926535897932;
-num min(num a, num b) => 0;
-num max(num a, num b) => 0;
-class Random {}
-''');
-
-  static const _MockSdkLibrary LIB_HTML =
-      const _MockSdkLibrary('html', '/lib/html/dartium/html_dartium.dart', '''
-library dart.html;
-class HtmlElement {}
-''');
-
-  static const List<SdkLibrary> LIBRARIES = const [
-      LIB_CORE,
-      LIB_ASYNC,
-      LIB_MATH,
-      LIB_HTML,];
-
-  final resource.MemoryResourceProvider provider =
-      new resource.MemoryResourceProvider();
-
-  /**
-   * The [AnalysisContext] which is used for all of the sources.
-   */
-  InternalAnalysisContext _analysisContext;
-
-  MockSdk() {
-    LIBRARIES.forEach((_MockSdkLibrary library) {
-      provider.newFile(library.path, library.content);
-    });
-  }
-
-  @override
-  AnalysisContext get context {
-    if (_analysisContext == null) {
-      _analysisContext = new SdkAnalysisContext();
-      SourceFactory factory = new SourceFactory([new DartUriResolver(this)]);
-      _analysisContext.sourceFactory = factory;
-      ChangeSet changeSet = new ChangeSet();
-      for (String uri in uris) {
-        Source source = factory.forUri(uri);
-        changeSet.addedSource(source);
-      }
-      _analysisContext.applyChanges(changeSet);
-    }
-    return _analysisContext;
-  }
-
-  @override
-  List<SdkLibrary> get sdkLibraries => LIBRARIES;
-
-  @override
-  String get sdkVersion => throw unimplemented;
-
-  UnimplementedError get unimplemented => new UnimplementedError();
-
-  @override
-  List<String> get uris {
-    List<String> uris = <String>[];
-    for (SdkLibrary library in LIBRARIES) {
-      uris.add('dart:' + library.shortName);
-    }
-    return uris;
-  }
-
-  @override
-  Source fromFileUri(Uri uri) {
-    String filePath = uri.path;
-    String libPath = '/lib';
-    if (!filePath.startsWith("$libPath/")) {
-      return null;
-    }
-    for (SdkLibrary library in LIBRARIES) {
-      String libraryPath = library.path;
-      if (filePath.replaceAll('\\', '/') == libraryPath) {
-        String path = library.shortName;
-        try {
-          resource.File file = provider.getResource(uri.path);
-          Uri dartUri = new Uri(scheme: 'dart', path: library.shortName);
-          return file.createSource(dartUri);
-        } catch (exception) {
-          return null;
-        }
-      }
-      if (filePath.startsWith("$libraryPath/")) {
-        String pathInLibrary = filePath.substring(libraryPath.length + 1);
-        String path = '${library.shortName}/${pathInLibrary}';
-        try {
-          resource.File file = provider.getResource(uri.path);
-          Uri dartUri = new Uri(scheme: 'dart', path: path);
-          return file.createSource(dartUri);
-        } catch (exception) {
-          return null;
-        }
-      }
-    }
-    return null;
-  }
-
-  @override
-  SdkLibrary getSdkLibrary(String dartUri) {
-    // getSdkLibrary() is only used to determine whether a library is internal
-    // to the SDK.  The mock SDK doesn't have any internals, so it's safe to
-    // return null.
-    return null;
-  }
-
-  @override
-  Source mapDartUri(String dartUri) {
-    const Map<String, String> uriToPath = const {
-      "dart:core": "/lib/core/core.dart",
-      "dart:html": "/lib/html/dartium/html_dartium.dart",
-      "dart:async": "/lib/async/async.dart",
-      "dart:math": "/lib/math/math.dart"
-    };
-
-    String path = uriToPath[dartUri];
-    if (path != null) {
-      resource.File file = provider.getResource(path);
-      Uri uri = new Uri(scheme: 'dart', path: dartUri.substring(5));
-      return file.createSource(uri);
-    }
-
-    // If we reach here then we tried to use a dartUri that's not in the
-    // table above.
-    throw unimplemented;
-  }
-}
-
-
-class _MockSdkLibrary implements SdkLibrary {
-  final String shortName;
-  final String path;
-  final String content;
-
-  const _MockSdkLibrary(this.shortName, this.path, this.content);
-
-  @override
-  String get category => throw unimplemented;
-
-  @override
-  bool get isDart2JsLibrary => throw unimplemented;
-
-  @override
-  bool get isDocumented => throw unimplemented;
-
-  @override
-  bool get isImplementation => throw unimplemented;
-
-  @override
-  bool get isInternal => throw unimplemented;
-
-  @override
-  bool get isShared => throw unimplemented;
-
-  @override
-  bool get isVmLibrary => throw unimplemented;
-
-  UnimplementedError get unimplemented => new UnimplementedError();
-}
diff --git a/pkg/analyzer2dart/test/output_helper.dart b/pkg/analyzer2dart/test/output_helper.dart
deleted file mode 100644
index 5a3f6b4..0000000
--- a/pkg/analyzer2dart/test/output_helper.dart
+++ /dev/null
@@ -1,32 +0,0 @@
-// Copyright (c) 2014, the Dart project authors.  Please see the AUTHORS file
-// for details. All rights reserved. Use of this source code is governed by a
-// BSD-style license that can be found in the LICENSE file.
-
-/// Helper classes for testing compiler output.
-library test.output_helper;
-
-import 'dart:async';
-
-
-class CollectingOutputProvider {
-  StringBufferSink output;
-
-  EventSink<String> call(String name, String extension) {
-    return output = new StringBufferSink();
-  }
-}
-
-class StringBufferSink implements EventSink<String> {
-  StringBuffer sb = new StringBuffer();
-
-  void add(String text) {
-    sb.write(text);
-  }
-
-  void addError(errorEvent, [StackTrace stackTrace]) {}
-
-  void close() {}
-
-  String get text => sb.toString();
-}
-
diff --git a/pkg/analyzer2dart/test/sexpr_data.dart b/pkg/analyzer2dart/test/sexpr_data.dart
deleted file mode 100644
index e7d00ae..0000000
--- a/pkg/analyzer2dart/test/sexpr_data.dart
+++ /dev/null
@@ -1,1549 +0,0 @@
-// Copyright (c) 2014, the Dart project authors.  Please see the AUTHORS file
-// for details. All rights reserved. Use of this source code is governed by a
-// BSD-style license that can be found in the LICENSE file.
-
-/// Test data for sexpr_test.
-library test.sexpr.data;
-
-import 'test_helper.dart';
-
-class TestSpec extends TestSpecBase {
-  // A [String] or a [Map<String, String>].
-  final output;
-
-  /// True if the test should be skipped when testing analyzer2dart.
-  final bool skipInAnalyzerFrontend;
-
-  const TestSpec(String input, this.output,
-                 {this.skipInAnalyzerFrontend: false}) : super(input);
-}
-
-const List<Group> TEST_DATA = const [
-  const Group('Empty main', const [
-    const TestSpec('''
-main() {}
-''', '''
-(FunctionDefinition main () () return
-  (LetPrim (v0 (Constant (Null)))
-    (InvokeContinuation return (v0))))
-'''),
-
-    const TestSpec('''
-foo() {}
-main() {
-  foo();
-}
-''', '''
-(FunctionDefinition main () () return
-  (LetCont ((k0 (v0)
-      (LetPrim (v1 (Constant (Null)))
-        (InvokeContinuation return (v1)))))
-    (InvokeStatic foo () k0)))
-''')
-  ]),
-
-  const Group('Literals', const [
-    const TestSpec('''
-main() {
-  return 0;
-}
-''', '''
-(FunctionDefinition main () () return
-  (LetPrim (v0 (Constant (Int 0)))
-    (InvokeContinuation return (v0))))
-'''),
-
-    const TestSpec('''
-main() {
-  return 1.5;
-}
-''', '''
-(FunctionDefinition main () () return
-  (LetPrim (v0 (Constant (Double 1.5)))
-    (InvokeContinuation return (v0))))
-'''),
-
-    const TestSpec('''
-main() {
-  return true;
-}
-''', '''
-(FunctionDefinition main () () return
-  (LetPrim (v0 (Constant (Bool true)))
-    (InvokeContinuation return (v0))))
-'''),
-
-    const TestSpec('''
-main() {
-  return false;
-}
-''', '''
-(FunctionDefinition main () () return
-  (LetPrim (v0 (Constant (Bool false)))
-    (InvokeContinuation return (v0))))
-'''),
-
-    const TestSpec('''
-main() {
-  return "a";
-}
-''', '''
-(FunctionDefinition main () () return
-  (LetPrim (v0 (Constant (String "a")))
-    (InvokeContinuation return (v0))))
-'''),
-  ]),
-
-  const Group('Parameters', const [
-    const TestSpec('''
-main(args) {}
-''', '''
-(FunctionDefinition main () (args) return
-  (LetPrim (v0 (Constant (Null)))
-    (InvokeContinuation return (v0))))
-'''),
-
-    const TestSpec('''
-main(a, b) {}
-''', '''
-(FunctionDefinition main () (a b) return
-  (LetPrim (v0 (Constant (Null)))
-    (InvokeContinuation return (v0))))
-'''),
-  ]),
-
-  const Group('Pass arguments', const [
-    const TestSpec('''
-foo(a) {}
-main() {
-  foo(null);
-}
-''', '''
-(FunctionDefinition main () () return
-  (LetPrim (v0 (Constant (Null)))
-    (LetCont ((k0 (v1)
-        (LetPrim (v2 (Constant (Null)))
-          (InvokeContinuation return (v2)))))
-      (InvokeStatic foo (v0) k0))))
-'''),
-
-    const TestSpec('''
-bar(b, c) {}
-foo(a) {}
-main() {
-  foo(null);
-  bar(0, "");
-}
-''', '''
-(FunctionDefinition main () () return
-  (LetPrim (v0 (Constant (Null)))
-    (LetCont ((k0 (v1)
-        (LetPrim (v2 (Constant (Int 0)))
-          (LetPrim (v3 (Constant (String "")))
-            (LetCont ((k1 (v4)
-                (LetPrim (v5 (Constant (Null)))
-                  (InvokeContinuation return (v5)))))
-              (InvokeStatic bar (v2 v3) k1))))))
-      (InvokeStatic foo (v0) k0))))
-'''),
-
-    const TestSpec('''
-foo(a) {}
-main() {
-  return foo(null);
-}
-''', '''
-(FunctionDefinition main () () return
-  (LetPrim (v0 (Constant (Null)))
-    (LetCont ((k0 (v1)
-        (InvokeContinuation return (v1))))
-      (InvokeStatic foo (v0) k0))))
-'''),
-  ]),
-
-  const Group('Local variables', const [
-    const TestSpec('''
-main() {
-  var a;
-  return a;
-}
-''', '''
-(FunctionDefinition main () () return
-  (LetPrim (v0 (Constant (Null)))
-    (InvokeContinuation return (v0))))
-'''),
-
-    const TestSpec('''
-main() {
-  var a = 0;
-  return a;
-}
-''', '''
-(FunctionDefinition main () () return
-  (LetPrim (v0 (Constant (Int 0)))
-    (InvokeContinuation return (v0))))
-'''),
-
-    const TestSpec('''
-main(a) {
-  return a;
-}
-''', '''
-(FunctionDefinition main () (a) return
-  (InvokeContinuation return (a)))
-'''),
-    ]),
-
-  const Group('Local variable writes', const <TestSpec>[
-    const TestSpec('''
-main() {
-  var a;
-  a = 10;
-  return a;
-}
-''', '''
-(FunctionDefinition main () () return
-  (LetPrim (v0 (Constant (Null)))
-    (LetPrim (v1 (Constant (Int 10)))
-      (InvokeContinuation return (v1)))))
-'''),
-
-    const TestSpec('''
-main() {
-  var a = 0;
-  a = 10;
-  return a;
-}
-''', '''
-(FunctionDefinition main () () return
-  (LetPrim (v0 (Constant (Int 0)))
-    (LetPrim (v1 (Constant (Int 10)))
-      (InvokeContinuation return (v1)))))
-'''),
-
-    const TestSpec('''
-main() {
-  var a = 0;
-  print(a);
-  a = "";
-  print(a);
-  return a;
-}
-''', '''
-(FunctionDefinition main () () return
-  (LetPrim (v0 (Constant (Int 0)))
-    (LetCont ((k0 (v1)
-        (LetPrim (v2 (Constant (String "")))
-          (LetCont ((k1 (v3)
-              (InvokeContinuation return (v2))))
-            (InvokeStatic print (v2) k1)))))
-      (InvokeStatic print (v0) k0))))
-'''),
-
-    const TestSpec('''
-main(a) {
-  print(a);
-  a = "";
-  print(a);
-  return a;
-}
-''', '''
-(FunctionDefinition main () (a) return
-  (LetCont ((k0 (v0)
-      (LetPrim (v1 (Constant (String "")))
-        (LetCont ((k1 (v2)
-            (InvokeContinuation return (v1))))
-          (InvokeStatic print (v1) k1)))))
-    (InvokeStatic print (a) k0)))
-'''),
-
-    const TestSpec('''
-main(a) {
-  if (a) {
-    a = "";
-  }
-  print(a);
-  return a;
-}
-''', '''
-(FunctionDefinition main () (a) return
-  (LetCont ((k0 (v0)
-      (LetCont ((k1 (v1)
-          (InvokeContinuation return (v0))))
-        (InvokeStatic print (v0) k1))))
-    (LetCont ((k2 ()
-        (LetPrim (v2 (Constant (String "")))
-          (InvokeContinuation k0 (v2))))
-              (k3 ()
-        (InvokeContinuation k0 (a))))
-      (Branch (IsTrue a) k2 k3))))
-'''),
-  ]),
-
-  const Group('Dynamic access', const [
-    const TestSpec('''
-main(a) {
-  return a.foo;
-}
-''', '''
-(FunctionDefinition main () (a) return
-  (LetCont ((k0 (v0)
-      (InvokeContinuation return (v0))))
-    (InvokeMethod a foo () k0)))
-'''),
-
-    const TestSpec('''
-main() {
-  var a = "";
-  return a.foo;
-}
-''', '''
-(FunctionDefinition main () () return
-  (LetPrim (v0 (Constant (String "")))
-    (LetCont ((k0 (v1)
-        (InvokeContinuation return (v1))))
-      (InvokeMethod v0 foo () k0))))
-'''),
-    ]),
-
-  const Group('Dynamic invocation', const [
-    const TestSpec('''
-main(a) {
-  return a.foo(0);
-}
-''', '''
-(FunctionDefinition main () (a) return
-  (LetPrim (v0 (Constant (Int 0)))
-    (LetCont ((k0 (v1)
-        (InvokeContinuation return (v1))))
-      (InvokeMethod a foo (v0) k0))))
-'''),
-
-    const TestSpec('''
-main() {
-  var a = "";
-  return a.foo(0, 1);
-}
-''', '''
-(FunctionDefinition main () () return
-  (LetPrim (v0 (Constant (String "")))
-    (LetPrim (v1 (Constant (Int 0)))
-      (LetPrim (v2 (Constant (Int 1)))
-        (LetCont ((k0 (v3)
-            (InvokeContinuation return (v3))))
-          (InvokeMethod v0 foo (v1 v2) k0))))))
-'''),
-    ]),
-
-  const Group('Binary expressions', const [
-    const TestSpec('''
-main() {
-  return 0 + "";
-}
-''', '''
-(FunctionDefinition main () () return
-  (LetPrim (v0 (Constant (Int 0)))
-    (LetPrim (v1 (Constant (String "")))
-      (LetCont ((k0 (v2)
-          (InvokeContinuation return (v2))))
-        (InvokeMethod v0 + (v1) k0)))))
-'''),
-
-    const TestSpec('''
-main() {
-  return 0 - "";
-}
-''', '''
-(FunctionDefinition main () () return
-  (LetPrim (v0 (Constant (Int 0)))
-    (LetPrim (v1 (Constant (String "")))
-      (LetCont ((k0 (v2)
-          (InvokeContinuation return (v2))))
-        (InvokeMethod v0 - (v1) k0)))))
-'''),
-
-    const TestSpec('''
-main() {
-  return 0 * "";
-}
-''', '''
-(FunctionDefinition main () () return
-  (LetPrim (v0 (Constant (Int 0)))
-    (LetPrim (v1 (Constant (String "")))
-      (LetCont ((k0 (v2)
-          (InvokeContinuation return (v2))))
-        (InvokeMethod v0 * (v1) k0)))))
-'''),
-
-    const TestSpec('''
-main() {
-  return 0 / "";
-}
-''', '''
-(FunctionDefinition main () () return
-  (LetPrim (v0 (Constant (Int 0)))
-    (LetPrim (v1 (Constant (String "")))
-      (LetCont ((k0 (v2)
-          (InvokeContinuation return (v2))))
-        (InvokeMethod v0 / (v1) k0)))))
-'''),
-
-    const TestSpec('''
-main() {
-  return 0 ~/ "";
-}
-''', '''
-(FunctionDefinition main () () return
-  (LetPrim (v0 (Constant (Int 0)))
-    (LetPrim (v1 (Constant (String "")))
-      (LetCont ((k0 (v2)
-          (InvokeContinuation return (v2))))
-        (InvokeMethod v0 ~/ (v1) k0)))))
-'''),
-
-    const TestSpec('''
-main() {
-  return 0 < "";
-}
-''', '''
-(FunctionDefinition main () () return
-  (LetPrim (v0 (Constant (Int 0)))
-    (LetPrim (v1 (Constant (String "")))
-      (LetCont ((k0 (v2)
-          (InvokeContinuation return (v2))))
-        (InvokeMethod v0 < (v1) k0)))))
-'''),
-
-    const TestSpec('''
-main() {
-  return 0 <= "";
-}
-''', '''
-(FunctionDefinition main () () return
-  (LetPrim (v0 (Constant (Int 0)))
-    (LetPrim (v1 (Constant (String "")))
-      (LetCont ((k0 (v2)
-          (InvokeContinuation return (v2))))
-        (InvokeMethod v0 <= (v1) k0)))))
-'''),
-
-    const TestSpec('''
-main() {
-  return 0 > "";
-}
-''', '''
-(FunctionDefinition main () () return
-  (LetPrim (v0 (Constant (Int 0)))
-    (LetPrim (v1 (Constant (String "")))
-      (LetCont ((k0 (v2)
-          (InvokeContinuation return (v2))))
-        (InvokeMethod v0 > (v1) k0)))))
-'''),
-
-    const TestSpec('''
-main() {
-  return 0 >= "";
-}
-''', '''
-(FunctionDefinition main () () return
-  (LetPrim (v0 (Constant (Int 0)))
-    (LetPrim (v1 (Constant (String "")))
-      (LetCont ((k0 (v2)
-          (InvokeContinuation return (v2))))
-        (InvokeMethod v0 >= (v1) k0)))))
-'''),
-
-    const TestSpec('''
-main() {
-  return 0 << "";
-}
-''', '''
-(FunctionDefinition main () () return
-  (LetPrim (v0 (Constant (Int 0)))
-    (LetPrim (v1 (Constant (String "")))
-      (LetCont ((k0 (v2)
-          (InvokeContinuation return (v2))))
-        (InvokeMethod v0 << (v1) k0)))))
-'''),
-
-    const TestSpec('''
-main() {
-  return 0 >> "";
-}
-''', '''
-(FunctionDefinition main () () return
-  (LetPrim (v0 (Constant (Int 0)))
-    (LetPrim (v1 (Constant (String "")))
-      (LetCont ((k0 (v2)
-          (InvokeContinuation return (v2))))
-        (InvokeMethod v0 >> (v1) k0)))))
-'''),
-
-    const TestSpec('''
-main() {
-  return 0 & "";
-}
-''', '''
-(FunctionDefinition main () () return
-  (LetPrim (v0 (Constant (Int 0)))
-    (LetPrim (v1 (Constant (String "")))
-      (LetCont ((k0 (v2)
-          (InvokeContinuation return (v2))))
-        (InvokeMethod v0 & (v1) k0)))))
-'''),
-
-    const TestSpec('''
-main() {
-  return 0 | "";
-}
-''', '''
-(FunctionDefinition main () () return
-  (LetPrim (v0 (Constant (Int 0)))
-    (LetPrim (v1 (Constant (String "")))
-      (LetCont ((k0 (v2)
-          (InvokeContinuation return (v2))))
-        (InvokeMethod v0 | (v1) k0)))))
-'''),
-
-    const TestSpec('''
-main() {
-  return 0 ^ "";
-}
-''', '''
-(FunctionDefinition main () () return
-  (LetPrim (v0 (Constant (Int 0)))
-    (LetPrim (v1 (Constant (String "")))
-      (LetCont ((k0 (v2)
-          (InvokeContinuation return (v2))))
-        (InvokeMethod v0 ^ (v1) k0)))))
-'''),
-
-    const TestSpec('''
-main() {
-  return 0 == "";
-}
-''', '''
-(FunctionDefinition main () () return
-  (LetPrim (v0 (Constant (Int 0)))
-    (LetPrim (v1 (Constant (String "")))
-      (LetCont ((k0 (v2)
-          (InvokeContinuation return (v2))))
-        (InvokeMethod v0 == (v1) k0)))))
-'''),
-
-    const TestSpec('''
-main() {
-  return 0 != "";
-}
-''', '''
-(FunctionDefinition main () () return
-  (LetPrim (v0 (Constant (Int 0)))
-    (LetPrim (v1 (Constant (String "")))
-      (LetCont ((k0 (v2)
-          (LetCont ((k1 (v3)
-              (InvokeContinuation return (v3))))
-            (LetCont ((k2 ()
-                (LetPrim (v4 (Constant (Bool false)))
-                  (InvokeContinuation k1 (v4))))
-                      (k3 ()
-                (LetPrim (v5 (Constant (Bool true)))
-                  (InvokeContinuation k1 (v5)))))
-              (Branch (IsTrue v2) k2 k3)))))
-        (InvokeMethod v0 == (v1) k0)))))
-'''),
-
-    const TestSpec('''
-main() {
-  return 0 && "";
-}
-''', '''
-(FunctionDefinition main () () return
-  (LetPrim (v0 (Constant (Int 0)))
-    (LetCont ((k0 (v1)
-        (InvokeContinuation return (v1))))
-      (LetCont ((k1 ()
-          (LetPrim (v2 (Constant (String "")))
-            (LetCont ((k2 ()
-                (LetPrim (v3 (Constant (Bool true)))
-                  (InvokeContinuation k0 (v3))))
-                      (k3 ()
-                (LetPrim (v4 (Constant (Bool false)))
-                  (InvokeContinuation k0 (v4)))))
-              (Branch (IsTrue v2) k2 k3))))
-                (k4 ()
-          (LetPrim (v5 (Constant (Bool false)))
-            (InvokeContinuation k0 (v5)))))
-        (Branch (IsTrue v0) k1 k4)))))
-'''),
-
-    const TestSpec('''
-main() {
-  return 0 || "";
-}
-''', '''
-(FunctionDefinition main () () return
-  (LetPrim (v0 (Constant (Int 0)))
-    (LetCont ((k0 (v1)
-        (InvokeContinuation return (v1))))
-      (LetCont ((k1 ()
-          (LetPrim (v2 (Constant (Bool true)))
-            (InvokeContinuation k0 (v2))))
-                (k2 ()
-          (LetPrim (v3 (Constant (String "")))
-            (LetCont ((k3 ()
-                (LetPrim (v4 (Constant (Bool true)))
-                  (InvokeContinuation k0 (v4))))
-                      (k4 ()
-                (LetPrim (v5 (Constant (Bool false)))
-                  (InvokeContinuation k0 (v5)))))
-              (Branch (IsTrue v3) k3 k4)))))
-        (Branch (IsTrue v0) k1 k2)))))
-'''),
-
-    const TestSpec('''
-main() {
-  return 0 + "" * 2;
-}
-''', '''
-(FunctionDefinition main () () return
-  (LetPrim (v0 (Constant (Int 0)))
-    (LetPrim (v1 (Constant (String "")))
-      (LetPrim (v2 (Constant (Int 2)))
-        (LetCont ((k0 (v3)
-            (LetCont ((k1 (v4)
-                (InvokeContinuation return (v4))))
-              (InvokeMethod v0 + (v3) k1))))
-          (InvokeMethod v1 * (v2) k0))))))
-'''),
-
-    const TestSpec('''
-main() {
-  return 0 * "" + 2;
-}
-''', '''
-(FunctionDefinition main () () return
-  (LetPrim (v0 (Constant (Int 0)))
-    (LetPrim (v1 (Constant (String "")))
-      (LetCont ((k0 (v2)
-          (LetPrim (v3 (Constant (Int 2)))
-            (LetCont ((k1 (v4)
-                (InvokeContinuation return (v4))))
-              (InvokeMethod v2 + (v3) k1)))))
-        (InvokeMethod v0 * (v1) k0)))))
-'''),
-    ]),
-
-  const Group('If statement', const [
-    const TestSpec('''
-main(a) {
-  if (a) {
-    print(0);
-  }
-}
-''', '''
-(FunctionDefinition main () (a) return
-  (LetCont ((k0 ()
-      (LetPrim (v0 (Constant (Null)))
-        (InvokeContinuation return (v0)))))
-    (LetCont ((k1 ()
-        (LetPrim (v1 (Constant (Int 0)))
-          (LetCont ((k2 (v2)
-              (InvokeContinuation k0 ())))
-            (InvokeStatic print (v1) k2))))
-              (k3 ()
-        (InvokeContinuation k0 ())))
-      (Branch (IsTrue a) k1 k3))))
-'''),
-
-    const TestSpec('''
-main(a) {
-  if (a) {
-    print(0);
-  } else {
-    print(1);
-  }
-}
-''', '''
-(FunctionDefinition main () (a) return
-  (LetCont ((k0 ()
-      (LetPrim (v0 (Constant (Null)))
-        (InvokeContinuation return (v0)))))
-    (LetCont ((k1 ()
-        (LetPrim (v1 (Constant (Int 0)))
-          (LetCont ((k2 (v2)
-              (InvokeContinuation k0 ())))
-            (InvokeStatic print (v1) k2))))
-              (k3 ()
-        (LetPrim (v3 (Constant (Int 1)))
-          (LetCont ((k4 (v4)
-              (InvokeContinuation k0 ())))
-            (InvokeStatic print (v3) k4)))))
-      (Branch (IsTrue a) k1 k3))))
-'''),
-
-    const TestSpec('''
-main(a) {
-  if (a) {
-    print(0);
-  } else {
-    print(1);
-    print(2);
-  }
-}
-''', '''
-(FunctionDefinition main () (a) return
-  (LetCont ((k0 ()
-      (LetPrim (v0 (Constant (Null)))
-        (InvokeContinuation return (v0)))))
-    (LetCont ((k1 ()
-        (LetPrim (v1 (Constant (Int 0)))
-          (LetCont ((k2 (v2)
-              (InvokeContinuation k0 ())))
-            (InvokeStatic print (v1) k2))))
-              (k3 ()
-        (LetPrim (v3 (Constant (Int 1)))
-          (LetCont ((k4 (v4)
-              (LetPrim (v5 (Constant (Int 2)))
-                (LetCont ((k5 (v6)
-                    (InvokeContinuation k0 ())))
-                  (InvokeStatic print (v5) k5)))))
-            (InvokeStatic print (v3) k4)))))
-      (Branch (IsTrue a) k1 k3))))
-'''),
-    ]),
-
-  const Group('Conditional expression', const [
-    const TestSpec('''
-main(a) {
-  return a ? print(0) : print(1);
-}
-''', '''
-(FunctionDefinition main () (a) return
-  (LetCont ((k0 (v0)
-      (InvokeContinuation return (v0))))
-    (LetCont ((k1 ()
-        (LetPrim (v1 (Constant (Int 0)))
-          (LetCont ((k2 (v2)
-              (InvokeContinuation k0 (v2))))
-            (InvokeStatic print (v1) k2))))
-              (k3 ()
-        (LetPrim (v3 (Constant (Int 1)))
-          (LetCont ((k4 (v4)
-              (InvokeContinuation k0 (v4))))
-            (InvokeStatic print (v3) k4)))))
-      (Branch (IsTrue a) k1 k3))))
-'''),
-    ]),
-
-
-  // These test that unreachable statements are skipped within a block.
-  const Group('Block statements', const <TestSpec>[
-    const TestSpec('''
-main(a) {
-  return 0;
-  return 1;
-}
-''', '''
-(FunctionDefinition main () (a) return
-  (LetPrim (v0 (Constant (Int 0)))
-    (InvokeContinuation return (v0))))
-'''),
-
-    const TestSpec('''
-main(a) {
-  if (a) {
-    return 0;
-    return 1;
-  } else {
-    return 2;
-    return 3;
-  }
-}
-''', '''
-(FunctionDefinition main () (a) return
-  (LetCont ((k0 ()
-      (LetPrim (v0 (Constant (Int 0)))
-        (InvokeContinuation return (v0))))
-            (k1 ()
-      (LetPrim (v1 (Constant (Int 2)))
-        (InvokeContinuation return (v1)))))
-    (Branch (IsTrue a) k0 k1)))
-'''),
-
-    const TestSpec('''
-main(a) {
-  if (a) {
-    print(0);
-    return 0;
-    return 1;
-  } else {
-    print(2);
-    return 2;
-    return 3;
-  }
-}
-''', '''
-(FunctionDefinition main () (a) return
-  (LetCont ((k0 ()
-      (LetPrim (v0 (Constant (Int 0)))
-        (LetCont ((k1 (v1)
-            (LetPrim (v2 (Constant (Int 0)))
-              (InvokeContinuation return (v2)))))
-          (InvokeStatic print (v0) k1))))
-            (k2 ()
-      (LetPrim (v3 (Constant (Int 2)))
-        (LetCont ((k3 (v4)
-            (LetPrim (v5 (Constant (Int 2)))
-              (InvokeContinuation return (v5)))))
-          (InvokeStatic print (v3) k3)))))
-    (Branch (IsTrue a) k0 k2)))
-'''),
-  ]),
-
-  const Group('Constructor invocation', const <TestSpec>[
-    const TestSpec('''
-main(a) {
-  new Object();
-}
-''', '''
-(FunctionDefinition main () (a) return
-  (LetCont ((k0 (v0)
-      (LetPrim (v1 (Constant (Null)))
-        (InvokeContinuation return (v1)))))
-    (InvokeConstructor Object () k0)))
-'''),
-
-    const TestSpec('''
-main(a) {
-  new Deprecated("");
-}
-''', '''
-(FunctionDefinition main () (a) return
-  (LetPrim (v0 (Constant (String "")))
-    (LetCont ((k0 (v1)
-        (LetPrim (v2 (Constant (Null)))
-          (InvokeContinuation return (v2)))))
-      (InvokeConstructor Deprecated (v0) k0))))
-'''),
-  ]),
-
-  const Group('List literal', const <TestSpec>[
-    const TestSpec('''
-main() {
-  return [];
-}
-''', '''
-(FunctionDefinition main () () return
-  (LetPrim (v0 (LiteralList ()))
-    (InvokeContinuation return (v0))))
-'''),
-
-    const TestSpec('''
-main() {
-  return [0];
-}
-''', '''
-(FunctionDefinition main () () return
-  (LetPrim (v0 (Constant (Int 0)))
-    (LetPrim (v1 (LiteralList (v0)))
-      (InvokeContinuation return (v1)))))
-'''),
-
-    const TestSpec('''
-main(a) {
-  return [0, 1, a];
-}
-''', '''
-(FunctionDefinition main () (a) return
-  (LetPrim (v0 (Constant (Int 0)))
-    (LetPrim (v1 (Constant (Int 1)))
-      (LetPrim (v2 (LiteralList (v0 v1 a)))
-        (InvokeContinuation return (v2))))))
-'''),
-
-    const TestSpec('''
-main(a) {
-  return [0, [1], [a, [3]]];
-}
-''', '''
-(FunctionDefinition main () (a) return
-  (LetPrim (v0 (Constant (Int 0)))
-    (LetPrim (v1 (Constant (Int 1)))
-      (LetPrim (v2 (LiteralList (v1)))
-        (LetPrim (v3 (Constant (Int 3)))
-          (LetPrim (v4 (LiteralList (v3)))
-            (LetPrim (v5 (LiteralList (a v4)))
-              (LetPrim (v6 (LiteralList (v0 v2 v5)))
-                (InvokeContinuation return (v6))))))))))
-'''),
-  ]),
-
-  const Group('Map literal', const <TestSpec>[
-    const TestSpec('''
-main() {
-  return {};
-}
-''', '''
-(FunctionDefinition main () () return
-  (LetPrim (v0 (LiteralMap () ()))
-    (InvokeContinuation return (v0))))
-'''),
-
-    const TestSpec('''
-main() {
-  return {"a": 0};
-}
-''', '''
-(FunctionDefinition main () () return
-  (LetPrim (v0 (Constant (String "a")))
-    (LetPrim (v1 (Constant (Int 0)))
-      (LetPrim (v2 (LiteralMap (v0) (v1)))
-        (InvokeContinuation return (v2))))))
-'''),
-
-    const TestSpec('''
-main(a) {
-  return {"a": 0, "b": 1, "c": a};
-}
-''', '''
-(FunctionDefinition main () (a) return
-  (LetPrim (v0 (Constant (String "a")))
-    (LetPrim (v1 (Constant (Int 0)))
-      (LetPrim (v2 (Constant (String "b")))
-        (LetPrim (v3 (Constant (Int 1)))
-          (LetPrim (v4 (Constant (String "c")))
-            (LetPrim (v5 (LiteralMap (v0 v2 v4) (v1 v3 a)))
-              (InvokeContinuation return (v5)))))))))
-'''),
-
-    const TestSpec('''
-main(a) {
-  return {0: "a", 1: {2: "b"}, a: {3: "c"}};
-}
-''', '''
-(FunctionDefinition main () (a) return
-  (LetPrim (v0 (Constant (Int 0)))
-    (LetPrim (v1 (Constant (String "a")))
-      (LetPrim (v2 (Constant (Int 1)))
-        (LetPrim (v3 (Constant (Int 2)))
-          (LetPrim (v4 (Constant (String "b")))
-            (LetPrim (v5 (LiteralMap (v3) (v4)))
-              (LetPrim (v6 (Constant (Int 3)))
-                (LetPrim (v7 (Constant (String "c")))
-                  (LetPrim (v8 (LiteralMap (v6) (v7)))
-                    (LetPrim (v9 (LiteralMap (v0 v2 a) (v1 v5 v8)))
-                      (InvokeContinuation return (v9)))))))))))))
-'''),
-  ]),
-
-  const Group('For loop', const <TestSpec>[
-    const TestSpec('''
-main() {
-  for (;;) {}
-}
-''', '''
-(FunctionDefinition main () () return
-  (LetCont ((rec k0 ()
-      (LetPrim (v0 (Constant (Bool true)))
-        (LetCont ((k1 ()
-            (LetPrim (v1 (Constant (Null)))
-              (InvokeContinuation return (v1))))
-                  (k2 ()
-            (InvokeContinuation rec k0 ())))
-          (Branch (IsTrue v0) k2 k1)))))
-    (InvokeContinuation k0 ())))
-'''),
-
-const TestSpec('''
-main() {
-  for (var i = 0; i < 10; i = i + 1) {
-    print(i);
-  }
-}
-''', '''
-(FunctionDefinition main () () return
-  (LetPrim (v0 (Constant (Int 0)))
-    (LetCont ((rec k0 (v1)
-        (LetPrim (v2 (Constant (Int 10)))
-          (LetCont ((k1 (v3)
-              (LetCont ((k2 ()
-                  (LetPrim (v4 (Constant (Null)))
-                    (InvokeContinuation return (v4))))
-                        (k3 ()
-                  (LetCont ((k4 (v5)
-                      (LetPrim (v6 (Constant (Int 1)))
-                        (LetCont ((k5 (v7)
-                            (InvokeContinuation rec k0 (v7))))
-                          (InvokeMethod v1 + (v6) k5)))))
-                    (InvokeStatic print (v1) k4))))
-                (Branch (IsTrue v3) k3 k2))))
-            (InvokeMethod v1 < (v2) k1)))))
-      (InvokeContinuation k0 (v0)))))
-'''),
-
-const TestSpec('''
-main(i) {
-  for (i = 0; i < 10; i = i + 1) {
-    print(i);
-  }
-}
-''', '''
-(FunctionDefinition main () (i) return
-  (LetPrim (v0 (Constant (Int 0)))
-    (LetCont ((rec k0 (v1)
-        (LetPrim (v2 (Constant (Int 10)))
-          (LetCont ((k1 (v3)
-              (LetCont ((k2 ()
-                  (LetPrim (v4 (Constant (Null)))
-                    (InvokeContinuation return (v4))))
-                        (k3 ()
-                  (LetCont ((k4 (v5)
-                      (LetPrim (v6 (Constant (Int 1)))
-                        (LetCont ((k5 (v7)
-                            (InvokeContinuation rec k0 (v7))))
-                          (InvokeMethod v1 + (v6) k5)))))
-                    (InvokeStatic print (v1) k4))))
-                (Branch (IsTrue v3) k3 k2))))
-            (InvokeMethod v1 < (v2) k1)))))
-      (InvokeContinuation k0 (v0)))))
-'''),
-  ]),
-
-  const Group('While loop', const <TestSpec>[
-    const TestSpec('''
-main() {
-  while (true) {}
-}
-''', '''
-(FunctionDefinition main () () return
-  (LetCont ((rec k0 ()
-      (LetPrim (v0 (Constant (Bool true)))
-        (LetCont ((k1 ()
-            (LetPrim (v1 (Constant (Null)))
-              (InvokeContinuation return (v1))))
-                  (k2 ()
-            (InvokeContinuation rec k0 ())))
-          (Branch (IsTrue v0) k2 k1)))))
-    (InvokeContinuation k0 ())))
-'''),
-
-const TestSpec('''
-main() {
-  var i = 0;
-  while (i < 10) {
-    print(i);
-    i = i + 1;
-  }
-}
-''', '''
-(FunctionDefinition main () () return
-  (LetPrim (v0 (Constant (Int 0)))
-    (LetCont ((rec k0 (v1)
-        (LetPrim (v2 (Constant (Int 10)))
-          (LetCont ((k1 (v3)
-              (LetCont ((k2 ()
-                  (LetPrim (v4 (Constant (Null)))
-                    (InvokeContinuation return (v4))))
-                        (k3 ()
-                  (LetCont ((k4 (v5)
-                      (LetPrim (v6 (Constant (Int 1)))
-                        (LetCont ((k5 (v7)
-                            (InvokeContinuation rec k0 (v7))))
-                          (InvokeMethod v1 + (v6) k5)))))
-                    (InvokeStatic print (v1) k4))))
-                (Branch (IsTrue v3) k3 k2))))
-            (InvokeMethod v1 < (v2) k1)))))
-      (InvokeContinuation k0 (v0)))))
-'''),
-  ]),
-
-  const Group('Type operators', const <TestSpec>[
-    const TestSpec('''
-main(a) {
-  return a is String;
-}
-''', '''
-(FunctionDefinition main () (a) return
-  (LetCont ((k0 (v0)
-      (InvokeContinuation return (v0))))
-    (TypeOperator is a String () k0)))
-'''),
-
-    const TestSpec('''
-main(a) {
-  return a is List<String>;
-}
-''', '''
-(FunctionDefinition main () (a) return
-  (LetCont ((k0 (v0)
-      (InvokeContinuation return (v0))))
-    (TypeOperator is a List<String> () k0)))
-'''),
-
-    const TestSpec('''
-main(a) {
-  return a is Comparator<String>;
-}
-''', '''
-(FunctionDefinition main () (a) return
-  (LetCont ((k0 (v0)
-      (InvokeContinuation return (v0))))
-    (TypeOperator is a Comparator<String> () k0)))
-'''),
-
-  const TestSpec('''
-main(a) {
-  return a is! String;
-}
-''', '''
-(FunctionDefinition main () (a) return
-  (LetCont ((k0 (v0)
-      (LetCont ((k1 (v1)
-          (InvokeContinuation return (v1))))
-        (LetCont ((k2 ()
-            (LetPrim (v2 (Constant (Bool false)))
-              (InvokeContinuation k1 (v2))))
-                  (k3 ()
-            (LetPrim (v3 (Constant (Bool true)))
-              (InvokeContinuation k1 (v3)))))
-          (Branch (IsTrue v0) k2 k3)))))
-    (TypeOperator is a String () k0)))
-'''),
-
-const TestSpec('''
-main(a) {
-  return a as String;
-}
-''', '''
-(FunctionDefinition main () (a) return
-  (LetCont ((k0 (v0)
-      (InvokeContinuation return (v0))))
-    (TypeOperator as a String () k0)))
-'''),
-  ]),
-
-  const Group('For in loop', const <TestSpec>[
-// TODO(johnniwinther): Add tests for `i` as top-level, static and instance
-// fields.
-    const TestSpec('''
-main(a) {
-  for (var i in a) {
-    print(i);
-  }
-}
-''', '''
-(FunctionDefinition main () (a) return
-  (LetCont ((k0 (v0)
-      (LetCont ((rec k1 (v1)
-          (LetCont ((k2 (v2)
-              (LetCont ((k3 ()
-                  (LetPrim (v3 (Constant (Null)))
-                    (InvokeContinuation return (v3))))
-                        (k4 ()
-                  (LetPrim (v4 (Constant (Null)))
-                    (LetCont ((k5 (v5)
-                        (LetCont ((k6 (v6)
-                            (InvokeContinuation rec k1 (v1))))
-                          (InvokeStatic print (v5) k6))))
-                      (InvokeMethod v0 current () k5)))))
-                (Branch (IsTrue v2) k4 k3))))
-            (InvokeMethod v0 moveNext () k2))))
-        (InvokeContinuation k1 (a)))))
-    (InvokeMethod a iterator () k0)))
-'''),
-
-    const TestSpec('''
-main(a) {
-  for (var i in a) {
-    print(i);
-    i = 0;
-    print(i);
-  }
-}
-''', '''
-(FunctionDefinition main () (a) return
-  (LetCont ((k0 (v0)
-      (LetCont ((rec k1 (v1)
-          (LetCont ((k2 (v2)
-              (LetCont ((k3 ()
-                  (LetPrim (v3 (Constant (Null)))
-                    (InvokeContinuation return (v3))))
-                        (k4 ()
-                  (LetPrim (v4 (Constant (Null)))
-                    (LetCont ((k5 (v5)
-                        (LetCont ((k6 (v6)
-                            (LetPrim (v7 (Constant (Int 0)))
-                              (LetCont ((k7 (v8)
-                                  (InvokeContinuation rec k1 (v1))))
-                                (InvokeStatic print (v7) k7)))))
-                          (InvokeStatic print (v5) k6))))
-                      (InvokeMethod v0 current () k5)))))
-                (Branch (IsTrue v2) k4 k3))))
-            (InvokeMethod v0 moveNext () k2))))
-        (InvokeContinuation k1 (a)))))
-    (InvokeMethod a iterator () k0)))
-'''),
-
-    const TestSpec('''
-main(a) {
-  var i;
-  for (i in a) {
-    print(i);
-  }
-}
-''', '''
-(FunctionDefinition main () (a) return
-  (LetPrim (v0 (Constant (Null)))
-    (LetCont ((k0 (v1)
-        (LetCont ((rec k1 (v2 v3)
-            (LetCont ((k2 (v4)
-                (LetCont ((k3 ()
-                    (LetPrim (v5 (Constant (Null)))
-                      (InvokeContinuation return (v5))))
-                          (k4 ()
-                    (LetCont ((k5 (v6)
-                        (LetCont ((k6 (v7)
-                            (InvokeContinuation rec k1 (v2 v6))))
-                          (InvokeStatic print (v6) k6))))
-                      (InvokeMethod v1 current () k5))))
-                  (Branch (IsTrue v4) k4 k3))))
-              (InvokeMethod v1 moveNext () k2))))
-          (InvokeContinuation k1 (a v0)))))
-      (InvokeMethod a iterator () k0))))
-'''),
-  ]),
-
-  const Group('Local functions', const <TestSpec>[
-    const TestSpec('''
-main(a) {
-  local() {}
-  return local();
-}
-''', '''
-(FunctionDefinition main () (a) return
-  (LetPrim (v0 (CreateFunction
-      (FunctionDefinition local () () return
-        (LetPrim (v1 (Constant (Null)))
-          (InvokeContinuation return (v1))))))
-    (LetCont ((k0 (v2)
-        (InvokeContinuation return (v2))))
-      (InvokeMethod v0 call () k0))))
-'''),
-
-  const TestSpec('''
-main(a) {
-  local() {}
-  var l = local;
-  return l();
-}
-''', '''
-(FunctionDefinition main () (a) return
-  (LetPrim (v0 (CreateFunction
-      (FunctionDefinition local () () return
-        (LetPrim (v1 (Constant (Null)))
-          (InvokeContinuation return (v1))))))
-    (LetCont ((k0 (v2)
-        (InvokeContinuation return (v2))))
-      (InvokeMethod v0 call () k0))))
-'''),
-
-  const TestSpec('''
-main(a) {
-  return () {}();
-}
-''', '''
-(FunctionDefinition main () (a) return
-  (LetPrim (v0 (CreateFunction
-      (FunctionDefinition  () () return
-        (LetPrim (v1 (Constant (Null)))
-          (InvokeContinuation return (v1))))))
-    (LetCont ((k0 (v2)
-        (InvokeContinuation return (v2))))
-      (InvokeMethod v0 call () k0))))
-'''),
-
-  const TestSpec('''
-main(a) {
-  var c = a ? () { return 0; } : () { return 1; }
-  return c();
-}
-''', '''
-(FunctionDefinition main () (a) return
-  (LetCont ((k0 (v0)
-      (LetCont ((k1 (v1)
-          (InvokeContinuation return (v1))))
-        (InvokeMethod v0 call () k1))))
-    (LetCont ((k2 ()
-        (LetPrim (v2 (CreateFunction
-            (FunctionDefinition  () () return
-              (LetPrim (v3 (Constant (Int 0)))
-                (InvokeContinuation return (v3))))))
-          (InvokeContinuation k0 (v2))))
-              (k3 ()
-        (LetPrim (v4 (CreateFunction
-            (FunctionDefinition  () () return
-              (LetPrim (v5 (Constant (Int 1)))
-                (InvokeContinuation return (v5))))))
-          (InvokeContinuation k0 (v4)))))
-      (Branch (IsTrue a) k2 k3))))
-'''),
-  ]),
-
-  const Group('Top level field', const <TestSpec>[
-    const TestSpec('''
-var field;
-main(args) {
-  return field;
-}
-''', const {
-      'main': '''
-(FunctionDefinition main () (args) return
-  (LetCont ((k0 (v0)
-      (InvokeContinuation return (v0))))
-    (GetLazyStatic field k0)))
-''',
-      'field': '''
-(FieldDefinition field)
-'''}),
-
-    const TestSpec('''
-var field = null;
-main(args) {
-  return field;
-}
-''', const {
-      'main': '''
-(FunctionDefinition main () (args) return
-  (LetCont ((k0 (v0)
-      (InvokeContinuation return (v0))))
-    (GetLazyStatic field k0)))
-''',
-      'field': '''
-(FieldDefinition field () return
-  (LetPrim (v0 (Constant (Null)))
-    (InvokeContinuation return (v0))))
-'''}),
-
-    const TestSpec('''
-var field = 0;
-main(args) {
-  return field;
-}
-''', const {
-      'main': '''
-(FunctionDefinition main () (args) return
-  (LetCont ((k0 (v0)
-      (InvokeContinuation return (v0))))
-    (GetLazyStatic field k0)))
-''',
-      'field': '''
-(FieldDefinition field () return
-  (LetPrim (v0 (Constant (Int 0)))
-    (InvokeContinuation return (v0))))
-'''}),
-
-    const TestSpec('''
-var field;
-main(args) {
-  field = args.length;
-  return field;
-}
-''', '''
-(FunctionDefinition main () (args) return
-  (LetCont ((k0 (v0)
-      (SetStatic field v0
-        (LetCont ((k1 (v1)
-            (InvokeContinuation return (v1))))
-          (GetLazyStatic field k1)))))
-    (InvokeMethod args length () k0)))
-'''),
-  ]),
-
-  const Group('Closure variables', const <TestSpec>[
-    const TestSpec('''
-main(x,foo) {
-  print(x);
-  getFoo() => foo;
-  print(getFoo());
-}
-''', '''
-(FunctionDefinition main () (x foo) return
-  (LetCont ((k0 (v0)
-      (LetPrim (v1 (CreateFunction
-          (FunctionDefinition getFoo () () return
-            (LetPrim (v2 (GetMutableVariable foo))
-              (InvokeContinuation return (v2))))))
-        (LetCont ((k1 (v3)
-            (LetCont ((k2 (v4)
-                (LetPrim (v5 (Constant (Null)))
-                  (InvokeContinuation return (v5)))))
-              (InvokeStatic print (v3) k2))))
-          (InvokeMethod v1 call () k1)))))
-    (InvokeStatic print (x) k0)))
-''', skipInAnalyzerFrontend: true)
-  ]),
-
-  const Group('Constructors', const <TestSpec>[
-    const TestSpec('''
-class C {}
-main() {
-  return new C();
-}
-''',
-    const {
-'main': '''
-(FunctionDefinition main () () return
-  (LetCont ((k0 (v0)
-      (InvokeContinuation return (v0))))
-    (InvokeConstructor C () k0)))
-''',
-'C.':  '''
-(ConstructorDefinition (this) () return (
-    )
-  (LetPrim (v0 (Constant (Null)))
-    (InvokeContinuation return (v0))))
-'''}),
-
-    const TestSpec('''
-class C {
-  C() {}
-}
-main() {
-  return new C();
-}
-''',
-    const {
-'main': '''
-(FunctionDefinition main () () return
-  (LetCont ((k0 (v0)
-      (InvokeContinuation return (v0))))
-    (InvokeConstructor C () k0)))
-''',
-'C.': '''
-(ConstructorDefinition (this) () return (
-    )
-  (LetPrim (v0 (Constant (Null)))
-    (InvokeContinuation return (v0))))
-'''}),
-
-    const TestSpec('''
-class B {}
-class C extends B {
-  C() {}
-}
-main() {
-  return new C();
-}
-''',
-    const {
-'main': '''
-(FunctionDefinition main () () return
-  (LetCont ((k0 (v0)
-      (InvokeContinuation return (v0))))
-    (InvokeConstructor C () k0)))
-''',
-'B.':  '''
-(ConstructorDefinition (this) () return (
-    )
-  (LetPrim (v0 (Constant (Null)))
-    (InvokeContinuation return (v0))))
-''',
-'C.': '''
-(ConstructorDefinition (this) () return (
-    )
-  (LetPrim (v0 (Constant (Null)))
-    (InvokeContinuation return (v0))))
-'''}),
-
-    const TestSpec('''
-class B {
-  B() {}
-}
-class C extends B {}
-main() {
-  return new C();
-}
-''',
-    const {
-'main': '''
-(FunctionDefinition main () () return
-  (LetCont ((k0 (v0)
-      (InvokeContinuation return (v0))))
-    (InvokeConstructor C () k0)))
-''',
-'B.': '''
-(ConstructorDefinition (this) () return (
-    )
-  (LetPrim (v0 (Constant (Null)))
-    (InvokeContinuation return (v0))))
-''',
-'C.':  '''
-(ConstructorDefinition (this) () return (
-    )
-  (LetPrim (v0 (Constant (Null)))
-    (InvokeContinuation return (v0))))
-'''}),
-  ]),
-
-  const Group('Instance method', const <TestSpec>[
-    const TestSpec('''
-class C {
-  C() {}
-  foo() {}
-}
-main() {
-  return new C().foo();
-}
-''',
-    const {
-'main': '''
-(FunctionDefinition main () () return
-  (LetCont ((k0 (v0)
-      (LetCont ((k1 (v1)
-          (InvokeContinuation return (v1))))
-        (InvokeMethod v0 foo () k1))))
-    (InvokeConstructor C () k0)))
-''',
-'C.foo': '''
-(FunctionDefinition foo (this) () return
-  (LetPrim (v0 (Constant (Null)))
-    (InvokeContinuation return (v0))))
-'''}),
-  ]),
-
-
-  const Group('Try-catch', const <TestSpec>[
-    const TestSpec('''
-main() {
-  try {} catch (e) {}
-}
-''',
-'''
-(FunctionDefinition main () () return
-  (LetCont ((k0 ()
-      (LetPrim (v0 (Constant (Null)))
-        (InvokeContinuation return (v0)))))
-    (LetHandler ((v1 v2)
-        (InvokeContinuation k0 ()))
-      (InvokeContinuation k0 ()))))
-'''),
-
-    const TestSpec('''
-main() {
-  try {
-    return;
-  } catch (e) {}
-}
-''',
-'''
-(FunctionDefinition main () () return
-  (LetCont ((k0 ()
-      (LetPrim (v0 (Constant (Null)))
-        (InvokeContinuation return (v0)))))
-    (LetHandler ((v1 v2)
-        (InvokeContinuation k0 ()))
-      (LetPrim (v3 (Constant (Null)))
-        (InvokeContinuation return (v3))))))
-'''),
-  ]),
-];
diff --git a/pkg/analyzer2dart/test/sexpr_test.dart b/pkg/analyzer2dart/test/sexpr_test.dart
deleted file mode 100644
index 2c8da58..0000000
--- a/pkg/analyzer2dart/test/sexpr_test.dart
+++ /dev/null
@@ -1,88 +0,0 @@
-// Copyright (c) 2014, the Dart project authors.  Please see the AUTHORS file
-// for details. All rights reserved. Use of this source code is governed by a
-// BSD-style license that can be found in the LICENSE file.
-
-/// Unittest test of the CPS ir generated by the analyzer2dart compiler.
-
-import 'mock_sdk.dart';
-import 'package:analyzer/file_system/memory_file_system.dart';
-import 'package:analyzer/src/generated/element.dart';
-import 'package:analyzer/src/generated/sdk.dart';
-import 'package:analyzer/src/generated/source.dart';
-import 'package:compiler/src/cps_ir/cps_ir_nodes.dart';
-import 'package:compiler/src/cps_ir/cps_ir_nodes_sexpr.dart';
-import 'package:compiler/src/elements/elements.dart' as dart2js;
-import 'package:unittest/unittest.dart';
-
-import '../lib/src/closed_world.dart';
-import '../lib/src/driver.dart';
-import '../lib/src/converted_world.dart';
-import 'output_helper.dart';
-import 'test_helper.dart';
-import 'sexpr_data.dart';
-
-main(List<String> args) {
-  performTests(TEST_DATA, unittester, checkResult, args);
-}
-
-checkResult(TestSpec result) {
-  if (result.skipInAnalyzerFrontend) return;
-  String input = result.input.trim();
-  CollectingOutputProvider outputProvider = new CollectingOutputProvider();
-  MemoryResourceProvider provider = new MemoryResourceProvider();
-  DartSdk sdk = new MockSdk();
-  Driver driver = new Driver(provider, sdk, outputProvider);
-  String rootFile = '/root.dart';
-  provider.newFile(rootFile, input);
-  Source rootSource = driver.setRoot(rootFile);
-  FunctionElement entryPoint = driver.resolveEntryPoint(rootSource);
-  ClosedWorld world = driver.computeWorld(entryPoint);
-  ConvertedWorld convertedWorld = convertWorld(world);
-
-  void checkOutput(String elementName,
-                   dart2js.Element element,
-                   String expectedOutput) {
-    RootNode ir = convertedWorld.getIr(element);
-    if (expectedOutput == null) {
-      expect(ir, isNull,
-          reason: "\nInput:\n${result.input}\n"
-                  "No CPS IR expected for $element");
-    } else {
-      expect(ir, isNotNull,
-          reason: "\nInput:\n${result.input}\n"
-                  "No CPS IR for $element");
-      expectedOutput = expectedOutput.trim();
-      String output = ir.accept(new SExpressionStringifier());
-      expect(output, equals(expectedOutput),
-          reason: "\nInput:\n${result.input}\n"
-                  "Expected for '$elementName':\n$expectedOutput\n"
-                  "Actual for '$elementName':\n$output\n");
-    }
-  }
-
-  if (result.output is String) {
-    checkOutput('main', convertedWorld.mainFunction, result.output);
-  } else {
-    assert(result.output is Map<String, String>);
-    dart2js.LibraryElement mainLibrary = convertedWorld.mainFunction.library;
-    result.output.forEach((String elementName, String output) {
-      bool found = false;
-      List<String> names = <String>[];
-      convertedWorld.resolvedElements.forEach((dart2js.Element element) {
-        if (element.library == mainLibrary) {
-          String name = element.name;
-          if (element.enclosingClass != null) {
-            name = '${element.enclosingClass.name}.$name';
-          }
-          if (name == elementName) {
-            checkOutput(elementName, element, output);
-            found = true;
-          }
-          names.add(name);
-        }
-      });
-      expect(found, isTrue, reason: "'$elementName' not found in $names.");
-    });
-  }
-}
-
diff --git a/pkg/analyzer2dart/test/test_helper.dart b/pkg/analyzer2dart/test/test_helper.dart
deleted file mode 100644
index 86a52b0..0000000
--- a/pkg/analyzer2dart/test/test_helper.dart
+++ /dev/null
@@ -1,49 +0,0 @@
-// Copyright (c) 2014, the Dart project authors.  Please see the AUTHORS file
-// for details. All rights reserved. Use of this source code is governed by a
-// BSD-style license that can be found in the LICENSE file.
-
-/// Helpers for defining input/output based unittests through (constant) data.
-
-import 'package:unittest/unittest.dart';
-
-/// A unittest group with a name and a list of input/output results.
-class Group {
-  final String name;
-  final List<TestSpecBase> results;
-
-  const Group(this.name, this.results);
-}
-
-/// A [input] for which a certain processing result is expected.
-class TestSpecBase {
-  final String input;
-
-  const TestSpecBase(this.input);
-}
-
-typedef TestGroup(Group group, RunTest check);
-typedef RunTest(TestSpecBase result);
-
-/// Test [data] using [testGroup] and [check].
-void performTests(List<Group> data,
-                  TestGroup testGroup,
-                  RunTest runTest,
-                  List<String> groupsToRun) {
-  for (Group group in data) {
-    if (groupsToRun.isNotEmpty &&
-        !groupsToRun.contains(group.name)) {
-      // Skip this group.
-      continue;
-    }
-    testGroup(group, runTest);
-  }
-}
-
-/// Test group using unittest.
-unittester(Group group, RunTest runTest) {
-  test(group.name, () {
-    for (TestSpecBase result in group.results) {
-      runTest(result);
-    }
-  });
-}
\ No newline at end of file
diff --git a/pkg/analyzer2dart/test/tree_shaker_test.dart b/pkg/analyzer2dart/test/tree_shaker_test.dart
deleted file mode 100644
index cedb449..0000000
--- a/pkg/analyzer2dart/test/tree_shaker_test.dart
+++ /dev/null
@@ -1,546 +0,0 @@
-// Copyright (c) 2014, the Dart project authors.  Please see the AUTHORS file
-// for details. All rights reserved. Use of this source code is governed by a
-// BSD-style license that can be found in the LICENSE file.
-
-import 'mock_sdk.dart';
-import 'package:analyzer/file_system/memory_file_system.dart';
-import 'package:analyzer/src/generated/ast.dart';
-import 'package:analyzer/src/generated/element.dart';
-import 'package:analyzer/src/generated/sdk.dart';
-import 'package:analyzer/src/generated/source.dart';
-import 'package:compiler/src/dart2jslib.dart' show NullSink;
-import 'package:unittest/unittest.dart';
-
-import '../lib/src/closed_world.dart';
-import '../lib/src/driver.dart';
-
-main() {
-  test('Toplevel function', () {
-    var helper = new TreeShakerTestHelper('''
-main() {
-  foo();
-}
-foo() {
-}
-''');
-    helper.assertHasFunction('main');
-    helper.assertHasFunction('foo');
-  });
-
-  test('Toplevel field read', () {
-    var helper = new TreeShakerTestHelper('''
-main() {
-  return foo;
-}
-var foo;
-var bar;
-''');
-    helper.assertHasFunction('main');
-    helper.assertHasVariable('foo');
-    helper.assertNoVariable('bar');
-  });
-
-  test('Toplevel field write', () {
-    var helper = new TreeShakerTestHelper('''
-main() {
-  foo = 1;
-}
-var foo;
-var bar;
-''');
-    helper.assertHasFunction('main');
-    helper.assertHasVariable('foo');
-    helper.assertNoVariable('bar');
-  });
-
-  test('Toplevel field invocation', () {
-    var helper = new TreeShakerTestHelper('''
-main() {
-  return foo();
-}
-var foo;
-var bar;
-''');
-    helper.assertHasFunction('main');
-    helper.assertHasVariable('foo');
-    helper.assertNoVariable('bar');
-  });
-
-  test('Member field invocation', () {
-    var helper = new TreeShakerTestHelper('''
-class A {
-  void call() {}
-  void baz() {}
-}
-main() {
-  new A();
-  foo();
-}
-var foo;
-var bar;
-''');
-    helper.assertHasFunction('main');
-    helper.assertHasVariable('foo');
-    helper.assertNoVariable('bar');
-    helper.assertHasInstantiatedClass('A');
-    helper.assertHasMethod('A.call');
-    helper.assertNoMethod('A.baz');
-  });
-
-  test('Class instantiation', () {
-    var helper = new TreeShakerTestHelper('''
-main() {
-  var x = new A();
-}
-class A {}
-class B {}
-''');
-    helper.assertHasInstantiatedClass('A');
-    helper.assertNoInstantiatedClass('B');
-  });
-
-  test('Super class instantiation', () {
-    var helper = new TreeShakerTestHelper('''
-main() {
-  var x = new B();
-}
-class A {}
-class B extends A {}
-''');
-    helper.assertHasInstantiatedClass('A');
-    helper.assertHasInstantiatedClass('B');
-  });
-
-  test('Method invocation', () {
-    var helper = new TreeShakerTestHelper('''
-main() {
-  var x = new A().foo();
-}
-class A {
-  foo() {}
-  bar() {}
-}
-class B {
-  foo() {}
-  bar() {}
-}
-''');
-    helper.assertHasMethod('A.foo');
-    helper.assertNoMethod('A.bar');
-    helper.assertNoMethod('B.foo');
-    helper.assertNoMethod('B.bar');
-  });
-
-  test('Method invocation on dynamic', () {
-    var helper = new TreeShakerTestHelper('''
-class A {
-  m1() {}
-  m2() {}
-}
-foo(dynamic x) {
-  x.m1();
-}
-main() {
-  foo(new A());
-}
-''');
-    helper.assertHasMethod('A.m1');
-    helper.assertNoMethod('A.m2');
-  });
-
-  test('Method invocation on dynamic via cascade', () {
-    var helper = new TreeShakerTestHelper('''
-class A {
-  m1() {}
-  m2() {}
-}
-foo(dynamic x) {
-  x..m1()..m2();
-}
-main() {
-  foo(new A());
-}
-''');
-    helper.assertHasMethod('A.m1');
-    helper.assertHasMethod('A.m2');
-  });
-
-  test('Getter usage', () {
-    var helper = new TreeShakerTestHelper('''
-class A {
-  get g1 => null;
-  get g2 => null;
-  set g1(x) {}
-  set g2(x) {}
-}
-class B {
-  get g1 => null;
-  get g2 => null;
-  set g1(x) {}
-  set g2(x) {}
-}
-main() {
-  new A().g1;
-}
-''');
-    helper.assertHasGetter('A.g1');
-    helper.assertNoGetter('A.g2');
-    helper.assertNoGetter('B.g1');
-    helper.assertNoGetter('B.g2');
-    helper.assertNoSetter('A.g1');
-    helper.assertNoSetter('A.g2');
-    helper.assertNoSetter('B.g1');
-    helper.assertNoSetter('B.g2');
-  });
-
-  test('Setter usage', () {
-    var helper = new TreeShakerTestHelper('''
-class A {
-  get g1 => null;
-  get g2 => null;
-  set g1(x) {}
-  set g2(x) {}
-}
-class B {
-  get g1 => null;
-  get g2 => null;
-  set g1(x) {}
-  set g2(x) {}
-}
-main() {
-  new A().g1 = 1;
-}
-''');
-    helper.assertHasSetter('A.g1');
-    helper.assertNoSetter('A.g2');
-    helper.assertNoSetter('B.g1');
-    helper.assertNoSetter('B.g2');
-    helper.assertNoGetter('A.g1');
-    helper.assertNoGetter('A.g2');
-    helper.assertNoGetter('B.g1');
-    helper.assertNoGetter('B.g2');
-  });
-
-  test('Field read', () {
-    var helper = new TreeShakerTestHelper('''
-class A {
-  var f1;
-  var f2;
-}
-class B {
-  var f1;
-  var f2;
-}
-main() {
-  new A().f1;
-}
-''');
-    helper.assertHasField('A.f1');
-    helper.assertNoField('A.f2');
-    helper.assertNoField('B.f1');
-    helper.assertNoField('B.f2');
-  });
-
-  test('Field write', () {
-    var helper = new TreeShakerTestHelper('''
-class A {
-  var f1;
-  var f2;
-}
-class B {
-  var f1;
-  var f2;
-}
-main() {
-  new A().f1 = 1;
-}
-''');
-    helper.assertHasField('A.f1');
-    helper.assertNoField('A.f2');
-    helper.assertNoField('B.f1');
-    helper.assertNoField('B.f2');
-  });
-
-  test('Ordinary constructor with initializer list', () {
-    var helper = new TreeShakerTestHelper('''
-class A {
-  A() : x = f();
-  var x;
-  foo() {}
-}
-f() {}
-main() {
-  new A().foo();
-}
-''');
-    helper.assertHasMethod('A.foo');
-    helper.assertHasFunction('f');
-  });
-
-  test('Redirecting constructor', () {
-    var helper = new TreeShakerTestHelper('''
-class A {
-  A.a1() : this.a2();
-  A.a2();
-  foo() {}
-}
-main() {
-  new A.a1().foo();
-}
-''');
-    helper.assertHasMethod('A.foo');
-  });
-
-  test('Factory constructor', () {
-    var helper = new TreeShakerTestHelper('''
-class A {
-  factory A() {
-    return new B();
-  }
-  foo() {}
-}
-class B {
-  B();
-  foo() {}
-}
-main() {
-  new A().foo();
-}
-''');
-    helper.assertHasMethod('B.foo');
-    helper.assertNoMethod('A.foo');
-  });
-
-  test('Redirecting factory constructor', () {
-    var helper = new TreeShakerTestHelper('''
-class A {
-  factory A() = B;
-  foo() {}
-}
-class B {
-  B();
-  foo() {}
-}
-main() {
-  new A().foo();
-}
-''');
-    helper.assertHasMethod('B.foo');
-    helper.assertNoMethod('A.foo');
-  });
-}
-
-class TreeShakerTestHelper {
-  /**
-   * The name of the root file.
-   */
-  String rootFile = '/root.dart';
-
-  /**
-   * ClosedWorld that resulted from tree shaking.
-   */
-  ClosedWorld world;
-
-  /**
-   * Functions contained in [world], indexed by name.
-   */
-  Map<String, FunctionDeclaration> functions = <String, FunctionDeclaration>{};
-
-  /**
-   * Methods contained in [world], indexed by className.methodName.
-   */
-  Map<String, MethodDeclaration> methods = <String, MethodDeclaration>{};
-
-  /**
-   * Getters contained in [world], indexed by className.propertyName.
-   */
-  Map<String, MethodDeclaration> getters = <String, MethodDeclaration>{};
-
-  /**
-   * Setters contained in [world], indexed by className.propertyName.
-   */
-  Map<String, MethodDeclaration> setters = <String, MethodDeclaration>{};
-
-  /**
-   * Fields contained in [world], indexed by className.fieldName.
-   */
-  Map<String, VariableDeclaration> fields = <String, VariableDeclaration>{};
-
-  /**
-   * Top level variables contained in [world], indexed by name.
-   */
-  Map<String, VariableDeclaration> variables = <String, VariableDeclaration>{};
-
-  /**
-   * Classes instantiated in [world], indexed by name.
-   */
-  Map<String, ClassDeclaration> instantiatedClasses = <String,
-      ClassDeclaration>{};
-
-  /**
-   * Create a TreeShakerTestHelper based on the given file contents.
-   */
-  TreeShakerTestHelper(String contents) {
-    MemoryResourceProvider provider = new MemoryResourceProvider();
-    DartSdk sdk = new MockSdk();
-    Driver driver = new Driver(provider, sdk, NullSink.outputProvider);
-    provider.newFile(rootFile, contents);
-    Source rootSource = driver.setRoot(rootFile);
-    FunctionElement entryPoint = driver.resolveEntryPoint(rootSource);
-    world = driver.computeWorld(entryPoint);
-    world.executableElements.forEach(
-        (ExecutableElement element, Declaration node) {
-      if (element is FunctionElement) {
-        FunctionDeclaration declaration = node as FunctionDeclaration;
-        expect(declaration, isNotNull);
-        expect(declaration.element, equals(element));
-        functions[element.name] = declaration;
-      } else if (element is MethodElement) {
-        MethodDeclaration declaration = node as MethodDeclaration;
-        expect(declaration, isNotNull);
-        expect(declaration.element, equals(element));
-        methods['${element.enclosingElement.name}.${element.name}'] =
-            declaration;
-      } else if (element is PropertyAccessorElement) {
-        MethodDeclaration declaration = node as MethodDeclaration;
-        expect(declaration, isNotNull);
-        expect(declaration.element, equals(element));
-        if (declaration.isGetter) {
-          getters['${element.enclosingElement.name}.${element.name}'] =
-              declaration;
-        } else if (declaration.isSetter) {
-          setters['${element.enclosingElement.name}.${element.displayName}'] =
-              declaration;
-        } else {
-          fail('Unexpected property accessor (neither getter nor setter)');
-        }
-      }
-    });
-    world.instantiatedClasses.forEach(
-        (ClassElement element, ClassDeclaration declaration) {
-      expect(declaration, isNotNull);
-      expect(declaration.element, equals(element));
-      instantiatedClasses[element.name] = declaration;
-    });
-    world.fields.forEach(
-        (FieldElement element, VariableDeclaration declaration) {
-      expect(declaration, isNotNull);
-      expect(declaration.element, equals(element));
-      fields['${element.enclosingElement.name}.${element.name}'] = declaration;
-    });
-    world.variables.forEach(
-        (TopLevelVariableElement element, VariableDeclaration declaration) {
-      expect(declaration, isNotNull);
-      expect(declaration.element, equals(element));
-      variables['${element.name}'] = declaration;
-    });
-  }
-
-  /**
-   * Asserts that [world] contains a field with the given qualified name.
-   */
-  void assertHasField(String qualifiedName) {
-    expect(fields, contains(qualifiedName));
-  }
-
-  /**
-   * Asserts that [world] contains a top level variable with the given name.
-   */
-  void assertHasVariable(String name) {
-    expect(variables, contains(name));
-  }
-
-  /**
-   * Asserts that [world] contains a top-level function with the given name.
-   */
-  void assertHasFunction(String name) {
-    expect(functions, contains(name));
-  }
-
-  /**
-   * Asserts that [world] contains a getter with the given qualified name.
-   */
-  void assertHasGetter(String qualifiedName) {
-    expect(getters, contains(qualifiedName));
-  }
-
-  /**
-   * Asserts that [world] contains a setter with the given qualified name.
-   */
-  void assertHasSetter(String qualifiedName) {
-    expect(setters, contains(qualifiedName));
-  }
-
-  /**
-   * Asserts that [world] instantiates a class with the given name.
-   */
-  void assertHasInstantiatedClass(String name) {
-    expect(instantiatedClasses, contains(name));
-  }
-
-  /**
-   * Asserts that [world] contains a method with the given qualified name.
-   *
-   * [qualifiedName] - the qualified name in form 'className.methodName'.
-   */
-  void assertHasMethod(String qualifiedName) {
-    expect(methods, contains(qualifiedName));
-  }
-
-  /**
-   * Asserts that [world] doesn't contain a field with the given qualified
-   * name.
-   */
-  void assertNoField(String qualifiedName) {
-    expect(fields, isNot(contains(qualifiedName)));
-  }
-
-  /**
-   * Asserts that [world] doesn't contain a top level variable with the given
-   * name.
-   */
-  void assertNoVariable(String name) {
-    expect(variables, isNot(contains(name)));
-  }
-
-  /**
-   * Asserts that [world] doesn't contain a top-level function with the given
-   * name.
-   */
-  void assertNoFunction(String name) {
-    expect(functions, isNot(contains(name)));
-  }
-
-  /**
-   * Asserts that [world] doesn't contain a getter with the given qualified
-   * name.
-   */
-  void assertNoGetter(String qualifiedName) {
-    expect(getters, isNot(contains(qualifiedName)));
-  }
-
-  /**
-   * Asserts that [world] doesn't contain a setter with the given qualified
-   * name.
-   */
-  void assertNoSetter(String qualifiedName) {
-    expect(setters, isNot(contains(qualifiedName)));
-  }
-
-  /**
-   * Asserts that [world] doesn't instantiate a class with the given name.
-   */
-  void assertNoInstantiatedClass(String name) {
-    expect(instantiatedClasses, isNot(contains(name)));
-  }
-
-  /**
-   * Asserts that [world] doesn't contain a method with the given qualified
-   * name.
-   *
-   * [qualifiedName] - the qualified name in form 'className.methodName'.
-   */
-  void assertNoMethod(String qualifiedName) {
-    expect(methods, isNot(contains(qualifiedName)));
-  }
-}
diff --git a/pkg/analyzer_cli/lib/src/analyzer_impl.dart b/pkg/analyzer_cli/lib/src/analyzer_impl.dart
index 6c4fbcd..da846c2 100644
--- a/pkg/analyzer_cli/lib/src/analyzer_impl.dart
+++ b/pkg/analyzer_cli/lib/src/analyzer_impl.dart
@@ -18,6 +18,7 @@
 import 'package:analyzer/src/generated/utilities_general.dart';
 import 'package:analyzer_cli/src/driver.dart';
 import 'package:analyzer_cli/src/error_formatter.dart';
+import 'package:analyzer_cli/src/incremental_analyzer.dart';
 import 'package:analyzer_cli/src/options.dart';
 import 'package:path/path.dart' as pathos;
 
@@ -38,6 +39,8 @@
 
   final AnalysisContext context;
 
+  final IncrementalAnalysisSession incrementalSession;
+
   /// Accumulated analysis statistics.
   final AnalysisStats stats;
 
@@ -60,8 +63,8 @@
   /// specified the "--package-warnings" option.
   String _selfPackageName;
 
-  AnalyzerImpl(this.context, this.librarySource, this.options, this.stats,
-      this.startTime);
+  AnalyzerImpl(this.context, this.incrementalSession, this.librarySource,
+      this.options, this.stats, this.startTime);
 
   /// Returns the maximal [ErrorSeverity] of the recorded errors.
   ErrorSeverity get maxErrorSeverity {
@@ -135,6 +138,7 @@
     var units = new Set<CompilationUnitElement>();
     var libraries = new Set<LibraryElement>();
     addLibrarySources(library, libraries, units);
+    incrementalSession?.setAnalyzedSources(sources);
   }
 
   /// Setup local fields such as the analysis context for analysis.
@@ -255,14 +259,12 @@
   static ErrorSeverity computeSeverity(
       AnalysisError error, CommandLineOptions options,
       [AnalysisContext context]) {
-    bool isStrongMode = false;
     if (context != null) {
       ErrorProcessor processor = ErrorProcessor.getProcessor(context, error);
       // If there is a processor for this error, defer to it.
       if (processor != null) {
         return processor.severity;
       }
-      isStrongMode = context.analysisOptions.strongMode;
     }
 
     if (!options.enableTypeChecks &&
@@ -272,10 +274,6 @@
       return ErrorSeverity.ERROR;
     } else if (options.lintsAreFatal && error.errorCode is LintCode) {
       return ErrorSeverity.ERROR;
-    } else if (isStrongMode &&
-        error is StaticWarningCode &&
-        (error as StaticWarningCode).isStrongModeError) {
-      return ErrorSeverity.ERROR;
     }
     return error.errorCode.errorSeverity;
   }
diff --git a/pkg/analyzer_cli/lib/src/build_mode.dart b/pkg/analyzer_cli/lib/src/build_mode.dart
index f6b3ddd..f79e401 100644
--- a/pkg/analyzer_cli/lib/src/build_mode.dart
+++ b/pkg/analyzer_cli/lib/src/build_mode.dart
@@ -184,8 +184,10 @@
 
     // Write summary.
     assembler = new PackageBundleAssembler(
-        excludeHashes: options.buildSummaryExcludeInformative);
-    if (options.buildSummaryOutput != null) {
+        excludeHashes: options.buildSummaryExcludeInformative &&
+            options.buildSummaryOutputSemantic == null);
+    if (options.buildSummaryOutput != null ||
+        options.buildSummaryOutputSemantic != null) {
       if (options.buildSummaryOnlyAst && !options.buildSummaryFallback) {
         _serializeAstBasedSummary(explicitSources);
       } else {
@@ -209,8 +211,17 @@
       if (options.buildSummaryExcludeInformative) {
         sdkBundle.flushInformative();
       }
-      io.File file = new io.File(options.buildSummaryOutput);
-      file.writeAsBytesSync(sdkBundle.toBuffer(), mode: io.FileMode.WRITE_ONLY);
+      if (options.buildSummaryOutput != null) {
+        io.File file = new io.File(options.buildSummaryOutput);
+        file.writeAsBytesSync(sdkBundle.toBuffer(),
+            mode: io.FileMode.WRITE_ONLY);
+      }
+      if (options.buildSummaryOutputSemantic != null) {
+        sdkBundle.flushInformative();
+        io.File file = new io.File(options.buildSummaryOutputSemantic);
+        file.writeAsBytesSync(sdkBundle.toBuffer(),
+            mode: io.FileMode.WRITE_ONLY);
+      }
     }
 
     if (options.buildSummaryOnly) {
@@ -257,7 +268,7 @@
           Driver.createAnalysisOptionsForCommandLineOptions(options);
       directorySdk.useSummary = !options.buildSummaryOnlyAst;
       sdk = directorySdk;
-      sdkBundle = directorySdk.getSummarySdkBundle();
+      sdkBundle = directorySdk.getSummarySdkBundle(options.strongMode);
     }
 
     // In AST mode include SDK bundle to avoid parsing SDK sources.
diff --git a/pkg/analyzer_cli/lib/src/driver.dart b/pkg/analyzer_cli/lib/src/driver.dart
index 4d80c61..44cc1d1 100644
--- a/pkg/analyzer_cli/lib/src/driver.dart
+++ b/pkg/analyzer_cli/lib/src/driver.dart
@@ -10,7 +10,6 @@
 
 import 'package:analyzer/file_system/file_system.dart' as file_system;
 import 'package:analyzer/file_system/physical_file_system.dart';
-import 'package:analyzer/plugin/embedded_resolver_provider.dart';
 import 'package:analyzer/plugin/options.dart';
 import 'package:analyzer/plugin/resolver_provider.dart';
 import 'package:analyzer/source/analysis_options_provider.dart';
@@ -37,6 +36,7 @@
 import 'package:analyzer_cli/src/analyzer_impl.dart';
 import 'package:analyzer_cli/src/build_mode.dart';
 import 'package:analyzer_cli/src/error_formatter.dart';
+import 'package:analyzer_cli/src/incremental_analyzer.dart';
 import 'package:analyzer_cli/src/options.dart';
 import 'package:analyzer_cli/src/perf_report.dart';
 import 'package:analyzer_cli/starter.dart';
@@ -86,8 +86,7 @@
   /// creation.
   CommandLineOptions _previousOptions;
 
-  @override
-  EmbeddedResolverProvider embeddedUriResolverProvider;
+  IncrementalAnalysisSession incrementalSession;
 
   @override
   ResolverProvider packageResolverProvider;
@@ -217,6 +216,8 @@
       libUris.add(source.uri);
     }
 
+    incrementalSession?.finish();
+
     // Check that each part has a corresponding source in the input list.
     for (Source part in parts) {
       bool found = false;
@@ -297,6 +298,9 @@
     if (options.enableSuperMixins != _previousOptions.enableSuperMixins) {
       return false;
     }
+    if (options.incrementalCachePath != _previousOptions.incrementalCachePath) {
+      return false;
+    }
     return true;
   }
 
@@ -338,25 +342,13 @@
           PhysicalResourceProvider.INSTANCE.getResource('.');
       UriResolver resolver = packageResolverProvider(folder);
       if (resolver != null) {
-        UriResolver sdkResolver;
-
-        // Check for a resolver provider.
-        if (embeddedUriResolverProvider != null) {
-          EmbedderUriResolver embedderUriResolver =
-              embeddedUriResolverProvider(folder);
-          if (embedderUriResolver != null && embedderUriResolver.length != 0) {
-            sdkResolver = embedderUriResolver;
-          }
-        }
-
-        // Default to a Dart URI resolver if no embedder is found.
-        sdkResolver ??= new DartUriResolver(sdk);
+        UriResolver sdkResolver = new DartUriResolver(sdk);
 
         // TODO(brianwilkerson) This doesn't handle sdk extensions.
         List<UriResolver> resolvers = <UriResolver>[
           sdkResolver,
           resolver,
-          new FileUriResolver()
+          new file_system.ResourceUriResolver(PhysicalResourceProvider.INSTANCE)
         ];
         return new SourceFactory(resolvers);
       }
@@ -396,16 +388,15 @@
     // 'dart:' URIs come first.
 
     // Setup embedding.
-    EmbedderUriResolver embedderUriResolver =
-        new EmbedderUriResolver(embedderMap);
-    if (embedderUriResolver.length == 0) {
+    EmbedderSdk embedderSdk = new EmbedderSdk(embedderMap);
+    if (embedderSdk.libraryMap.size() == 0) {
       // The embedder uri resolver has no mappings. Use the default Dart SDK
       // uri resolver.
       resolvers.add(new DartUriResolver(sdk));
     } else {
       // The embedder uri resolver has mappings, use it instead of the default
       // Dart SDK uri resolver.
-      resolvers.add(embedderUriResolver);
+      resolvers.add(new DartUriResolver(embedderSdk));
     }
 
     // Next SdkExts.
@@ -419,7 +410,8 @@
     }
 
     // Finally files.
-    resolvers.add(new FileUriResolver());
+    resolvers.add(
+        new file_system.ResourceUriResolver(PhysicalResourceProvider.INSTANCE));
 
     return new SourceFactory(resolvers, packageInfo.packages);
   }
@@ -493,7 +485,7 @@
 
     // Process embedders.
     Map<file_system.Folder, YamlMap> embedderMap =
-        _findEmbedders(packageInfo.packageMap);
+        new EmbedderYamlLocator(packageInfo.packageMap).embedderYamls;
 
     // Scan for SDK extenders.
     bool hasSdkExt = _hasSdkExt(packageInfo.packageMap?.values);
@@ -510,6 +502,8 @@
         _chooseUriResolutionPolicy(options, embedderMap, packageInfo);
 
     _context.sourceFactory = sourceFactory;
+
+    incrementalSession = configureIncrementalAnalysis(options, context);
   }
 
   /// Return discovered packagespec, or `null` if none is found.
@@ -526,14 +520,6 @@
     return null;
   }
 
-  Map<file_system.Folder, YamlMap> _findEmbedders(
-      Map<String, List<file_system.Folder>> packageMap) {
-    EmbedderYamlLocator locator =
-        (_context as InternalAnalysisContext).embedderYamlLocator;
-    locator.refresh(packageMap);
-    return locator.embedderYamls;
-  }
-
   _PackageInfo _findPackages(CommandLineOptions options) {
     if (packageResolverProvider != null) {
       // The resolver provider will do all the work later.
@@ -617,8 +603,8 @@
   /// Analyze a single source.
   ErrorSeverity _runAnalyzer(Source source, CommandLineOptions options) {
     int startTime = currentTimeMillis();
-    AnalyzerImpl analyzer =
-        new AnalyzerImpl(_context, source, options, stats, startTime);
+    AnalyzerImpl analyzer = new AnalyzerImpl(
+        _context, incrementalSession, source, options, stats, startTime);
     var errorSeverity = analyzer.analyzeSync();
     if (errorSeverity == ErrorSeverity.ERROR) {
       exitCode = errorSeverity.ordinal;
@@ -654,6 +640,7 @@
   static AnalysisOptionsImpl createAnalysisOptionsForCommandLineOptions(
       CommandLineOptions options) {
     AnalysisOptionsImpl contextOptions = new AnalysisOptionsImpl();
+    contextOptions.trackCacheDependencies = false;
     contextOptions.hint = !options.disableHints;
     contextOptions.enableStrictCallChecks = options.enableStrictCallChecks;
     contextOptions.enableSuperMixins = options.enableSuperMixins;
diff --git a/pkg/analyzer_cli/lib/src/error_formatter.dart b/pkg/analyzer_cli/lib/src/error_formatter.dart
index c43fcd8..e0c9a8e 100644
--- a/pkg/analyzer_cli/lib/src/error_formatter.dart
+++ b/pkg/analyzer_cli/lib/src/error_formatter.dart
@@ -105,9 +105,13 @@
 /// The two format options are a user consumable format and a machine consumable
 /// format.
 class ErrorFormatter {
+  static final int _pipeCodeUnit = '|'.codeUnitAt(0);
+  static final int _slashCodeUnit = '\\'.codeUnitAt(0);
+
   final StringSink out;
   final CommandLineOptions options;
   final AnalysisStats stats;
+
   final _SeverityProcessor processSeverity;
 
   ErrorFormatter(this.out, this.options, this.stats,
@@ -167,7 +171,6 @@
     }
     out.writeln();
   }
-
   void formatErrors(List<AnalysisErrorInfo> errorInfos) {
     stats.unfilteredCount += errorInfos.length;
 
@@ -223,9 +226,9 @@
   }
 
   static String escapePipe(String input) {
-    var result = new StringBuffer();
-    for (var c in input.codeUnits) {
-      if (c == '\\' || c == '|') {
+    StringBuffer result = new StringBuffer();
+    for (int c in input.codeUnits) {
+      if (c == _slashCodeUnit || c == _pipeCodeUnit) {
         result.write('\\');
       }
       result.writeCharCode(c);
diff --git a/pkg/analyzer_cli/lib/src/incremental_analyzer.dart b/pkg/analyzer_cli/lib/src/incremental_analyzer.dart
new file mode 100644
index 0000000..d57327a
--- /dev/null
+++ b/pkg/analyzer_cli/lib/src/incremental_analyzer.dart
@@ -0,0 +1,226 @@
+// Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+library analyzer_cli.src.incremental_analyzer;
+
+import 'dart:io' as io;
+
+import 'package:analyzer/dart/element/element.dart';
+import 'package:analyzer/file_system/physical_file_system.dart';
+import 'package:analyzer/src/context/cache.dart';
+import 'package:analyzer/src/context/context.dart';
+import 'package:analyzer/src/generated/engine.dart';
+import 'package:analyzer/src/generated/error.dart';
+import 'package:analyzer/src/generated/sdk.dart';
+import 'package:analyzer/src/generated/sdk_io.dart';
+import 'package:analyzer/src/generated/source.dart';
+import 'package:analyzer/src/summary/incremental_cache.dart';
+import 'package:analyzer/src/summary/package_bundle_reader.dart';
+import 'package:analyzer/src/task/dart.dart';
+import 'package:analyzer/task/dart.dart';
+import 'package:analyzer/task/general.dart';
+import 'package:analyzer/task/model.dart';
+import 'package:analyzer_cli/src/options.dart';
+
+/**
+ * If the given [options] enables incremental analysis and [context] and Dart
+ * SDK implementations support incremental analysis, configure it for the
+ * given [context] and return the handle to work with it.
+ */
+IncrementalAnalysisSession configureIncrementalAnalysis(
+    CommandLineOptions options, AnalysisContext context) {
+  String cachePath = options.incrementalCachePath;
+  DartSdk sdk = context.sourceFactory.dartSdk;
+  // If supported implementations, configure for incremental analysis.
+  if (cachePath != null &&
+      context is InternalAnalysisContext &&
+      sdk is DirectoryBasedDartSdk) {
+    context.typeProvider = sdk.context.typeProvider;
+    // Set the result provide from the cache.
+    CacheStorage storage = new FolderCacheStorage(
+        PhysicalResourceProvider.INSTANCE.getFolder(cachePath),
+        '${io.pid}.temp');
+    List<int> configSalt = <int>[
+      context.analysisOptions.encodeCrossContextOptions()
+    ];
+    IncrementalCache cache = new IncrementalCache(storage, context, configSalt);
+    context.resultProvider = new _CacheBasedResultProvider(context, cache);
+    // Listen for new libraries to put into the cache.
+    _IncrementalAnalysisSession session =
+        new _IncrementalAnalysisSession(options, storage, context, cache);
+    context
+        .onResultChanged(LIBRARY_ELEMENT1)
+        .listen((ResultChangedEvent event) {
+      if (event.wasComputed) {
+        session.newLibrarySources.add(event.target.source);
+      }
+    });
+    return session;
+  }
+  // Incremental analysis cannot be used.
+  return null;
+}
+
+/**
+ * Interface that is exposed to the clients of incremental analysis.
+ */
+abstract class IncrementalAnalysisSession {
+  /**
+   * Finish tasks required after incremental analysis - save results into the
+   * cache, evict old results, etc.
+   */
+  void finish();
+
+  /**
+   * Sets the set of [Source]s analyzed in the context, both explicit and
+   * implicit, for which errors might be requested.  This set is used to compute
+   * containing libraries for every source in the context.
+   */
+  void setAnalyzedSources(Iterable<Source> sources);
+}
+
+/**
+ * The [ResultProvider] that provides results from [IncrementalCache].
+ */
+class _CacheBasedResultProvider extends ResynthesizerResultProvider {
+  final IncrementalCache cache;
+
+  final Set<Source> sourcesWithSummaries = new Set<Source>();
+  final Set<Source> sourcesWithoutSummaries = new Set<Source>();
+  final Set<String> addedLibraryBundleIds = new Set<String>();
+
+  _CacheBasedResultProvider(InternalAnalysisContext context, this.cache)
+      : super(context, new SummaryDataStore(<String>[])) {
+    AnalysisContext sdkContext = context.sourceFactory.dartSdk.context;
+    createResynthesizer(sdkContext, sdkContext.typeProvider);
+  }
+
+  @override
+  bool compute(CacheEntry entry, ResultDescriptor result) {
+    AnalysisTarget target = entry.target;
+    // Source based results.
+    if (target is Source) {
+      if (result == SOURCE_KIND) {
+        SourceKind kind = cache.getSourceKind(target);
+        if (kind != null) {
+          entry.setValue(result, kind, TargetedResult.EMPTY_LIST);
+          return true;
+        } else {
+          return false;
+        }
+      }
+      if (result == INCLUDED_PARTS) {
+        List<Source> parts = cache.getLibraryParts(target);
+        if (parts != null) {
+          entry.setValue(result, parts, TargetedResult.EMPTY_LIST);
+          return true;
+        } else {
+          return false;
+        }
+      }
+      if (result == DART_ERRORS) {
+        List<Source> librarySources = context.getLibrariesContaining(target);
+        List<List<AnalysisError>> errorList = <List<AnalysisError>>[];
+        for (Source librarySource in librarySources) {
+          List<AnalysisError> errors =
+              cache.getSourceErrorsInLibrary(librarySource, target);
+          if (errors == null) {
+            return false;
+          }
+          errorList.add(errors);
+        }
+        List<AnalysisError> mergedErrors = AnalysisError.mergeLists(errorList);
+        // Filter the errors.
+        IgnoreInfo ignoreInfo = context.getResult(target, IGNORE_INFO);
+        LineInfo lineInfo = context.getResult(target, LINE_INFO);
+        List<AnalysisError> filteredErrors =
+            DartErrorsTask.filterIgnored(mergedErrors, ignoreInfo, lineInfo);
+        // Set the result.
+        entry.setValue(result, filteredErrors, TargetedResult.EMPTY_LIST);
+        return true;
+      }
+    }
+    return super.compute(entry, result);
+  }
+
+  @override
+  bool hasResultsForSource(Source source) {
+    // Check cache states.
+    if (sourcesWithSummaries.contains(source)) {
+      return true;
+    }
+    if (sourcesWithoutSummaries.contains(source)) {
+      return false;
+    }
+    // Try to load bundles.
+    List<LibraryBundleWithId> bundles = cache.getLibraryClosureBundles(source);
+    if (bundles == null) {
+      sourcesWithoutSummaries.add(source);
+      return false;
+    }
+    // Fill the resynthesizer.
+    sourcesWithSummaries.add(source);
+    for (LibraryBundleWithId bundleWithId in bundles) {
+      if (addedLibraryBundleIds.add(bundleWithId.id)) {
+        addBundle(null, bundleWithId.bundle);
+      }
+    }
+    return true;
+  }
+}
+
+class _IncrementalAnalysisSession implements IncrementalAnalysisSession {
+  final CommandLineOptions commandLineOptions;
+  final CacheStorage cacheStorage;
+  final AnalysisContext context;
+  final IncrementalCache cache;
+
+  final Set<Source> newLibrarySources = new Set<Source>();
+
+  _IncrementalAnalysisSession(
+      this.commandLineOptions, this.cacheStorage, this.context, this.cache);
+
+  @override
+  void finish() {
+    // Finish computing new libraries and put them into the cache.
+    for (Source librarySource in newLibrarySources) {
+      if (!commandLineOptions.machineFormat) {
+        print('Compute library element for $librarySource');
+      }
+      _putLibrary(librarySource);
+    }
+    // Compact the cache.
+    cacheStorage.compact();
+  }
+
+  @override
+  void setAnalyzedSources(Iterable<Source> sources) {
+    for (Source source in sources) {
+      SourceKind kind = context.computeKindOf(source);
+      if (kind == SourceKind.LIBRARY) {
+        context.computeResult(source, LINE_INFO);
+        context.computeResult(source, IGNORE_INFO);
+        context.computeResult(source, INCLUDED_PARTS);
+      }
+    }
+  }
+
+  void _putLibrary(Source librarySource) {
+    LibraryElement libraryElement =
+        context.computeResult(librarySource, LIBRARY_ELEMENT);
+    try {
+      cache.putLibrary(libraryElement);
+    } catch (e) {
+      return;
+    }
+    // Write errors for the library units.
+    for (CompilationUnitElement unit in libraryElement.units) {
+      Source unitSource = unit.source;
+      List<AnalysisError> errors = context.computeResult(
+          new LibrarySpecificUnit(librarySource, unitSource),
+          LIBRARY_UNIT_ERRORS);
+      cache.putSourceErrorsInLibrary(librarySource, unitSource, errors);
+    }
+  }
+}
diff --git a/pkg/analyzer_cli/lib/src/options.dart b/pkg/analyzer_cli/lib/src/options.dart
index 5a2e939..3706464 100644
--- a/pkg/analyzer_cli/lib/src/options.dart
+++ b/pkg/analyzer_cli/lib/src/options.dart
@@ -62,6 +62,10 @@
   /// The path to output the summary when creating summaries in build mode.
   final String buildSummaryOutput;
 
+  /// The path to output the semantic-only summary when creating summaries in
+  /// build mode.
+  final String buildSummaryOutputSemantic;
+
   /// Whether to output a summary in "fallback mode".
   final bool buildSummaryFallback;
 
@@ -112,6 +116,9 @@
   /// Whether to use machine format for error display
   final bool machineFormat;
 
+  /// The path to the root folder of the incremental cache.
+  final String incrementalCachePath;
+
   /// The path to the package root
   final String packageRootPath;
 
@@ -160,6 +167,7 @@
         buildSummaryExcludeInformative =
             args['build-summary-exclude-informative'],
         buildSummaryOutput = args['build-summary-output'],
+        buildSummaryOutputSemantic = args['build-summary-output-semantic'],
         buildSuppressExitCode = args['build-suppress-exit-code'],
         dartSdkPath = args['dart-sdk'],
         dartSdkSummaryPath = args['dart-sdk-summary'],
@@ -176,6 +184,7 @@
         lints = args['lints'],
         log = args['log'],
         machineFormat = args['machine'] || args['format'] == 'machine',
+        incrementalCachePath = args['incremental-cache-path'],
         packageConfigPath = args['packages'],
         packageRootPath = args['package-root'],
         perfReport = args['x-perf-report'],
@@ -351,6 +360,13 @@
           allowMultiple: true,
           splitCommas: false)
       //
+      // Incremental analysis.
+      //
+      ..addOption('incremental-cache-path',
+          help: 'The path to the folder with information to support '
+              'incremental analysis, e.g. summary files, errors, etc.',
+          hide: true)
+      //
       // Build mode.
       //
       ..addFlag('persistent_worker',
@@ -374,8 +390,12 @@
           allowMultiple: true,
           hide: true)
       ..addOption('build-summary-output',
-          help: 'Specifies the path to the file where the summary information '
-              'should be written.',
+          help: 'Specifies the path to the file where the full summary '
+              'information should be written.',
+          hide: true)
+      ..addOption('build-summary-output-semantic',
+          help: 'Specifies the path to the file where the semantic summary '
+              'information should be written.',
           hide: true)
       ..addFlag('build-summary-only',
           help: 'Disable analysis (only generate summaries).',
@@ -393,7 +413,8 @@
           negatable: false,
           hide: true)
       ..addFlag('build-summary-exclude-informative',
-          help: 'Exclude @informative information (docs, offsets, etc).',
+          help: 'Exclude @informative information (docs, offsets, etc).  '
+              'Deprecated: please use --build-summary-output-semantic instead.',
           defaultsTo: false,
           negatable: false,
           hide: true)
diff --git a/pkg/analyzer_cli/lib/starter.dart b/pkg/analyzer_cli/lib/starter.dart
index e42d39a..a784a75 100644
--- a/pkg/analyzer_cli/lib/starter.dart
+++ b/pkg/analyzer_cli/lib/starter.dart
@@ -4,7 +4,6 @@
 
 library analyzer_cli.starter;
 
-import 'package:analyzer/plugin/embedded_resolver_provider.dart';
 import 'package:analyzer/plugin/resolver_provider.dart';
 import 'package:analyzer_cli/src/driver.dart';
 import 'package:plugin/plugin.dart';
@@ -23,14 +22,6 @@
   factory CommandLineStarter() = Driver;
 
   /**
-   * Set the embedded resolver provider used to override the way embedded
-   * library URI's are resolved in some contexts. The provider should return
-   * `null` if the embedded library URI resolution scheme should be used
-   * instead.
-   */
-  void set embeddedUriResolverProvider(EmbeddedResolverProvider provider);
-
-  /**
    * Set the package resolver provider used to override the way package URI's
    * are resolved in some contexts. The provider should return `null` if the
    * default package resolution scheme should be used instead.
diff --git a/pkg/analyzer_cli/pubspec.yaml b/pkg/analyzer_cli/pubspec.yaml
index a0ded9b..4e45320 100644
--- a/pkg/analyzer_cli/pubspec.yaml
+++ b/pkg/analyzer_cli/pubspec.yaml
@@ -11,7 +11,7 @@
   bazel_worker: ^0.1.0
   cli_util: ^0.0.1
   linter: ^0.1.16
-  package_config: ^0.1.1
+  package_config: ^0.1.5
   plugin: '>=0.1.0 <0.3.0'
   protobuf: ^0.5.0
   yaml: ^2.1.2
diff --git a/pkg/analyzer_cli/test/options_test.dart b/pkg/analyzer_cli/test/options_test.dart
index 29bde19..50ee98c 100644
--- a/pkg/analyzer_cli/test/options_test.dart
+++ b/pkg/analyzer_cli/test/options_test.dart
@@ -25,6 +25,7 @@
         expect(options.buildSummaryInputs, isEmpty);
         expect(options.buildSummaryOnly, isFalse);
         expect(options.buildSummaryOutput, isNull);
+        expect(options.buildSummaryOutputSemantic, isNull);
         expect(options.buildSuppressExitCode, isFalse);
         expect(options.dartSdkPath, isNotNull);
         expect(options.disableHints, isFalse);
@@ -318,6 +319,16 @@
     expect(options.buildSummaryOutput, '//path/to/output.sum');
   }
 
+  test_buildSummaryOutputSemantic() {
+    _parse([
+      '--build-mode',
+      '--build-summary-output-semantic=//path/to/output.sum',
+      'package:p/foo.dart|/path/to/p/lib/foo.dart'
+    ]);
+    expect(options.buildMode, isTrue);
+    expect(options.buildSummaryOutputSemantic, '//path/to/output.sum');
+  }
+
   test_buildSuppressExitCode() {
     _parse([
       '--build-mode',
diff --git a/pkg/compiler/bin/resolver.dart b/pkg/compiler/bin/resolver.dart
new file mode 100644
index 0000000..aa70e4d
--- /dev/null
+++ b/pkg/compiler/bin/resolver.dart
@@ -0,0 +1,71 @@
+// Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+import 'dart:io';
+
+import 'package:args/args.dart';
+import 'package:compiler/src/apiimpl.dart';
+import 'package:compiler/src/dart2js.dart';
+import 'package:compiler/src/filenames.dart';
+import 'package:compiler/src/null_compiler_output.dart';
+import 'package:compiler/src/source_file_provider.dart';
+import 'package:compiler/src/options.dart';
+import 'package:compiler/src/serialization/json_serializer.dart';
+import 'package:package_config/discovery.dart';
+
+main(var argv) async {
+  var parser = new ArgParser();
+  parser.addOption('deps', abbr: 'd', allowMultiple: true);
+  parser.addOption('out', abbr: 'o');
+  parser.addOption('library-root', abbr: 'l');
+  var args = parser.parse(argv);
+
+  var resolutionInputs = args['deps']
+      .map((uri) => currentDirectory.resolve(nativeToUriPath(uri)))
+      .toList();
+  var root = args['library-root'];
+  var libraryRoot = root == null
+      ? Platform.script.resolve('../../../sdk/')
+      : currentDirectory.resolve(nativeToUriPath(root));
+  var options = new CompilerOptions(
+      libraryRoot: libraryRoot,
+      resolveOnly: true,
+      resolutionInputs: resolutionInputs,
+      packagesDiscoveryProvider: findPackages);
+  var inputProvider = new CompilerSourceFileProvider();
+  var outputProvider = const NullCompilerOutput();
+  var diagnostics = new FormattingDiagnosticHandler(inputProvider);
+
+  var compiler =
+      new CompilerImpl(inputProvider, outputProvider, diagnostics, options);
+
+  var inputs = args.rest
+      .map((uri) => currentDirectory.resolve(nativeToUriPath(uri)))
+      .toList();
+
+  await compiler.setupSdk();
+  await compiler.setupPackages(inputs.first);
+
+  for (var library in inputs) {
+    await compiler.libraryLoader.loadLibrary(library);
+  }
+
+  for (var library in inputs) {
+    compiler.fullyEnqueueLibrary(compiler.libraryLoader.lookupLibrary(library),
+        compiler.enqueuer.resolution);
+  }
+
+  compiler.processQueue(compiler.enqueuer.resolution, null);
+
+  var librariesToSerialize =
+      inputs.map((lib) => compiler.libraryLoader.lookupLibrary(lib)).toList();
+
+  var serializer =
+      compiler.serialization.createSerializer(librariesToSerialize);
+  var text = serializer.toText(const JsonSerializationEncoder());
+
+  var outFile = args['out'] ?? 'out.data';
+
+  await new File(outFile).writeAsString(text);
+}
diff --git a/pkg/compiler/lib/src/commandline_options.dart b/pkg/compiler/lib/src/commandline_options.dart
index 013e997..bf08051 100644
--- a/pkg/compiler/lib/src/commandline_options.dart
+++ b/pkg/compiler/lib/src/commandline_options.dart
@@ -65,6 +65,7 @@
   // account).
   static const String genericMethodSyntax = '--generic-method-syntax';
   static const String resolveOnly = '--resolve-only';
+  static const String initializingFormalAccess = '--initializing-formal-access';
 }
 
 class Option {
diff --git a/pkg/compiler/lib/src/common/backend_api.dart b/pkg/compiler/lib/src/common/backend_api.dart
index 080a03b..c4c9020 100644
--- a/pkg/compiler/lib/src/common/backend_api.dart
+++ b/pkg/compiler/lib/src/common/backend_api.dart
@@ -44,7 +44,7 @@
 import 'tasks.dart' show CompilerTask;
 import 'work.dart' show ItemCompilationContext;
 
-abstract class Backend implements Target {
+abstract class Backend extends Target {
   final Compiler compiler;
 
   Backend(this.compiler);
@@ -131,11 +131,6 @@
   /// Called during codegen when [constant] has been used.
   void registerCompileTimeConstant(ConstantValue constant, Registry registry) {}
 
-  /// Called during resolution when a constant value for [metadata] on
-  /// [annotatedElement] has been evaluated.
-  void registerMetadataConstant(MetadataAnnotation metadata,
-      Element annotatedElement, Registry registry) {}
-
   /// Called to notify to the backend that a class is being instantiated.
   // TODO(johnniwinther): Remove this. It's only called once for each [cls] and
   // only with [Compiler.globalDependencies] as [registry].
@@ -278,23 +273,25 @@
   /// been scanned.
   Future onLibraryScanned(LibraryElement library, LibraryLoader loader) {
     // TODO(johnniwinther): Move this to [JavaScriptBackend].
-    if (canLibraryUseNative(library)) {
+    if (!compiler.serialization.isDeserialized(library)) {
+      if (canLibraryUseNative(library)) {
+        library.forEachLocalMember((Element element) {
+          if (element.isClass) {
+            checkNativeAnnotation(compiler, element);
+          }
+        });
+      }
+      checkJsInteropAnnotation(compiler, library);
       library.forEachLocalMember((Element element) {
-        if (element.isClass) {
-          checkNativeAnnotation(compiler, element);
+        checkJsInteropAnnotation(compiler, element);
+        if (element.isClass && isJsInterop(element)) {
+          ClassElement classElement = element;
+          classElement.forEachMember((_, memberElement) {
+            checkJsInteropAnnotation(compiler, memberElement);
+          });
         }
       });
     }
-    checkJsInteropAnnotation(compiler, library);
-    library.forEachLocalMember((Element element) {
-      checkJsInteropAnnotation(compiler, element);
-      if (element.isClass && isJsInterop(element)) {
-        ClassElement classElement = element;
-        classElement.forEachMember((_, memberElement) {
-          checkJsInteropAnnotation(compiler, memberElement);
-        });
-      }
-    });
     return new Future.value();
   }
 
@@ -404,6 +401,12 @@
   EnqueueTask makeEnqueuer() => new EnqueueTask(compiler);
 }
 
+/// Interface for resolving native data for a target specific element.
+abstract class NativeRegistry {
+  /// Registers [nativeData] as part of the resolution impact.
+  void registerNativeData(dynamic nativeData);
+}
+
 /// Interface for resolving calls to foreign functions.
 abstract class ForeignResolver {
   /// Returns the constant expression of [node], or `null` if [node] is not
diff --git a/pkg/compiler/lib/src/common/codegen.dart b/pkg/compiler/lib/src/common/codegen.dart
index 9860b08..8a26860 100644
--- a/pkg/compiler/lib/src/common/codegen.dart
+++ b/pkg/compiler/lib/src/common/codegen.dart
@@ -251,4 +251,6 @@
     registry = new CodegenRegistry(compiler, element);
     return compiler.codegen(this, world);
   }
+
+  String toString() => 'CodegenWorkItem(${resolvedAst.element})';
 }
diff --git a/pkg/compiler/lib/src/common/names.dart b/pkg/compiler/lib/src/common/names.dart
index 300d176..2fa535b 100644
--- a/pkg/compiler/lib/src/common/names.dart
+++ b/pkg/compiler/lib/src/common/names.dart
@@ -26,6 +26,9 @@
   /// The name of the iterator property used in for-each loops.
   static const String iterator = 'iterator';
 
+  /// The name of the `loadLibrary` getter defined on deferred prefixes.
+  static const String loadLibrary = 'loadLibrary';
+
   /// The name of the main method.
   static const String main = 'main';
 
diff --git a/pkg/compiler/lib/src/common/resolution.dart b/pkg/compiler/lib/src/common/resolution.dart
index 7380911..515aa68 100644
--- a/pkg/compiler/lib/src/common/resolution.dart
+++ b/pkg/compiler/lib/src/common/resolution.dart
@@ -202,6 +202,10 @@
   /// have special treatment, such as being allowed to extends blacklisted
   /// classes or members being eagerly resolved.
   bool isTargetSpecificLibrary(LibraryElement element);
+
+  /// Resolve target specific information for [element] and register it with
+  /// [registry].
+  void resolveNativeElement(Element element, NativeRegistry registry) {}
 }
 
 // TODO(johnniwinther): Rename to `Resolver` or `ResolverContext`.
@@ -257,6 +261,9 @@
   /// Computes the [WorldImpact] for [element].
   WorldImpact computeWorldImpact(Element element);
 
+  WorldImpact transformResolutionImpact(
+      Element element, ResolutionImpact resolutionImpact);
+
   /// Removes the [WorldImpact] for [element] from the resolution cache. Later
   /// calls to [getWorldImpact] or [computeWorldImpact] returns an empty impact.
   void uncacheWorldImpact(Element element);
diff --git a/pkg/compiler/lib/src/compile_time_constants.dart b/pkg/compiler/lib/src/compile_time_constants.dart
index 9bab3a5..1c066ed 100644
--- a/pkg/compiler/lib/src/compile_time_constants.dart
+++ b/pkg/compiler/lib/src/compile_time_constants.dart
@@ -104,6 +104,9 @@
 
   /// Returns the compile-time constant value of [metadata].
   ConstantValue getConstantValueForMetadata(MetadataAnnotation metadata);
+
+  /// Register that [element] needs lazy initialization.
+  void registerLazyStatic(FieldElement element);
 }
 
 /// Interface for the task that compiles the constant environments for the
@@ -142,6 +145,8 @@
    *
    * Invariant: The keys in this map are declarations.
    */
+  // TODO(johnniwinther): Make this purely internal when no longer used by
+  // poi/forget_element_test.
   final Map<VariableElement, ConstantExpression> initialVariableValues =
       new Map<VariableElement, ConstantExpression>();
 
@@ -160,7 +165,9 @@
   @override
   @deprecated
   ConstantValue getConstantValueForVariable(VariableElement element) {
-    return getConstantValue(initialVariableValues[element.declaration]);
+    ConstantExpression constant = initialVariableValues[element.declaration];
+    // TODO(johnniwinther): Support eager evaluation of the constant.
+    return constant != null ? getConstantValue(constant) : null;
   }
 
   ConstantExpression compileConstant(VariableElement element) {
@@ -315,8 +322,27 @@
     return constantValueMap.containsKey(expression);
   }
 
+  @override
   ConstantValue getConstantValue(ConstantExpression expression) {
-    return constantValueMap[expression];
+    assert(invariant(CURRENT_ELEMENT_SPANNABLE, expression != null,
+        message: "ConstantExpression is null in getConstantValue."));
+    // TODO(johnniwinther): ensure expressions have been evaluated at this
+    // point. This can't be enabled today due to dartbug.com/26406.
+    if (compiler.serialization.supportsDeserialization) {
+      evaluate(expression);
+    }
+    ConstantValue value = constantValueMap[expression];
+    if (value == null &&
+        expression != null &&
+        expression.kind == ConstantExpressionKind.ERRONEOUS) {
+      // TODO(johnniwinther): When the Dart constant system sees a constant
+      // expression as erroneous but the JavaScript constant system finds it ok
+      // we have store a constant value for the erroneous constant expression.
+      // Ensure the computed constant expressions are always the same; that only
+      // the constant values may be different.
+      value = new NullConstantValue();
+    }
+    return value;
   }
 
   ConstantExpression compileNode(Node node, TreeElements elements,
@@ -354,7 +380,7 @@
       Node node, TreeElements definitions,
       {bool isConst: true}) {
     ConstantExpression constant = definitions.getConstant(node);
-    if (constant != null && getConstantValue(constant) != null) {
+    if (constant != null && hasConstantValue(constant)) {
       return constant;
     }
     constant =
@@ -388,7 +414,10 @@
   AstConstant evaluate(Node node) {
     // TODO(johnniwinther): should there be a visitErrorNode?
     if (node is ErrorNode) return new ErroneousAstConstant(context, node);
-    return node.accept(this);
+    AstConstant result = node.accept(this);
+    assert(invariant(node, !isEvaluatingConstant || result != null,
+        message: "No AstConstant computed for the node."));
+    return result;
   }
 
   AstConstant evaluateConstant(Node node) {
@@ -396,7 +425,8 @@
     isEvaluatingConstant = true;
     AstConstant result = node.accept(this);
     isEvaluatingConstant = oldIsEvaluatingConstant;
-    assert(result != null);
+    assert(invariant(node, result != null,
+        message: "No AstConstant computed for the node."));
     return result;
   }
 
@@ -435,8 +465,8 @@
         !link.isEmpty;
         link = link.tail) {
       AstConstant argument = evaluateConstant(link.head);
-      if (argument == null) {
-        return null;
+      if (argument == null || argument.isError) {
+        return argument;
       }
       argumentExpressions.add(argument.expression);
       argumentValues.add(argument.value);
@@ -462,12 +492,12 @@
         link = link.tail) {
       LiteralMapEntry entry = link.head;
       AstConstant key = evaluateConstant(entry.key);
-      if (key == null) {
-        return null;
+      if (key == null || key.isError) {
+        return key;
       }
       AstConstant value = evaluateConstant(entry.value);
-      if (value == null) {
-        return null;
+      if (value == null || value.isError) {
+        return value;
       }
       if (!map.containsKey(key.value)) {
         keyValues.add(key.value);
@@ -504,7 +534,12 @@
   AstConstant visitStringJuxtaposition(StringJuxtaposition node) {
     AstConstant left = evaluate(node.first);
     AstConstant right = evaluate(node.second);
-    if (left == null || right == null) return null;
+    if (left == null || left.isError) {
+      return left;
+    }
+    if (right == null || right.isError) {
+      return right;
+    }
     StringConstantValue leftValue = left.value;
     StringConstantValue rightValue = right.value;
     return new AstConstant(
@@ -518,16 +553,16 @@
   AstConstant visitStringInterpolation(StringInterpolation node) {
     List<ConstantExpression> subexpressions = <ConstantExpression>[];
     AstConstant initialString = evaluate(node.string);
-    if (initialString == null) {
-      return null;
+    if (initialString == null || initialString.isError) {
+      return initialString;
     }
     subexpressions.add(initialString.expression);
     StringConstantValue initialStringValue = initialString.value;
     DartString accumulator = initialStringValue.primitiveValue;
     for (StringInterpolationPart part in node.parts) {
       AstConstant subexpression = evaluate(part.expression);
-      if (subexpression == null) {
-        return null;
+      if (subexpression == null || subexpression.isError) {
+        return subexpression;
       }
       subexpressions.add(subexpression.expression);
       ConstantValue expression = subexpression.value;
@@ -552,7 +587,6 @@
       accumulator =
           new DartString.concat(accumulator, partStringValue.primitiveValue);
     }
-    ;
     return new AstConstant(
         context,
         node,
@@ -699,8 +733,8 @@
     } else if (send.isPrefix) {
       assert(send.isOperator);
       AstConstant receiverConstant = evaluate(send.receiver);
-      if (receiverConstant == null) {
-        return null;
+      if (receiverConstant == null || receiverConstant.isError) {
+        return receiverConstant;
       }
       Operator node = send.selector;
       UnaryOperator operator = UnaryOperator.parse(node.source);
@@ -721,8 +755,11 @@
       assert(send.argumentCount() == 1);
       AstConstant left = evaluate(send.receiver);
       AstConstant right = evaluate(send.argumentsNode.nodes.head);
-      if (left == null || right == null) {
-        return null;
+      if (left == null || left.isError) {
+        return left;
+      }
+      if (right == null || right.isError) {
+        return right;
       }
       ConstantValue leftValue = left.value;
       ConstantValue rightValue = right.value;
@@ -770,8 +807,8 @@
 
   AstConstant visitConditional(Conditional node) {
     AstConstant condition = evaluate(node.condition);
-    if (condition == null) {
-      return null;
+    if (condition == null || condition.isError) {
+      return condition;
     } else if (!condition.value.isBool) {
       DartType conditionType = condition.value.getType(coreTypes);
       if (isEvaluatingConstant) {
@@ -783,8 +820,11 @@
     }
     AstConstant thenExpression = evaluate(node.thenExpression);
     AstConstant elseExpression = evaluate(node.elseExpression);
-    if (thenExpression == null || elseExpression == null) {
-      return null;
+    if (thenExpression == null || thenExpression.isError) {
+      return thenExpression;
+    }
+    if (elseExpression == null || elseExpression.isError) {
+      return elseExpression;
     }
     BoolConstantValue boolCondition = condition.value;
     return new AstConstant(
@@ -922,10 +962,7 @@
       assert(normalizedArguments != null);
       concreteArguments = normalizedArguments;
     }
-
-    if (constructor == compiler.intEnvironment ||
-        constructor == compiler.boolEnvironment ||
-        constructor == compiler.stringEnvironment) {
+    if (constructor.isFromEnvironmentConstructor) {
       return createFromEnvironmentConstant(node, constructedType, constructor,
           callStructure, normalizedArguments, concreteArguments);
     } else {
@@ -955,48 +992,43 @@
     ConstantValue defaultValue = normalizedArguments[1].value;
 
     if (firstArgument.isNull) {
-      reporter.reportErrorMessage(
+      return reportNotCompileTimeConstant(
           normalizedArguments[0].node, MessageKind.NULL_NOT_ALLOWED);
-      return null;
     }
 
     if (!firstArgument.isString) {
       DartType type = defaultValue.getType(coreTypes);
-      reporter.reportErrorMessage(
+      return reportNotCompileTimeConstant(
           normalizedArguments[0].node,
           MessageKind.NOT_ASSIGNABLE,
           {'fromType': type, 'toType': coreTypes.stringType});
-      return null;
     }
 
-    if (constructor == compiler.intEnvironment &&
+    if (constructor.isIntFromEnvironmentConstructor &&
         !(defaultValue.isNull || defaultValue.isInt)) {
       DartType type = defaultValue.getType(coreTypes);
-      reporter.reportErrorMessage(
+      return reportNotCompileTimeConstant(
           normalizedArguments[1].node,
           MessageKind.NOT_ASSIGNABLE,
           {'fromType': type, 'toType': coreTypes.intType});
-      return null;
     }
 
-    if (constructor == compiler.boolEnvironment &&
+    if (constructor.isBoolFromEnvironmentConstructor &&
         !(defaultValue.isNull || defaultValue.isBool)) {
       DartType type = defaultValue.getType(coreTypes);
-      reporter.reportErrorMessage(
+      return reportNotCompileTimeConstant(
           normalizedArguments[1].node,
           MessageKind.NOT_ASSIGNABLE,
           {'fromType': type, 'toType': coreTypes.boolType});
-      return null;
     }
 
-    if (constructor == compiler.stringEnvironment &&
+    if (constructor.isStringFromEnvironmentConstructor &&
         !(defaultValue.isNull || defaultValue.isString)) {
       DartType type = defaultValue.getType(coreTypes);
-      reporter.reportErrorMessage(
+      return reportNotCompileTimeConstant(
           normalizedArguments[1].node,
           MessageKind.NOT_ASSIGNABLE,
           {'fromType': type, 'toType': coreTypes.stringType});
-      return null;
     }
 
     String name = firstArgument.primitiveValue.slowToString();
@@ -1009,13 +1041,13 @@
       if (concreteArguments.length > 1) {
         defaultValue = concreteArguments[1].expression;
       }
-      if (constructor == compiler.intEnvironment) {
+      if (constructor.isIntFromEnvironmentConstructor) {
         expression =
             new IntFromEnvironmentConstantExpression(name, defaultValue);
-      } else if (constructor == compiler.boolEnvironment) {
+      } else if (constructor.isBoolFromEnvironmentConstructor) {
         expression =
             new BoolFromEnvironmentConstantExpression(name, defaultValue);
-      } else if (constructor == compiler.stringEnvironment) {
+      } else if (constructor.isStringFromEnvironmentConstructor) {
         expression =
             new StringFromEnvironmentConstantExpression(name, defaultValue);
       }
@@ -1025,11 +1057,11 @@
 
     if (value == null) {
       return createEvaluatedConstant(defaultValue);
-    } else if (constructor == compiler.intEnvironment) {
+    } else if (constructor.isIntFromEnvironmentConstructor) {
       int number = int.parse(value, onError: (_) => null);
       return createEvaluatedConstant(
           (number == null) ? defaultValue : constantSystem.createInt(number));
-    } else if (constructor == compiler.boolEnvironment) {
+    } else if (constructor.isBoolFromEnvironmentConstructor) {
       if (value == 'true') {
         return createEvaluatedConstant(constantSystem.createBool(true));
       } else if (value == 'false') {
@@ -1038,7 +1070,7 @@
         return createEvaluatedConstant(defaultValue);
       }
     } else {
-      assert(constructor == compiler.stringEnvironment);
+      assert(constructor.isStringFromEnvironmentConstructor);
       return createEvaluatedConstant(
           constantSystem.createString(new DartString.literal(value)));
     }
@@ -1080,6 +1112,11 @@
     fieldConstants.forEach((FieldElement field, AstConstant astConstant) {
       fieldValues[field] = astConstant.value;
     });
+    for (AstConstant fieldValue in fieldConstants.values) {
+      if (fieldValue.isError) {
+        return fieldValue;
+      }
+    }
     return new AstConstant(
         context,
         node,
@@ -1092,13 +1129,18 @@
     return node.expression.accept(this);
   }
 
-  AstConstant signalNotCompileTimeConstant(Node node,
-      {MessageKind message: MessageKind.NOT_A_COMPILE_TIME_CONSTANT}) {
-    if (isEvaluatingConstant) {
-      reporter.reportErrorMessage(node, message);
+  AstConstant reportNotCompileTimeConstant(Node node, MessageKind message,
+      [Map arguments = const {}]) {
+    reporter.reportErrorMessage(node, message, arguments);
+    return new AstConstant(context, node, new ErroneousConstantExpression(),
+        new NullConstantValue());
+  }
 
-      return new AstConstant(context, node, new ErroneousConstantExpression(),
-          new NullConstantValue());
+  AstConstant signalNotCompileTimeConstant(Node node,
+      {MessageKind message: MessageKind.NOT_A_COMPILE_TIME_CONSTANT,
+      Map arguments: const {}}) {
+    if (isEvaluatingConstant) {
+      return reportNotCompileTimeConstant(node, message, arguments);
     }
     // Else we don't need to do anything. The final handler is only
     // optimistically trying to compile constants. So it is normal that we
@@ -1182,6 +1224,9 @@
       if (parameter.isInitializingFormal) {
         InitializingFormalElement initializingFormal = parameter;
         updateFieldValue(node, initializingFormal.fieldElement, argument);
+        if (compiler.options.enableInitializingFormalAccess) {
+          definitions[parameter] = argument;
+        }
       } else {
         potentiallyCheckType(parameter, argument);
         definitions[parameter] = argument;
@@ -1346,6 +1391,8 @@
         value);
   }
 
+  bool get isError => expression.kind == ConstantExpressionKind.ERRONEOUS;
+
   String toString() => expression.toString();
 }
 
diff --git a/pkg/compiler/lib/src/compiler.dart b/pkg/compiler/lib/src/compiler.dart
index 7f7a37f..d567bba 100644
--- a/pkg/compiler/lib/src/compiler.dart
+++ b/pkg/compiler/lib/src/compiler.dart
@@ -209,15 +209,6 @@
   Element loadLibraryFunction;
   Element functionApplyMethod;
 
-  /// The [int.fromEnvironment] constructor.
-  ConstructorElement intEnvironment;
-
-  /// The [bool.fromEnvironment] constructor.
-  ConstructorElement boolEnvironment;
-
-  /// The [String.fromEnvironment] constructor.
-  ConstructorElement stringEnvironment;
-
   // TODO(zarah): Remove this map and incorporate compile-time errors
   // in the model.
   /// Tracks elements with compile-time errors.
@@ -623,12 +614,6 @@
       mirrorSystemGetNameFunction = cls.lookupLocalMember('getName');
     } else if (mirrorsUsedClass == cls) {
       mirrorsUsedConstructor = cls.constructors.head;
-    } else if (coreClasses.intClass == cls) {
-      intEnvironment = cls.lookupConstructor(Identifiers.fromEnvironment);
-    } else if (coreClasses.stringClass == cls) {
-      stringEnvironment = cls.lookupConstructor(Identifiers.fromEnvironment);
-    } else if (coreClasses.boolClass == cls) {
-      boolEnvironment = cls.lookupConstructor(Identifiers.fromEnvironment);
     }
   }
 
@@ -825,7 +810,14 @@
             supportSerialization: serialization.supportSerialization);
 
         phase = PHASE_RESOLVING;
-        if (analyzeAll) {
+        if (options.resolveOnly) {
+          libraryLoader.libraries.where((LibraryElement library) {
+            return !serialization.isDeserialized(library);
+          }).forEach((LibraryElement library) {
+            reporter.log('Enqueuing ${library.canonicalUri}');
+            fullyEnqueueLibrary(library, enqueuer.resolution);
+          });
+        } else if (analyzeAll) {
           libraryLoader.libraries.forEach((LibraryElement library) {
             reporter.log('Enqueuing ${library.canonicalUri}');
             fullyEnqueueLibrary(library, enqueuer.resolution);
@@ -861,9 +853,11 @@
 
         if (options.resolveOnly) {
           reporter.log('Serializing to ${options.resolutionOutput}');
-          serialization.serializeToSink(
-              userOutputProvider.createEventSink('', 'data'),
-              libraryLoader.libraries);
+          serialization
+              .serializeToSink(userOutputProvider.createEventSink('', 'data'),
+                  libraryLoader.libraries.where((LibraryElement library) {
+            return !serialization.isDeserialized(library);
+          }));
         }
         if (options.analyzeOnly) {
           if (!analyzeAll && !compilationFailed) {
@@ -926,6 +920,16 @@
       fullyEnqueueTopLevelElement(element, world);
     }
     library.implementation.forEachLocalMember(enqueueAll);
+    library.imports.forEach((ImportElement import) {
+      if (import.isDeferred) {
+        // `import.prefix` and `loadLibrary` may be `null` when the deferred
+        // import has compile-time errors.
+        GetterElement loadLibrary = import.prefix?.loadLibrary;
+        if (loadLibrary != null) {
+          world.addToWorkList(loadLibrary);
+        }
+      }
+    });
   }
 
   void fullyEnqueueTopLevelElement(Element element, Enqueuer world) {
@@ -1079,7 +1083,6 @@
           return const WorldImpact();
         }
         WorldImpact worldImpact = analyzeElement(element);
-        backend.onElementResolved(element);
         world.registerProcessedElement(element);
         return worldImpact;
       });
@@ -1674,7 +1677,12 @@
             Token from = astElement.node.getBeginToken();
             Token to = astElement.node.getEndToken();
             if (astElement.metadata.isNotEmpty) {
-              from = astElement.metadata.first.beginToken;
+              if (!astElement.metadata.first.hasNode) {
+                // We might try to report an error while parsing the metadata
+                // itself.
+                return true;
+              }
+              from = astElement.metadata.first.node.getBeginToken();
             }
             return validateToken(from, to);
           }
@@ -1755,8 +1763,7 @@
     } else if (node is Element) {
       return spanFromElement(node);
     } else if (node is MetadataAnnotation) {
-      Uri uri = node.annotatedElement.compilationUnit.script.resourceUri;
-      return spanFromTokens(node.beginToken, node.endToken, uri);
+      return node.sourcePosition;
     } else if (node is Local) {
       Local local = node;
       return spanFromElement(local.executableContext);
@@ -2022,13 +2029,20 @@
         // Only analyze nodes with a corresponding [TreeElements].
         compiler.checker.check(element);
       }
-      WorldImpact worldImpact = compiler.backend.impactTransformer
-          .transformResolutionImpact(resolutionImpact);
-      return worldImpact;
+      return transformResolutionImpact(element, resolutionImpact);
     });
   }
 
   @override
+  WorldImpact transformResolutionImpact(
+      Element element, ResolutionImpact resolutionImpact) {
+    WorldImpact worldImpact = compiler.backend.impactTransformer
+        .transformResolutionImpact(resolutionImpact);
+    _worldImpactCache[element] = worldImpact;
+    return worldImpact;
+  }
+
+  @override
   void uncacheWorldImpact(Element element) {
     assert(invariant(element, element.isDeclaration,
         message: "Element $element must be the declaration."));
diff --git a/pkg/compiler/lib/src/constants/constant_constructors.dart b/pkg/compiler/lib/src/constants/constant_constructors.dart
index c389eef..4628f28 100644
--- a/pkg/compiler/lib/src/constants/constant_constructors.dart
+++ b/pkg/compiler/lib/src/constants/constant_constructors.dart
@@ -87,6 +87,11 @@
     applyParameters(parameters, _);
     ConstructedConstantExpression constructorInvocation =
         applyInitializers(node, _);
+    constructor.enclosingClass.forEachInstanceField((_, FieldElement field) {
+      if (!fieldMap.containsKey(field)) {
+        fieldMap[field] = field.constant;
+      }
+    });
     return new GenerativeConstantConstructor(
         currentClass.thisType, defaultValues, fieldMap, constructorInvocation);
   }
@@ -285,6 +290,11 @@
   }
 
   @override
+  ConstantExpression visitLiteralDouble(LiteralDouble node) {
+    return new DoubleConstantExpression(node.value);
+  }
+
+  @override
   ConstantExpression visitLiteralBool(LiteralBool node) {
     return new BoolConstantExpression(node.value);
   }
@@ -329,4 +339,10 @@
   ConstantExpression visitNamedArgument(NamedArgument node) {
     return apply(node.expression);
   }
+
+  @override
+  ConstantExpression visitIfNull(Send node, Node left, Node right, _) {
+    return new BinaryConstantExpression(
+        apply(left), BinaryOperator.IF_NULL, apply(right));
+  }
 }
diff --git a/pkg/compiler/lib/src/constants/expressions.dart b/pkg/compiler/lib/src/constants/expressions.dart
index 3b4317f..922ae06 100644
--- a/pkg/compiler/lib/src/constants/expressions.dart
+++ b/pkg/compiler/lib/src/constants/expressions.dart
@@ -549,7 +549,7 @@
 
   @override
   void _createStructuredText(StringBuffer sb) {
-    sb.write('Constructored(type=$type,constructor=$target,'
+    sb.write('Constructed(type=$type,constructor=$target,'
         'callStructure=$callStructure,arguments=[');
     String delimiter = '';
     for (ConstantExpression value in arguments) {
@@ -561,6 +561,8 @@
   }
 
   Map<FieldElement, ConstantExpression> computeInstanceFields() {
+    assert(invariant(target, target.constantConstructor != null,
+        message: "No constant constructor computed for $target."));
     return target.constantConstructor
         .computeInstanceFields(arguments, callStructure);
   }
@@ -868,9 +870,18 @@
   @override
   ConstantValue evaluate(
       Environment environment, ConstantSystem constantSystem) {
-    return constantSystem.lookupBinary(operator).fold(
-        left.evaluate(environment, constantSystem),
-        right.evaluate(environment, constantSystem));
+    ConstantValue leftValue = left.evaluate(environment, constantSystem);
+    ConstantValue rightValue = right.evaluate(environment, constantSystem);
+    switch (operator.kind) {
+      case BinaryOperatorKind.NOT_EQ:
+        BoolConstantValue equals =
+            constantSystem.equal.fold(leftValue, rightValue);
+        return equals.negate();
+      default:
+        return constantSystem
+            .lookupBinary(operator)
+            .fold(leftValue, rightValue);
+    }
   }
 
   ConstantExpression apply(NormalizedArguments arguments) {
diff --git a/pkg/compiler/lib/src/constants/values.dart b/pkg/compiler/lib/src/constants/values.dart
index 37f5646..62749bf 100644
--- a/pkg/compiler/lib/src/constants/values.dart
+++ b/pkg/compiler/lib/src/constants/values.dart
@@ -28,6 +28,7 @@
   R visitInterceptor(InterceptorConstantValue constant, A arg);
   R visitSynthetic(SyntheticConstantValue constant, A arg);
   R visitDeferred(DeferredConstantValue constant, A arg);
+  R visitNonConstant(NonConstantValue constant, A arg);
 }
 
 abstract class ConstantValue {
@@ -657,13 +658,15 @@
 }
 
 class ConstructedConstantValue extends ObjectConstantValue {
+  // TODO(johnniwinther): Make [fields] private to avoid misuse of the map
+  // ordering and mutability.
   final Map<FieldElement, ConstantValue> fields;
   final int hashCode;
 
   ConstructedConstantValue(
       InterfaceType type, Map<FieldElement, ConstantValue> fields)
       : this.fields = fields,
-        hashCode = Hashing.mapHash(fields, Hashing.objectHash(type)),
+        hashCode = Hashing.unorderedMapHash(fields, Hashing.objectHash(type)),
         super(type) {
     assert(type != null);
     assert(!fields.containsValue(null));
@@ -751,7 +754,9 @@
 
   String toDartText() => 'deferred(${referenced.toDartText()})';
 
-  String toStructuredText() => 'DeferredConstant($referenced)';
+  String toStructuredText() {
+    return 'DeferredConstant(${referenced.toStructuredText()})';
+  }
 }
 
 /// A constant value resulting from a non constant or erroneous constant
@@ -762,7 +767,7 @@
 
   @override
   accept(ConstantValueVisitor visitor, arg) {
-    // TODO(johnniwinther): Should this be part of the visiting?
+    return visitor.visitNonConstant(this, arg);
   }
 
   @override
diff --git a/pkg/compiler/lib/src/cps_ir/cps_ir_builder_task.dart b/pkg/compiler/lib/src/cps_ir/cps_ir_builder_task.dart
index 18eeb52..28cc94a 100644
--- a/pkg/compiler/lib/src/cps_ir/cps_ir_builder_task.dart
+++ b/pkg/compiler/lib/src/cps_ir/cps_ir_builder_task.dart
@@ -3859,7 +3859,7 @@
     if (Elements.isLocal(element)) {
       LocalElement local = element;
       if (insideInitializer &&
-          local.isParameter &&
+          (local.isParameter || local.isInitializingFormal) &&
           local.enclosingElement == currentFunction) {
         assert(local.enclosingElement.isConstructor);
         // Initializers in an initializer-list can communicate via parameters.
@@ -3871,7 +3871,9 @@
         // outlive the activation of the function).
         markAsCaptured(local);
       } else if (inTryStatement) {
-        assert(local.isParameter || local.isVariable);
+        assert(local.isParameter ||
+            local.isVariable ||
+            local.isInitializingFormal);
         // Search for the position of the try block containing the variable
         // declaration, or -1 if it is declared outside the outermost try.
         int i = tryNestingStack.length - 1;
diff --git a/pkg/compiler/lib/src/cps_ir/cps_ir_nodes_sexpr.dart b/pkg/compiler/lib/src/cps_ir/cps_ir_nodes_sexpr.dart
index 1402b1d8..d788549 100644
--- a/pkg/compiler/lib/src/cps_ir/cps_ir_nodes_sexpr.dart
+++ b/pkg/compiler/lib/src/cps_ir/cps_ir_nodes_sexpr.dart
@@ -475,6 +475,10 @@
     return '(Null)';
   }
 
+  String visitNonConstant(NonConstantValue constant, _) {
+    return '(NonConstant)';
+  }
+
   String visitInt(IntConstantValue constant, _) {
     return '(Int ${constant.toDartText()})';
   }
diff --git a/pkg/compiler/lib/src/dart2js.dart b/pkg/compiler/lib/src/dart2js.dart
index 64bb70a..4301a55 100644
--- a/pkg/compiler/lib/src/dart2js.dart
+++ b/pkg/compiler/lib/src/dart2js.dart
@@ -338,6 +338,7 @@
     new OptionHandler(Flags.allowMockCompilation, passThrough),
     new OptionHandler(Flags.fastStartup, passThrough),
     new OptionHandler(Flags.genericMethodSyntax, passThrough),
+    new OptionHandler(Flags.initializingFormalAccess, passThrough),
     new OptionHandler('${Flags.minify}|-m', implyCompilation),
     new OptionHandler(Flags.preserveUris, passThrough),
     new OptionHandler('--force-strip=.*', setStrip),
@@ -470,6 +471,12 @@
         api.Diagnostic.INFO);
   }
   if (resolveOnly) {
+    if (resolutionInputs != null &&
+        resolutionInputs.contains(resolutionOutput)) {
+      helpAndFail("Resolution input '${resolutionOutput}' can't be used as "
+          "resolution output. Use the '--out' option to specify another "
+          "resolution output.");
+    }
     analyzeOnly = analyzeAll = true;
   } else if (analyzeAll) {
     analyzeOnly = true;
@@ -529,7 +536,7 @@
       packageConfig: packageConfig,
       packagesDiscoveryProvider: findPackages,
       resolutionInputs: resolutionInputs,
-      resolutionOutput: resolutionOutput,
+      resolutionOutput: resolveOnly ? resolutionOutput : null,
       options: options,
       environment: environment);
   return compileFunc(
@@ -834,60 +841,162 @@
   });
 }
 
+// TODO(johnniwinther): Add corresponding options to the test script and change
+// these to use 'bool.fromEnvironment'.
 final bool USE_SERIALIZED_DART_CORE =
     Platform.environment['USE_SERIALIZED_DART_CORE'] == 'true';
 
-/// Mock URI used only in testing when [USE_SERIALIZED_DART_CORE] is enabled.
-final Uri _SERIALIZED_URI = Uri.parse('file:fake.data');
+final bool SERIALIZED_COMPILATION =
+    Platform.environment['SERIALIZED_COMPILATION'] == 'true';
+
+/// Mock URI used only in testing when [USE_SERIALIZED_DART_CORE] or
+/// [SERIALIZED_COMPILATION] is enabled.
+final Uri _SERIALIZED_DART_CORE_URI = Uri.parse('file:core.data');
+final Uri _SERIALIZED_TEST_URI = Uri.parse('file:test.data');
 
 void _useSerializedDataForDartCore(CompileFunc oldCompileFunc) {
-  String serializedData;
-
+  /// Run the [oldCompileFunc] with [serializedData] added as resolution input.
   Future<api.CompilationResult> compileWithSerializedData(
       CompilerOptions compilerOptions,
       api.CompilerInput compilerInput,
       api.CompilerDiagnostics compilerDiagnostics,
-      api.CompilerOutput compilerOutput) async {
-    CompilerImpl compiler = new CompilerImpl(
-        compilerInput, compilerOutput, compilerDiagnostics, compilerOptions);
-    compiler.serialization.deserializeFromText(_SERIALIZED_URI, serializedData);
-    return compiler.run(compilerOptions.entryPoint).then((bool success) {
-      return new api.CompilationResult(compiler, isSuccess: success);
-    });
+      api.CompilerOutput compilerOutput,
+      List<_SerializedData> serializedData) {
+    api.CompilerInput input = compilerInput;
+    CompilerOptions options = compilerOptions;
+    if (serializedData != null && serializedData.isNotEmpty) {
+      Map<Uri, String> dataMap = <Uri, String>{};
+      for (_SerializedData data in serializedData) {
+        dataMap[data.uri] = data.data;
+      }
+      input = new _CompilerInput(input, dataMap);
+      List<Uri> resolutionInputs = dataMap.keys.toList();
+      if (compilerOptions.resolutionInputs != null) {
+        for (Uri uri in compilerOptions.resolutionInputs) {
+          if (!dataMap.containsKey(uri)) {
+            resolutionInputs.add(uri);
+          }
+        }
+      }
+      options = options.copy(resolutionInputs: resolutionInputs);
+    }
+    return oldCompileFunc(options, input, compilerDiagnostics, compilerOutput);
   }
 
+  /// Serialize [entryPoint] using [serializedData] if provided.
+  Future<api.CompilationResult> serialize(
+      Uri entryPoint,
+      Uri serializedUri,
+      CompilerOptions compilerOptions,
+      api.CompilerInput compilerInput,
+      api.CompilerDiagnostics compilerDiagnostics,
+      api.CompilerOutput compilerOutput,
+      [List<_SerializedData> serializedData]) {
+    CompilerOptions options = new CompilerOptions.parse(
+        entryPoint: entryPoint,
+        libraryRoot: compilerOptions.libraryRoot,
+        packageRoot: compilerOptions.packageRoot,
+        packageConfig: compilerOptions.packageConfig,
+        packagesDiscoveryProvider: compilerOptions.packagesDiscoveryProvider,
+        environment: compilerOptions.environment,
+        resolutionOutput: serializedUri,
+        options: [Flags.resolveOnly]);
+    return compileWithSerializedData(options, compilerInput,
+        compilerDiagnostics, compilerOutput, serializedData);
+  }
+
+  // Local cache for the serialized data for dart:core.
+  _SerializedData serializedDartCore;
+
+  /// Serialize the entry point using serialized data from dart:core and run
+  /// [oldCompileFunc] using serialized data for whole program.
+  Future<api.CompilationResult> compileFromSerializedData(
+      CompilerOptions compilerOptions,
+      api.CompilerInput compilerInput,
+      api.CompilerDiagnostics compilerDiagnostics,
+      api.CompilerOutput compilerOutput) async {
+    _CompilerOutput output = new _CompilerOutput(_SERIALIZED_TEST_URI);
+    api.CompilationResult result = await serialize(
+        compilerOptions.entryPoint,
+        output.uri,
+        compilerOptions,
+        compilerInput,
+        compilerDiagnostics,
+        output,
+        [serializedDartCore]);
+    if (!result.isSuccess) {
+      return result;
+    }
+    return compileWithSerializedData(
+        compilerOptions,
+        compilerInput,
+        compilerDiagnostics,
+        compilerOutput,
+        [serializedDartCore, output.serializedData]);
+  }
+
+  /// Compiles the entry point using the serialized data from dart:core.
+  Future<api.CompilationResult> compileWithSerializedDartCoreData(
+      CompilerOptions compilerOptions,
+      api.CompilerInput compilerInput,
+      api.CompilerDiagnostics compilerDiagnostics,
+      api.CompilerOutput compilerOutput) async {
+    return compileWithSerializedData(compilerOptions, compilerInput,
+        compilerDiagnostics, compilerOutput, [serializedDartCore]);
+  }
+
+  /// Serialize dart:core data into [serializedDartCore] and setup the
+  /// [compileFunc] to run the compiler using this data.
   Future<api.CompilationResult> generateSerializedDataForDartCore(
       CompilerOptions compilerOptions,
       api.CompilerInput compilerInput,
       api.CompilerDiagnostics compilerDiagnostics,
       api.CompilerOutput compilerOutput) async {
-    _CompilerOutput output = new _CompilerOutput();
-    api.CompilationResult result = await oldCompileFunc(
-        new CompilerOptions.parse(
-            entryPoint: Uris.dart_core,
-            libraryRoot: compilerOptions.libraryRoot,
-            packageRoot: compilerOptions.packageRoot,
-            packageConfig: compilerOptions.packageConfig,
-            packagesDiscoveryProvider:
-                compilerOptions.packagesDiscoveryProvider,
-            environment: compilerOptions.environment,
-            resolutionOutput: _SERIALIZED_URI,
-            options: [Flags.resolveOnly]),
-        compilerInput,
-        compilerDiagnostics,
-        output);
-    serializedData = output.serializedData;
-    compileFunc = compileWithSerializedData;
-    return compileWithSerializedData(
+    _CompilerOutput output = new _CompilerOutput(_SERIALIZED_DART_CORE_URI);
+    await serialize(Uris.dart_core, output.uri, compilerOptions, compilerInput,
+        compilerDiagnostics, output);
+    serializedDartCore = output.serializedData;
+    if (SERIALIZED_COMPILATION) {
+      compileFunc = compileFromSerializedData;
+    } else {
+      compileFunc = compileWithSerializedDartCoreData;
+    }
+    return compileFunc(
         compilerOptions, compilerInput, compilerDiagnostics, compilerOutput);
   }
 
   compileFunc = generateSerializedDataForDartCore;
 }
 
+class _CompilerInput implements api.CompilerInput {
+  final api.CompilerInput _input;
+  final Map<Uri, String> _data;
+
+  _CompilerInput(this._input, this._data);
+
+  @override
+  Future readFromUri(Uri uri) {
+    String data = _data[uri];
+    if (data != null) {
+      return new Future.value(data);
+    }
+    return _input.readFromUri(uri);
+  }
+}
+
+class _SerializedData {
+  final Uri uri;
+  final String data;
+
+  _SerializedData(this.uri, this.data);
+}
+
 class _CompilerOutput extends NullCompilerOutput {
+  final Uri uri;
   _BufferedEventSink sink;
 
+  _CompilerOutput(this.uri);
+
   @override
   EventSink<String> createEventSink(String name, String extension) {
     if (name == '' && extension == 'data') {
@@ -896,7 +1005,9 @@
     return super.createEventSink(name, extension);
   }
 
-  String get serializedData => sink.sb.toString();
+  _SerializedData get serializedData {
+    return new _SerializedData(uri, sink.sb.toString());
+  }
 }
 
 class _BufferedEventSink implements EventSink<String> {
diff --git a/pkg/compiler/lib/src/dart_backend/backend.dart b/pkg/compiler/lib/src/dart_backend/backend.dart
index 9a1162e..371eed6 100644
--- a/pkg/compiler/lib/src/dart_backend/backend.dart
+++ b/pkg/compiler/lib/src/dart_backend/backend.dart
@@ -540,4 +540,9 @@
     constantCompiler.constantValueMap
         .addAll(task.constantCompiler.constantValueMap);
   }
+
+  @override
+  void registerLazyStatic(FieldElement element) {
+    // Do nothing.
+  }
 }
diff --git a/pkg/compiler/lib/src/deferred_load.dart b/pkg/compiler/lib/src/deferred_load.dart
index 18e85ad..0f4f8d7 100644
--- a/pkg/compiler/lib/src/deferred_load.dart
+++ b/pkg/compiler/lib/src/deferred_load.dart
@@ -349,6 +349,13 @@
         // See dartbug.com/26406 for context.
         treeElements
             .forEachConstantNode((Node node, ConstantExpression expression) {
+          if (compiler.serialization.isDeserialized(analyzableElement)) {
+            if (!expression.isImplicit && !expression.isPotential) {
+              // Enforce evaluation of [expression].
+              backend.constants.getConstantValue(expression);
+            }
+          }
+
           // Explicitly depend on the backend constants.
           if (backend.constants.hasConstantValue(expression)) {
             ConstantValue value =
diff --git a/pkg/compiler/lib/src/diagnostics/messages.dart b/pkg/compiler/lib/src/diagnostics/messages.dart
index ad689fd..dbcab63 100644
--- a/pkg/compiler/lib/src/diagnostics/messages.dart
+++ b/pkg/compiler/lib/src/diagnostics/messages.dart
@@ -665,6 +665,7 @@
       MessageKind.DUPLICATE_DEFINITION: const MessageTemplate(
           MessageKind.DUPLICATE_DEFINITION,
           "Duplicate definition of '#{name}'.",
+          options: const ["--initializing-formal-access"],
           howToFix: "Try to rename or remove this definition.",
           examples: const [
             """
@@ -676,7 +677,36 @@
 main() {
   new C();
 }
+""",
+            """
+class C {
+  int x;
+  C(this.x, int x);
+}
 
+main() {
+  new C(4, 2);
+}
+""",
+            """
+class C {
+  int x;
+  C(int x, this.x);
+}
+
+main() {
+  new C(4, 2);
+}
+""",
+            """
+class C {
+  int x;
+  C(this.x, this.x);
+}
+
+main() {
+  new C(4, 2);
+}
 """
           ]),
 
diff --git a/pkg/compiler/lib/src/elements/common.dart b/pkg/compiler/lib/src/elements/common.dart
index 6c9a013..5ff8341 100644
--- a/pkg/compiler/lib/src/elements/common.dart
+++ b/pkg/compiler/lib/src/elements/common.dart
@@ -6,7 +6,7 @@
 
 library elements.common;
 
-import '../common/names.dart' show Names, Uris;
+import '../common/names.dart' show Identifiers, Names, Uris;
 import '../core_types.dart' show CoreClasses;
 import '../dart_types.dart' show DartType, InterfaceType, FunctionType;
 import '../util/util.dart' show Link;
@@ -594,3 +594,62 @@
     });
   }
 }
+
+abstract class AbstractFieldElementCommon implements AbstractFieldElement {
+  @override
+  bool get isInstanceMember {
+    return isClassMember && !isStatic;
+  }
+
+  @override
+  bool get isAbstract {
+    return getter != null && getter.isAbstract ||
+        setter != null && setter.isAbstract;
+  }
+}
+
+enum _FromEnvironmentState { NOT, BOOL, INT, STRING, }
+
+abstract class ConstructorElementCommon implements ConstructorElement {
+  _FromEnvironmentState _fromEnvironmentState;
+
+  _FromEnvironmentState get fromEnvironmentState {
+    if (_fromEnvironmentState == null) {
+      _fromEnvironmentState = _FromEnvironmentState.NOT;
+      if (name == Identifiers.fromEnvironment && library.isDartCore) {
+        switch (enclosingClass.name) {
+          case 'bool':
+            _fromEnvironmentState = _FromEnvironmentState.BOOL;
+            break;
+          case 'int':
+            _fromEnvironmentState = _FromEnvironmentState.INT;
+            break;
+          case 'String':
+            _fromEnvironmentState = _FromEnvironmentState.STRING;
+            break;
+        }
+      }
+    }
+    return _fromEnvironmentState;
+  }
+
+  @override
+  bool get isFromEnvironmentConstructor {
+    return fromEnvironmentState != _FromEnvironmentState.NOT;
+  }
+
+  @override
+  bool get isIntFromEnvironmentConstructor {
+    return fromEnvironmentState == _FromEnvironmentState.INT;
+  }
+
+  @override
+  bool get isBoolFromEnvironmentConstructor {
+    return fromEnvironmentState == _FromEnvironmentState.BOOL;
+  }
+
+  @override
+  bool get isStringFromEnvironmentConstructor {
+    return fromEnvironmentState == _FromEnvironmentState.STRING;
+  }
+}
diff --git a/pkg/compiler/lib/src/elements/elements.dart b/pkg/compiler/lib/src/elements/elements.dart
index dc708d0..52f5f30 100644
--- a/pkg/compiler/lib/src/elements/elements.dart
+++ b/pkg/compiler/lib/src/elements/elements.dart
@@ -941,6 +941,9 @@
 
   /// Import that declared this deferred prefix.
   ImportElement get deferredImport;
+
+  /// The `loadLibrary` getter implicitly defined on deferred prefixes.
+  GetterElement get loadLibrary;
 }
 
 /// A type alias definition.
@@ -1067,7 +1070,7 @@
 ///
 /// Normal parameter that introduce a local variable are modeled by
 /// [LocalParameterElement] whereas initializing formals, that is parameter of
-/// the form `this.x`, are modeled by [InitializingFormalParameter].
+/// the form `this.x`, are modeled by [InitializingFormalElement].
 abstract class ParameterElement extends Element
     implements VariableElement, FormalElement, LocalElement {
   /// Use [functionDeclaration] instead.
@@ -1092,7 +1095,7 @@
 /// A formal parameter in a constructor that directly initializes a field.
 ///
 /// For example: `A(this.field)`.
-abstract class InitializingFormalElement extends ParameterElement {
+abstract class InitializingFormalElement extends LocalParameterElement {
   /// The field initialized by this initializing formal.
   FieldElement get fieldElement;
 
@@ -1324,6 +1327,15 @@
   /// `int.fromEnvironment`, or `String.fromEnvironment`.
   bool get isFromEnvironmentConstructor;
 
+  /// `true` if this constructor is `int.fromEnvironment`.
+  bool get isIntFromEnvironmentConstructor;
+
+  /// `true` if this constructor is `bool.fromEnvironment`.
+  bool get isBoolFromEnvironmentConstructor;
+
+  /// `true` if this constructor is `String.fromEnvironment`.
+  bool get isStringFromEnvironmentConstructor;
+
   /// Use [enclosingClass] instead.
   @deprecated
   get enclosingElement;
@@ -1490,6 +1502,8 @@
 
   Element lookupSuperMemberInLibrary(String memberName, LibraryElement library);
 
+  // TODO(johnniwinther): Clean up semantics. Can the default constructor take
+  // optional arguments? Must it be resolved?
   ConstructorElement lookupDefaultConstructor();
   ConstructorElement lookupConstructor(String name);
 
@@ -1526,6 +1540,10 @@
 abstract class MixinApplicationElement extends ClassElement {
   ClassElement get mixin;
   InterfaceType get mixinType;
+
+  /// If this is an unnamed mixin application [subclass] is the subclass for
+  /// which this mixin application is created.
+  ClassElement get subclass;
 }
 
 /// Enum declaration.
@@ -1605,9 +1623,7 @@
   /// The front-end constant of this metadata annotation.
   ConstantExpression get constant;
   Element get annotatedElement;
-  int get resolutionState;
-  Token get beginToken;
-  Token get endToken;
+  SourceSpan get sourcePosition;
 
   bool get hasNode;
   Node get node;
@@ -1678,6 +1694,10 @@
   /// The element is an implicit forwarding constructor on a mixin application.
   /// No AST or [TreeElements] are provided.
   FORWARDING_CONSTRUCTOR,
+
+  /// The element is the `loadLibrary` getter implicitly defined on a deferred
+  /// prefix.
+  DEFERRED_LOAD_LIBRARY,
 }
 
 /// [ResolvedAst] contains info that define the semantics of an element.
diff --git a/pkg/compiler/lib/src/elements/modelx.dart b/pkg/compiler/lib/src/elements/modelx.dart
index 2381d64..bb331d9 100644
--- a/pkg/compiler/lib/src/elements/modelx.dart
+++ b/pkg/compiler/lib/src/elements/modelx.dart
@@ -5,6 +5,7 @@
 library elements.modelx;
 
 import '../common.dart';
+import '../common/names.dart' show Identifiers;
 import '../common/resolution.dart' show Resolution, ParsingContext;
 import '../compiler.dart' show Compiler;
 import '../constants/constant_constructors.dart';
@@ -206,7 +207,9 @@
   }
 }
 
-class ErroneousElementX extends ElementX implements ErroneousElement {
+class ErroneousElementX extends ElementX
+    with ConstructorElementCommon
+    implements ErroneousElement {
   final MessageKind messageKind;
   final Map messageArguments;
 
@@ -284,9 +287,6 @@
   }
 
   @override
-  bool get isFromEnvironmentConstructor => false;
-
-  @override
   List<DartType> get typeVariables => unsupported();
 }
 
@@ -1260,6 +1260,11 @@
     return visitor.visitPrefixElement(this, arg);
   }
 
+  @override
+  GetterElement get loadLibrary {
+    return isDeferred ? lookupLocalMember(Identifiers.loadLibrary) : null;
+  }
+
   String toString() => '$kind($name)';
 }
 
@@ -1420,11 +1425,22 @@
       // constant for a variable already known to be erroneous.
       return;
     }
-    assert(invariant(this, constantCache == null || constantCache == value,
-        message: "Constant has already been computed for $this. "
-            "Existing constant: "
-            "${constantCache != null ? constantCache.toStructuredText() : ''}, "
-            "New constant: ${value != null ? value.toStructuredText() : ''}."));
+    if (constantCache != null && constantCache != value) {
+      // Allow setting the constant as erroneous. Constants computed during
+      // resolution are locally valid but might be effectively erroneous. For
+      // instance `a ? true : false` where a is `const a = m()`. Since `a` is
+      // declared to be constant, the conditional is assumed valid, but when
+      // computing the value we see that it isn't.
+      // TODO(johnniwinther): Remove this exception when all constant
+      // expressions are computed during resolution.
+      assert(invariant(
+          this, value == null || value.kind == ConstantExpressionKind.ERRONEOUS,
+          message: "Constant has already been computed for $this. "
+              "Existing constant: "
+              "${constantCache != null ? constantCache.toStructuredText() : ''}"
+              ", New constant: "
+              "${value != null ? value.toStructuredText() : ''}."));
+    }
     constantCache = value;
   }
 }
@@ -1820,7 +1836,11 @@
 
   MemberElement get memberContext => enclosingElement;
 
-  bool get isLocal => false;
+  @override
+  bool get isFinal => true;
+
+  @override
+  bool get isLocal => true;
 }
 
 class ErroneousInitializingFormalElementX extends ParameterElementX
@@ -1845,7 +1865,9 @@
   DynamicType get type => const DynamicType();
 }
 
-class AbstractFieldElementX extends ElementX implements AbstractFieldElement {
+class AbstractFieldElementX extends ElementX
+    with AbstractFieldElementCommon
+    implements AbstractFieldElement {
   GetterElementX getter;
   SetterElementX setter;
 
@@ -1888,18 +1910,9 @@
     }
   }
 
-  bool get isInstanceMember {
-    return isClassMember && !isStatic;
-  }
-
   accept(ElementVisitor visitor, arg) {
     return visitor.visitAbstractFieldElement(this, arg);
   }
-
-  bool get isAbstract {
-    return getter != null && getter.isAbstract ||
-        setter != null && setter.isAbstract;
-  }
 }
 
 // TODO(johnniwinther): [FunctionSignature] should be merged with
@@ -2170,21 +2183,13 @@
     }
   }
 
-  bool get isFromEnvironmentConstructor {
-    return name == 'fromEnvironment' &&
-        library.isDartCore &&
-        (enclosingClass.name == 'bool' ||
-            enclosingClass.name == 'int' ||
-            enclosingClass.name == 'String');
-  }
-
   /// Returns the empty list of type variables by default.
   @override
   List<DartType> get typeVariables => functionSignature.typeVariables;
 }
 
 abstract class ConstructorElementX extends FunctionElementX
-    with ConstantConstructorMixin
+    with ConstantConstructorMixin, ConstructorElementCommon
     implements ConstructorElement {
   bool isRedirectingGenerative = false;
 
@@ -2266,7 +2271,7 @@
 
   DeferredLoaderGetterElementX(PrefixElement prefix)
       : this.prefix = prefix,
-        super("loadLibrary", Modifiers.EMPTY, prefix, false) {
+        super(Identifiers.loadLibrary, Modifiers.EMPTY, prefix, false) {
     functionSignature = new FunctionSignatureX(type: new FunctionType(this));
   }
 
@@ -2277,6 +2282,7 @@
   bool get isDeferredLoaderGetter => true;
 
   bool get isTopLevel => true;
+
   // By having position null, the enclosing elements location is printed in
   // error messages.
   Token get position => null;
@@ -2287,6 +2293,13 @@
 
   FunctionExpression get node => null;
 
+  bool get hasResolvedAst => true;
+
+  ResolvedAst get resolvedAst {
+    return new SynthesizedResolvedAst(
+        this, ResolvedAstKind.DEFERRED_LOAD_LIBRARY);
+  }
+
   @override
   SetterElement get setter => null;
 }
@@ -2995,7 +3008,6 @@
   ClassElement get mixin => mixinType != null ? mixinType.element : null;
 
   bool get isMixinApplication => true;
-  bool get isUnnamedMixinApplication => node is! NamedMixinApplication;
   bool get hasConstructor => !constructors.isEmpty;
   bool get hasLocalScopeMembers => !constructors.isEmpty;
 
@@ -3053,14 +3065,20 @@
   Modifiers get modifiers => node.modifiers;
 
   DeclarationSite get declarationSite => this;
+
+  ClassElement get subclass => null;
 }
 
 class UnnamedMixinApplicationElementX extends MixinApplicationElementX {
   final Node node;
+  final ClassElement subclass;
 
   UnnamedMixinApplicationElementX(
-      String name, CompilationUnitElement enclosing, int id, this.node)
-      : super(name, enclosing, id);
+      String name, ClassElement subclass, int id, this.node)
+      : this.subclass = subclass,
+        super(name, subclass.compilationUnit, id);
+
+  bool get isUnnamedMixinApplication => true;
 
   bool get isAbstract => true;
 }
@@ -3200,6 +3218,8 @@
    */
   Token get beginToken;
 
+  Token get endToken;
+
   MetadataAnnotationX([this.resolutionState = STATE_NOT_STARTED]);
 
   MetadataAnnotation ensureResolved(Resolution resolution) {
@@ -3215,6 +3235,11 @@
 
   Node parseNode(ParsingContext parsing);
 
+  SourceSpan get sourcePosition {
+    Uri uri = annotatedElement.compilationUnit.script.resourceUri;
+    return new SourceSpan.fromTokens(uri, beginToken, endToken);
+  }
+
   String toString() => 'MetadataAnnotation($constant, $resolutionState)';
 }
 
diff --git a/pkg/compiler/lib/src/enqueue.dart b/pkg/compiler/lib/src/enqueue.dart
index 29ae8ea..01b258e 100644
--- a/pkg/compiler/lib/src/enqueue.dart
+++ b/pkg/compiler/lib/src/enqueue.dart
@@ -199,25 +199,12 @@
       // classes, which may not be the case when a native class is subclassed.
       if (compiler.backend.isNative(cls)) {
         compiler.world.registerUsedElement(member);
-        nativeEnqueuer.handleFieldAnnotations(member);
         if (universe.hasInvokedGetter(member, compiler.world) ||
             universe.hasInvocation(member, compiler.world)) {
-          nativeEnqueuer.registerFieldLoad(member);
-          // In handleUnseenSelector we can't tell if the field is loaded or
-          // stored.  We need the basic algorithm to be Church-Rosser, since the
-          // resolution 'reduction' order is different to the codegen order. So
-          // register that the field is also stored.  In other words: if we
-          // don't register the store here during resolution, the store could be
-          // registered during codegen on the handleUnseenSelector path, and
-          // cause the set of codegen elements to include unresolved elements.
-          nativeEnqueuer.registerFieldStore(member);
           addToWorkList(member);
           return;
         }
         if (universe.hasInvokedSetter(member, compiler.world)) {
-          nativeEnqueuer.registerFieldStore(member);
-          // See comment after registerFieldLoad above.
-          nativeEnqueuer.registerFieldLoad(member);
           addToWorkList(member);
           return;
         }
@@ -462,6 +449,7 @@
     bool includeLibrary =
         shouldIncludeElementDueToMirrors(lib, includedEnclosing: false);
     lib.forEachLocalMember((Element member) {
+      if (member.isInjected) return;
       if (member.isClass) {
         enqueueReflectiveElementsInClass(member, recents, includeLibrary);
       } else {
@@ -549,25 +537,6 @@
         if (member.isFunction && selector.isGetter) {
           registerClosurizedMember(member);
         }
-        if (member.isField &&
-            compiler.backend.isNative(member.enclosingClass)) {
-          if (selector.isGetter || selector.isCall) {
-            nativeEnqueuer.registerFieldLoad(member);
-            // We have to also handle storing to the field because we only get
-            // one look at each member and there might be a store we have not
-            // seen yet.
-            // TODO(sra): Process fields for storing separately.
-            nativeEnqueuer.registerFieldStore(member);
-          } else {
-            assert(selector.isSetter);
-            nativeEnqueuer.registerFieldStore(member);
-            // We have to also handle loading from the field because we only get
-            // one look at each member and there might be a load we have not
-            // seen yet.
-            // TODO(sra): Process fields for storing separately.
-            nativeEnqueuer.registerFieldLoad(member);
-          }
-        }
         addToWorkList(member);
         return true;
       }
@@ -747,6 +716,7 @@
   /// Registers [element] as processed by the resolution enqueuer.
   void registerProcessedElement(AstElement element) {
     processedElements.add(element);
+    compiler.backend.onElementResolved(element);
   }
 
   /**
@@ -810,7 +780,6 @@
       compiler.enabledFunctionApply = true;
     }
 
-    nativeEnqueuer.registerElement(element);
     return true;
   }
 
diff --git a/pkg/compiler/lib/src/inferrer/simple_types_inferrer.dart b/pkg/compiler/lib/src/inferrer/simple_types_inferrer.dart
index a65d3e3..c2956ff 100644
--- a/pkg/compiler/lib/src/inferrer/simple_types_inferrer.dart
+++ b/pkg/compiler/lib/src/inferrer/simple_types_inferrer.dart
@@ -290,10 +290,7 @@
   }
 
   bool isNativeElement(Element element) {
-    if (compiler.backend.isNative(element)) return true;
-    return element.isClassMember &&
-        compiler.backend.isNative(element.enclosingClass) &&
-        element.isField;
+    return compiler.backend.isNative(element);
   }
 
   void analyze(ResolvedAst resolvedAst, ArgumentsTypes arguments);
diff --git a/pkg/compiler/lib/src/inferrer/type_graph_inferrer.dart b/pkg/compiler/lib/src/inferrer/type_graph_inferrer.dart
index f81a6ca..f8ae6ea 100644
--- a/pkg/compiler/lib/src/inferrer/type_graph_inferrer.dart
+++ b/pkg/compiler/lib/src/inferrer/type_graph_inferrer.dart
@@ -856,20 +856,27 @@
             if (constant != null) {
               ConstantValue value =
                   compiler.backend.constants.getConstantValue(constant);
-              assert(invariant(fieldElement, value != null,
-                  message: "Constant expression without value: "
-                      "${constant.toStructuredText()}."));
-              if (value.isFunction) {
-                FunctionConstantValue functionConstant = value;
-                type = types.allocateClosure(node, functionConstant.element);
+              if (value != null) {
+                if (value.isFunction) {
+                  FunctionConstantValue functionConstant = value;
+                  type = types.allocateClosure(node, functionConstant.element);
+                } else {
+                  // Although we might find a better type, we have to keep
+                  // the old type around to ensure that we get a complete view
+                  // of the type graph and do not drop any flow edges.
+                  TypeMask refinedType = computeTypeMask(compiler, value);
+                  assert(TypeMask.assertIsNormalized(refinedType, classWorld));
+                  type = new NarrowTypeInformation(type, refinedType);
+                  types.allocatedTypes.add(type);
+                }
               } else {
-                // Although we might find a better type, we have to keep
-                // the old type around to ensure that we get a complete view
-                // of the type graph and do not drop any flow edges.
-                TypeMask refinedType = computeTypeMask(compiler, value);
-                assert(TypeMask.assertIsNormalized(refinedType, classWorld));
-                type = new NarrowTypeInformation(type, refinedType);
-                types.allocatedTypes.add(type);
+                assert(invariant(
+                    fieldElement,
+                    fieldElement.isInstanceMember ||
+                        constant.isImplicit ||
+                        constant.isPotential,
+                    message: "Constant expression without value: "
+                        "${constant.toStructuredText()}."));
               }
             }
           }
diff --git a/pkg/compiler/lib/src/inferrer/type_graph_nodes.dart b/pkg/compiler/lib/src/inferrer/type_graph_nodes.dart
index 2bc3670..3b0127a 100644
--- a/pkg/compiler/lib/src/inferrer/type_graph_nodes.dart
+++ b/pkg/compiler/lib/src/inferrer/type_graph_nodes.dart
@@ -473,7 +473,7 @@
       if (element.isField) {
         return inferrer
             .typeOfNativeBehavior(
-                native.NativeBehavior.ofFieldLoad(element, inferrer.compiler))
+                inferrer.backend.getNativeFieldLoadBehavior(element))
             .type;
       } else {
         assert(element.isFunction ||
@@ -487,22 +487,25 @@
         } else {
           return inferrer
               .typeOfNativeBehavior(
-                  native.NativeBehavior.ofMethod(element, inferrer.compiler))
+                  inferrer.backend.getNativeMethodBehavior(element))
               .type;
         }
       }
     }
 
     Compiler compiler = inferrer.compiler;
-    if (element.declaration == compiler.intEnvironment) {
-      giveUp(inferrer);
-      return compiler.typesTask.intType.nullable();
-    } else if (element.declaration == compiler.boolEnvironment) {
-      giveUp(inferrer);
-      return compiler.typesTask.boolType.nullable();
-    } else if (element.declaration == compiler.stringEnvironment) {
-      giveUp(inferrer);
-      return compiler.typesTask.stringType.nullable();
+    if (element.isConstructor) {
+      ConstructorElement constructor = element;
+      if (constructor.isIntFromEnvironmentConstructor) {
+        giveUp(inferrer);
+        return compiler.typesTask.intType.nullable();
+      } else if (constructor.isBoolFromEnvironmentConstructor) {
+        giveUp(inferrer);
+        return compiler.typesTask.boolType.nullable();
+      } else if (constructor.isStringFromEnvironmentConstructor) {
+        giveUp(inferrer);
+        return compiler.typesTask.stringType.nullable();
+      }
     }
     return null;
   }
diff --git a/pkg/compiler/lib/src/js_backend/backend.dart b/pkg/compiler/lib/src/js_backend/backend.dart
index e8f43ca..cbae712 100644
--- a/pkg/compiler/lib/src/js_backend/backend.dart
+++ b/pkg/compiler/lib/src/js_backend/backend.dart
@@ -40,6 +40,11 @@
   final Map<FunctionElement, int> _cachedDecisions =
       new Map<FunctionElement, int>();
 
+  /// Returns the current cache decision. This should only be used for testing.
+  int getCurrentCacheDecisionForTesting(Element element) {
+    return _cachedDecisions[element];
+  }
+
   // Returns `true`/`false` if we have a cached decision.
   // Returns `null` otherwise.
   bool canInline(FunctionElement element, {bool insideLoop}) {
@@ -449,6 +454,9 @@
   /// these constants must be registered.
   final List<Dependency> metadataConstants = <Dependency>[];
 
+  /// Set of elements for which metadata has been registered as dependencies.
+  final Set<Element> _registeredMetadata = new Set<Element>();
+
   /// List of elements that the user has requested for reflection.
   final Set<Element> targetsUsed = new Set<Element>();
 
@@ -736,6 +744,56 @@
   @override
   bool isNative(Element element) => nativeData.isNative(element);
 
+  /// Returns the [NativeBehavior] for calling the native [method].
+  native.NativeBehavior getNativeMethodBehavior(FunctionElement method) {
+    return nativeData.getNativeMethodBehavior(method);
+  }
+
+  /// Returns the [NativeBehavior] for reading from the native [field].
+  native.NativeBehavior getNativeFieldLoadBehavior(FieldElement field) {
+    return nativeData.getNativeFieldLoadBehavior(field);
+  }
+
+  /// Returns the [NativeBehavior] for writing to the native [field].
+  native.NativeBehavior getNativeFieldStoreBehavior(FieldElement field) {
+    return nativeData.getNativeFieldStoreBehavior(field);
+  }
+
+  @override
+  void resolveNativeElement(Element element, NativeRegistry registry) {
+    if (element.isFunction ||
+        element.isConstructor ||
+        element.isGetter ||
+        element.isSetter) {
+      compiler.enqueuer.resolution.nativeEnqueuer
+          .handleMethodAnnotations(element);
+      if (isNative(element)) {
+        native.NativeBehavior behavior =
+            native.NativeBehavior.ofMethod(element, compiler);
+        nativeData.setNativeMethodBehavior(element, behavior);
+        registry.registerNativeData(behavior);
+      }
+    } else if (element.isField) {
+      compiler.enqueuer.resolution.nativeEnqueuer
+          .handleFieldAnnotations(element);
+      if (isNative(element)) {
+        native.NativeBehavior fieldLoadBehavior =
+            native.NativeBehavior.ofFieldLoad(element, compiler);
+        native.NativeBehavior fieldStoreBehavior =
+            native.NativeBehavior.ofFieldStore(element, compiler);
+        nativeData.setNativeFieldLoadBehavior(element, fieldLoadBehavior);
+        nativeData.setNativeFieldStoreBehavior(element, fieldStoreBehavior);
+
+        // TODO(sra): Process fields for storing separately.
+        // We have to handle both loading and storing to the field because we
+        // only get one look at each member and there might be a load or store
+        // we have not seen yet.
+        registry.registerNativeData(fieldLoadBehavior);
+        registry.registerNativeData(fieldStoreBehavior);
+      }
+    }
+  }
+
   bool isNativeOrExtendsNative(ClassElement element) {
     if (element == null) return false;
     if (isNative(element) || isJsInterop(element)) {
@@ -997,21 +1055,12 @@
         // helper so we register a use of that.
         registry.registerStaticUse(new StaticUse.staticInvoke(
             // TODO(johnniwinther): Find the right [CallStructure].
-
             helpers.createRuntimeType,
             null));
       }
     }
   }
 
-  void registerMetadataConstant(MetadataAnnotation metadata,
-      Element annotatedElement, Registry registry) {
-    assert(registry.isForResolution);
-    ConstantValue constant = constants.getConstantValueForMetadata(metadata);
-    registerCompileTimeConstant(constant, registry);
-    metadataConstants.add(new Dependency(constant, annotatedElement));
-  }
-
   void registerInstantiatedClass(
       ClassElement cls, Enqueuer enqueuer, Registry registry) {
     _processClass(cls, enqueuer, registry);
@@ -1222,6 +1271,7 @@
     super.onResolutionComplete();
     computeMembersNeededForReflection();
     rti.computeClassesNeedingRti();
+    _registeredMetadata.clear();
   }
 
   onTypeInferenceComplete() {
@@ -1477,17 +1527,24 @@
       ConstantExpression constant = variableElement.constant;
       if (constant != null) {
         ConstantValue initialValue = constants.getConstantValue(constant);
-        assert(invariant(variableElement, initialValue != null,
-            message: "Constant expression without value: "
-                "${constant.toStructuredText()}."));
-        registerCompileTimeConstant(initialValue, work.registry);
-        addCompileTimeConstantForEmission(initialValue);
-        // We don't need to generate code for static or top-level
-        // variables. For instance variables, we may need to generate
-        // the checked setter.
-        if (Elements.isStaticOrTopLevel(element)) {
-          return impactTransformer
-              .transformCodegenImpact(work.registry.worldImpact);
+        if (initialValue != null) {
+          registerCompileTimeConstant(initialValue, work.registry);
+          addCompileTimeConstantForEmission(initialValue);
+          // We don't need to generate code for static or top-level
+          // variables. For instance variables, we may need to generate
+          // the checked setter.
+          if (Elements.isStaticOrTopLevel(element)) {
+            return impactTransformer
+                .transformCodegenImpact(work.registry.worldImpact);
+          }
+        } else {
+          assert(invariant(
+              variableElement,
+              variableElement.isInstanceMember ||
+                  constant.isImplicit ||
+                  constant.isPotential,
+              message: "Constant expression without value: "
+                  "${constant.toStructuredText()}."));
         }
       } else {
         // If the constant-handler was not able to produce a result we have to
@@ -2254,17 +2311,67 @@
       reporter.log('Retaining metadata.');
 
       compiler.libraryLoader.libraries.forEach(retainMetadataOf);
-      for (Dependency dependency in metadataConstants) {
-        registerCompileTimeConstant(dependency.constant,
-            new EagerRegistry('EagerRegistry for ${dependency}', enqueuer));
-      }
-      if (!enqueuer.isResolutionQueue) {
+
+      if (enqueuer.isResolutionQueue && !enqueuer.queueIsClosed) {
+        /// Register the constant value of [metadata] as live in resolution.
+        void registerMetadataConstant(MetadataAnnotation metadata) {
+          ConstantValue constant =
+              constants.getConstantValueForMetadata(metadata);
+          Dependency dependency =
+              new Dependency(constant, metadata.annotatedElement);
+          metadataConstants.add(dependency);
+          registerCompileTimeConstant(dependency.constant,
+              new EagerRegistry('EagerRegistry for ${dependency}', enqueuer));
+        }
+
+        // TODO(johnniwinther): We should have access to all recently processed
+        // elements and process these instead.
+        processMetadata(compiler.enqueuer.resolution.processedElements,
+            registerMetadataConstant);
+      } else {
+        for (Dependency dependency in metadataConstants) {
+          registerCompileTimeConstant(dependency.constant,
+              new EagerRegistry('EagerRegistry for ${dependency}', enqueuer));
+        }
         metadataConstants.clear();
       }
     }
     return true;
   }
 
+  /// Call [registerMetadataConstant] on all metadata from [elements].
+  void processMetadata(Iterable<Element> elements,
+      void onMetadata(MetadataAnnotation metadata)) {
+    void processLibraryMetadata(LibraryElement library) {
+      if (_registeredMetadata.add(library)) {
+        library.metadata.forEach(onMetadata);
+        library.entryCompilationUnit.metadata.forEach(onMetadata);
+        for (ImportElement import in library.imports) {
+          import.metadata.forEach(onMetadata);
+        }
+      }
+    }
+
+    void processElementMetadata(Element element) {
+      if (_registeredMetadata.add(element)) {
+        element.metadata.forEach(onMetadata);
+        if (element.isFunction) {
+          FunctionElement function = element;
+          for (ParameterElement parameter in function.parameters) {
+            parameter.metadata.forEach(onMetadata);
+          }
+        }
+        if (element.enclosingClass != null) {
+          processElementMetadata(element.enclosingClass);
+        } else {
+          processLibraryMetadata(element.library);
+        }
+      }
+    }
+
+    elements.forEach(processElementMetadata);
+  }
+
   void onQueueClosed() {
     lookupMapAnalysis.onQueueClosed();
     jsInteropAnalysis.onQueueClosed();
@@ -2987,6 +3094,8 @@
   final Element annotatedElement;
 
   const Dependency(this.constant, this.annotatedElement);
+
+  String toString() => '$annotatedElement:${constant.toStructuredText()}';
 }
 
 class JavaScriptImpactStrategy extends ImpactStrategy {
diff --git a/pkg/compiler/lib/src/js_backend/backend_serialization.dart b/pkg/compiler/lib/src/js_backend/backend_serialization.dart
index d0594f4..97da2f5 100644
--- a/pkg/compiler/lib/src/js_backend/backend_serialization.dart
+++ b/pkg/compiler/lib/src/js_backend/backend_serialization.dart
@@ -4,6 +4,7 @@
 
 library js_backend.serialization;
 
+import '../common.dart';
 import '../common/backend_api.dart' show BackendSerialization;
 import '../dart_types.dart';
 import '../elements/elements.dart';
@@ -38,6 +39,9 @@
 const Key JS_INTEROP_NAME = const Key('jsInteropName');
 const Key NATIVE_MEMBER_NAME = const Key('nativeMemberName');
 const Key NATIVE_CLASS_TAG_INFO = const Key('nativeClassTagInfo');
+const Key NATIVE_METHOD_BEHAVIOR = const Key('nativeMethodBehavior');
+const Key NATIVE_FIELD_LOAD_BEHAVIOR = const Key('nativeFieldLoadBehavior');
+const Key NATIVE_FIELD_STORE_BEHAVIOR = const Key('nativeFieldStoreBehavior');
 
 class JavaScriptBackendSerializer implements SerializerPlugin {
   final JavaScriptBackend backend;
@@ -63,41 +67,31 @@
     if (nativeClassTagInfo != null) {
       getEncoder().setString(NATIVE_CLASS_TAG_INFO, nativeClassTagInfo);
     }
-  }
-
-  /// Returns a list of the [DartType]s in [types].
-  static List<DartType> filterDartTypes(List types) {
-    return types.where((type) => type is DartType).toList();
-  }
-
-  /// Returns a list of the names of the [SpecialType]s in [types].
-  static List<String> filterSpecialTypes(List types) {
-    return types
-        .where((type) => type is SpecialType)
-        .map((SpecialType type) => type.name)
-        .toList();
+    NativeBehavior nativeMethodBehavior =
+        backend.nativeData.nativeMethodBehavior[element];
+    if (nativeMethodBehavior != null) {
+      NativeBehaviorSerialization.serializeNativeBehavior(nativeMethodBehavior,
+          getEncoder().createObject(NATIVE_METHOD_BEHAVIOR));
+    }
+    NativeBehavior nativeFieldLoadBehavior =
+        backend.nativeData.nativeFieldLoadBehavior[element];
+    if (nativeFieldLoadBehavior != null) {
+      NativeBehaviorSerialization.serializeNativeBehavior(
+          nativeFieldLoadBehavior,
+          getEncoder().createObject(NATIVE_FIELD_LOAD_BEHAVIOR));
+    }
+    NativeBehavior nativeFieldStoreBehavior =
+        backend.nativeData.nativeFieldStoreBehavior[element];
+    if (nativeFieldStoreBehavior != null) {
+      NativeBehaviorSerialization.serializeNativeBehavior(
+          nativeFieldStoreBehavior,
+          getEncoder().createObject(NATIVE_FIELD_STORE_BEHAVIOR));
+    }
   }
 
   @override
   void onData(NativeBehavior behavior, ObjectEncoder encoder) {
-    encoder.setTypes(
-        DART_TYPES_RETURNED, filterDartTypes(behavior.typesReturned));
-    encoder.setStrings(
-        SPECIAL_TYPES_RETURNED, filterSpecialTypes(behavior.typesReturned));
-
-    encoder.setTypes(
-        DART_TYPES_INSTANTIATED, filterDartTypes(behavior.typesInstantiated));
-    encoder.setStrings(SPECIAL_TYPES_INSTANTIATED,
-        filterSpecialTypes(behavior.typesInstantiated));
-
-    if (behavior.codeTemplateText != null) {
-      encoder.setString(CODE_TEMPLATE, behavior.codeTemplateText);
-    }
-
-    encoder.setInt(SIDE_EFFECTS, behavior.sideEffects.flags);
-    encoder.setEnum(THROW_BEHAVIOR, behavior.throwBehavior);
-    encoder.setBool(IS_ALLOCATION, behavior.isAllocation);
-    encoder.setBool(USE_GVN, behavior.useGvn);
+    NativeBehaviorSerialization.serializeNativeBehavior(behavior, encoder);
   }
 }
 
@@ -125,11 +119,73 @@
       if (nativeClassTagInfo != null) {
         backend.nativeData.nativeClassTagInfo[element] = nativeClassTagInfo;
       }
+      ObjectDecoder nativeMethodBehavior =
+          decoder.getObject(NATIVE_METHOD_BEHAVIOR, isOptional: true);
+      if (nativeMethodBehavior != null) {
+        backend.nativeData.nativeMethodBehavior[element] =
+            NativeBehaviorSerialization
+                .deserializeNativeBehavior(nativeMethodBehavior);
+      }
+      ObjectDecoder nativeFieldLoadBehavior =
+          decoder.getObject(NATIVE_FIELD_LOAD_BEHAVIOR, isOptional: true);
+      if (nativeFieldLoadBehavior != null) {
+        backend.nativeData.nativeFieldLoadBehavior[element] =
+            NativeBehaviorSerialization
+                .deserializeNativeBehavior(nativeFieldLoadBehavior);
+      }
+      ObjectDecoder nativeFieldStoreBehavior =
+          decoder.getObject(NATIVE_FIELD_STORE_BEHAVIOR, isOptional: true);
+      if (nativeFieldStoreBehavior != null) {
+        backend.nativeData.nativeFieldStoreBehavior[element] =
+            NativeBehaviorSerialization
+                .deserializeNativeBehavior(nativeFieldStoreBehavior);
+      }
     }
   }
 
   @override
   NativeBehavior onData(ObjectDecoder decoder) {
+    return NativeBehaviorSerialization.deserializeNativeBehavior(decoder);
+  }
+}
+
+class NativeBehaviorSerialization {
+  /// Returns a list of the [DartType]s in [types].
+  static List<DartType> filterDartTypes(List types) {
+    return types.where((type) => type is DartType).toList();
+  }
+
+  /// Returns a list of the names of the [SpecialType]s in [types].
+  static List<String> filterSpecialTypes(List types) {
+    return types
+        .where((type) => type is SpecialType)
+        .map((SpecialType type) => type.name)
+        .toList();
+  }
+
+  static void serializeNativeBehavior(
+      NativeBehavior behavior, ObjectEncoder encoder) {
+    encoder.setTypes(
+        DART_TYPES_RETURNED, filterDartTypes(behavior.typesReturned));
+    encoder.setStrings(
+        SPECIAL_TYPES_RETURNED, filterSpecialTypes(behavior.typesReturned));
+
+    encoder.setTypes(
+        DART_TYPES_INSTANTIATED, filterDartTypes(behavior.typesInstantiated));
+    encoder.setStrings(SPECIAL_TYPES_INSTANTIATED,
+        filterSpecialTypes(behavior.typesInstantiated));
+
+    if (behavior.codeTemplateText != null) {
+      encoder.setString(CODE_TEMPLATE, behavior.codeTemplateText);
+    }
+
+    encoder.setInt(SIDE_EFFECTS, behavior.sideEffects.flags);
+    encoder.setEnum(THROW_BEHAVIOR, behavior.throwBehavior);
+    encoder.setBool(IS_ALLOCATION, behavior.isAllocation);
+    encoder.setBool(USE_GVN, behavior.useGvn);
+  }
+
+  static NativeBehavior deserializeNativeBehavior(ObjectDecoder decoder) {
     SideEffects sideEffects =
         new SideEffects.fromFlags(decoder.getInt(SIDE_EFFECTS));
     NativeBehavior behavior = new NativeBehavior.internal(sideEffects);
diff --git a/pkg/compiler/lib/src/js_backend/constant_emitter.dart b/pkg/compiler/lib/src/js_backend/constant_emitter.dart
index fc53cb4..eba60d9 100644
--- a/pkg/compiler/lib/src/js_backend/constant_emitter.dart
+++ b/pkg/compiler/lib/src/js_backend/constant_emitter.dart
@@ -64,6 +64,11 @@
     return new jsAst.LiteralNull();
   }
 
+  @override
+  jsAst.Expression visitNonConstant(NonConstantValue constant, [_]) {
+    return new jsAst.LiteralNull();
+  }
+
   static final _exponentialRE = new RegExp('^'
       '\([-+]?\)' // 1: sign
       '\([0-9]+\)' // 2: leading digit(s)
@@ -280,7 +285,7 @@
 
   @override
   jsAst.Expression visitConstructed(ConstructedConstantValue constant, [_]) {
-    Element element = constant.type.element;
+    ClassElement element = constant.type.element;
     if (backend.isForeign(element) && element.name == 'JS_CONST') {
       StringConstantValue str = constant.fields.values.single;
       String value = str.primitiveValue.slowToString();
@@ -288,9 +293,10 @@
     }
     jsAst.Expression constructor =
         backend.emitter.constructorAccess(constant.type.element);
-    List<jsAst.Expression> fields = constant.fields.values
-        .map(constantReferenceGenerator)
-        .toList(growable: false);
+    List<jsAst.Expression> fields = <jsAst.Expression>[];
+    element.forEachInstanceField((_, FieldElement field) {
+      fields.add(constantReferenceGenerator(constant.fields[field]));
+    }, includeSuperAndInjectedMembers: true);
     jsAst.New instantiation = new jsAst.New(constructor, fields);
     return maybeAddTypeArguments(constant.type, instantiation);
   }
diff --git a/pkg/compiler/lib/src/js_backend/constant_handler_javascript.dart b/pkg/compiler/lib/src/js_backend/constant_handler_javascript.dart
index da3efb5..8e6de54 100644
--- a/pkg/compiler/lib/src/js_backend/constant_handler_javascript.dart
+++ b/pkg/compiler/lib/src/js_backend/constant_handler_javascript.dart
@@ -116,6 +116,8 @@
       new Map<Node, ConstantExpression>();
 
   // Constants computed for metadata.
+  // TODO(johnniwinther): Remove this when no longer used by
+  // poi/forget_element_test.
   final Map<MetadataAnnotation, ConstantExpression> metadataConstantMap =
       new Map<MetadataAnnotation, ConstantExpression>();
 
@@ -132,28 +134,18 @@
         element, definitions,
         isConst: isConst, checkType: checkType);
     if (!isConst && value == null) {
-      lazyStatics.add(element);
+      registerLazyStatic(element);
     }
     return value;
   }
 
-  void addCompileTimeConstantForEmission(ConstantValue constant) {
-    compiledConstants.add(constant);
+  @override
+  void registerLazyStatic(FieldElement element) {
+    lazyStatics.add(element);
   }
 
-  /**
-   * Returns an [Iterable] of static non final fields that need to be
-   * initialized. The fields list must be evaluated in order since they might
-   * depend on each other.
-   */
-  Iterable<VariableElement> getStaticNonFinalFieldsForEmission() {
-    return initialVariableValues.keys.where((element) {
-      return element.kind == ElementKind.FIELD &&
-          !element.isInstanceMember &&
-          !element.modifiers.isFinal &&
-          // The const fields are all either emitted elsewhere or inlined.
-          !element.modifiers.isConst;
-    });
+  void addCompileTimeConstantForEmission(ConstantValue constant) {
+    compiledConstants.add(constant);
   }
 
   List<VariableElement> getLazilyInitializedFieldsForEmission() {
@@ -189,15 +181,6 @@
     return result;
   }
 
-  ConstantValue getInitialValueFor(VariableElement element) {
-    ConstantExpression initialValue =
-        initialVariableValues[element.declaration];
-    if (initialValue == null) {
-      reporter.internalError(element, "No initial value for given element.");
-    }
-    return getConstantValue(initialValue);
-  }
-
   ConstantExpression compileNode(Node node, TreeElements elements,
       {bool enforceConst: true}) {
     return compileNodeWithDefinitions(node, elements, isConst: enforceConst);
@@ -231,9 +214,10 @@
   }
 
   ConstantValue getConstantValueForMetadata(MetadataAnnotation metadata) {
-    return getConstantValue(metadataConstantMap[metadata]);
+    return getConstantValue(metadata.constant);
   }
 
+  @override
   ConstantExpression compileMetadata(
       MetadataAnnotation metadata, Node node, TreeElements elements) {
     ConstantExpression constant =
@@ -249,29 +233,6 @@
       element.node.accept(new ForgetConstantNodeVisitor(this));
     }
   }
-
-  @override
-  ConstantValue getConstantValue(ConstantExpression expression) {
-    assert(invariant(CURRENT_ELEMENT_SPANNABLE, expression != null,
-        message: "ConstantExpression is null in getConstantValue."));
-    // TODO(johhniwinther): ensure expressions have been evaluated at this
-    // point. This can't be enabled today due to dartbug.com/26406.
-    if (compiler.serialization.supportsDeserialization) {
-      evaluate(expression);
-    }
-    ConstantValue value = super.getConstantValue(expression);
-    if (value == null &&
-        expression != null &&
-        expression.kind == ConstantExpressionKind.ERRONEOUS) {
-      // TODO(johnniwinther): When the Dart constant system sees a constant
-      // expression as erroneous but the JavaScript constant system finds it ok
-      // we have store a constant value for the erroneous constant expression.
-      // Ensure the computed constant expressions are always the same; that only
-      // the constant values may be different.
-      value = new NullConstantValue();
-    }
-    return value;
-  }
 }
 
 class ForgetConstantElementVisitor
diff --git a/pkg/compiler/lib/src/js_backend/constant_system_javascript.dart b/pkg/compiler/lib/src/js_backend/constant_system_javascript.dart
index f3d2b4c..bfd156e 100644
--- a/pkg/compiler/lib/src/js_backend/constant_system_javascript.dart
+++ b/pkg/compiler/lib/src/js_backend/constant_system_javascript.dart
@@ -349,14 +349,15 @@
   @override
   ConstantValue createSymbol(Compiler compiler, String text) {
     // TODO(johnniwinther): Create a backend agnostic value.
-    InterfaceType type = compiler.coreTypes.symbolType;
+    JavaScriptBackend backend = compiler.backend;
+    ClassElement symbolClass = backend.helpers.symbolImplementationClass;
+    InterfaceType type = symbolClass.rawType;
     ConstantValue argument = createString(new DartString.literal(text));
     Map<FieldElement, ConstantValue> fields = <FieldElement, ConstantValue>{};
-    JavaScriptBackend backend = compiler.backend;
-    backend.helpers.symbolImplementationClass.forEachInstanceField(
+    symbolClass.forEachInstanceField(
         (ClassElement enclosingClass, FieldElement field) {
       fields[field] = argument;
-    });
+    }, includeSuperAndInjectedMembers: true);
     assert(fields.length == 1);
     return new ConstructedConstantValue(type, fields);
   }
diff --git a/pkg/compiler/lib/src/js_backend/js_backend.dart b/pkg/compiler/lib/src/js_backend/js_backend.dart
index 900a965..5e5424f 100644
--- a/pkg/compiler/lib/src/js_backend/js_backend.dart
+++ b/pkg/compiler/lib/src/js_backend/js_backend.dart
@@ -13,7 +13,7 @@
 import '../closure.dart';
 import '../common.dart';
 import '../common/backend_api.dart'
-    show Backend, ImpactTransformer, ForeignResolver;
+    show Backend, ImpactTransformer, ForeignResolver, NativeRegistry;
 import '../common/codegen.dart' show CodegenImpact, CodegenWorkItem;
 import '../common/names.dart' show Identifiers, Names, Selectors, Uris;
 import '../common/registry.dart' show EagerRegistry, Registry;
@@ -38,8 +38,6 @@
 import '../diagnostics/invariant.dart' show DEBUG_MODE;
 import '../dump_info.dart' show DumpInfoTask;
 import '../elements/elements.dart';
-import '../elements/modelx.dart'
-    show ConstructorBodyElementX, FieldElementX, DeferredLoaderGetterElementX;
 import '../elements/visitor.dart' show BaseElementVisitor;
 import '../enqueue.dart' show Enqueuer, ResolutionEnqueuer;
 import '../io/code_output.dart';
diff --git a/pkg/compiler/lib/src/js_backend/js_interop_analysis.dart b/pkg/compiler/lib/src/js_backend/js_interop_analysis.dart
index 448f979..83f6284 100644
--- a/pkg/compiler/lib/src/js_backend/js_interop_analysis.dart
+++ b/pkg/compiler/lib/src/js_backend/js_interop_analysis.dart
@@ -5,8 +5,14 @@
 /// Analysis to determine how to generate code for typed JavaScript interop.
 library compiler.src.js_backend.js_interop_analysis;
 
+import '../common.dart';
 import '../constants/values.dart'
     show ConstantValue, ConstructedConstantValue, StringConstantValue;
+import '../dart_types.dart'
+    show
+        DartType,
+        DynamicType,
+        FunctionType;
 import '../diagnostics/messages.dart' show MessageKind;
 import '../elements/elements.dart'
     show
@@ -54,6 +60,8 @@
 
   void processJsInteropAnnotation(Element e) {
     for (MetadataAnnotation annotation in e.implementation.metadata) {
+      // TODO(johnniwinther): Avoid processing unresolved elements.
+      if (annotation.constant == null) continue;
       ConstantValue constant =
           backend.compiler.constants.getConstantValue(annotation.constant);
       if (constant == null || constant is! ConstructedConstantValue) continue;
@@ -182,4 +190,14 @@
     });
     return new jsAst.Block(statements);
   }
+
+  FunctionType buildJsFunctionType() {
+    // TODO(jacobr): consider using codegenWorld.isChecks to determine the
+    // range of positional arguments that need to be supported by JavaScript
+    // function types.
+    return new FunctionType.synthesized(
+      const DynamicType(),
+      [],
+      new List<DartType>.filled(16, const DynamicType()));
+  }
 }
diff --git a/pkg/compiler/lib/src/js_backend/namer.dart b/pkg/compiler/lib/src/js_backend/namer.dart
index c0f21c5..c4a86ef 100644
--- a/pkg/compiler/lib/src/js_backend/namer.dart
+++ b/pkg/compiler/lib/src/js_backend/namer.dart
@@ -331,6 +331,7 @@
   final String callPrefix = 'call';
   final String callCatchAllName = r'call*';
   final String callNameField = r'$callName';
+  final String stubNameField = r'$stubName';
   final String reflectableField = r'$reflectable';
   final String reflectionInfoField = r'$reflectionInfo';
   final String reflectionNameField = r'$reflectionName';
@@ -1672,6 +1673,11 @@
   }
 
   @override
+  void visitNonConstant(NonConstantValue constant, [_]) {
+    add('null');
+  }
+
+  @override
   void visitInt(IntConstantValue constant, [_]) {
     // No `addRoot` since IntConstants are always inlined.
     if (constant.primitiveValue < 0) {
@@ -1730,10 +1736,10 @@
   @override
   void visitConstructed(ConstructedConstantValue constant, [_]) {
     addRoot(constant.type.element.name);
-    for (ConstantValue value in constant.fields.values) {
-      _visit(value);
+    constant.type.element.forEachInstanceField((_, FieldElement field) {
       if (failed) return;
-    }
+      _visit(constant.fields[field]);
+    }, includeSuperAndInjectedMembers: true);
   }
 
   @override
@@ -1816,6 +1822,9 @@
   int visitNull(NullConstantValue constant, [_]) => 1;
 
   @override
+  int visitNonConstant(NonConstantValue constant, [_]) => 1;
+
+  @override
   int visitBool(BoolConstantValue constant, [_]) {
     return constant.isTrue ? 2 : 3;
   }
@@ -1854,9 +1863,9 @@
   @override
   int visitConstructed(ConstructedConstantValue constant, [_]) {
     int hash = _hashString(3, constant.type.element.name);
-    for (ConstantValue value in constant.fields.values) {
-      hash = _combine(hash, _visit(value));
-    }
+    constant.type.element.forEachInstanceField((_, FieldElement field) {
+      hash = _combine(hash, _visit(constant.fields[field]));
+    }, includeSuperAndInjectedMembers: true);
     return hash;
   }
 
diff --git a/pkg/compiler/lib/src/js_backend/native_data.dart b/pkg/compiler/lib/src/js_backend/native_data.dart
index 49a207d..67b473b 100644
--- a/pkg/compiler/lib/src/js_backend/native_data.dart
+++ b/pkg/compiler/lib/src/js_backend/native_data.dart
@@ -6,7 +6,8 @@
 
 import '../common.dart';
 import '../elements/elements.dart'
-    show ClassElement, Element, FunctionElement, MemberElement;
+    show ClassElement, Element, FieldElement, FunctionElement, MemberElement;
+import '../native/behavior.dart' show NativeBehavior;
 
 /// Additional element information for native classes and methods and js-interop
 /// methods.
@@ -22,6 +23,18 @@
   /// [setNativeClassTagInfo].
   Map<ClassElement, String> nativeClassTagInfo = <ClassElement, String>{};
 
+  /// Cache for [NativeBehavior]s for calling native methods.
+  Map<FunctionElement, NativeBehavior> nativeMethodBehavior =
+      <FunctionElement, NativeBehavior>{};
+
+  /// Cache for [NativeBehavior]s for reading from native fields.
+  Map<MemberElement, NativeBehavior> nativeFieldLoadBehavior =
+      <FieldElement, NativeBehavior>{};
+
+  /// Cache for [NativeBehavior]s for writing to native fields.
+  Map<MemberElement, NativeBehavior> nativeFieldStoreBehavior =
+      <FieldElement, NativeBehavior>{};
+
   /// Returns `true` if [element] is explicitly marked as part of JsInterop.
   bool _isJsInterop(Element element) {
     return jsInteropNames.containsKey(element.declaration);
@@ -163,4 +176,44 @@
   bool hasNativeTagsForcedNonLeaf(ClassElement cls) {
     return getNativeTagsOfClassRaw(cls).contains('!nonleaf');
   }
+
+  /// Returns the [NativeBehavior] for calling the native [method].
+  NativeBehavior getNativeMethodBehavior(FunctionElement method) {
+    assert(invariant(method, nativeMethodBehavior.containsKey(method),
+        message: "No native method behavior has been computed for $method."));
+    return nativeMethodBehavior[method];
+  }
+
+  /// Returns the [NativeBehavior] for reading from the native [field].
+  NativeBehavior getNativeFieldLoadBehavior(FieldElement field) {
+    assert(invariant(field, nativeFieldLoadBehavior.containsKey(field),
+        message: "No native field load behavior has been "
+            "computed for $field."));
+    return nativeFieldLoadBehavior[field];
+  }
+
+  /// Returns the [NativeBehavior] for writing to the native [field].
+  NativeBehavior getNativeFieldStoreBehavior(FieldElement field) {
+    assert(invariant(field, nativeFieldStoreBehavior.containsKey(field),
+        message: "No native field store behavior has been "
+            "computed for $field."));
+    return nativeFieldStoreBehavior[field];
+  }
+
+  /// Registers the [behavior] for calling the native [method].
+  void setNativeMethodBehavior(
+      FunctionElement method, NativeBehavior behavior) {
+    nativeMethodBehavior[method] = behavior;
+  }
+
+  /// Registers the [behavior] for reading from the native [field].
+  void setNativeFieldLoadBehavior(FieldElement field, NativeBehavior behavior) {
+    nativeFieldLoadBehavior[field] = behavior;
+  }
+
+  /// Registers the [behavior] for writing to the native [field].
+  void setNativeFieldStoreBehavior(
+      FieldElement field, NativeBehavior behavior) {
+    nativeFieldStoreBehavior[field] = behavior;
+  }
 }
diff --git a/pkg/compiler/lib/src/js_backend/no_such_method_registry.dart b/pkg/compiler/lib/src/js_backend/no_such_method_registry.dart
index 4cbc011..d9e8681 100644
--- a/pkg/compiler/lib/src/js_backend/no_such_method_registry.dart
+++ b/pkg/compiler/lib/src/js_backend/no_such_method_registry.dart
@@ -193,8 +193,16 @@
     // At this point we know that this is signature-compatible with
     // Object.noSuchMethod, but it may have more than one argument as long as
     // it only has one required argument.
+    if (!element.hasResolvedAst) {
+      // TODO(johnniwinther): Why do we see unresolved elements here?
+      return false;
+    }
+    ResolvedAst resolvedAst = element.resolvedAst;
+    if (resolvedAst.kind != ResolvedAstKind.PARSED) {
+      return false;
+    }
     String param = element.parameters.first.name;
-    Statement body = element.node.body;
+    Statement body = resolvedAst.body;
     Expression expr;
     if (body is Return && body.isArrowBody) {
       expr = body.expression;
@@ -231,7 +239,15 @@
   }
 
   bool _hasThrowingSyntax(FunctionElement element) {
-    Statement body = element.node.body;
+    if (!element.hasResolvedAst) {
+      // TODO(johnniwinther): Why do we see unresolved elements here?
+      return false;
+    }
+    ResolvedAst resolvedAst = element.resolvedAst;
+    if (resolvedAst.kind != ResolvedAstKind.PARSED) {
+      return false;
+    }
+    Statement body = resolvedAst.body;
     if (body is Return && body.isArrowBody) {
       if (body.expression is Throw) {
         return true;
diff --git a/pkg/compiler/lib/src/js_backend/type_variable_handler.dart b/pkg/compiler/lib/src/js_backend/type_variable_handler.dart
index b3b4354..faf8539 100644
--- a/pkg/compiler/lib/src/js_backend/type_variable_handler.dart
+++ b/pkg/compiler/lib/src/js_backend/type_variable_handler.dart
@@ -9,7 +9,7 @@
  */
 class TypeVariableHandler {
   final Compiler _compiler;
-  FunctionElement _typeVariableConstructor;
+  ConstructorElement _typeVariableConstructor;
 
   /**
    * Set to 'true' on first encounter of a class with type variables.
@@ -77,43 +77,23 @@
     for (TypeVariableType currentTypeVariable in cls.typeVariables) {
       TypeVariableElement typeVariableElement = currentTypeVariable.element;
 
-      AstConstant name = new AstConstant(
-          typeVariableElement,
-          typeVariableElement.node,
-          new StringConstantExpression(currentTypeVariable.name),
-          _backend.constantSystem
-              .createString(new DartString.literal(currentTypeVariable.name)));
       jsAst.Expression boundIndex =
           _metadataCollector.reifyType(typeVariableElement.bound);
       ConstantValue boundValue = new SyntheticConstantValue(
           SyntheticConstantKind.TYPEVARIABLE_REFERENCE, boundIndex);
       ConstantExpression boundExpression =
           new SyntheticConstantExpression(boundValue);
-      AstConstant bound = new AstConstant(typeVariableElement,
-          typeVariableElement.node, boundExpression, boundValue);
-      AstConstant type = new AstConstant(
-          typeVariableElement,
-          typeVariableElement.node,
-          new TypeConstantExpression(cls.rawType),
-          _backend.constantSystem.createType(_backend.compiler, cls.rawType));
-      List<AstConstant> arguments = [type, name, bound];
+      ConstantExpression constant = new ConstructedConstantExpression(
+          _typeVariableConstructor.enclosingClass.thisType,
+          _typeVariableConstructor,
+          const CallStructure.unnamed(3), [
+        new TypeConstantExpression(cls.rawType),
+        new StringConstantExpression(currentTypeVariable.name),
+        new SyntheticConstantExpression(boundValue)
+      ]);
 
-      // TODO(johnniwinther): Support a less front-end specific creation of
-      // constructed constants.
-      AstConstant constant =
-          CompileTimeConstantEvaluator.makeConstructedConstant(
-              _compiler,
-              _backend.constants,
-              typeVariableElement,
-              typeVariableElement.node,
-              typeVariableType,
-              _typeVariableConstructor,
-              typeVariableType,
-              _typeVariableConstructor,
-              const CallStructure.unnamed(3),
-              arguments,
-              arguments);
-      ConstantValue value = constant.value;
+      _backend.constants.evaluate(constant);
+      ConstantValue value = _backend.constants.getConstantValue(constant);
       _backend.registerCompileTimeConstant(value, _compiler.globalDependencies);
       _backend.addCompileTimeConstantForEmission(value);
       _backend.constants.addCompileTimeConstantForEmission(value);
diff --git a/pkg/compiler/lib/src/js_emitter/constant_ordering.dart b/pkg/compiler/lib/src/js_emitter/constant_ordering.dart
index a2a0d7d..27651c2 100644
--- a/pkg/compiler/lib/src/js_emitter/constant_ordering.dart
+++ b/pkg/compiler/lib/src/js_emitter/constant_ordering.dart
@@ -76,6 +76,10 @@
     return 0;
   }
 
+  int visitNonConstant(NonConstantValue a, NonConstantValue b) {
+    return 0;
+  }
+
   int visitInt(IntConstantValue a, IntConstantValue b) {
     return a.primitiveValue.compareTo(b.primitiveValue);
   }
@@ -189,12 +193,14 @@
   static const int INTERCEPTOR = 11;
   static const int SYNTHETIC = 12;
   static const int DEFERRED = 13;
+  static const int NONCONSTANT = 13;
 
   static int kind(ConstantValue constant) =>
       constant.accept(const _KindVisitor(), null);
 
   int visitFunction(FunctionConstantValue a, _) => FUNCTION;
   int visitNull(NullConstantValue a, _) => NULL;
+  int visitNonConstant(NonConstantValue a, _) => NONCONSTANT;
   int visitInt(IntConstantValue a, _) => INT;
   int visitDouble(DoubleConstantValue a, _) => DOUBLE;
   int visitBool(BoolConstantValue a, _) => BOOL;
diff --git a/pkg/compiler/lib/src/js_emitter/full_emitter/emitter.dart b/pkg/compiler/lib/src/js_emitter/full_emitter/emitter.dart
index 0e02c6c..5bd1cb1 100644
--- a/pkg/compiler/lib/src/js_emitter/full_emitter/emitter.dart
+++ b/pkg/compiler/lib/src/js_emitter/full_emitter/emitter.dart
@@ -614,9 +614,9 @@
     // If the outputUnit does not contain any static non-final fields, then
     // [fields] is `null`.
     if (fields != null) {
-      for (Element element in fields) {
+      for (FieldElement element in fields) {
         reporter.withCurrentElement(element, () {
-          ConstantValue constant = handler.getInitialValueFor(element);
+          ConstantValue constant = handler.getConstantValue(element.constant);
           parts.add(buildInitialization(element, constantReference(constant)));
         });
       }
diff --git a/pkg/compiler/lib/src/js_emitter/program_builder/collector.dart b/pkg/compiler/lib/src/js_emitter/program_builder/collector.dart
index 6c76418..a32f462 100644
--- a/pkg/compiler/lib/src/js_emitter/program_builder/collector.dart
+++ b/pkg/compiler/lib/src/js_emitter/program_builder/collector.dart
@@ -282,18 +282,21 @@
       list.add(element);
     }
 
-    Iterable<VariableElement> staticNonFinalFields = handler
-        .getStaticNonFinalFieldsForEmission()
-        .where(compiler.codegenWorld.allReferencedStaticFields.contains);
+    Iterable<Element> fields = compiler.codegenWorld.allReferencedStaticFields
+        .where((FieldElement field) {
+      if (!field.isConst) {
+        return field.isField &&
+            !field.isInstanceMember &&
+            !field.isFinal &&
+            field.constant != null;
+      } else {
+        // We also need to emit static const fields if they are available for
+        // reflection.
+        return backend.isAccessibleByReflection(field);
+      }
+    });
 
-    Elements.sortedByPosition(staticNonFinalFields).forEach(addToOutputUnit);
-
-    // We also need to emit static const fields if they are available for
-    // reflection.
-    compiler.codegenWorld.allReferencedStaticFields
-        .where((FieldElement field) => field.isConst)
-        .where(backend.isAccessibleByReflection)
-        .forEach(addToOutputUnit);
+    Elements.sortedByPosition(fields).forEach(addToOutputUnit);
   }
 
   void computeNeededLibraries() {
diff --git a/pkg/compiler/lib/src/js_emitter/program_builder/program_builder.dart b/pkg/compiler/lib/src/js_emitter/program_builder/program_builder.dart
index b779ef7..1f81f6d 100644
--- a/pkg/compiler/lib/src/js_emitter/program_builder/program_builder.dart
+++ b/pkg/compiler/lib/src/js_emitter/program_builder/program_builder.dart
@@ -260,9 +260,9 @@
     return staticNonFinalFields.map(_buildStaticField).toList(growable: false);
   }
 
-  StaticField _buildStaticField(Element element) {
+  StaticField _buildStaticField(FieldElement element) {
     JavaScriptConstantCompiler handler = backend.constants;
-    ConstantValue initialValue = handler.getInitialValueFor(element);
+    ConstantValue initialValue = handler.getConstantValue(element.constant);
     // TODO(zarah): The holder should not be registered during building of
     // a static field.
     _registry.registerHolder(namer.globalObjectForConstant(initialValue),
diff --git a/pkg/compiler/lib/src/js_emitter/runtime_type_generator.dart b/pkg/compiler/lib/src/js_emitter/runtime_type_generator.dart
index f2997aa..ddef40b 100644
--- a/pkg/compiler/lib/src/js_emitter/runtime_type_generator.dart
+++ b/pkg/compiler/lib/src/js_emitter/runtime_type_generator.dart
@@ -76,8 +76,8 @@
         FunctionElement method, FunctionType type) {
       assert(method.isImplementation);
       jsAst.Expression thisAccess = new jsAst.This();
-      ClosureClassMap closureData =
-          compiler.closureToClassMapper.closureMappingCache[method.node];
+      ClosureClassMap closureData = compiler
+          .closureToClassMapper.closureMappingCache[method.resolvedAst.node];
       if (closureData != null) {
         ClosureFieldElement thisLocal =
             closureData.freeVariableMap[closureData.thisLocal];
@@ -132,6 +132,17 @@
     _generateIsTestsOn(classElement, generateIsTest,
         generateFunctionTypeSignature, generateSubstitution, generateTypeCheck);
 
+    if (classElement == backend.helpers.jsJavaScriptFunctionClass) {
+        var type = backend.jsInteropAnalysis.buildJsFunctionType();
+        if (type != null) {
+          jsAst.Expression thisAccess = new jsAst.This();
+          RuntimeTypesEncoder rtiEncoder = backend.rtiEncoder;
+          jsAst.Expression encoding =
+              rtiEncoder.getSignatureEncoding(type, thisAccess);
+          jsAst.Name operatorSignature = namer.asName(namer.operatorSignature);
+          result.properties[operatorSignature] = encoding;
+        }
+    }
     return result;
   }
 
diff --git a/pkg/compiler/lib/src/js_emitter/startup_emitter/fragment_emitter.dart b/pkg/compiler/lib/src/js_emitter/startup_emitter/fragment_emitter.dart
index 48ccd6d..1b7beca 100644
--- a/pkg/compiler/lib/src/js_emitter/startup_emitter/fragment_emitter.dart
+++ b/pkg/compiler/lib/src/js_emitter/startup_emitter/fragment_emitter.dart
@@ -233,6 +233,7 @@
     reflectionInfo = reflectionInfo + typesOffset;
   }
   var name = funsOrNames[0];
+  fun.#stubName = name;
   var getterFunction =
       tearOff(funs, reflectionInfo, isStatic, name, isIntercepted);
   container[getterName] = getterFunction;
@@ -455,6 +456,7 @@
       'constantHolderReference': buildConstantHolderReference(program),
       'holders': emitHolders(program.holders, fragment),
       'callName': js.string(namer.callNameField),
+      'stubName': js.string(namer.stubNameField),
       'argumentCount': js.string(namer.requiredParameterField),
       'defaultArgumentValues': js.string(namer.defaultValuesField),
       'prototypes': emitPrototypes(fragment),
@@ -831,6 +833,9 @@
     // TODO(herhut): Replace [js.LiteralNull] with [js.ArrayHole].
     if (method.optionalParameterDefaultValues is List) {
       List<ConstantValue> defaultValues = method.optionalParameterDefaultValues;
+      if (defaultValues.isEmpty) {
+        return new js.LiteralNull();
+      }
       Iterable<js.Expression> elements =
           defaultValues.map(generateConstantReference);
       return js.js('function() { return #; }',
diff --git a/pkg/compiler/lib/src/library_loader.dart b/pkg/compiler/lib/src/library_loader.dart
index 3a2e2a3..be0cb41 100644
--- a/pkg/compiler/lib/src/library_loader.dart
+++ b/pkg/compiler/lib/src/library_loader.dart
@@ -131,7 +131,7 @@
  * point to the 'packages' folder.
  *
  */
-abstract class LibraryLoaderTask implements CompilerTask {
+abstract class LibraryLoaderTask implements LibraryProvider, CompilerTask {
   factory LibraryLoaderTask(
       ResolvedUriTranslator uriTranslator,
       ScriptLoader scriptLoader,
@@ -145,9 +145,6 @@
   /// Returns all libraries that have been loaded.
   Iterable<LibraryElement> get libraries;
 
-  /// Looks up the library with the [canonicalUri].
-  LibraryElement lookupLibrary(Uri canonicalUri);
-
   /// Loads the library specified by the [resolvedUri] and returns its
   /// [LibraryElement].
   ///
@@ -176,6 +173,14 @@
   Future<Null> resetLibraries(ReuseLibrariesFunction reuseLibraries);
 }
 
+/// Interface for an entity that provide libraries. For instance from normal
+/// library loading or from deserialization.
+// TODO(johnniwinther): Use this to integrate deserialized libraries better.
+abstract class LibraryProvider {
+  /// Looks up the library with the [canonicalUri].
+  LibraryElement lookupLibrary(Uri canonicalUri);
+}
+
 /// Handle for creating synthesized/patch libraries during library loading.
 abstract class LibraryLoader {
   /// This method must be called when a new synthesized/patch library has been
@@ -299,9 +304,15 @@
 
   final DiagnosticReporter reporter;
 
-  _LibraryLoaderTask(this.uriTranslator, this.scriptLoader,
-      this.scanner, this.deserializer, this.listener, this.environment,
-      this.reporter, Measurer measurer)
+  _LibraryLoaderTask(
+      this.uriTranslator,
+      this.scriptLoader,
+      this.scanner,
+      this.deserializer,
+      this.listener,
+      this.environment,
+      this.reporter,
+      Measurer measurer)
       : super(measurer);
 
   String get name => 'LibraryLoader';
@@ -638,10 +649,16 @@
     handler.registerNewLibrary(library);
     return listener.onLibraryScanned(library, handler).then((_) {
       return Future.forEach(library.imports, (ImportElement import) {
-        return createLibrary(handler, library, import.uri);
+        Uri resolvedUri = library.canonicalUri.resolveUri(import.uri);
+        return createLibrary(handler, library, resolvedUri);
       }).then((_) {
         return Future.forEach(library.exports, (ExportElement export) {
-          return createLibrary(handler, library, export.uri);
+          Uri resolvedUri = library.canonicalUri.resolveUri(export.uri);
+          return createLibrary(handler, library, resolvedUri);
+        }).then((_) {
+          // TODO(johnniwinther): Shouldn't there be an [ImportElement] for the
+          // implicit import of dart:core?
+          return createLibrary(handler, library, Uris.dart_core);
         }).then((_) => library);
       });
     });
@@ -1384,8 +1401,6 @@
       suffixChainMap[library] = const <Link<Uri>>[];
       List<Link<Uri>> suffixes = [];
       if (targetUri != canonicalUri) {
-        LibraryDependencyNode node = nodeMap[library];
-
         /// Process the import (or export) of [importedLibrary].
         void processLibrary(LibraryElement importedLibrary) {
           bool suffixesArePrecomputed =
@@ -1416,12 +1431,12 @@
           }
         }
 
-        for (ImportLink import in node.imports.reverse()) {
+        for (ImportElement import in library.imports) {
           processLibrary(import.importedLibrary);
           if (aborted) return;
         }
-        for (LibraryElement exportedLibrary in node.exports.reverse()) {
-          processLibrary(exportedLibrary);
+        for (ExportElement export in library.exports) {
+          processLibrary(export.exportedLibrary);
           if (aborted) return;
         }
       } else {
diff --git a/pkg/compiler/lib/src/native/behavior.dart b/pkg/compiler/lib/src/native/behavior.dart
index daa6ddc..a4e2922 100644
--- a/pkg/compiler/lib/src/native/behavior.dart
+++ b/pkg/compiler/lib/src/native/behavior.dart
@@ -705,13 +705,12 @@
     behavior._capture(type, compiler.resolution,
         isInterop: isInterop, compiler: compiler);
 
-    // TODO(sra): Optional arguments are currently missing from the
-    // DartType. This should be fixed so the following work-around can be
-    // removed.
-    method.functionSignature
-        .forEachOptionalParameter((ParameterElement parameter) {
-      behavior._escape(parameter.type, compiler.resolution);
-    });
+    for (DartType type in type.optionalParameterTypes) {
+      behavior._escape(type, compiler.resolution);
+    }
+    for (DartType type in type.namedParameterTypes) {
+      behavior._escape(type, compiler.resolution);
+    }
 
     behavior._overrideWithAnnotations(method, compiler);
     return behavior;
diff --git a/pkg/compiler/lib/src/native/enqueue.dart b/pkg/compiler/lib/src/native/enqueue.dart
index e15f185..e8f3340 100644
--- a/pkg/compiler/lib/src/native/enqueue.dart
+++ b/pkg/compiler/lib/src/native/enqueue.dart
@@ -34,19 +34,17 @@
   /// types to the world.
   void registerNativeBehavior(NativeBehavior nativeBehavior, cause) {}
 
-  /// Notification of a main Enqueuer worklist element.  For methods, adds
-  /// information from metadata attributes, and computes types instantiated due
-  /// to calling the method.
-  void registerElement(Element element) {}
-
-  /// Notification of native field.  Adds information from metadata attributes.
+  // TODO(johnniwinther): Move [handleFieldAnnotations] and
+  // [handleMethodAnnotations] to [JavaScriptBackend] or [NativeData].
+  // TODO(johnniwinther): Change the return type to 'bool' and rename them to
+  // something like `computeNativeField`.
+  /// Process the potentially native [field]. Adds information from metadata
+  /// attributes.
   void handleFieldAnnotations(Element field) {}
 
-  /// Computes types instantiated due to getting a native field.
-  void registerFieldLoad(Element field) {}
-
-  /// Computes types instantiated due to setting a native field.
-  void registerFieldStore(Element field) {}
+  /// Process the potentially native [method]. Adds information from metadata
+  /// attributes.
+  void handleMethodAnnotations(Element method) {}
 
   /// Returns whether native classes are being used.
   bool hasInstantiatedNativeClasses() => false;
@@ -320,7 +318,7 @@
       // TODO(sra): Better validation of the constant.
       if (fields.length != 1 || fields.single is! StringConstantValue) {
         reporter.internalError(
-            annotation, 'Annotations needs one string: ${annotation.node}');
+            annotation, 'Annotations needs one string: ${annotation}');
       }
       StringConstantValue specStringConstant = fields.single;
       String specString = specStringConstant.toDartString().slowToString();
@@ -328,7 +326,7 @@
         name = specString;
       } else {
         reporter.internalError(
-            annotation, 'Too many JSName annotations: ${annotation.node}');
+            annotation, 'Too many JSName annotations: ${annotation}');
       }
     }
     return name;
@@ -371,26 +369,6 @@
     }
   }
 
-  registerElement(Element element) {
-    reporter.withCurrentElement(element, () {
-      if (element.isFunction ||
-          element.isFactoryConstructor ||
-          element.isGetter ||
-          element.isSetter) {
-        handleMethodAnnotations(element);
-        if (backend.isNative(element)) {
-          registerMethodUsed(element);
-        }
-      } else if (element.isField) {
-        handleFieldAnnotations(element);
-        if (backend.isNative(element)) {
-          registerFieldLoad(element);
-          registerFieldStore(element);
-        }
-      }
-    });
-  }
-
   void handleFieldAnnotations(Element element) {
     if (compiler.serialization.isDeserialized(element)) {
       return;
@@ -475,18 +453,6 @@
     flushQueue();
   }
 
-  void registerMethodUsed(Element method) {
-    registerNativeBehavior(NativeBehavior.ofMethod(method, compiler), method);
-  }
-
-  void registerFieldLoad(Element field) {
-    registerNativeBehavior(NativeBehavior.ofFieldLoad(field, compiler), field);
-  }
-
-  void registerFieldStore(Element field) {
-    registerNativeBehavior(NativeBehavior.ofFieldStore(field, compiler), field);
-  }
-
   processNativeBehavior(NativeBehavior behavior, cause) {
     // TODO(ahe): Is this really a global dependency?
     Registry registry = compiler.globalDependencies;
@@ -520,12 +486,23 @@
             .isSubtype(type, backend.listImplementation.rawType)) {
           backend.registerInstantiatedType(type, world, registry);
         }
+        // TODO(johnniwinther): Improve spec string precision to handle type
+        // arguments and implements relations that preserve generics. Currently
+        // we cannot distinguish between `List`, `List<dynamic>`, and
+        // `List<int>` and take all to mean `List<E>`; in effect not including
+        // any native subclasses of generic classes.
+        // TODO(johnniwinther,sra): Find and replace uses of `List` with the
+        // actual implementation classes such as `JSArray` et al.
+        enqueueUnusedClassesMatching((ClassElement nativeClass) {
+          InterfaceType nativeType = nativeClass.thisType;
+          InterfaceType specType = type.element.thisType;
+          return compiler.types.isSubtype(nativeType, specType);
+        }, cause, 'subtypeof($type)');
+      } else if (type.isDynamic) {
+        enqueueUnusedClassesMatching((_) => true, cause, 'subtypeof($type)');
+      } else {
+        assert(type is VoidType);
       }
-      assert(type is DartType);
-      enqueueUnusedClassesMatching(
-          (nativeClass) => compiler.types.isSubtype(nativeClass.thisType, type),
-          cause,
-          'subtypeof($type)');
     }
 
     // Give an info so that library developers can compile with -v to find why
diff --git a/pkg/compiler/lib/src/options.dart b/pkg/compiler/lib/src/options.dart
index e839a17..ee3370e 100644
--- a/pkg/compiler/lib/src/options.dart
+++ b/pkg/compiler/lib/src/options.dart
@@ -149,6 +149,10 @@
   /// methods where type arguments are passed.
   final bool enableGenericMethodSyntax;
 
+  /// Support access to initializing formal constructor arguments, e.g., the
+  /// use of `x` to initialize `y` in `C(this.x) : y = x`.
+  final bool enableInitializingFormalAccess;
+
   /// Whether the user specified a flag to allow the use of dart:mirrors. This
   /// silences a warning produced by the compiler.
   final bool enableExperimentalMirrors;
@@ -292,6 +296,8 @@
         enableAssertMessage: _hasOption(options, Flags.enableAssertMessage),
         enableGenericMethodSyntax:
             _hasOption(options, Flags.genericMethodSyntax),
+        enableInitializingFormalAccess:
+            _hasOption(options, Flags.initializingFormalAccess),
         enableExperimentalMirrors:
             _hasOption(options, Flags.enableExperimentalMirrors),
         enableMinification: _hasOption(options, Flags.minify),
@@ -361,6 +367,7 @@
       bool emitJavaScript: true,
       bool enableAssertMessage: false,
       bool enableGenericMethodSyntax: false,
+      bool enableInitializingFormalAccess: false,
       bool enableExperimentalMirrors: false,
       bool enableMinification: false,
       bool enableNativeLiveTypeAnalysis: true,
@@ -434,6 +441,7 @@
         emitJavaScript: emitJavaScript,
         enableAssertMessage: enableAssertMessage,
         enableGenericMethodSyntax: enableGenericMethodSyntax,
+        enableInitializingFormalAccess: enableInitializingFormalAccess,
         enableExperimentalMirrors: enableExperimentalMirrors,
         enableMinification: enableMinification,
         enableNativeLiveTypeAnalysis: enableNativeLiveTypeAnalysis,
@@ -487,6 +495,7 @@
       this.emitJavaScript: true,
       this.enableAssertMessage: false,
       this.enableGenericMethodSyntax: false,
+      this.enableInitializingFormalAccess: false,
       this.enableExperimentalMirrors: false,
       this.enableMinification: false,
       this.enableNativeLiveTypeAnalysis: false,
@@ -516,6 +525,131 @@
       this.verbose: false})
       : _shownPackageWarnings = shownPackageWarnings;
 
+  /// Creates a copy of the [CompilerOptions] where the provided non-null
+  /// option values replace existing.
+  CompilerOptions copy(
+      {entryPoint,
+      libraryRoot,
+      packageRoot,
+      packageConfig,
+      packagesDiscoveryProvider,
+      environment,
+      allowMockCompilation,
+      allowNativeExtensions,
+      analyzeAll,
+      analyzeMain,
+      analyzeOnly,
+      analyzeSignaturesOnly,
+      buildId,
+      dart2dartMultiFile,
+      deferredMapUri,
+      fatalWarnings,
+      terseDiagnostics,
+      suppressWarnings,
+      suppressHints,
+      List<String> shownPackageWarnings,
+      disableInlining,
+      disableTypeInference,
+      dumpInfo,
+      emitJavaScript,
+      enableAssertMessage,
+      enableGenericMethodSyntax,
+      enableInitializingFormalAccess,
+      enableExperimentalMirrors,
+      enableMinification,
+      enableNativeLiveTypeAnalysis,
+      enableTypeAssertions,
+      enableUserAssertions,
+      generateCodeWithCompileTimeErrors,
+      generateSourceMap,
+      hasIncrementalSupport,
+      outputUri,
+      platformConfigUri,
+      preserveComments,
+      preserveUris,
+      resolutionInputs,
+      resolutionOutput,
+      resolveOnly,
+      sourceMapUri,
+      strips,
+      testMode,
+      trustJSInteropTypeAnnotations,
+      trustPrimitives,
+      trustTypeAnnotations,
+      useContentSecurityPolicy,
+      useCpsIr,
+      useFrequencyNamer,
+      useNewSourceInfo,
+      useStartupEmitter,
+      verbose}) {
+    return new CompilerOptions._(
+        entryPoint ?? this.entryPoint,
+        libraryRoot ?? this.libraryRoot,
+        packageRoot ?? this.packageRoot,
+        packageConfig ?? this.packageConfig,
+        packagesDiscoveryProvider ?? this.packagesDiscoveryProvider,
+        environment ?? this.environment,
+        allowMockCompilation: allowMockCompilation ?? this.allowMockCompilation,
+        allowNativeExtensions:
+            allowNativeExtensions ?? this.allowNativeExtensions,
+        analyzeAll: analyzeAll ?? this.analyzeAll,
+        analyzeMain: analyzeMain ?? this.analyzeMain,
+        analyzeOnly: analyzeOnly ?? this.analyzeOnly,
+        analyzeSignaturesOnly:
+            analyzeSignaturesOnly ?? this.analyzeSignaturesOnly,
+        buildId: buildId ?? this.buildId,
+        dart2dartMultiFile: dart2dartMultiFile ?? this.dart2dartMultiFile,
+        deferredMapUri: deferredMapUri ?? this.deferredMapUri,
+        fatalWarnings: fatalWarnings ?? this.fatalWarnings,
+        terseDiagnostics: terseDiagnostics ?? this.terseDiagnostics,
+        suppressWarnings: suppressWarnings ?? this.suppressWarnings,
+        suppressHints: suppressHints ?? this.suppressHints,
+        shownPackageWarnings:
+            shownPackageWarnings ?? this._shownPackageWarnings,
+        disableInlining: disableInlining ?? this.disableInlining,
+        disableTypeInference: disableTypeInference ?? this.disableTypeInference,
+        dumpInfo: dumpInfo ?? this.dumpInfo,
+        emitJavaScript: emitJavaScript ?? this.emitJavaScript,
+        enableAssertMessage: enableAssertMessage ?? this.enableAssertMessage,
+        enableGenericMethodSyntax:
+            enableGenericMethodSyntax ?? this.enableGenericMethodSyntax,
+        enableInitializingFormalAccess: enableInitializingFormalAccess ??
+            this.enableInitializingFormalAccess,
+        enableExperimentalMirrors:
+            enableExperimentalMirrors ?? this.enableExperimentalMirrors,
+        enableMinification: enableMinification ?? this.enableMinification,
+        enableNativeLiveTypeAnalysis:
+            enableNativeLiveTypeAnalysis ?? this.enableNativeLiveTypeAnalysis,
+        enableTypeAssertions: enableTypeAssertions ?? this.enableTypeAssertions,
+        enableUserAssertions: enableUserAssertions ?? this.enableUserAssertions,
+        generateCodeWithCompileTimeErrors: generateCodeWithCompileTimeErrors ??
+            this.generateCodeWithCompileTimeErrors,
+        generateSourceMap: generateSourceMap ?? this.generateSourceMap,
+        hasIncrementalSupport:
+            hasIncrementalSupport ?? this.hasIncrementalSupport,
+        outputUri: outputUri ?? this.outputUri,
+        platformConfigUri: platformConfigUri ?? this.platformConfigUri,
+        preserveComments: preserveComments ?? this.preserveComments,
+        preserveUris: preserveUris ?? this.preserveUris,
+        resolutionInputs: resolutionInputs ?? this.resolutionInputs,
+        resolutionOutput: resolutionOutput ?? this.resolutionOutput,
+        resolveOnly: resolveOnly ?? this.resolveOnly,
+        sourceMapUri: sourceMapUri ?? this.sourceMapUri,
+        strips: strips ?? this.strips,
+        testMode: testMode ?? this.testMode,
+        trustJSInteropTypeAnnotations:
+            trustJSInteropTypeAnnotations ?? this.trustJSInteropTypeAnnotations,
+        trustPrimitives: trustPrimitives ?? this.trustPrimitives,
+        trustTypeAnnotations: trustTypeAnnotations ?? this.trustTypeAnnotations,
+        useContentSecurityPolicy:
+            useContentSecurityPolicy ?? this.useContentSecurityPolicy,
+        useCpsIr: useCpsIr ?? this.useCpsIr,
+        useFrequencyNamer: useFrequencyNamer ?? this.useFrequencyNamer,
+        useNewSourceInfo: useNewSourceInfo ?? this.useNewSourceInfo,
+        useStartupEmitter: useStartupEmitter ?? this.useStartupEmitter,
+        verbose: verbose ?? this.verbose);
+  }
+
   /// Returns `true` if warnings and hints are shown for all packages.
   bool get showAllPackageWarnings {
     return _shownPackageWarnings != null && _shownPackageWarnings.isEmpty;
diff --git a/pkg/compiler/lib/src/parser/element_listener.dart b/pkg/compiler/lib/src/parser/element_listener.dart
index 6fe019c..6d9bd82 100644
--- a/pkg/compiler/lib/src/parser/element_listener.dart
+++ b/pkg/compiler/lib/src/parser/element_listener.dart
@@ -17,6 +17,7 @@
         EnumClassElementX,
         FieldElementX,
         LibraryElementX,
+        MetadataAnnotationX,
         NamedMixinApplicationElementX,
         VariableList;
 import '../id_generator.dart';
@@ -233,8 +234,8 @@
 
   void endTopLevelDeclaration(Token token) {
     if (!metadata.isEmpty) {
-      recoverableError(
-          metadata.first.beginToken, 'Metadata not supported here.');
+      MetadataAnnotationX first = metadata.first;
+      recoverableError(first.beginToken, 'Metadata not supported here.');
       metadata.clear();
     }
   }
diff --git a/pkg/compiler/lib/src/parser/listener.dart b/pkg/compiler/lib/src/parser/listener.dart
index afd2997..ce29d16 100644
--- a/pkg/compiler/lib/src/parser/listener.dart
+++ b/pkg/compiler/lib/src/parser/listener.dart
@@ -319,9 +319,7 @@
 
   void endWhileStatement(Token whileKeyword, Token endToken) {}
 
-  void handleAsOperator(Token operathor, Token endToken) {
-    // TODO(ahe): Rename [operathor] to "operator" when VM bug is fixed.
-  }
+  void handleAsOperator(Token operator, Token endToken) {}
 
   void handleAssignmentExpression(Token token) {}
 
@@ -338,9 +336,7 @@
   void handleIndexedExpression(
       Token openCurlyBracket, Token closeCurlyBracket) {}
 
-  void handleIsOperator(Token operathor, Token not, Token endToken) {
-    // TODO(ahe): Rename [operathor] to "operator" when VM bug is fixed.
-  }
+  void handleIsOperator(Token operator, Token not, Token endToken) {}
 
   void handleLiteralBool(Token token) {}
 
diff --git a/pkg/compiler/lib/src/parser/node_listener.dart b/pkg/compiler/lib/src/parser/node_listener.dart
index b5cb91d..feaa44c 100644
--- a/pkg/compiler/lib/src/parser/node_listener.dart
+++ b/pkg/compiler/lib/src/parser/node_listener.dart
@@ -265,11 +265,11 @@
     pushNode(new Cascade(popNode()));
   }
 
-  void handleAsOperator(Token operathor, Token endToken) {
+  void handleAsOperator(Token operator, Token endToken) {
     TypeAnnotation type = popNode();
     Expression expression = popNode();
     NodeList arguments = new NodeList.singleton(type);
-    pushNode(new Send(expression, new Operator(operathor), arguments));
+    pushNode(new Send(expression, new Operator(operator), arguments));
   }
 
   void handleAssignmentExpression(Token token) {
@@ -759,7 +759,7 @@
         Modifiers.EMPTY, null, null, asyncModifier));
   }
 
-  void handleIsOperator(Token operathor, Token not, Token endToken) {
+  void handleIsOperator(Token operator, Token not, Token endToken) {
     TypeAnnotation type = popNode();
     Expression expression = popNode();
     Node argument;
@@ -770,7 +770,7 @@
     }
 
     NodeList arguments = new NodeList.singleton(argument);
-    pushNode(new Send(expression, new Operator(operathor), arguments));
+    pushNode(new Send(expression, new Operator(operator), arguments));
   }
 
   void handleLabel(Token colon) {
diff --git a/pkg/compiler/lib/src/parser/partial_parser.dart b/pkg/compiler/lib/src/parser/partial_parser.dart
index 67f320d..514a058 100644
--- a/pkg/compiler/lib/src/parser/partial_parser.dart
+++ b/pkg/compiler/lib/src/parser/partial_parser.dart
@@ -49,7 +49,8 @@
       }
       if (identical(value, '=') ||
           identical(value, '?') ||
-          identical(value, ':')) {
+          identical(value, ':') ||
+          identical(value, '??')) {
         var nextValue = token.next.stringValue;
         if (identical(nextValue, 'const')) {
           token = token.next;
diff --git a/pkg/compiler/lib/src/patch_parser.dart b/pkg/compiler/lib/src/patch_parser.dart
index a2246c4..9f51036 100644
--- a/pkg/compiler/lib/src/patch_parser.dart
+++ b/pkg/compiler/lib/src/patch_parser.dart
@@ -128,6 +128,7 @@
         ClassElementX,
         GetterElementX,
         LibraryElementX,
+        MetadataAnnotationX,
         SetterElementX;
 import 'id_generator.dart';
 import 'js_backend/js_backend.dart' show JavaScriptBackend;
@@ -403,7 +404,7 @@
 class NativeAnnotationHandler implements EagerAnnotationHandler<String> {
   const NativeAnnotationHandler();
 
-  String getNativeAnnotation(MetadataAnnotation annotation) {
+  String getNativeAnnotation(MetadataAnnotationX annotation) {
     if (annotation.beginToken != null &&
         annotation.beginToken.next.value == 'Native') {
       // Skipping '@', 'Native', and '('.
@@ -443,7 +444,7 @@
 class JsInteropAnnotationHandler implements EagerAnnotationHandler<bool> {
   const JsInteropAnnotationHandler();
 
-  bool hasJsNameAnnotation(MetadataAnnotation annotation) =>
+  bool hasJsNameAnnotation(MetadataAnnotationX annotation) =>
       annotation.beginToken != null && annotation.beginToken.next.value == 'JS';
 
   bool apply(
@@ -474,7 +475,7 @@
 class PatchAnnotationHandler implements EagerAnnotationHandler<PatchVersion> {
   const PatchAnnotationHandler();
 
-  PatchVersion getPatchVersion(MetadataAnnotation annotation) {
+  PatchVersion getPatchVersion(MetadataAnnotationX annotation) {
     if (annotation.beginToken != null) {
       if (annotation.beginToken.next.value == 'patch') {
         return const PatchVersion(null);
diff --git a/pkg/compiler/lib/src/resolution/class_hierarchy.dart b/pkg/compiler/lib/src/resolution/class_hierarchy.dart
index e136a14..87c00a8 100644
--- a/pkg/compiler/lib/src/resolution/class_hierarchy.dart
+++ b/pkg/compiler/lib/src/resolution/class_hierarchy.dart
@@ -303,11 +303,8 @@
     String superName = supertype.name;
     String mixinName = mixinType.name;
     MixinApplicationElementX mixinApplication =
-        new UnnamedMixinApplicationElementX(
-            "${superName}+${mixinName}",
-            element.compilationUnit,
-            compiler.idGenerator.getNextFreeId(),
-            node);
+        new UnnamedMixinApplicationElementX("${superName}+${mixinName}",
+            element, compiler.idGenerator.getNextFreeId(), node);
     // Create synthetic type variables for the mixin application.
     List<DartType> typeVariables = <DartType>[];
     int index = 0;
diff --git a/pkg/compiler/lib/src/resolution/class_members.dart b/pkg/compiler/lib/src/resolution/class_members.dart
index 6cbbf70..7c28777 100644
--- a/pkg/compiler/lib/src/resolution/class_members.dart
+++ b/pkg/compiler/lib/src/resolution/class_members.dart
@@ -824,8 +824,30 @@
   /// includes `call`.
   Iterable<String> computedMemberNames;
 
+  bool _interfaceMembersAreClassMembers;
+
+  /// Compute value of the [_interfaceMembersAreClassMembers] for this class
+  /// and its superclasses.
+  void _computeInterfaceMembersAreClassMembers(Resolution resolution) {
+    if (_interfaceMembersAreClassMembers == null) {
+      ensureResolved(resolution);
+      ClassMemberMixin superclass = this.superclass;
+      if (superclass != null) {
+        superclass._computeInterfaceMembersAreClassMembers(resolution);
+      }
+      if ((superclass != null &&
+              (!superclass.interfaceMembersAreClassMembers ||
+                  superclass.isMixinApplication)) ||
+          !interfaces.isEmpty) {
+        _interfaceMembersAreClassMembers = false;
+      } else {
+        _interfaceMembersAreClassMembers = true;
+      }
+    }
+  }
+
   /// If `true` interface members are the non-static class member.
-  bool interfaceMembersAreClassMembers = true;
+  bool get interfaceMembersAreClassMembers => _interfaceMembersAreClassMembers;
 
   Map<Name, Member> classMembers;
   Map<Name, MemberSignature> interfaceMembers;
@@ -834,18 +856,8 @@
   /// this class.
   MembersCreator _prepareCreator(Resolution resolution) {
     if (classMembers == null) {
-      ensureResolved(resolution);
+      _computeInterfaceMembersAreClassMembers(resolution);
       classMembers = new Map<Name, Member>();
-
-      if (interfaceMembersAreClassMembers) {
-        ClassMemberMixin superclass = this.superclass;
-        if ((superclass != null &&
-                (!superclass.interfaceMembersAreClassMembers ||
-                    superclass.isMixinApplication)) ||
-            !interfaces.isEmpty) {
-          interfaceMembersAreClassMembers = false;
-        }
-      }
       if (!interfaceMembersAreClassMembers) {
         interfaceMembers = new Map<Name, MemberSignature>();
       }
@@ -864,6 +876,9 @@
   /// and private names.
   void computeClassMember(
       Resolution resolution, String name, Setlet<Name> names) {
+    // TODO(johnniwinther): Should we assert that the class has been resolved
+    // instead?
+    ensureResolved(resolution);
     if (isMemberComputed(name)) return;
     if (Name.isPrivateName(name)) {
       names
@@ -887,6 +902,9 @@
   }
 
   void computeAllClassMembers(Resolution resolution) {
+    // TODO(johnniwinther): Should we assert that the class has been resolved
+    // instead?
+    ensureResolved(resolution);
     if (areAllMembersComputed()) return;
     MembersCreator creator = _prepareCreator(resolution);
     creator.computeAllMembers();
diff --git a/pkg/compiler/lib/src/resolution/constructors.dart b/pkg/compiler/lib/src/resolution/constructors.dart
index 7a4dbc6..90f4716 100644
--- a/pkg/compiler/lib/src/resolution/constructors.dart
+++ b/pkg/compiler/lib/src/resolution/constructors.dart
@@ -32,6 +32,7 @@
 import 'registry.dart' show ResolutionRegistry;
 import 'resolution_common.dart' show CommonResolverVisitor;
 import 'resolution_result.dart';
+import 'scope.dart' show Scope, ExtensionScope;
 
 class InitializerResolver {
   final ResolverVisitor visitor;
@@ -294,14 +295,30 @@
    * Resolve all initializers of this constructor. In the case of a redirecting
    * constructor, the resolved constructor's function element is returned.
    */
-  ConstructorElement resolveInitializers() {
+  ConstructorElement resolveInitializers(
+      {bool enableInitializingFormalAccess: false}) {
     Map<dynamic /*String|int*/, ConstantExpression> defaultValues =
         <dynamic /*String|int*/, ConstantExpression>{};
     ConstructedConstantExpression constructorInvocation;
     // Keep track of all "this.param" parameters specified for constructor so
     // that we can ensure that fields are initialized only once.
     FunctionSignature functionParameters = constructor.functionSignature;
+    Scope oldScope = visitor.scope;
+    if (enableInitializingFormalAccess) {
+      // In order to get the correct detection of name clashes between all
+      // parameters (regular ones and initializing formals) we must extend
+      // the parameter scope rather than adding a new nested scope.
+      visitor.scope = new ExtensionScope(visitor.scope);
+    }
+    Link<Node> parameterNodes = (functionNode.parameters == null)
+        ? const Link<Node>()
+        : functionNode.parameters.nodes;
     functionParameters.forEachParameter((ParameterElementX element) {
+      List<Element> optionals = functionParameters.optionalParameters;
+      if (!optionals.isEmpty && element == optionals.first) {
+        NodeList nodes = parameterNodes.head;
+        parameterNodes = nodes.nodes;
+      }
       if (isConst) {
         if (element.isOptional) {
           if (element.constantCache == null) {
@@ -325,9 +342,15 @@
         }
       }
       if (element.isInitializingFormal) {
+        VariableDefinitions variableDefinitions = parameterNodes.head;
+        Node parameterNode = variableDefinitions.definitions.nodes.head;
         InitializingFormalElementX initializingFormal = element;
         FieldElement field = initializingFormal.fieldElement;
         checkForDuplicateInitializers(field, element.initializer);
+        if (enableInitializingFormalAccess) {
+          visitor.defineLocalVariable(parameterNode, initializingFormal);
+          visitor.addToScope(initializingFormal);
+        }
         if (isConst) {
           if (element.isNamed) {
             fieldInitializers[field] = new NamedArgumentReference(element.name);
@@ -339,6 +362,7 @@
           isValidAsConstant = false;
         }
       }
+      parameterNodes = parameterNodes.tail;
     });
 
     if (functionNode.initializers == null) {
@@ -424,12 +448,26 @@
       constructorInvocation = resolveImplicitSuperConstructorSend();
     }
     if (isConst && isValidAsConstant) {
-      constructor.constantConstructor = new GenerativeConstantConstructor(
-          constructor.enclosingClass.thisType,
-          defaultValues,
-          fieldInitializers,
-          constructorInvocation);
+      constructor.enclosingClass.forEachInstanceField((_, FieldElement field) {
+        if (!fieldInitializers.containsKey(field)) {
+          visitor.resolution.ensureResolved(field);
+          // TODO(johnniwinther): Report error if `field.constant` is `null`.
+          if (field.constant != null) {
+            fieldInitializers[field] = field.constant;
+          } else {
+            isValidAsConstant = false;
+          }
+        }
+      });
+      if (isValidAsConstant) {
+        constructor.constantConstructor = new GenerativeConstantConstructor(
+            constructor.enclosingClass.thisType,
+            defaultValues,
+            fieldInitializers,
+            constructorInvocation);
+      }
     }
+    visitor.scope = oldScope;
     return null; // If there was no redirection always return null.
   }
 }
diff --git a/pkg/compiler/lib/src/resolution/member_impl.dart b/pkg/compiler/lib/src/resolution/member_impl.dart
index 7597d36..8c4eb1c 100644
--- a/pkg/compiler/lib/src/resolution/member_impl.dart
+++ b/pkg/compiler/lib/src/resolution/member_impl.dart
@@ -138,13 +138,17 @@
   }
 
   DeclaredMember inheritFrom(InterfaceType newInstance) {
-    assert(() {
+    assert(invariant(declaration.element, () {
       // Assert that if [instance] contains type variables, then these are
       // defined in the declaration of [newInstance] and will therefore be
       // substituted into the context of [newInstance] in the created member.
       ClassElement contextClass = Types.getClassContext(instance);
       return contextClass == null || contextClass == newInstance.element;
-    });
+    }, message: () {
+      return "Context mismatch: Context class "
+          "${Types.getClassContext(instance)} from $instance does match "
+          "the new instance $newInstance.";
+    }));
     return _newInheritedMember(newInstance);
   }
 
@@ -169,7 +173,7 @@
 
   String toString() {
     StringBuffer sb = new StringBuffer();
-    printOn(sb, instance);
+    printOn(sb, type);
     return sb.toString();
   }
 }
diff --git a/pkg/compiler/lib/src/resolution/members.dart b/pkg/compiler/lib/src/resolution/members.dart
index b69287a..b12218b 100644
--- a/pkg/compiler/lib/src/resolution/members.dart
+++ b/pkg/compiler/lib/src/resolution/members.dart
@@ -17,10 +17,10 @@
 import '../elements/elements.dart';
 import '../elements/modelx.dart'
     show
-        BaseFunctionElementX,
         ConstructorElementX,
         ErroneousElementX,
         FunctionElementX,
+        InitializingFormalElementX,
         JumpTargetX,
         LocalFunctionElementX,
         LocalParameterElementX,
@@ -886,6 +886,8 @@
       } else {
         return new StaticAccess.parameter(target);
       }
+    } else if (target.isInitializingFormal) {
+      return new StaticAccess.finalParameter(target);
     } else if (target.isVariable) {
       if (target.isFinal || target.isConst) {
         return new StaticAccess.finalLocalVariable(target);
@@ -2566,6 +2568,11 @@
       } else {
         semantics = new StaticAccess.parameter(element);
       }
+    } else if (element.isInitializingFormal &&
+        compiler.options.enableInitializingFormalAccess) {
+      error = reportAndCreateErroneousElement(node.selector, name.text,
+          MessageKind.UNDEFINED_STATIC_SETTER_BUT_GETTER, {'name': name});
+      semantics = new StaticAccess.finalParameter(element);
     } else if (element.isVariable) {
       if (element.isFinal || element.isConst) {
         error = reportAndCreateErroneousElement(node.selector, name.text,
diff --git a/pkg/compiler/lib/src/resolution/registry.dart b/pkg/compiler/lib/src/resolution/registry.dart
index 6d56c8e..e52b185 100644
--- a/pkg/compiler/lib/src/resolution/registry.dart
+++ b/pkg/compiler/lib/src/resolution/registry.dart
@@ -5,7 +5,8 @@
 library dart2js.resolution.registry;
 
 import '../common.dart';
-import '../common/backend_api.dart' show Backend, ForeignResolver;
+import '../common/backend_api.dart'
+    show Backend, ForeignResolver, NativeRegistry;
 import '../common/resolution.dart'
     show Feature, ListLiteralUse, MapLiteralUse, ResolutionImpact;
 import '../common/registry.dart' show Registry;
@@ -28,7 +29,9 @@
 import 'members.dart' show ResolverVisitor;
 import 'tree_elements.dart' show TreeElementMapping;
 
-class _ResolutionWorldImpact extends ResolutionImpact with WorldImpactBuilder {
+class _ResolutionWorldImpact extends ResolutionImpact
+    with WorldImpactBuilder
+    implements NativeRegistry {
   final String name;
   EnumSet<Feature> _features;
   Setlet<MapLiteralUse> _mapLiterals;
@@ -331,10 +334,6 @@
     worldImpact.registerStaticUse(staticUse);
   }
 
-  void registerMetadataConstant(MetadataAnnotation metadata) {
-    backend.registerMetadataConstant(metadata, metadata.annotatedElement, this);
-  }
-
   /// Register the use of a type.
   void registerTypeUse(TypeUse typeUse) {
     worldImpact.registerTypeUse(typeUse);
diff --git a/pkg/compiler/lib/src/resolution/resolution.dart b/pkg/compiler/lib/src/resolution/resolution.dart
index 4ea5aff..f775190 100644
--- a/pkg/compiler/lib/src/resolution/resolution.dart
+++ b/pkg/compiler/lib/src/resolution/resolution.dart
@@ -53,6 +53,8 @@
 import 'constructors.dart';
 import 'members.dart';
 import 'registry.dart';
+import 'resolution_result.dart';
+import 'scope.dart' show MutableScope;
 import 'signatures.dart';
 import 'tree_elements.dart';
 import 'typedefs.dart';
@@ -236,7 +238,7 @@
       ResolverVisitor visitor = visitorFor(element);
       ResolutionRegistry registry = visitor.registry;
       registry.defineFunction(tree, element);
-      visitor.setupFunction(tree, element);
+      visitor.setupFunction(tree, element); // Modifies the scope.
       processAsyncMarker(compiler, element, registry);
 
       if (element.isGenerativeConstructor) {
@@ -244,7 +246,9 @@
         // resolution in case there is an implicit super constructor call.
         InitializerResolver resolver =
             new InitializerResolver(visitor, element, tree);
-        FunctionElement redirection = resolver.resolveInitializers();
+        FunctionElement redirection = resolver.resolveInitializers(
+            enableInitializingFormalAccess:
+                compiler.options.enableInitializingFormalAccess);
         if (redirection != null) {
           resolveRedirectingConstructor(resolver, tree, element, redirection);
         }
@@ -285,6 +289,8 @@
         reporter.reportErrorMessage(tree, MessageKind.NO_SUCH_METHOD_IN_NATIVE);
       }
 
+      resolution.target.resolveNativeElement(element, registry.worldImpact);
+
       return registry.worldImpact;
     });
   }
@@ -359,8 +365,21 @@
     // TODO(johnniwinther): Share the resolved type between all variables
     // declared in the same declaration.
     if (tree.type != null) {
-      element.variables.type = visitor.resolveTypeAnnotation(tree.type);
-    } else {
+      DartType type = visitor.resolveTypeAnnotation(tree.type);
+      assert(invariant(
+          element,
+          element.variables.type == null ||
+              // Crude check but we have no equivalence relation that
+              // equates malformed types, like matching creations of type
+              // `Foo<Unresolved>`.
+              element.variables.type.toString() == type.toString(),
+          message: "Unexpected type computed for $element. "
+              "Was ${element.variables.type}, computed $type."));
+      element.variables.type = type;
+    } else if (element.variables.type == null) {
+      // Only assign the dynamic type if the element has no known type. This
+      // happens for enum fields where the type is known but is not in the
+      // synthesized AST.
       element.variables.type = const DynamicType();
     }
 
@@ -369,7 +388,10 @@
     if (initializer != null) {
       // TODO(johnniwinther): Avoid analyzing initializers if
       // [Compiler.analyzeSignaturesOnly] is set.
-      visitor.visit(initializer);
+      ResolutionResult result = visitor.visit(initializer);
+      if (result.isConstant) {
+        element.constant = result.constant;
+      }
     } else if (modifiers.isConst) {
       reporter.reportErrorMessage(
           element, MessageKind.CONST_WITHOUT_INITIALIZER);
@@ -401,6 +423,8 @@
     // Perform various checks as side effect of "computing" the type.
     element.computeType(resolution);
 
+    resolution.target.resolveNativeElement(element, registry.worldImpact);
+
     return registry.worldImpact;
   }
 
@@ -1071,7 +1095,6 @@
               // and the annotated element instead. This will allow the backend to
               // retrieve the backend constant and only register metadata on the
               // elements for which it is needed. (Issue 17732).
-              registry.registerMetadataConstant(annotation);
               annotation.resolutionState = STATE_DONE;
             }));
   }
diff --git a/pkg/compiler/lib/src/resolution/scope.dart b/pkg/compiler/lib/src/resolution/scope.dart
index 75c3de9..e03304e 100644
--- a/pkg/compiler/lib/src/resolution/scope.dart
+++ b/pkg/compiler/lib/src/resolution/scope.dart
@@ -9,8 +9,11 @@
 
 abstract class Scope {
   /**
-   * Adds [element] to this scope. This operation is only allowed on mutable
-   * scopes such as [MethodScope] and [BlockScope].
+   * If an [Element] named `element.name` has already been added to this
+   * [Scope], return that element and make no changes. If no such element has
+   * been added, add the given [element] to this [Scope], and return [element].
+   * Note that this operation is only allowed on mutable scopes such as
+   * [MethodScope] and [BlockScope].
    */
   Element add(Element element);
 
@@ -123,6 +126,46 @@
   Element localLookup(String name) => elements[name];
 }
 
+/**
+ * [ExtensionScope] enables the creation of an extended version of an
+ * existing [NestedScope], received during construction and stored in
+ * [extendee]. An [ExtensionScope] will treat an added `element` as conflicting
+ * if an element `e` where `e.name == element.name` exists among the elements
+ * added to this [ExtensionScope], or among the ones added to [extendee]
+ * (according to `extendee.localLookup`). In this sense, it represents the
+ * union of the bindings stored locally in [elements] and the bindings in
+ * [extendee], not a new scope which is nested inside [extendee].
+ *
+ * Note that it is required that no bindings are added to [extendee] during the
+ * lifetime of this [ExtensionScope]: That would enable duplicates to be
+ * introduced into the extended scope consisting of [this] plus [extendee]
+ * without detection.
+ */
+class ExtensionScope extends Scope {
+  final NestedScope extendee;
+  final Map<String, Element> elements;
+
+  ExtensionScope(this.extendee) : this.elements = new Map<String, Element>() {
+    assert(extendee != null);
+  }
+
+  Element lookup(String name) {
+    Element result = elements[name];
+    if (result != null) return result;
+    return extendee.lookup(name);
+  }
+
+  Element add(Element newElement) {
+    if (elements.containsKey(newElement.name)) {
+      return elements[newElement.name];
+    }
+    Element existing = extendee.localLookup(newElement.name);
+    if (existing != null) return existing;
+    elements[newElement.name] = newElement;
+    return newElement;
+  }
+}
+
 class MethodScope extends MutableScope {
   final Element element;
 
diff --git a/pkg/compiler/lib/src/serialization/constant_serialization.dart b/pkg/compiler/lib/src/serialization/constant_serialization.dart
index b041347..ae3040a 100644
--- a/pkg/compiler/lib/src/serialization/constant_serialization.dart
+++ b/pkg/compiler/lib/src/serialization/constant_serialization.dart
@@ -169,8 +169,8 @@
 
   @override
   void visitDeferred(DeferredConstantExpression exp, ObjectEncoder encoder) {
-    throw new UnsupportedError(
-        "ConstantSerializer.visitDeferred: ${exp.toDartText()}");
+    encoder.setElement(Key.PREFIX, exp.prefix);
+    encoder.setConstant(Key.EXPRESSION, exp.expression);
   }
 }
 
@@ -267,6 +267,9 @@
       case ConstantExpressionKind.NAMED_REFERENCE:
         return new NamedArgumentReference(decoder.getString(Key.NAME));
       case ConstantExpressionKind.DEFERRED:
+        return new DeferredConstantExpression(
+            decoder.getConstant(Key.EXPRESSION),
+            decoder.getElement(Key.PREFIX));
       case ConstantExpressionKind.SYNTHETIC:
     }
     throw new UnsupportedError("Unexpected constant kind: ${kind} in $decoder");
diff --git a/pkg/compiler/lib/src/serialization/element_serialization.dart b/pkg/compiler/lib/src/serialization/element_serialization.dart
index ab10cc8..d51c67a 100644
--- a/pkg/compiler/lib/src/serialization/element_serialization.dart
+++ b/pkg/compiler/lib/src/serialization/element_serialization.dart
@@ -5,10 +5,14 @@
 library dart2js.serialization.elements;
 
 import '../common.dart';
+import '../common/names.dart';
 import '../constants/constructors.dart';
 import '../constants/expressions.dart';
 import '../dart_types.dart';
+import '../diagnostics/messages.dart';
 import '../elements/elements.dart';
+import '../elements/modelx.dart'
+    show DeferredLoaderGetterElementX, ErroneousElementX;
 import 'constant_serialization.dart';
 import 'keys.dart';
 import 'modelz.dart';
@@ -17,6 +21,7 @@
 
 /// Enum kinds used for encoding [Element]s.
 enum SerializedElementKind {
+  ERROR,
   LIBRARY,
   COMPILATION_UNIT,
   CLASS,
@@ -48,6 +53,7 @@
   IMPORT,
   EXPORT,
   PREFIX,
+  DEFERRED_LOAD_LIBRARY,
   LOCAL_VARIABLE,
   EXTERNAL_LIBRARY,
   EXTERNAL_LIBRARY_MEMBER,
@@ -63,8 +69,11 @@
 /// and [ConstantExpression] that the serialized [Element] depends upon are also
 /// serialized.
 const List<ElementSerializer> ELEMENT_SERIALIZERS = const [
+  const ErrorSerializer(),
   const LibrarySerializer(),
   const CompilationUnitSerializer(),
+  const PrefixSerializer(),
+  const DeferredLoadLibrarySerializer(),
   const ClassSerializer(),
   const ConstructorSerializer(),
   const FieldSerializer(),
@@ -74,7 +83,6 @@
   const ParameterSerializer(),
   const ImportSerializer(),
   const ExportSerializer(),
-  const PrefixSerializer(),
   const LocalVariableSerializer(),
 ];
 
@@ -120,6 +128,31 @@
     }
   }
 
+  /// Serialize the metadata of [element] into [encoder].
+  static void serializeMetadata(Element element, ObjectEncoder encoder) {
+    ListEncoder list;
+
+    void encodeAnnotation(MetadataAnnotation metadata) {
+      ObjectEncoder object = list.createObject();
+      object.setElement(Key.ELEMENT, metadata.annotatedElement);
+      SourceSpan sourcePosition = metadata.sourcePosition;
+      // TODO(johnniwinther): What is the base URI here?
+      object.setUri(Key.URI, sourcePosition.uri, sourcePosition.uri);
+      object.setInt(Key.OFFSET, sourcePosition.begin);
+      object.setInt(Key.LENGTH, sourcePosition.end - sourcePosition.begin);
+      object.setConstant(Key.CONSTANT, metadata.constant);
+    }
+
+    if (element.metadata.isNotEmpty) {
+      list = encoder.createList(Key.METADATA);
+      element.metadata.forEach(encodeAnnotation);
+    }
+    if (element.isPatched && element.implementation.metadata.isNotEmpty) {
+      list ??= encoder.createList(Key.METADATA);
+      element.implementation.metadata.forEach(encodeAnnotation);
+    }
+  }
+
   /// Serialize the parent relation for [element] into [encoder], i.e library,
   /// enclosing class, and compilation unit references.
   static void serializeParentRelation(Element element, ObjectEncoder encoder) {
@@ -168,6 +201,30 @@
   }
 }
 
+class ErrorSerializer implements ElementSerializer {
+  const ErrorSerializer();
+
+  SerializedElementKind getSerializedKind(Element element) {
+    if (element.isError) {
+      return SerializedElementKind.ERROR;
+    }
+    return null;
+  }
+
+  void serialize(ErroneousElement element, ObjectEncoder encoder,
+      SerializedElementKind kind) {
+    encoder.setElement(Key.ENCLOSING, element.enclosingElement);
+    encoder.setString(Key.NAME, element.name);
+    encoder.setEnum(Key.MESSAGE_KIND, element.messageKind);
+    if (element.messageArguments.isNotEmpty) {
+      MapEncoder mapEncoder = encoder.createMap(Key.ARGUMENTS);
+      element.messageArguments.forEach((String key, var value) {
+        mapEncoder.setString(key, Message.convertToString(value));
+      });
+    }
+  }
+}
+
 class LibrarySerializer implements ElementSerializer {
   const LibrarySerializer();
 
@@ -225,6 +282,7 @@
 
   void serialize(LibraryElement element, ObjectEncoder encoder,
       SerializedElementKind kind) {
+    SerializerUtil.serializeMetadata(element, encoder);
     encoder.setUri(
         Key.CANONICAL_URI, element.canonicalUri, element.canonicalUri);
     encoder.setString(Key.LIBRARY_NAME, element.libraryName);
@@ -235,7 +293,6 @@
     encoder.setElements(Key.EXPORTS, element.exports);
 
     encoder.setElements(Key.IMPORT_SCOPE, getImportedElements(element));
-
     encoder.setElements(Key.EXPORT_SCOPE, getExportedElements(element));
   }
 }
@@ -252,6 +309,7 @@
 
   void serialize(CompilationUnitElement element, ObjectEncoder encoder,
       SerializedElementKind kind) {
+    SerializerUtil.serializeMetadata(element, encoder);
     encoder.setElement(Key.LIBRARY, element.library);
     encoder.setUri(
         Key.URI, element.library.canonicalUri, element.script.resourceUri);
@@ -299,6 +357,7 @@
 
   void serialize(
       ClassElement element, ObjectEncoder encoder, SerializedElementKind kind) {
+    SerializerUtil.serializeMetadata(element, encoder);
     encoder.setElement(Key.LIBRARY, element.library);
     encoder.setElement(Key.COMPILATION_UNIT, element.compilationUnit);
     encoder.setString(Key.NAME, element.name);
@@ -307,6 +366,7 @@
     encoder.setBool(Key.IS_ABSTRACT, element.isAbstract);
     SerializerUtil.serializeMembers(getMembers(element), encoder);
     encoder.setBool(Key.IS_PROXY, element.isProxy);
+    encoder.setBool(Key.IS_INJECTED, element.isInjected);
     if (kind == SerializedElementKind.ENUM) {
       EnumClassElement enumClass = element;
       encoder.setElements(Key.FIELDS, enumClass.enumValues);
@@ -366,14 +426,17 @@
       SerializedElementKind kind) {
     SerializerUtil.serializeParentRelation(element, encoder);
     if (kind == SerializedElementKind.FORWARDING_CONSTRUCTOR) {
-      encoder.setElement(Key.ELEMENT, element.definingConstructor);
+      serializeElementReference(element.enclosingClass, Key.ELEMENT, Key.NAME,
+          encoder, element.definingConstructor);
     } else {
+      SerializerUtil.serializeMetadata(element, encoder);
       encoder.setType(Key.TYPE, element.type);
       encoder.setString(Key.NAME, element.name);
       SerializerUtil.serializePosition(element, encoder);
       SerializerUtil.serializeParameters(element, encoder);
       encoder.setBool(Key.IS_CONST, element.isConst);
       encoder.setBool(Key.IS_EXTERNAL, element.isExternal);
+      encoder.setBool(Key.IS_INJECTED, element.isInjected);
       if (element.isConst && !element.isFromEnvironmentConstructor) {
         ConstantConstructor constantConstructor = element.constantConstructor;
         ObjectEncoder constantEncoder = encoder.createObject(Key.CONSTRUCTOR);
@@ -419,10 +482,12 @@
   void serialize(
       FieldElement element, ObjectEncoder encoder, SerializedElementKind kind) {
     encoder.setString(Key.NAME, element.name);
+    SerializerUtil.serializeMetadata(element, encoder);
     SerializerUtil.serializePosition(element, encoder);
     encoder.setType(Key.TYPE, element.type);
     encoder.setBool(Key.IS_FINAL, element.isFinal);
     encoder.setBool(Key.IS_CONST, element.isConst);
+    encoder.setBool(Key.IS_INJECTED, element.isInjected);
     ConstantExpression constant = element.constant;
     if (constant != null) {
       encoder.setConstant(Key.CONSTANT, constant);
@@ -439,6 +504,9 @@
   const FunctionSerializer();
 
   SerializedElementKind getSerializedKind(Element element) {
+    if (element.isDeferredLoaderGetter) {
+      return null;
+    }
     if (element.isFunction) {
       if (element.isTopLevel) return SerializedElementKind.TOPLEVEL_FUNCTION;
       if (element.isStatic) return SerializedElementKind.STATIC_FUNCTION;
@@ -469,6 +537,7 @@
   void serialize(FunctionElement element, ObjectEncoder encoder,
       SerializedElementKind kind) {
     encoder.setString(Key.NAME, element.name);
+    SerializerUtil.serializeMetadata(element, encoder);
     SerializerUtil.serializePosition(element, encoder);
     SerializerUtil.serializeParameters(element, encoder);
     encoder.setType(Key.TYPE, element.type);
@@ -478,6 +547,8 @@
     }
     SerializerUtil.serializeParentRelation(element, encoder);
     encoder.setBool(Key.IS_EXTERNAL, element.isExternal);
+    encoder.setBool(Key.IS_ABSTRACT, element.isAbstract);
+    encoder.setBool(Key.IS_INJECTED, element.isInjected);
     if (element.isLocal) {
       LocalFunctionElement localFunction = element;
       encoder.setElement(
@@ -499,6 +570,7 @@
   void serialize(TypedefElement element, ObjectEncoder encoder,
       SerializedElementKind kind) {
     encoder.setString(Key.NAME, element.name);
+    SerializerUtil.serializeMetadata(element, encoder);
     SerializerUtil.serializePosition(element, encoder);
     encoder.setType(Key.ALIAS, element.alias);
     encoder.setElement(Key.LIBRARY, element.library);
@@ -521,6 +593,7 @@
       SerializedElementKind kind) {
     encoder.setElement(Key.TYPE_DECLARATION, element.typeDeclaration);
     encoder.setString(Key.NAME, element.name);
+    SerializerUtil.serializeMetadata(element, encoder);
     SerializerUtil.serializePosition(element, encoder);
     encoder.setType(Key.TYPE, element.type);
     encoder.setInt(Key.INDEX, element.index);
@@ -544,6 +617,7 @@
       SerializedElementKind kind) {
     encoder.setElement(Key.FUNCTION, element.functionDeclaration);
     encoder.setString(Key.NAME, element.name);
+    SerializerUtil.serializeMetadata(element, encoder);
     SerializerUtil.serializePosition(element, encoder);
     encoder.setType(Key.TYPE, element.type);
     encoder.setBool(Key.IS_OPTIONAL, element.isOptional);
@@ -572,6 +646,7 @@
   void serialize(LocalVariableElement element, ObjectEncoder encoder,
       SerializedElementKind kind) {
     encoder.setString(Key.NAME, element.name);
+    SerializerUtil.serializeMetadata(element, encoder);
     SerializerUtil.serializePosition(element, encoder);
     encoder.setType(Key.TYPE, element.type);
     encoder.setBool(Key.IS_FINAL, element.isFinal);
@@ -596,6 +671,7 @@
 
   void serialize(ImportElement element, ObjectEncoder encoder,
       SerializedElementKind kind) {
+    SerializerUtil.serializeMetadata(element, encoder);
     encoder.setElement(Key.LIBRARY, element.library);
     encoder.setElement(Key.COMPILATION_UNIT, element.compilationUnit);
     encoder.setElement(Key.LIBRARY_DEPENDENCY, element.importedLibrary);
@@ -620,6 +696,7 @@
 
   void serialize(ExportElement element, ObjectEncoder encoder,
       SerializedElementKind kind) {
+    SerializerUtil.serializeMetadata(element, encoder);
     encoder.setElement(Key.LIBRARY, element.library);
     encoder.setElement(Key.COMPILATION_UNIT, element.compilationUnit);
     encoder.setElement(Key.LIBRARY_DEPENDENCY, element.exportedLibrary);
@@ -643,10 +720,27 @@
     encoder.setString(Key.NAME, element.name);
     encoder.setElement(Key.LIBRARY, element.library);
     encoder.setElement(Key.COMPILATION_UNIT, element.compilationUnit);
-    if (element.deferredImport != null) {
-      encoder.setElement(Key.IMPORT, element.deferredImport);
-    }
     encoder.setBool(Key.IS_DEFERRED, element.isDeferred);
+    if (element.isDeferred) {
+      encoder.setElement(Key.IMPORT, element.deferredImport);
+      encoder.setElement(Key.GETTER, element.loadLibrary);
+    }
+  }
+}
+
+class DeferredLoadLibrarySerializer implements ElementSerializer {
+  const DeferredLoadLibrarySerializer();
+
+  SerializedElementKind getSerializedKind(Element element) {
+    if (element.isDeferredLoaderGetter) {
+      return SerializedElementKind.DEFERRED_LOAD_LIBRARY;
+    }
+    return null;
+  }
+
+  void serialize(GetterElement element, ObjectEncoder encoder,
+      SerializedElementKind kind) {
+    encoder.setElement(Key.PREFIX, element.enclosingElement);
   }
 }
 
@@ -663,6 +757,19 @@
   static Element deserialize(
       ObjectDecoder decoder, SerializedElementKind elementKind) {
     switch (elementKind) {
+      case SerializedElementKind.ERROR:
+        Element enclosing = decoder.getElement(Key.ENCLOSING);
+        String name = decoder.getString(Key.NAME);
+        MessageKind messageKind =
+            decoder.getEnum(Key.MESSAGE_KIND, MessageKind.values);
+        Map<String, String> arguments = <String, String>{};
+        MapDecoder mapDecoder = decoder.getMap(Key.ARGUMENTS, isOptional: true);
+        if (mapDecoder != null) {
+          mapDecoder.forEachKey((String key) {
+            arguments[key] = mapDecoder.getString(key);
+          });
+        }
+        return new ErroneousElementX(messageKind, arguments, name, enclosing);
       case SerializedElementKind.LIBRARY:
         return new LibraryElementZ(decoder);
       case SerializedElementKind.COMPILATION_UNIT:
@@ -690,8 +797,10 @@
       case SerializedElementKind.REDIRECTING_FACTORY_CONSTRUCTOR:
         return new RedirectingFactoryConstructorElementZ(decoder);
       case SerializedElementKind.FORWARDING_CONSTRUCTOR:
-        return new ForwardingConstructorElementZ(
-            decoder.getElement(Key.CLASS), decoder.getElement(Key.ELEMENT));
+        ClassElement cls = decoder.getElement(Key.CLASS);
+        Element definingConstructor =
+            deserializeElementReference(cls, Key.ELEMENT, Key.NAME, decoder);
+        return new ForwardingConstructorElementZ(cls, definingConstructor);
       case SerializedElementKind.TOPLEVEL_FUNCTION:
         return new TopLevelFunctionElementZ(decoder);
       case SerializedElementKind.STATIC_FUNCTION:
@@ -726,6 +835,8 @@
         return new ExportElementZ(decoder);
       case SerializedElementKind.PREFIX:
         return new PrefixElementZ(decoder);
+      case SerializedElementKind.DEFERRED_LOAD_LIBRARY:
+        return new DeferredLoaderGetterElementX(decoder.getElement(Key.PREFIX));
       case SerializedElementKind.LOCAL_VARIABLE:
         return new LocalVariableElementZ(decoder);
       case SerializedElementKind.EXTERNAL_LIBRARY:
diff --git a/pkg/compiler/lib/src/serialization/equivalence.dart b/pkg/compiler/lib/src/serialization/equivalence.dart
index ab8f78e..a7c08b5 100644
--- a/pkg/compiler/lib/src/serialization/equivalence.dart
+++ b/pkg/compiler/lib/src/serialization/equivalence.dart
@@ -14,7 +14,7 @@
 import '../elements/elements.dart';
 import '../elements/visitor.dart';
 import '../js_backend/backend_serialization.dart'
-    show JavaScriptBackendSerializer;
+    show NativeBehaviorSerialization;
 import '../native/native.dart' show NativeBehavior;
 import '../resolution/access_semantics.dart';
 import '../resolution/send_structure.dart';
@@ -383,9 +383,31 @@
 
   @override
   bool visitClassElement(ClassElement element1, ClassElement element2) {
-    return strategy.test(
-            element1, element2, 'name', element1.name, element2.name) &&
-        visit(element1.library, element2.library);
+    if (!strategy.test(
+        element1,
+        element2,
+        'isUnnamedMixinApplication',
+        element1.isUnnamedMixinApplication,
+        element2.isUnnamedMixinApplication)) {
+      return false;
+    }
+    if (element1.isUnnamedMixinApplication) {
+      MixinApplicationElement mixin1 = element1;
+      MixinApplicationElement mixin2 = element2;
+      return strategy.testElements(
+              mixin1, mixin2, 'subclass', mixin1.subclass, mixin2.subclass) &&
+          // Using the [mixinType] is more precise but requires the test to
+          // handle self references: The identity of a type variable is based on
+          // its type declaration and if [mixin1] is generic the [mixinType]
+          // will contain the type variables declared by [mixin1], i.e.
+          // `abstract class Mixin<T> implements MixinType<T> {}`
+          strategy.testElements(
+              mixin1, mixin2, 'mixin', mixin1.mixin, mixin2.mixin);
+    } else {
+      return strategy.test(
+              element1, element2, 'name', element1.name, element2.name) &&
+          visit(element1.library, element2.library);
+    }
   }
 
   bool checkMembers(Element element1, Element element2) {
@@ -838,6 +860,43 @@
   return false;
 }
 
+bool testNativeBehavior(NativeBehavior a, NativeBehavior b,
+    [TestStrategy strategy = const TestStrategy()]) {
+  if (identical(a, b)) return true;
+  if (a == null || b == null) return false;
+  return strategy.test(
+          a, b, 'codeTemplateText', a.codeTemplateText, b.codeTemplateText) &&
+      strategy.test(a, b, 'isAllocation', a.isAllocation, b.isAllocation) &&
+      strategy.test(a, b, 'sideEffects', a.sideEffects, b.sideEffects) &&
+      strategy.test(a, b, 'throwBehavior', a.throwBehavior, b.throwBehavior) &&
+      strategy.testTypeLists(
+          a,
+          b,
+          'dartTypesReturned',
+          NativeBehaviorSerialization.filterDartTypes(a.typesReturned),
+          NativeBehaviorSerialization.filterDartTypes(b.typesReturned)) &&
+      strategy.testLists(
+          a,
+          b,
+          'specialTypesReturned',
+          NativeBehaviorSerialization.filterSpecialTypes(a.typesReturned),
+          NativeBehaviorSerialization.filterSpecialTypes(b.typesReturned)) &&
+      strategy.testTypeLists(
+          a,
+          b,
+          'dartTypesInstantiated',
+          NativeBehaviorSerialization.filterDartTypes(a.typesInstantiated),
+          NativeBehaviorSerialization.filterDartTypes(b.typesInstantiated)) &&
+      strategy.testLists(
+          a,
+          b,
+          'specialTypesInstantiated',
+          NativeBehaviorSerialization.filterSpecialTypes(a.typesInstantiated),
+          NativeBehaviorSerialization
+              .filterSpecialTypes(b.typesInstantiated)) &&
+      strategy.test(a, b, 'useGvn', a.useGvn, b.useGvn);
+}
+
 /// Visitor that checks the equivalence of [TreeElements] data.
 class TreeElementsEquivalenceVisitor extends Visitor {
   final TestStrategy strategy;
@@ -889,41 +948,7 @@
     if (identical(a, b)) return true;
     if (a == null || b == null) return false;
     if (a is NativeBehavior && b is NativeBehavior) {
-      return strategy.test(a, b, 'codeTemplateText', a.codeTemplateText,
-              b.codeTemplateText) &&
-          strategy.test(a, b, 'isAllocation', a.isAllocation, b.isAllocation) &&
-          strategy.test(a, b, 'sideEffects', a.sideEffects, b.sideEffects) &&
-          strategy.test(
-              a, b, 'throwBehavior', a.throwBehavior, b.throwBehavior) &&
-          strategy.testTypeLists(
-              a,
-              b,
-              'dartTypesReturned',
-              JavaScriptBackendSerializer.filterDartTypes(a.typesReturned),
-              JavaScriptBackendSerializer.filterDartTypes(b.typesReturned)) &&
-          strategy.testLists(
-              a,
-              b,
-              'specialTypesReturned',
-              JavaScriptBackendSerializer.filterSpecialTypes(a.typesReturned),
-              JavaScriptBackendSerializer
-                  .filterSpecialTypes(b.typesReturned)) &&
-          strategy.testTypeLists(
-              a,
-              b,
-              'dartTypesInstantiated',
-              JavaScriptBackendSerializer.filterDartTypes(a.typesInstantiated),
-              JavaScriptBackendSerializer
-                  .filterDartTypes(b.typesInstantiated)) &&
-          strategy.testLists(
-              a,
-              b,
-              'specialTypesInstantiated',
-              JavaScriptBackendSerializer
-                  .filterSpecialTypes(a.typesInstantiated),
-              JavaScriptBackendSerializer
-                  .filterSpecialTypes(b.typesInstantiated)) &&
-          strategy.test(a, b, 'useGvn', a.useGvn, b.useGvn);
+      return testNativeBehavior(a, b, strategy);
     }
     return true;
   }
@@ -1807,3 +1832,12 @@
     throw new UnsupportedError('Unexpected nodes: $node1 <> $node2');
   }
 }
+
+bool areMetadataAnnotationsEquivalent(
+    MetadataAnnotation metadata1, MetadataAnnotation metadata2) {
+  if (metadata1 == metadata2) return true;
+  if (metadata1 == null || metadata2 == null) return false;
+  return areElementsEquivalent(
+          metadata1.annotatedElement, metadata2.annotatedElement) &&
+      areConstantsEquivalent(metadata1.constant, metadata2.constant);
+}
diff --git a/pkg/compiler/lib/src/serialization/keys.dart b/pkg/compiler/lib/src/serialization/keys.dart
index 179b727..2c70168 100644
--- a/pkg/compiler/lib/src/serialization/keys.dart
+++ b/pkg/compiler/lib/src/serialization/keys.dart
@@ -31,6 +31,7 @@
   static const Key EFFECTIVE_TARGET_TYPE = const Key('effectiveTargetType');
   static const Key ELEMENT = const Key('element');
   static const Key ELEMENTS = const Key('elements');
+  static const Key ENCLOSING = const Key('enclosing');
   static const Key EXECUTABLE_CONTEXT = const Key('executable-context');
   static const Key EXPORTS = const Key('exports');
   static const Key EXPORT_SCOPE = const Key('export-scope');
@@ -59,6 +60,7 @@
   static const Key IS_EMPTY = const Key('isEmpty');
   static const Key IS_EXTERNAL = const Key('isExternal');
   static const Key IS_FINAL = const Key('isFinal');
+  static const Key IS_INJECTED = const Key('isInjected');
   static const Key IS_NAMED = const Key('isNamed');
   static const Key IS_OPERATOR = const Key('isOperator');
   static const Key IS_OPTIONAL = const Key('isOptional');
@@ -83,6 +85,8 @@
   static const Key LISTS = const Key('lists');
   static const Key MAPS = const Key('maps');
   static const Key MEMBERS = const Key('members');
+  static const Key MESSAGE_KIND = const Key('messageKind');
+  static const Key METADATA = const Key('metadata');
   static const Key MIXIN = const Key('mixin');
   static const Key MIXINS = const Key('mixins');
   static const Key NAME = const Key('name');
diff --git a/pkg/compiler/lib/src/serialization/modelz.dart b/pkg/compiler/lib/src/serialization/modelz.dart
index 36ec45c..4c932f3 100644
--- a/pkg/compiler/lib/src/serialization/modelz.dart
+++ b/pkg/compiler/lib/src/serialization/modelz.dart
@@ -112,7 +112,6 @@
   @override
   bool get isTopLevel => false;
 
-  // TODO(johnniwinther): Support metadata.
   @override
   Iterable<MetadataAnnotation> get metadata => const <MetadataAnnotation>[];
 
@@ -122,6 +121,7 @@
 
 abstract class DeserializedElementZ extends ElementZ {
   ObjectDecoder _decoder;
+  List<MetadataAnnotation> _metadata;
 
   DeserializedElementZ(this._decoder);
 
@@ -147,6 +147,27 @@
     }
     return new SourceSpan(uri, offset, offset + length);
   }
+
+  @override
+  Iterable<MetadataAnnotation> get metadata {
+    if (_metadata == null) {
+      _metadata = <MetadataAnnotation>[];
+      ListDecoder list = _decoder.getList(Key.METADATA, isOptional: true);
+      if (list != null) {
+        for (int index = 0; index < list.length; index++) {
+          ObjectDecoder object = list.getObject(index);
+          Element element = object.getElement(Key.ELEMENT);
+          Uri uri = object.getUri(Key.URI);
+          int offset = object.getInt(Key.OFFSET);
+          int length = object.getInt(Key.LENGTH);
+          ConstantExpression constant = object.getConstant(Key.CONSTANT);
+          _metadata.add(new MetadataAnnotationZ(
+              element, new SourceSpan(uri, offset, offset + length), constant));
+        }
+      }
+    }
+    return _metadata;
+  }
 }
 
 /// Deserializer for a collection of member elements serialized as a map from
@@ -301,7 +322,9 @@
   }
 }
 
-class AbstractFieldElementZ extends ElementZ implements AbstractFieldElement {
+class AbstractFieldElementZ extends ElementZ
+    with AbstractFieldElementCommon
+    implements AbstractFieldElement {
   final String name;
   final GetterElementZ getter;
   final SetterElementZ setter;
@@ -346,6 +369,18 @@
 
   @override
   ClassElement get enclosingClass => _canonicalElement.enclosingClass;
+
+  @override
+  bool get isClassMember => _canonicalElement.isClassMember;
+
+  @override
+  bool get isInstanceMember => _canonicalElement.isInstanceMember;
+
+  @override
+  bool get isStatic => _canonicalElement.isStatic;
+
+  @override
+  bool get isTopLevel => _canonicalElement.isTopLevel;
 }
 
 class LibraryElementZ extends DeserializedElementZ
@@ -423,7 +458,8 @@
 
   void _ensureExports() {
     if (_exportsMap == null) {
-      _exportsMap = new ListedContainer(_decoder.getElements(Key.EXPORT_SCOPE));
+      _exportsMap = new ListedContainer(
+          _decoder.getElements(Key.EXPORT_SCOPE, isOptional: true));
     }
   }
 
@@ -468,7 +504,9 @@
 
   @override
   Iterable<ImportElement> getImportsFor(Element element) {
-    return _unsupported('getImportsFor');
+    // TODO(johnniwinther): Serialize this to support deferred access to
+    // serialized entities.
+    return <ImportElement>[];
   }
 
   String toString() {
@@ -568,6 +606,9 @@
   SourceSpan get sourcePosition => new SourceSpan(script.resourceUri, 0, 0);
 
   @override
+  bool get isTopLevel => false;
+
+  @override
   accept(ElementVisitor visitor, arg) {
     return visitor.visitCompilationUnitElement(this, arg);
   }
@@ -669,6 +710,9 @@
 
   @override
   bool get isInstanceMember => true;
+
+  @override
+  bool get isClassMember => true;
 }
 
 abstract class StaticMemberMixin implements DeserializedElementZ {
@@ -677,6 +721,9 @@
 
   @override
   bool get isStatic => true;
+
+  @override
+  bool get isClassMember => true;
 }
 
 abstract class TypedElementMixin implements DeserializedElementZ, TypedElement {
@@ -810,7 +857,8 @@
   @override
   ConstructorElement lookupDefaultConstructor() {
     ConstructorElement constructor = lookupConstructor("");
-    if (constructor != null && constructor.parameters.isEmpty) {
+    if (constructor != null &&
+        constructor.functionSignature.requiredParameterCount == 0) {
       return constructor;
     }
     return null;
@@ -823,8 +871,10 @@
   void ensureResolved(Resolution resolution) {
     if (!_isResolved) {
       _isResolved = true;
-      class_members.MembersCreator
-          .computeClassMembersByName(resolution, this, Identifiers.call);
+      // TODO(johnniwinther): Avoid eager computation of all members. `call` is
+      // always needed, but the remaining should be computed on-demand or on
+      // type instantiation.
+      class_members.MembersCreator.computeAllClassMembers(resolution, this);
       resolution.registerClass(this);
     }
   }
@@ -918,6 +968,9 @@
   bool get isProxy => _decoder.getBool(Key.IS_PROXY);
 
   @override
+  bool get isInjected => _decoder.getBool(Key.IS_INJECTED);
+
+  @override
   bool get isUnnamedMixinApplication => false;
 
   @override
@@ -946,6 +999,9 @@
 
   @override
   InterfaceType get mixinType => _mixinType ??= _decoder.getType(Key.MIXIN);
+
+  @override
+  ClassElement get subclass => null;
 }
 
 class UnnamedMixinApplicationElementZ extends ElementZ
@@ -959,26 +1015,30 @@
         MixinApplicationElementCommon,
         MixinApplicationElementMixin {
   final String name;
-  final ClassElement _subclass;
-  final InterfaceType supertype;
-  final Link<DartType> interfaces;
+  final ClassElement subclass;
+  final InterfaceType _supertypeBase;
+  final InterfaceType _mixinBase;
+  InterfaceType _supertype;
+  Link<DartType> _interfaces;
   OrderedTypeSet _allSupertypesAndSelf;
   Link<ConstructorElement> _constructors;
 
   UnnamedMixinApplicationElementZ(
-      ClassElement subclass, InterfaceType supertype, InterfaceType mixin)
-      : this._subclass = subclass,
-        this.supertype = supertype,
-        this.interfaces = const Link<DartType>().prepend(mixin),
+      this.subclass, InterfaceType supertype, InterfaceType mixin)
+      : this._supertypeBase = supertype,
+        this._mixinBase = mixin,
         this.name = "${supertype.name}+${mixin.name}";
 
   @override
-  CompilationUnitElement get compilationUnit => _subclass.compilationUnit;
+  CompilationUnitElement get compilationUnit => subclass.compilationUnit;
 
   @override
   bool get isTopLevel => true;
 
   @override
+  bool get isAbstract => true;
+
+  @override
   bool get isUnnamedMixinApplication => true;
 
   Link<ConstructorElement> get constructors {
@@ -1005,7 +1065,7 @@
     // Create synthetic type variables for the mixin application.
     List<DartType> typeVariables = <DartType>[];
     int index = 0;
-    for (TypeVariableType type in _subclass.typeVariables) {
+    for (TypeVariableType type in subclass.typeVariables) {
       SyntheticTypeVariableElementZ typeVariableElement =
           new SyntheticTypeVariableElementZ(this, index, type.name);
       TypeVariableType typeVariable = new TypeVariableType(typeVariableElement);
@@ -1013,17 +1073,56 @@
       index++;
     }
     // Setup bounds on the synthetic type variables.
-    for (TypeVariableType type in _subclass.typeVariables) {
+    for (TypeVariableType type in subclass.typeVariables) {
       TypeVariableType typeVariable = typeVariables[type.element.index];
       SyntheticTypeVariableElementZ typeVariableElement = typeVariable.element;
       typeVariableElement._type = typeVariable;
       typeVariableElement._bound =
-          type.element.bound.subst(typeVariables, _subclass.typeVariables);
+          type.element.bound.subst(typeVariables, subclass.typeVariables);
     }
     return typeVariables;
   }
 
   @override
+  InterfaceType get supertype {
+    if (_supertype == null) {
+      // Substitute the type variables in [_supertypeBase] provided by
+      // [_subclass] with the type variables in this unnamed mixin application.
+      //
+      // For instance
+      //    class S<S.T> {}
+      //    class M<M.T> {}
+      //    class C<C.T> extends S<C.T> with M<C.T> {}
+      // the unnamed mixin application should be
+      //    abstract class S+M<S+M.T> extends S<S+M.T> implements M<S+M.T> {}
+      // but the supertype is provided as S<C.T> and we need to substitute S+M.T
+      // for C.T.
+      _supertype = _supertypeBase.subst(typeVariables, subclass.typeVariables);
+    }
+    return _supertype;
+  }
+
+  @override
+  Link<DartType> get interfaces {
+    if (_interfaces == null) {
+      // Substitute the type variables in [_mixinBase] provided by
+      // [_subclass] with the type variables in this unnamed mixin application.
+      //
+      // For instance
+      //    class S<S.T> {}
+      //    class M<M.T> {}
+      //    class C<C.T> extends S<C.T> with M<C.T> {}
+      // the unnamed mixin application should be
+      //    abstract class S+M<S+M.T> extends S<S+M.T> implements M<S+M.T> {}
+      // but the mixin is provided as M<C.T> and we need to substitute S+M.T
+      // for C.T.
+      _interfaces = const Link<DartType>()
+          .prepend(_mixinBase.subst(typeVariables, subclass.typeVariables));
+    }
+    return _interfaces;
+  }
+
+  @override
   accept(ElementVisitor visitor, arg) {
     return visitor.visitMixinApplicationElement(this, arg);
   }
@@ -1038,7 +1137,7 @@
   }
 
   @override
-  Element get enclosingElement => _subclass.enclosingElement;
+  Element get enclosingElement => subclass.enclosingElement;
 
   @override
   bool get isObject => false;
@@ -1053,10 +1152,10 @@
   InterfaceType get mixinType => interfaces.head;
 
   @override
-  int get sourceOffset => _subclass.sourceOffset;
+  int get sourceOffset => subclass.sourceOffset;
 
   @override
-  SourceSpan get sourcePosition => _subclass.sourcePosition;
+  SourceSpan get sourcePosition => subclass.sourcePosition;
 }
 
 class EnumClassElementZ extends ClassElementZ implements EnumClassElement {
@@ -1084,7 +1183,8 @@
         FunctionTypedElementMixin,
         ParametersMixin,
         TypedElementMixin,
-        MemberElementMixin
+        MemberElementMixin,
+        ConstructorElementCommon
     implements
         ConstructorElement,
         // TODO(johnniwinther): Sort out whether a constructor is a method.
@@ -1104,14 +1204,6 @@
   @override
   bool get isExternal => _decoder.getBool(Key.IS_EXTERNAL);
 
-  bool get isFromEnvironmentConstructor {
-    return name == 'fromEnvironment' &&
-        library.isDartCore &&
-        (enclosingClass.name == 'bool' ||
-            enclosingClass.name == 'int' ||
-            enclosingClass.name == 'String');
-  }
-
   ConstantConstructor get constantConstructor {
     if (isConst && _constantConstructor == null) {
       ObjectDecoder data =
@@ -1263,8 +1355,13 @@
 }
 
 class ForwardingConstructorElementZ extends ElementZ
-    with AnalyzableElementMixin, AstElementMixinZ
-    implements ConstructorElement {
+    with
+        AnalyzableElementMixin,
+        AstElementMixinZ
+    implements
+        ConstructorElement,
+        // TODO(johnniwinther): Sort out whether a constructor is a method.
+        MethodElement {
   final MixinApplicationElement enclosingClass;
   final ConstructorElement definingConstructor;
 
@@ -1293,6 +1390,9 @@
   bool get isConst => false;
 
   @override
+  bool get isClassMember => true;
+
+  @override
   ConstantConstructor get constantConstructor => null;
 
   @override
@@ -1332,6 +1432,15 @@
   bool get isFromEnvironmentConstructor => false;
 
   @override
+  bool get isIntFromEnvironmentConstructor => false;
+
+  @override
+  bool get isBoolFromEnvironmentConstructor => false;
+
+  @override
+  bool get isStringFromEnvironmentConstructor => false;
+
+  @override
   bool get isRedirectingFactory => false;
 
   @override
@@ -1396,6 +1505,9 @@
 
   @override
   List<FunctionElement> get nestedClosures => <FunctionElement>[];
+
+  @override
+  bool get isInjected => _decoder.getBool(Key.IS_INJECTED);
 }
 
 abstract class FieldElementZ extends DeserializedElementZ
@@ -1493,6 +1605,9 @@
   }
 
   @override
+  bool get isAbstract => _decoder.getBool(Key.IS_ABSTRACT);
+
+  @override
   bool get isOperator => _decoder.getBool(Key.IS_OPERATOR);
 }
 
@@ -1594,6 +1709,9 @@
   }
 
   @override
+  bool get isAbstract => _decoder.getBool(Key.IS_ABSTRACT);
+
+  @override
   AsyncMarker get asyncMarker => AsyncMarker.SYNC;
 }
 
@@ -1634,6 +1752,9 @@
   }
 
   @override
+  bool get isAbstract => _decoder.getBool(Key.IS_ABSTRACT);
+
+  @override
   AsyncMarker get asyncMarker => AsyncMarker.SYNC;
 }
 
@@ -1960,7 +2081,7 @@
   ElementKind get kind => ElementKind.PARAMETER;
 }
 
-class InitializingFormalElementZ extends ParameterElementZ
+class InitializingFormalElementZ extends LocalParameterElementZ
     implements InitializingFormalElement {
   FieldElement _fieldElement;
 
@@ -1981,6 +2102,9 @@
 
   @override
   ElementKind get kind => ElementKind.INITIALIZING_FORMAL;
+
+  @override
+  bool get isLocal => true;
 }
 
 class LocalVariableElementZ extends DeserializedElementZ
@@ -2134,6 +2258,7 @@
     implements PrefixElement {
   bool _isDeferred;
   ImportElement _deferredImport;
+  GetterElement _loadLibrary;
 
   PrefixElementZ(ObjectDecoder decoder) : super(decoder);
 
@@ -2143,7 +2268,10 @@
   void _ensureDeferred() {
     if (_isDeferred == null) {
       _isDeferred = _decoder.getBool(Key.IS_DEFERRED);
-      _deferredImport = _decoder.getElement(Key.IMPORT, isOptional: true);
+      if (_isDeferred) {
+        _deferredImport = _decoder.getElement(Key.IMPORT);
+        _loadLibrary = _decoder.getElement(Key.GETTER);
+      }
     }
   }
 
@@ -2160,6 +2288,11 @@
   }
 
   @override
+  GetterElement get loadLibrary {
+    return _loadLibrary;
+  }
+
+  @override
   ElementKind get kind => ElementKind.PREFIX;
 
   @override
@@ -2167,3 +2300,25 @@
     return _unsupported('lookupLocalMember');
   }
 }
+
+class MetadataAnnotationZ implements MetadataAnnotation {
+  final Element annotatedElement;
+  final SourceSpan sourcePosition;
+  final ConstantExpression constant;
+
+  MetadataAnnotationZ(
+      this.annotatedElement, this.sourcePosition, this.constant);
+
+  @override
+  MetadataAnnotation ensureResolved(Resolution resolution) {
+    // Do nothing.
+  }
+
+  @override
+  Node get node => throw new UnsupportedError('${this}.node');
+
+  @override
+  bool get hasNode => false;
+
+  String toString() => 'MetadataAnnotationZ(${constant.toDartText()})';
+}
diff --git a/pkg/compiler/lib/src/serialization/resolved_ast_serialization.dart b/pkg/compiler/lib/src/serialization/resolved_ast_serialization.dart
index ff8eef4..b5d6cae 100644
--- a/pkg/compiler/lib/src/serialization/resolved_ast_serialization.dart
+++ b/pkg/compiler/lib/src/serialization/resolved_ast_serialization.dart
@@ -100,6 +100,7 @@
         break;
       case ResolvedAstKind.DEFAULT_CONSTRUCTOR:
       case ResolvedAstKind.FORWARDING_CONSTRUCTOR:
+      case ResolvedAstKind.DEFERRED_LOAD_LIBRARY:
         // No additional properties.
         break;
     }
@@ -108,10 +109,7 @@
   /// Serialize [ResolvedAst] that is defined in terms of an AST together with
   /// [TreeElements].
   void serializeParsed() {
-    objectEncoder.setUri(
-        Key.URI,
-        elements.analyzedElement.compilationUnit.script.resourceUri,
-        elements.analyzedElement.compilationUnit.script.resourceUri);
+    objectEncoder.setUri(Key.URI, resolvedAst.sourceUri, resolvedAst.sourceUri);
     AstKind kind;
     if (element.enclosingClass is EnumClassElement) {
       if (element.name == 'index') {
@@ -358,6 +356,8 @@
         (element as AstElementMixinZ).resolvedAst =
             new SynthesizedResolvedAst(element, kind);
         break;
+      case ResolvedAstKind.DEFERRED_LOAD_LIBRARY:
+        break;
     }
   }
 
@@ -515,7 +515,7 @@
               reporter.internalError(
                   element,
                   "No token found for $element in "
-                  "${objectDecoder.getUri(Key.URI)} @ $getOrSetOffset");
+                  "${uri} @ $getOrSetOffset");
             }
           }
           return doParse((parser) {
diff --git a/pkg/compiler/lib/src/serialization/serialization.dart b/pkg/compiler/lib/src/serialization/serialization.dart
index 74dd4a0..1b97c3d 100644
--- a/pkg/compiler/lib/src/serialization/serialization.dart
+++ b/pkg/compiler/lib/src/serialization/serialization.dart
@@ -5,9 +5,11 @@
 library dart2js.serialization;
 
 import '../common.dart';
+import '../common/resolution.dart';
 import '../constants/expressions.dart';
 import '../dart_types.dart';
 import '../elements/elements.dart';
+import '../library_loader.dart' show LibraryProvider;
 import '../util/enumset.dart';
 
 import 'constant_serialization.dart';
@@ -921,13 +923,17 @@
 /// Context for parallel deserialization.
 class DeserializationContext {
   final DiagnosticReporter reporter;
+  final Resolution resolution;
+  final LibraryProvider libraryProvider;
   Map<Uri, LibraryElement> _uriMap = <Uri, LibraryElement>{};
   List<Deserializer> deserializers = <Deserializer>[];
   List<DeserializerPlugin> plugins = <DeserializerPlugin>[];
 
-  DeserializationContext(this.reporter);
+  DeserializationContext(this.reporter, this.resolution, this.libraryProvider);
 
   LibraryElement lookupLibrary(Uri uri) {
+    // TODO(johnniwinther): Move this to the library loader by making a
+    // [Deserializer] a [LibraryProvider].
     return _uriMap.putIfAbsent(uri, () {
       Uri foundUri;
       LibraryElement foundLibrary;
@@ -949,6 +955,11 @@
       return foundLibrary;
     });
   }
+
+  LibraryElement findLibrary(Uri uri) {
+    LibraryElement library = lookupLibrary(uri);
+    return library ?? libraryProvider.lookupLibrary(uri);
+  }
 }
 
 /// Deserializer for a closed collection of libraries.
@@ -1024,7 +1035,7 @@
           decoder.getEnum(Key.KIND, SerializedElementKind.values);
       if (elementKind == SerializedElementKind.EXTERNAL_LIBRARY) {
         Uri uri = decoder.getUri(Key.URI);
-        element = context.lookupLibrary(uri);
+        element = context.findLibrary(uri);
         if (element == null) {
           throw new StateError("Missing library for $uri.");
         }
@@ -1047,6 +1058,7 @@
         }
       } else if (elementKind == SerializedElementKind.EXTERNAL_CLASS_MEMBER) {
         ClassElement cls = decoder.getElement(Key.CLASS);
+        cls.ensureResolved(context.resolution);
         String name = decoder.getString(Key.NAME);
         bool isGetter = decoder.getBool(Key.GETTER, isOptional: true);
         element = cls.lookupLocalMember(name);
@@ -1063,6 +1075,7 @@
         }
       } else if (elementKind == SerializedElementKind.EXTERNAL_CONSTRUCTOR) {
         ClassElement cls = decoder.getElement(Key.CLASS);
+        cls.ensureResolved(context.resolution);
         String name = decoder.getString(Key.NAME);
         element = cls.lookupConstructor(name);
         if (element == null) {
diff --git a/pkg/compiler/lib/src/serialization/serialization_util.dart b/pkg/compiler/lib/src/serialization/serialization_util.dart
index 24aa6f8..1e40000 100644
--- a/pkg/compiler/lib/src/serialization/serialization_util.dart
+++ b/pkg/compiler/lib/src/serialization/serialization_util.dart
@@ -506,10 +506,19 @@
     if (elementName == null) {
       return null;
     }
-    assert(invariant(NO_LOCATION_SPANNABLE, context.isConstructor,
-        message: "Unexpected reference of forwarding constructor "
-            "'${elementName}' from $context."));
-    ClassElement superclass = context.enclosingClass.superclass;
+    ClassElement cls;
+    if (context is ClassElement) {
+      assert(invariant(NO_LOCATION_SPANNABLE, context.isNamedMixinApplication,
+          message: "Unexpected reference of forwarding constructor "
+              "'${elementName}' from $context."));
+      cls = context;
+    } else {
+      assert(invariant(NO_LOCATION_SPANNABLE, context.isConstructor,
+          message: "Unexpected reference of forwarding constructor "
+              "'${elementName}' from $context."));
+      cls = context.enclosingClass;
+    }
+    ClassElement superclass = cls.superclass;
     element = superclass.lookupConstructor(elementName);
     assert(invariant(NO_LOCATION_SPANNABLE, element != null,
         message: "Unresolved reference of forwarding constructor "
diff --git a/pkg/compiler/lib/src/serialization/system.dart b/pkg/compiler/lib/src/serialization/system.dart
index 8b54e98d..5e2a24b 100644
--- a/pkg/compiler/lib/src/serialization/system.dart
+++ b/pkg/compiler/lib/src/serialization/system.dart
@@ -28,16 +28,15 @@
 
 class DeserializerSystemImpl extends DeserializerSystem {
   final Compiler _compiler;
+  final Resolution resolution;
   final DeserializationContext deserializationContext;
   final List<LibraryElement> deserializedLibraries = <LibraryElement>[];
   final ResolutionImpactDeserializer _resolutionImpactDeserializer;
   final ResolvedAstDeserializerPlugin _resolvedAstDeserializer;
-  final ImpactTransformer _impactTransformer;
 
-  factory DeserializerSystemImpl(
-      Compiler compiler, ImpactTransformer impactTransformer) {
-    DeserializationContext context =
-        new DeserializationContext(compiler.reporter);
+  factory DeserializerSystemImpl(Compiler compiler) {
+    DeserializationContext context = new DeserializationContext(
+        compiler.reporter, compiler.resolution, compiler.libraryLoader);
     DeserializerPlugin backendDeserializer =
         compiler.backend.serialization.deserializer;
     context.plugins.add(backendDeserializer);
@@ -48,14 +47,14 @@
         new ResolvedAstDeserializerPlugin(
             compiler.parsingContext, backendDeserializer);
     context.plugins.add(resolvedAstDeserializer);
-    return new DeserializerSystemImpl._(compiler, context, impactTransformer,
+    return new DeserializerSystemImpl._(compiler, compiler.resolution, context,
         resolutionImpactDeserializer, resolvedAstDeserializer);
   }
 
   DeserializerSystemImpl._(
       this._compiler,
+      this.resolution,
       this.deserializationContext,
-      this._impactTransformer,
       this._resolutionImpactDeserializer,
       this._resolvedAstDeserializer);
 
@@ -96,7 +95,7 @@
         element.enclosingClass.isUnnamedMixinApplication) {
       return true;
     }
-    return _resolutionImpactDeserializer.impactMap.containsKey(element);
+    return _resolutionImpactDeserializer.hasResolutionImpact(element);
   }
 
   @override
@@ -111,14 +110,15 @@
               "${element} not found in ${superclass}."));
       // TODO(johnniwinther): Compute callStructure. Currently not used.
       CallStructure callStructure;
-      return _resolutionImpactDeserializer.impactMap.putIfAbsent(element, () {
+      return _resolutionImpactDeserializer.registerResolutionImpact(element,
+          () {
         return new DeserializedResolutionImpact(staticUses: <StaticUse>[
           new StaticUse.superConstructorInvoke(
               superclassConstructor, callStructure)
         ]);
       });
     }
-    return _resolutionImpactDeserializer.impactMap[element];
+    return _resolutionImpactDeserializer.getResolutionImpact(element);
   }
 
   @override
@@ -129,7 +129,18 @@
     if (element is ExecutableElement) {
       getResolvedAst(element);
     }
-    return _impactTransformer.transformResolutionImpact(resolutionImpact);
+    if (element.isField && !element.isConst) {
+      FieldElement field = element;
+      if (field.isTopLevel || field.isStatic) {
+        if (field.constant == null) {
+          // TODO(johnniwinther): Find a cleaner way to do this. Maybe
+          // `Feature.LAZY_FIELD` of the resolution impact should be used
+          // instead.
+          _compiler.backend.constants.registerLazyStatic(element);
+        }
+      }
+    }
+    return resolution.transformResolutionImpact(element, resolutionImpact);
   }
 
   @override
@@ -158,7 +169,8 @@
 }
 
 class ResolutionImpactDeserializer extends DeserializerPlugin {
-  Map<Element, ResolutionImpact> impactMap = <Element, ResolutionImpact>{};
+  Map<Element, ObjectDecoder> _decoderMap = <Element, ObjectDecoder>{};
+  Map<Element, ResolutionImpact> _impactMap = <Element, ResolutionImpact>{};
   final DeserializerPlugin nativeDataDeserializer;
 
   ResolutionImpactDeserializer(this.nativeDataDeserializer);
@@ -167,10 +179,30 @@
   void onElement(Element element, ObjectDecoder getDecoder(String tag)) {
     ObjectDecoder decoder = getDecoder(WORLD_IMPACT_TAG);
     if (decoder != null) {
-      impactMap[element] = ImpactDeserializer.deserializeImpact(
-          element, decoder, nativeDataDeserializer);
+      _decoderMap[element] = decoder;
     }
   }
+
+  bool hasResolutionImpact(Element element) {
+    return _impactMap.containsKey(element) || _decoderMap.containsKey(element);
+  }
+
+  ResolutionImpact registerResolutionImpact(
+      Element element, ResolutionImpact ifAbsent()) {
+    return _impactMap.putIfAbsent(element, ifAbsent);
+  }
+
+  ResolutionImpact getResolutionImpact(Element element) {
+    return registerResolutionImpact(element, () {
+      ObjectDecoder decoder = _decoderMap[element];
+      if (decoder != null) {
+        _decoderMap.remove(element);
+        return ImpactDeserializer.deserializeImpact(
+            element, decoder, nativeDataDeserializer);
+      }
+      return null;
+    });
+  }
 }
 
 const String RESOLVED_AST_TAG = 'resolvedAst';
@@ -185,6 +217,7 @@
   void onElement(Element element, ObjectEncoder createEncoder(String tag)) {
     assert(invariant(element, element.isDeclaration,
         message: "Element $element must be the declaration"));
+    if (element.isError) return;
     if (element is MemberElement) {
       assert(invariant(element, resolution.hasResolvedAst(element),
           message: "Element $element must have a resolved ast"));
diff --git a/pkg/compiler/lib/src/serialization/task.dart b/pkg/compiler/lib/src/serialization/task.dart
index 80d98d6..f57ade6 100644
--- a/pkg/compiler/lib/src/serialization/task.dart
+++ b/pkg/compiler/lib/src/serialization/task.dart
@@ -117,8 +117,7 @@
   void deserializeFromText(Uri sourceUri, String serializedData) {
     measure(() {
       if (deserializer == null) {
-        deserializer = new DeserializerSystemImpl(
-            compiler, compiler.backend.impactTransformer);
+        deserializer = new DeserializerSystemImpl(compiler);
       }
       DeserializerSystemImpl deserializerImpl = deserializer;
       DeserializationContext context = deserializerImpl.deserializationContext;
diff --git a/pkg/compiler/lib/src/serialization/type_serialization.dart b/pkg/compiler/lib/src/serialization/type_serialization.dart
index 3d4cb9f..24a46ae 100644
--- a/pkg/compiler/lib/src/serialization/type_serialization.dart
+++ b/pkg/compiler/lib/src/serialization/type_serialization.dart
@@ -35,7 +35,9 @@
     encoder.setTypes(Key.NAMED_PARAMETER_TYPES, type.namedParameterTypes);
   }
 
-  void visitMalformedType(MalformedType type, ObjectEncoder encoder) {}
+  void visitMalformedType(MalformedType type, ObjectEncoder encoder) {
+    encoder.setElement(Key.ELEMENT, type.element);
+  }
 
   void visitInterfaceType(InterfaceType type, ObjectEncoder encoder) {
     encoder.setElement(Key.ELEMENT, type.element);
@@ -79,8 +81,11 @@
         return new TypedefType(decoder.getElement(Key.ELEMENT),
             decoder.getTypes(Key.TYPE_ARGUMENTS, isOptional: true));
       case TypeKind.STATEMENT:
-      case TypeKind.MALFORMED_TYPE:
         throw new UnsupportedError("Unexpected type kind '${typeKind}.");
+      case TypeKind.MALFORMED_TYPE:
+        // TODO(johnniwinther): Do we need the 'userProvidedBadType' or maybe
+        // just a toString of it?
+        return new MalformedType(decoder.getElement(Key.ELEMENT), null);
       case TypeKind.DYNAMIC:
         return const DynamicType();
       case TypeKind.VOID:
diff --git a/pkg/compiler/lib/src/ssa/codegen.dart b/pkg/compiler/lib/src/ssa/codegen.dart
index 0ff32f2..9637549 100644
--- a/pkg/compiler/lib/src/ssa/codegen.dart
+++ b/pkg/compiler/lib/src/ssa/codegen.dart
@@ -1496,8 +1496,25 @@
     js.Statement elsePart =
         unwrapStatement(generateStatementsInNewBlock(elseGraph));
 
-    pushStatement(new js.If(test, thenPart, elsePart)
-        .withSourceInformation(node.sourceInformation));
+    js.Statement code;
+    // Peephole rewrites:
+    //
+    //     if (e); else S;   -->   if(!e) S;
+    //
+    //     if (e);   -->   e;
+    //
+    // TODO(sra): This peephole optimization would be better done as an SSA
+    // optimization.
+    if (thenPart is js.EmptyStatement) {
+      if (elsePart is js.EmptyStatement) {
+        code = new js.ExpressionStatement(test);
+      } else {
+        code = new js.If.noElse(new js.Prefix('!', test), elsePart);
+      }
+    } else {
+      code = new js.If(test, thenPart, elsePart);
+    }
+    pushStatement(code.withSourceInformation(node.sourceInformation));
   }
 
   visitIf(HIf node) {
diff --git a/pkg/compiler/lib/src/ssa/optimize.dart b/pkg/compiler/lib/src/ssa/optimize.dart
index 20aaf32..e1eee00c 100644
--- a/pkg/compiler/lib/src/ssa/optimize.dart
+++ b/pkg/compiler/lib/src/ssa/optimize.dart
@@ -505,7 +505,7 @@
     // Strengthen instruction type from annotations to help optimize
     // dependent instructions.
     native.NativeBehavior nativeBehavior =
-        native.NativeBehavior.ofMethod(method, compiler);
+        backend.getNativeMethodBehavior(method);
     TypeMask returnType =
         TypeMaskFactory.fromNativeBehavior(nativeBehavior, compiler);
     HInvokeDynamicMethod result =
@@ -869,7 +869,7 @@
     TypeMask type;
     if (backend.isNative(field.enclosingClass)) {
       type = TypeMaskFactory.fromNativeBehavior(
-          native.NativeBehavior.ofFieldLoad(field, compiler), compiler);
+          backend.getNativeFieldLoadBehavior(field), compiler);
     } else {
       type = TypeMaskFactory.inferredTypeForElement(field, compiler);
     }
diff --git a/pkg/compiler/lib/src/typechecker.dart b/pkg/compiler/lib/src/typechecker.dart
index 750357e..69137ab 100644
--- a/pkg/compiler/lib/src/typechecker.dart
+++ b/pkg/compiler/lib/src/typechecker.dart
@@ -40,7 +40,7 @@
         TypeDeclarationElement,
         TypedElement,
         VariableElement;
-import 'resolution/class_members.dart' show MembersCreator;
+import 'resolution/class_members.dart' show MembersCreator, ErroneousMember;
 import 'resolution/tree_elements.dart' show TreeElements;
 import 'tree/tree.dart';
 import 'util/util.dart' show Link, LinkBuilder;
@@ -681,7 +681,12 @@
       assert(invariant(node, element != null,
           message: 'Missing element for identifier'));
       assert(invariant(
-          node, element.isVariable || element.isParameter || element.isField,
+          node,
+          element.isVariable ||
+              element.isParameter ||
+              element.isField ||
+              (element.isInitializingFormal &&
+                  compiler.options.enableInitializingFormalAccess),
           message: 'Unexpected context element ${element}'));
       return element.computeType(resolution);
     }
@@ -733,7 +738,11 @@
         Name name, DartType unaliasedBound, InterfaceType interface) {
       MemberSignature member = lookupMemberSignature(memberName, interface);
       if (member != null) {
-        return new MemberAccess(member);
+        if (member is ErroneousMember) {
+          return const DynamicAccess();
+        } else {
+          return new MemberAccess(member);
+        }
       }
       if (name == const PublicName('call')) {
         if (unaliasedBound.isFunctionType) {
@@ -762,7 +771,10 @@
       return access;
     }
     if (receiverElement != null &&
-        (receiverElement.isVariable || receiverElement.isParameter)) {
+        (receiverElement.isVariable ||
+            receiverElement.isParameter ||
+            (receiverElement.isInitializingFormal &&
+                compiler.options.enableInitializingFormalAccess))) {
       Link<TypePromotion> typePromotions = typePromotionsMap[receiverElement];
       if (typePromotions != null) {
         while (!typePromotions.isEmpty) {
@@ -1057,7 +1069,10 @@
     } else if (element.isFunction) {
       // foo() where foo is a method in the same class.
       return createResolvedAccess(node, name, element);
-    } else if (element.isVariable || element.isParameter || element.isField) {
+    } else if (element.isVariable ||
+        element.isParameter ||
+        element.isField ||
+        element.isInitializingFormal) {
       // foo() where foo is a field in the same class.
       return createResolvedAccess(node, name, element);
     } else if (element.isGetter || element.isSetter) {
@@ -1075,7 +1090,10 @@
   }
 
   ElementAccess createPromotedAccess(Element element) {
-    if (element.isVariable || element.isParameter) {
+    if (element.isVariable ||
+        element.isParameter ||
+        (element.isInitializingFormal &&
+            compiler.options.enableInitializingFormalAccess)) {
       TypePromotion typePromotion = getKnownTypePromotion(element);
       if (typePromotion != null) {
         return new PromotedAccess(element, typePromotion.type);
@@ -1210,7 +1228,11 @@
           }
         }
 
-        if (variable != null && (variable.isVariable || variable.isParameter)) {
+        if (variable != null &&
+            (variable.isVariable ||
+                variable.isParameter ||
+                (variable.isInitializingFormal &&
+                    compiler.options.enableInitializingFormalAccess))) {
           DartType knownType = getKnownType(variable);
           if (!knownType.isDynamic) {
             DartType shownType = elements.getType(node.arguments.head);
diff --git a/pkg/compiler/lib/src/types/constants.dart b/pkg/compiler/lib/src/types/constants.dart
index a3b2faa..d66a31c 100644
--- a/pkg/compiler/lib/src/types/constants.dart
+++ b/pkg/compiler/lib/src/types/constants.dart
@@ -105,6 +105,11 @@
   }
 
   @override
+  TypeMask visitNonConstant(NonConstantValue constant, Compiler compiler) {
+    return compiler.typesTask.nullType;
+  }
+
+  @override
   TypeMask visitString(StringConstantValue constant, Compiler compiler) {
     return compiler.typesTask.stringType;
   }
diff --git a/pkg/compiler/lib/src/util/util.dart b/pkg/compiler/lib/src/util/util.dart
index 102ca9e..d9cc16c 100644
--- a/pkg/compiler/lib/src/util/util.dart
+++ b/pkg/compiler/lib/src/util/util.dart
@@ -52,6 +52,16 @@
     return h;
   }
 
+  /// Mix the bits of the hash codes of the unordered key/value from [map] with
+  /// [existing].
+  static int unorderedMapHash(Map map, [int existing = 0]) {
+    int h = 0;
+    for (var key in map.keys) {
+      h ^= objectHash(key, objectHash(map[key]));
+    }
+    return mixHashCodeBits(h, existing);
+  }
+
   /// Mix the bits of the key/value hash codes from [map] with [existing].
   static int mapHash(Map map, [int existing = 0]) {
     int h = existing;
diff --git a/pkg/dart_messages/lib/shared_messages.dart b/pkg/dart_messages/lib/shared_messages.dart
index c2e73c7..ae91711 100644
--- a/pkg/dart_messages/lib/shared_messages.dart
+++ b/pkg/dart_messages/lib/shared_messages.dart
@@ -682,6 +682,8 @@
       template: "The setter '#{memberName}' is not defined for the "
           "class '#{className}'.",
       usedBy: [dart2js, analyzer],
+      // TODO(eernst): When this.x access is available, add examples here,
+      // e.g., "class A { var x; A(this.x) : x = 3; } main() => new A(2);"
       examples: const ["class A {} main() { new A().x = 499; }",]),
 
   'NO_SUCH_SUPER_MEMBER': new Message(
diff --git a/pkg/meta/CHANGELOG.md b/pkg/meta/CHANGELOG.md
index 474ddfe..bf761f1 100644
--- a/pkg/meta/CHANGELOG.md
+++ b/pkg/meta/CHANGELOG.md
@@ -1,3 +1,12 @@
+## 1.0.1
+* Updated `@factory` to allow statics and methods returning `null`.
+
+## 1.0.0
+* First stable API release.
+
+## 0.12.2
+* Updated `@protected` to include implemented interfaces (linter#252).
+
 ## 0.12.1
 * Fixed markdown in dartdocs.
 
diff --git a/pkg/meta/lib/meta.dart b/pkg/meta/lib/meta.dart
index ff0f9b0..99a3924 100644
--- a/pkg/meta/lib/meta.dart
+++ b/pkg/meta/lib/meta.dart
@@ -18,17 +18,16 @@
 /// in the language tour.
 library meta;
 
-/// Used to annotate an instance method `m`. Indicates that `m` must either be
-/// abstract or must return a newly allocated object. In addition, every method
-/// that either implements or overrides `m` is implicitly annotated with this
-/// same annotation.
+/// Used to annotate an instance or static method `m`. Indicates that `m` must
+/// either be abstract or must return a newly allocated object or `null`. In
+/// addition, every method that either implements or overrides `m` is implicitly
+/// annotated with this same annotation.
 ///
 /// Tools, such as the analyzer, can provide feedback if
 ///
-/// * the annotation is associated with anything other than an instance method,
-///   or
-/// * a method that has this annotation that can return anything other than a
-///   newly allocated object.
+/// * the annotation is associated with anything other than a method, or
+/// * the annotation is associated with a method that has this annotation that
+///   can return anything other than a newly allocated object or `null`.
 const _Factory factory = const _Factory();
 
 /// Used to annotate a const constructor `c`. Indicates that any invocation of
@@ -69,8 +68,9 @@
 /// field) `m` in a class `C`. If the annotation is on a field it applies to the
 /// getter, and setter if appropriate, that are induced by the field. Indicates
 /// that `m` should only be invoked from instance methods of `C` or classes that
-/// extend or mix in `C`, either directly or indirectly. Additionally indicates
-/// that `m` should only be invoked on `this`, whether explicitly or implicitly.
+/// extend, implement or mix in `C`, either directly or indirectly. Additionally
+/// indicates that `m` should only be invoked on `this`, whether explicitly or
+/// implicitly.
 ///
 /// Tools, such as the analyzer, can provide feedback if
 ///
diff --git a/pkg/meta/pubspec.yaml b/pkg/meta/pubspec.yaml
index 82a1677..acab306 100644
--- a/pkg/meta/pubspec.yaml
+++ b/pkg/meta/pubspec.yaml
@@ -1,5 +1,5 @@
 name: meta
-version: 0.12.1
+version: 1.0.1
 author: Dart Team <misc@dartlang.org>
 homepage: http://www.dartlang.org
 description: >
diff --git a/pkg/pkg.status b/pkg/pkg.status
index 9fa4651..81fe4d1 100644
--- a/pkg/pkg.status
+++ b/pkg/pkg.status
@@ -27,96 +27,19 @@
 
 [ $runtime == vm && $system == windows]
 analysis_server/test/analysis/get_errors_test: Skip # runtime error, Issue 22180
+analysis_server/test/context_manager_test: RuntimeError # Issue 26828
 analysis_server/test/integration/analysis/analysis_options_test: RuntimeError # Issue 24796
 analyzer/test/generated/all_the_rest_test: Fail # Issue 21772
-analyzer_cli/test/driver_test: Fail # Issue 25471
+analyzer/test/generated/source_factory_test: RuntimeError # Issue 26828
+analyzer/test/src/context/builder_test: RuntimeError # Issue 26828
+analyzer/test/src/summary/linker_test: RuntimeError # Issue 26828
+analyzer/test/src/summary/prelinker_test: RuntimeError # Issue 26828
+analyzer/test/src/summary/summarize_elements_strong_test: RuntimeError # Issue 26828
+analyzer/test/src/summary/summarize_elements_test: RuntimeError # Issue 26828
 
 [ $compiler == dart2js ]
+analysis_server/test/integration: SkipByDesign # Analysis server integration tests don't make sense to run under dart2js, since the code under test always runs in the Dart vm as a subprocess.
 analyzer_cli/test/*: SkipByDesign # Only meant to run on vm
-analysis_server/test/*: Skip # Issue 22161
-analysis_server/test/analysis_notification_highlights_test: Pass, Slow # 19756, 21628
-analysis_server/test/analysis_notification_navigation_test: Pass, Slow # Issue 19756, 21628
-analysis_server/test/analysis_notification_occurrences_test: Pass, Slow # Issue 19756, 21628
-analysis_server/test/analysis_notification_outline_test: Pass, Slow # Issue 19756, 21628
-analysis_server/test/domain_analysis_test: Pass, Slow # Issue 19756, 21628
-analysis_server/test/domain_completion_test: Pass, Slow
-analysis_server/test/edit/assists_test: Pass, Slow
-analysis_server/test/edit/format_test: Pass, Slow
-analysis_server/test/edit/refactoring_test: Pass, Slow # Issue 19756, 21628
-analysis_server/test/search/element_references_test: Pass, Slow
-analysis_server/test/search/top_level_declarations_test: Pass, Slow # 19756, 21628
-analysis_server/test/services/index/store/codec_test: Pass, Slow
-analysis_server/test/socket_server_test: Pass, Slow # Issue 19756, 21628
-analyzer/test/context/declared_variables_test: Pass, Slow # Issue 21628
-analyzer/test/dart/element/element_test: Pass, Slow # Issue 24914
-analyzer/test/dart/ast/ast_test: Pass, Slow # Issue 19756, 21628
-analyzer/test/dart/ast/utilities_test: Pass, Slow # Issue 19756, 21628
-analyzer/test/dart/ast/visitor_test: Pass, Slow # Issue 19756, 21628
-analyzer/test/enum_test: Slow, Pass, Fail # Issue 21323
-analyzer/test/non_hint_code_test: Pass, Slow # Issue 21628
-analyzer/test/strong_mode_test: Pass, Slow # Issue 21628
-analyzer/test/generated/all_the_rest_test: Pass, Slow # Issue 21628
-analyzer/test/generated/checked_mode_compile_time_error_code_test: Pass, Slow # Issue 21628
-analyzer/test/generated/ast_test: Pass, Slow # Issue 21628
-analyzer/test/generated/checked_mode_compile_time_error_code_test: Pass, Slow # Issue 21628
-analyzer/test/generated/compile_time_error_code_test: Pass, Slow # Issue 21628
-analyzer/test/generated/constant_test: Pass, Slow # Issue 24914
-analyzer/test/generated/declaration_resolver_test: Pass, Slow # Issue 24914
-analyzer/test/generated/element_test: Pass, Slow # Issue 21628
-analyzer/test/generated/element_resolver_test: Pass, Slow # Issue 21628
-analyzer/test/generated/error_suppression_test: Pass, Slow # Issue 21628
-analyzer/test/generated/engine_test: SkipSlow
-analyzer/test/generated/hint_code_test: Pass, Slow # Issue 21628
-analyzer/test/generated/non_hint_code_test: Pass, Slow # Issue 21628
-analyzer/test/generated/incremental_resolver_test: Pass, Slow # Issue 21628
-analyzer/test/generated/incremental_scanner_test: Pass, Slow # Issue 21628
-analyzer/test/generated/inheritance_manager_test: Pass, Slow # Issue 21628
-analyzer/test/generated/non_error_resolver_test: Pass, Slow # Issue 21628
-analyzer/test/generated/parser_test: Pass, Slow # Issue 21628
-analyzer/test/generated/resolver_test: Pass, Slow # Issue 21628
-analyzer/test/generated/scanner_test: Pass, Slow # Issue 21628
-analyzer/test/generated/simple_resolver_test: Pass, Slow # Issue 21628
-analyzer/test/generated/source_factory_test: Pass, Slow # Issue 21628
-analyzer/test/generated/static_type_analyzer_test: Pass, Slow # Issue 21628
-analyzer/test/generated/static_type_warning_code_test: Pass, Slow
-analyzer/test/generated/static_type_warning_code_test: Pass, Slow # Issue 21628
-analyzer/test/generated/static_warning_code_test: Pass, Slow # Issue 21628
-analyzer/test/generated/strong_mode_test: Pass, Slow # Issue 21628
-analyzer/test/generated/type_system_test: Pass, Slow # Issue 21628
-analyzer/test/generated/utilities_test: Pass, Slow # Issue 21628
-analyzer/test/src/context/cache_test: Pass, Slow # Issue 21628
-analyzer/test/src/context/context_test: Pass, Timeout # dartbug.com/23658
-analyzer/test/src/dart/ast/utilities_test: Pass, Slow # Issue 24914
-analyzer/test/src/dart/constant/evaluation_test: Pass, Slow # Issue 24914
-analyzer/test/src/dart/constant/value_test: Pass, Slow # Issue 24914
-analyzer/test/src/dart/element/element_test: Pass, Slow # Issue 24914
-analyzer/test/src/summary/incremental_cache_test: Pass, Slow # Issue 24914
-analyzer/test/src/summary/index_unit_test: Pass, Slow # Issue 24914
-analyzer/test/src/summary/linker_test: Pass, Slow # Issue 24914
-analyzer/test/src/summary/prelinker_test: Pass, Slow # Issue 24914
-analyzer/test/src/summary/resynthesize_ast_test: Pass, Slow
-analyzer/test/src/summary/resynthesize_strong_test: Pass, Slow
-analyzer/test/src/summary/resynthesize_test: Pass, Slow
-analyzer/test/src/summary/summary_sdk_test: Pass, Slow # Issue 24914
-analyzer/test/src/summary/summarize_ast_test: Pass, Slow # Issue 24914
-analyzer/test/src/summary/summarize_ast_strong_test: Pass, Slow # Issue 24914
-analyzer/test/src/summary/summarize_elements_strong_test: Pass, Slow # Issue 24914
-analyzer/test/src/summary/summarize_elements_test: Pass, Slow # Issue 24914
-analyzer/test/src/task/dart_test: Pass, Slow # Issue 21628
-analyzer/test/src/task/dart_work_manager_test: Pass, Slow # Issue 21628
-analyzer/test/src/task/driver_test: Pass, Slow # Issue 21628
-analyzer/test/src/task/general_test: Pass, Slow # Issue 21628
-analyzer/test/src/task/html_test: Pass, Slow # Issue 21628
-analyzer/test/src/task/html_work_manager_test: Pass, Slow # Issue 21628
-analyzer/test/src/task/incremental_element_builder_test: Pass, Slow # Issue 21628
-analyzer/test/src/task/inputs_test: Pass, Slow # Issue 21628
-analyzer/test/src/task/manager_test: Pass, Slow # Issue 21628
-analyzer/test/src/task/model_test: Pass, Slow # Issue 21628
-analyzer/test/src/task/options_test: Pass, Slow # Issue 21628
-analyzer/test/src/task/options_work_manager_test: Pass, Slow # Issue 21628
-analyzer/test/src/task/strong/checker_test: Pass, Slow # Issue 21628
-analyzer/test/src/task/strong/inferred_type_test: Pass, Slow # Issue 21628
-analyzer/test/src/task/yaml_test: Pass, Slow # Issue 21628
 collection/test/equality_test/01: Fail # Issue 1533
 collection/test/equality_test/02: Fail # Issue 1533
 collection/test/equality_test/03: Fail # Issue 1533
@@ -126,22 +49,14 @@
 lookup_map/test/version_check_test: SkipByDesign # Only meant to run in vm.
 typed_data/test/typed_buffers_test/01: Fail # Not supporting Int64List, Uint64List.
 
-# Analysis server integration tests don't make sense to run under
-# dart2js, since the code under test always runs in the Dart vm as a
-# subprocess.
-analysis_server/test/integration: Skip
+[ $compiler == dart2js && $builder_tag != dart2js_analyzer ]
+analyzer/test/*: Skip # Issue 26813
+analyzer/tool/*: Skip # Issue 26813
+analysis_server/test/*: Skip # Issue 26813
 
 [ $compiler == dart2js && $checked ]
 crypto/test/base64_test: Slow, Pass
 
-[ $runtime == d8 ]
-analysis_server/test/analysis_notification_overrides_test: Pass, Slow # Issue 19756
-analysis_server/test/analysis_notification_occurrences_test: Pass, Slow # Issue 19756
-analysis_server/test/analysis_notification_outline_test: Pass, Slow # Issue 19756
-analysis_server/test/domain_search_test: Pass, Slow # Issue 19756
-analysis_server/test/search/element_reference_test: Pass, Slow # Issue 19756
-analysis_server/index/store/codec_test: Pass, Slow # Issue 19756
-
 [ $runtime == jsshell ]
 async/test/stream_zip_test: RuntimeError, OK # Issue 26103. Timers are not supported.
 lookup_map/test/lookup_map_test: RuntimeError, OK # Issue 26103. Timers are not supported.
@@ -155,7 +70,7 @@
 
 [ $runtime == ie10 ]
 analyzer/test/generated/java_core_test: Pass, Timeout # Issue 19747
-typed_data/test/typed_buffers_test/none: Fail # Issue   17607 (I put this here explicitly, since this is not the same as on ie9)
+typed_data/test/typed_buffers_test/none: Fail # Issue 17607 (I put this here explicitly, since this is not the same as on ie9)
 
 [ $runtime == safarimobilesim ]
 # Unexplained errors only occuring on Safari 6.1 and earlier.
@@ -163,8 +78,6 @@
 
 [ $compiler == dart2analyzer ]
 compiler/samples/compile_loop/compile_loop: CompileTimeError  # Issue 16524
-lookup_map/test/version_check_test: StaticWarning # https://github.com/dart-lang/http_parser/issues/6
-lookup_map/test/lookup_map_test: StaticWarning # https://github.com/dart-lang/http_parser/issues/6
 
 [ $compiler == dart2js && $csp ]
 # This test cannot run under CSP because it is injecting a JavaScript polyfill
@@ -204,11 +117,6 @@
 [ $use_repository_packages ]
 analyzer/test/*: PubGetError
 
-[ $compiler == dart2js && $cps_ir && $host_checked == false ]
-analyzer/test/dart/element/element_test: Pass, Slow # Times out due to inlining, but see issue 24485
-analyzer/test/src/summary/resynthesize_test: Pass, Slow # Times out due to inlining, but see issue 24485
-analyzer/test/src/task/strong_mode_test: Pass, Slow # Times out due to inlining, but see issue 24485
-
 [ $compiler == dart2js && $cps_ir && $host_checked ]
 analyzer/test/dart/ast/ast_test: Crash # Issue 24485
 analyzer/test/dart/ast/visitor_test: Crash # Issue 24485
diff --git a/pkg/pkgbuild.status b/pkg/pkgbuild.status
index 1366f47..2f7a6bd 100644
--- a/pkg/pkgbuild.status
+++ b/pkg/pkgbuild.status
@@ -2,15 +2,10 @@
 # for details. All rights reserved. Use of this source code is governed by a
 # BSD-style license that can be found in the LICENSE file.
 
-samples/searchable_list: Pass, Slow
-
-[ $use_repository_packages ]
-pkg/analyzer: PubGetError
-samples/third_party/angular_todo: Fail # angular needs to be updated
+third_party/pkg/scheduled_test: Fail # Issue 26585
 
 [ $use_public_packages ]
 pkg/compiler: SkipByDesign # js_ast is not published
-samples/third_party/angular_todo: Pass, Slow
 
-[ $builder_tag == russian ]
-samples/third_party/angular_todo: Fail # Issue 16356
+[ ($use_repository_packages || $use_public_packages) && ($system == windows || $system == linux) ]
+third_party/pkg/*: Pass, PubGetError # Issue 26696
diff --git a/runtime/bin/bin.gypi b/runtime/bin/bin.gypi
index 3905ad7..2d04720 100644
--- a/runtime/bin/bin.gypi
+++ b/runtime/bin/bin.gypi
@@ -1058,6 +1058,20 @@
       ]
     },
     {
+      'target_name': 'fuchsia_test',
+      'type': 'executable',
+      'dependencies': [
+        'libdart_nosnapshot',
+      ],
+      'include_dirs': [
+        '..',
+        '../include',
+      ],
+      'sources': [
+        'fuchsia_test.cc',
+      ],
+    },
+    {
       # dart binary with a snapshot of corelibs built in.
       'target_name': 'dart',
       'type': 'executable',
diff --git a/runtime/bin/builtin.dart b/runtime/bin/builtin.dart
index 6b16d99..7780208 100644
--- a/runtime/bin/builtin.dart
+++ b/runtime/bin/builtin.dart
@@ -1,5 +1,5 @@
 // Copyright (c) 2012, the Dart project authors.  Please see the AUTHORS file
-// for details. All rights reserved. Use of this source code is governed by a
+// for details. All rights solveserved. Use of this source code is governed by a
 // BSD-style license that can be found in the LICENSE file.
 
 library builtin;
@@ -10,6 +10,10 @@
 import 'dart:isolate';
 import 'dart:typed_data';
 
+// Embedder sets this to true if the --trace-loading flag was passed on the
+// command line.
+bool _traceLoading = false;
+
 
 // Before handling an embedder entrypoint we finalize the setup of the
 // dart:_builtin library.
@@ -52,40 +56,40 @@
 
 _getUriBaseClosure() => _uriBase;
 
-
 // Asynchronous loading of resources.
-// The embedder forwards most loading requests to this library.
-
-// See Dart_LibraryTag in dart_api.h
-const _Dart_kScriptTag = null;
-const _Dart_kImportTag = 0;
-const _Dart_kSourceTag = 1;
-const _Dart_kCanonicalizeUrl = 2;
-const _Dart_kResourceLoad = 3;
-
-// Embedder sets this to true if the --trace-loading flag was passed on the
-// command line.
-bool _traceLoading = false;
-
-// This is currently a build time flag only. We measure the time from the first
-// load request (opening the receive port) to completing the last load
-// request (closing the receive port). Future, deferred load operations will
-// add to this time.
-bool _timeLoading = false;
-Stopwatch _stopwatch;
+// The embedder forwards loading requests to the service isolate.
 
 // A port for communicating with the service isolate for I/O.
 SendPort _loadPort;
-// The receive port for a load request. Multiple sources can be fetched in
-// a single load request.
-RawReceivePort _dataPort;
-// A request id valid only for the current load cycle (while the number of
-// outstanding load requests is greater than 0). Can be reset when loading is
-// completed.
-int _reqId = 0;
-// An unordered hash map mapping from request id to a particular load request.
-// Once there are no outstanding load requests the current load has finished.
-HashMap _reqMap = new HashMap();
+
+// The isolateId used to communicate with the service isolate for I/O.
+int _isolateId;
+
+// Requests made to the service isolate over the load port.
+
+// Extra requests. Keep these in sync between loader.dart and builtin.dart.
+const _Dart_kInitLoader = 4;           // Initialize the loader.
+const _Dart_kResourceLoad = 5;         // Resource class support.
+const _Dart_kGetPackageRootUri = 6;    // Uri of the packages/ directory.
+const _Dart_kGetPackageConfigUri = 7;  // Uri of the .packages file.
+const _Dart_kResolvePackageUri = 8;    // Resolve a package: uri.
+
+// Make a request to the loader. Future will complete with result which is
+// either a Uri or a List<int>.
+Future _makeLoaderRequest(int tag, String uri) {
+  assert(_isolateId != null);
+  assert(_loadPort != null);
+  Completer completer = new Completer();
+  RawReceivePort port = new RawReceivePort();
+  port.handler = (msg) {
+    // Close the port.
+    port.close();
+    completer.complete(msg);
+  };
+  _loadPort.send([_traceLoading, _isolateId, tag, port.sendPort, uri]);
+  return completer.future;
+}
+
 
 // The current working directory when the embedder was launched.
 Uri _workingDirectory;
@@ -93,29 +97,8 @@
 // package imports can be resolved relative to it. The root script is the basis
 // for the root library in the VM.
 Uri _rootScript;
-
-// Packages are either resolved looking up in a map or resolved from within a
-// package root.
-bool get _packagesReady =>
-    (_packageRoot != null) || (_packageMap != null) || (_packageError != null);
-// Error string set if there was an error resolving package configuration.
-// For example not finding a .packages file or packages/ directory, malformed
-// .packages file or any other related error.
-String _packageError = null;
-// The directory to look in to resolve "package:" scheme URIs. By detault it is
-// the 'packages' directory right next to the script.
-Uri _packageRoot = null; // Used to be _rootScript.resolve('packages/');
-// The map describing how certain package names are mapped to Uris.
-Uri _packageConfig = null;
-Map<String, Uri> _packageMap = null;
-
-// A list of pending packags which have been requested while resolving the
-// location of the package root or the contents of the package map.
-List<_LoadRequest> _pendingPackageLoads = [];
-
-// If we have outstanding loads or pending package loads waiting for resolution,
-// then we do have pending loads.
-bool _pendingLoads() => !_reqMap.isEmpty || !_pendingPackageLoads.isEmpty;
+// The package root set on the command line.
+Uri _packageRoot;
 
 // Special handling for Windows paths so that they are compatible with URI
 // handling.
@@ -128,47 +111,6 @@
   _print("* $_logId $msg");
 }
 
-// A class wrapping the load error message in an Error object.
-class _LoadError extends Error {
-  final _LoadRequest request;
-  final String message;
-  _LoadError(this.request, this.message);
-
-  String toString() {
-    var context = request._context;
-    if (context == null || context is! String) {
-      return 'Could not load "${request._uri}": $message';
-    } else {
-      return 'Could not import "${request._uri}" from "$context": $message';
-    }
-  }
-}
-
-// Class collecting all of the information about a particular load request.
-class _LoadRequest {
-  final int _id = _reqId++;
-  final int _tag;
-  final String _uri;
-  final Uri _resourceUri;
-  final _context;
-
-  _LoadRequest(this._tag, this._uri, this._resourceUri, this._context) {
-    assert(_reqMap[_id] == null);
-    _reqMap[_id] = this;
-  }
-
-  toString() => "LoadRequest($_id, $_tag, $_uri, $_resourceUri, $_context)";
-}
-
-
-// Native calls provided by the embedder.
-void _signalDoneLoading() native "Builtin_DoneLoading";
-void _loadScriptCallback(int tag, String uri, String libraryUri, Uint8List data)
-    native "Builtin_LoadSource";
-void _asyncLoadErrorCallback(uri, libraryUri, error)
-    native "Builtin_AsyncLoadError";
-
-
 _sanitizeWindowsPath(path) {
   // For Windows we need to massage the paths a bit according to
   // http://blogs.msdn.com/b/ie/archive/2006/12/06/file-uris-in-windows.aspx
@@ -193,7 +135,6 @@
   return fixedPath;
 }
 
-
 _trimWindowsPath(path) {
   // Convert /X:/ to X:/.
   if (_isWindows == false) {
@@ -211,7 +152,6 @@
   return path;
 }
 
-
 // Ensure we have a trailing slash character.
 _enforceTrailingSlash(uri) {
   if (!uri.endsWith('/')) {
@@ -220,7 +160,6 @@
   return uri;
 }
 
-
 // Embedder Entrypoint:
 // The embedder calls this method with the current working directory.
 void _setWorkingDirectory(cwd) {
@@ -236,7 +175,6 @@
   }
 }
 
-
 // Embedder Entrypoint:
 // The embedder calls this method with a custom package root.
 _setPackageRoot(String packageRoot) {
@@ -246,15 +184,15 @@
   if (_traceLoading) {
     _log('Setting package root: $packageRoot');
   }
-  packageRoot = _enforceTrailingSlash(packageRoot);
   if (packageRoot.startsWith('file:') ||
       packageRoot.startsWith('http:') ||
       packageRoot.startsWith('https:')) {
+    packageRoot = _enforceTrailingSlash(packageRoot);
     _packageRoot = _workingDirectory.resolve(packageRoot);
   } else {
     packageRoot = _sanitizeWindowsPath(packageRoot);
     packageRoot = _trimWindowsPath(packageRoot);
-    _packageRoot = _workingDirectory.resolveUri(new Uri.file(packageRoot));
+    _packageRoot = _workingDirectory.resolveUri(new Uri.directory(packageRoot));
   }
   // Now that we have determined the packageRoot value being used, set it
   // up for use in Platform.packageRoot. This is only set when the embedder
@@ -266,265 +204,8 @@
   }
 }
 
-
-// Given a uri with a 'package' scheme, return a Uri that is prefixed with
-// the package root.
-Uri _resolvePackageUri(Uri uri) {
-  assert(uri.scheme == "package");
-  assert(_packagesReady);
-
-  if (!uri.host.isEmpty) {
-    var path = '${uri.host}${uri.path}';
-    var right = 'package:$path';
-    var wrong = 'package://$path';
-
-    throw "URIs using the 'package:' scheme should look like "
-          "'$right', not '$wrong'.";
-  }
-
-  if (_traceLoading) {
-    _log('Resolving package with uri path: ${uri.path}');
-  }
-  var resolvedUri;
-  if (_packageError != null) {
-    if (_traceLoading) {
-      _log("Resolving package with pending resolution error: $_packageError");
-    }
-    throw _packageError;
-  } else if (_packageRoot != null) {
-    resolvedUri = _packageRoot.resolve(uri.path);
-  } else {
-    var packageName = uri.pathSegments[0];
-    var mapping = _packageMap[packageName];
-    if (_traceLoading) {
-      _log("Mapped '$packageName' package to '$mapping'");
-    }
-    if (mapping == null) {
-      throw "No mapping for '$packageName' package when resolving '$uri'.";
-    }
-    var path;
-    if (uri.path.length > packageName.length) {
-      path = uri.path.substring(packageName.length + 1);
-    } else {
-      // Handle naked package resolution to the default package name:
-      // package:foo is equivalent to package:foo/foo.dart
-      assert(uri.path.length == packageName.length);
-      path = "$packageName.dart";
-    }
-    if (_traceLoading) {
-      _log("Path to be resolved in package: $path");
-    }
-    resolvedUri = mapping.resolve(path);
-  }
-  if (_traceLoading) {
-    _log("Resolved '$uri' to '$resolvedUri'.");
-  }
-  return resolvedUri;
-}
-
-
-// Resolves the script uri in the current working directory iff the given uri
-// did not specify a scheme (e.g. a path to a script file on the command line).
-Uri _resolveScriptUri(String scriptName) {
-  if (_traceLoading) {
-    _log("Resolving script: $scriptName");
-  }
-  if (_workingDirectory == null) {
-    throw 'No current working directory set.';
-  }
-  scriptName = _sanitizeWindowsPath(scriptName);
-
-  var scriptUri = Uri.parse(scriptName);
-  if (scriptUri.scheme == '') {
-    // Script does not have a scheme, assume that it is a path,
-    // resolve it against the working directory.
-    scriptUri = _workingDirectory.resolveUri(scriptUri);
-  }
-
-  // Remember the root script URI so that we can resolve packages based on
-  // this location.
-  _rootScript = scriptUri;
-
-  if (_traceLoading) {
-    _log('Resolved entry point to: $_rootScript');
-  }
-  return scriptUri;
-}
-
-
-void _finishLoadRequest(_LoadRequest req) {
-  if (req != null) {
-    // Now that we are done with loading remove the request from the map.
-    var tmp = _reqMap.remove(req._id);
-    assert(tmp == req);
-    if (_traceLoading) {
-      _log("Loading of ${req._uri} finished: "
-           "${_reqMap.length} requests remaining, "
-           "${_pendingPackageLoads.length} packages pending.");
-    }
-  }
-
-  if (!_pendingLoads() && (_dataPort != null)) {
-    _stopwatch.stop();
-    // Close the _dataPort now that there are no more requests outstanding.
-    if (_traceLoading || _timeLoading) {
-      _log("Closing loading port: ${_stopwatch.elapsedMilliseconds} ms");
-    }
-    _dataPort.close();
-    _dataPort = null;
-    _reqId = 0;
-    _signalDoneLoading();
-  }
-}
-
-
-void _handleLoaderReply(msg) {
-  int id = msg[0];
-  var dataOrError = msg[1];
-  assert((id >= 0) && (id < _reqId));
-  var req = _reqMap[id];
-  try {
-    if (dataOrError is Uint8List) {
-      // Successfully loaded the data.
-      if (req._tag == _Dart_kResourceLoad) {
-        Completer c = req._context;
-        c.complete(dataOrError);
-      } else {
-        // TODO: Currently a compilation error while loading the script is
-        // fatal for the isolate. _loadScriptCallback() does not return and
-        // the number of requests remains out of sync.
-        _loadScriptCallback(req._tag, req._uri, req._context, dataOrError);
-      }
-      _finishLoadRequest(req);
-    } else {
-      assert(dataOrError is String);
-      var error = new _LoadError(req, dataOrError.toString());
-      _asyncLoadError(req, error, null);
-    }
-  } catch(e, s) {
-    // Wrap inside a _LoadError unless we are already propagating a
-    // previous _LoadError.
-    var error = (e is _LoadError) ? e : new _LoadError(req, e.toString());
-    assert(req != null);
-    _asyncLoadError(req, error, s);
-  }
-}
-
-
-void _startLoadRequest(int tag, String uri, Uri resourceUri, context) {
-  if (_dataPort == null) {
-    if (_traceLoading) {
-      _log("Initializing load port.");
-    }
-    // Allocate the Stopwatch if necessary.
-    if (_stopwatch == null) {
-      _stopwatch = new Stopwatch();
-    }
-    assert(_dataPort == null);
-    _dataPort = new RawReceivePort(_handleLoaderReply);
-    _stopwatch.start();
-  }
-  // Register the load request and send it to the VM service isolate.
-  var req = new _LoadRequest(tag, uri, resourceUri, context);
-
-  assert(_dataPort != null);
-  var msg = new List(4);
-  msg[0] = _dataPort.sendPort;
-  msg[1] = _traceLoading;
-  msg[2] = req._id;
-  msg[3] = resourceUri.toString();
-  _loadPort.send(msg);
-
-  if (_traceLoading) {
-    _log("Loading of $resourceUri for $uri started with id: ${req._id}. "
-         "${_reqMap.length} requests remaining, "
-         "${_pendingPackageLoads.length} packages pending.");
-  }
-}
-
-
-RawReceivePort _packagesPort;
-
-void _handlePackagesReply(msg) {
-  // Make sure to close the _packagePort before any other action.
-  _packagesPort.close();
-  _packagesPort = null;
-
-  if (_traceLoading) {
-    _log("Got packages reply: $msg");
-  }
-  if (msg is String) {
-    if (_traceLoading) {
-      _log("Got failure response on package port: '$msg'");
-    }
-    // Remember the error message.
-    _packageError = msg;
-  } else if (msg is List) {
-    if (msg.length == 1) {
-      if (_traceLoading) {
-        _log("Received package root: '${msg[0]}'");
-      }
-      _packageRoot = Uri.parse(msg[0]);
-    } else {
-      // First entry contains the location of the loaded .packages file.
-      assert((msg.length % 2) == 0);
-      assert(msg.length >= 2);
-      assert(msg[1] == null);
-      _packageConfig = Uri.parse(msg[0]);
-      _packageMap = new Map<String, Uri>();
-      for (var i = 2; i < msg.length; i+=2) {
-        // TODO(iposva): Complain about duplicate entries.
-        _packageMap[msg[i]] = Uri.parse(msg[i+1]);
-      }
-      if (_traceLoading) {
-        _log("Setup package map: $_packageMap");
-      }
-    }
-  } else {
-    _packageError = "Bad type of packages reply: ${msg.runtimeType}";
-    if (_traceLoading) {
-      _log(_packageError);
-    }
-  }
-
-  // Resolve all pending package loads now that we know how to resolve them.
-  while (_pendingPackageLoads.length > 0) {
-    // Order does not matter as we queue all of the requests up right now.
-    var req = _pendingPackageLoads.removeLast();
-    // Call the registered closure, to handle the delayed action.
-    req();
-  }
-  // Reset the pending package loads to empty. So that we eventually can
-  // finish loading.
-  _pendingPackageLoads = [];
-  // Make sure that the receive port is closed if no other loads are pending.
-  _finishLoadRequest(null);
-}
-
-
-void _requestPackagesMap() {
-  assert(_packagesPort == null);
-  assert(_rootScript != null);
-  // Create a port to receive the packages map on.
-  _packagesPort = new RawReceivePort(_handlePackagesReply);
-  var sp = _packagesPort.sendPort;
-
-  var msg = new List(4);
-  msg[0] = sp;
-  msg[1] = _traceLoading;
-  msg[2] = -1;
-  msg[3] = _rootScript.toString();
-  _loadPort.send(msg);
-
-  if (_traceLoading) {
-    _log("Requested packages map for '$_rootScript'.");
-  }
-}
-
-
 // Embedder Entrypoint:
-// Request the load of a particular packages map.
-void _loadPackagesMap(String packagesParam) {
+void _setPackagesMap(String packagesParam) {
   if (!_setupCompleted) {
     _setupHooks();
   }
@@ -550,257 +231,36 @@
   if (_traceLoading) {
     _log('Resolved packages map to: $packagesUri');
   }
+}
 
-  // Request the loading and parsing of the packages map at the specified URI.
-  // Create a port to receive the packages map on.
-  assert(_packagesPort == null);
-  _packagesPort = new RawReceivePort(_handlePackagesReply);
-  var sp = _packagesPort.sendPort;
+// Resolves the script uri in the current working directory iff the given uri
+// did not specify a scheme (e.g. a path to a script file on the command line).
+String _resolveScriptUri(String scriptName) {
+  if (_traceLoading) {
+    _log("Resolving script: $scriptName");
+  }
+  if (_workingDirectory == null) {
+    throw 'No current working directory set.';
+  }
+  scriptName = _sanitizeWindowsPath(scriptName);
 
-  var msg = new List(4);
-  msg[0] = sp;
-  msg[1] = _traceLoading;
-  msg[2] = -2;
-  msg[3] = packagesUriStr;
-  _loadPort.send(msg);
+  var scriptUri = Uri.parse(scriptName);
+  if (scriptUri.scheme == '') {
+    // Script does not have a scheme, assume that it is a path,
+    // resolve it against the working directory.
+    scriptUri = _workingDirectory.resolveUri(scriptUri);
+  }
 
-  // Signal that the resolution of the packages map has started. But in this
-  // case it is not tied to a particular request.
-  _pendingPackageLoads.add(() {
-    // Nothing to be done beyond registering that there is pending package
-    // resolution requested by having an empty entry.
-    if (_traceLoading) {
-      _log("Skipping dummy deferred request.");
-    }
-  });
+  // Remember the root script URI so that we can resolve packages based on
+  // this location.
+  _rootScript = scriptUri;
 
   if (_traceLoading) {
-    _log("Requested packages map at '$packagesUri'.");
+    _log('Resolved entry point to: $_rootScript');
   }
+  return scriptUri.toString();
 }
 
-
-void _asyncLoadError(_LoadRequest req, _LoadError error, StackTrace stack) {
-  if (_traceLoading) {
-    _log("_asyncLoadError(${req._uri}), error: $error\nstack: $stack");
-  }
-  if (req._tag == _Dart_kResourceLoad) {
-    Completer c = req._context;
-    c.completeError(error, stack);
-  } else {
-    String libraryUri = req._context;
-    if (req._tag == _Dart_kImportTag) {
-      // When importing a library, the libraryUri is the imported
-      // uri.
-      libraryUri = req._uri;
-    }
-    _asyncLoadErrorCallback(req._uri, libraryUri, error);
-  }
-  _finishLoadRequest(req);
-}
-
-
-_loadDataFromLoadPort(int tag, String uri, Uri resourceUri, context) {
-  try {
-    _startLoadRequest(tag, uri, resourceUri, context);
-  } catch (e, s) {
-    if (_traceLoading) {
-      _log("Exception when communicating with service isolate: $e");
-    }
-    // Register a dummy load request so we can fail to load it.
-    var req = new _LoadRequest(tag, uri, resourceUri, context);
-
-    // Wrap inside a _LoadError unless we are already propagating a previously
-    // seen _LoadError.
-    var error = (e is _LoadError) ? e : new _LoadError(req, e.toString());
-    _asyncLoadError(req, error, s);
-  }
-}
-
-
-// Loading a package URI needs to first map the package name to a loadable
-// URI.
-_loadPackage(int tag, String uri, Uri resourceUri, context) {
-  if (_packagesReady) {
-    var resolvedUri;
-    try {
-      resolvedUri = _resolvePackageUri(resourceUri);
-    } catch (e, s) {
-      if (_traceLoading) {
-        _log("Exception ($e) when resolving package URI: $resourceUri");
-      }
-      // Register a dummy load request so we can fail to load it.
-      var req = new _LoadRequest(tag, uri, resourceUri, context);
-
-      // Wrap inside a _LoadError unless we are already propagating a previously
-      // seen _LoadError.
-      var error = (e is _LoadError) ? e : new _LoadError(req, e.toString());
-      _asyncLoadError(req, error, s);
-    }
-    _loadData(tag, uri, resolvedUri, context);
-  } else {
-    if (_pendingPackageLoads.isEmpty) {
-      // Package resolution has not been setup yet, and this is the first
-      // request for package resolution & loading.
-      _requestPackagesMap();
-    }
-    // Register the action of loading this package once the package resolution
-    // is ready.
-    _pendingPackageLoads.add(() {
-      if (_traceLoading) {
-        _log("Handling deferred package request: "
-             "$tag, $uri, $resourceUri, $context");
-      }
-      _loadPackage(tag, uri, resourceUri, context);
-    });
-    if (_traceLoading) {
-      _log("Pending package load of '$uri': "
-           "${_pendingPackageLoads.length} pending");
-    }
-  }
-}
-
-
-// Load the data associated with the resourceUri.
-_loadData(int tag, String uri, Uri resourceUri, context) {
-  if (resourceUri.scheme == 'package') {
-    // package based uris need to be resolved to the correct loadable location.
-    // The logic of which is handled seperately, and then _loadData is called
-    // recursively.
-    _loadPackage(tag, uri, resourceUri, context);
-  } else {
-    _loadDataFromLoadPort(tag, uri, resourceUri, context);
-  }
-}
-
-
-// Embedder Entrypoint:
-// Asynchronously loads script data through a http[s] or file uri.
-_loadDataAsync(int tag, String uri, String libraryUri) {
-  if (!_setupCompleted) {
-    _setupHooks();
-  }
-  var resourceUri;
-  if (tag == _Dart_kScriptTag) {
-    resourceUri = _resolveScriptUri(uri);
-    uri = resourceUri.toString();
-  } else {
-    resourceUri = Uri.parse(uri);
-  }
-  _loadData(tag, uri, resourceUri, libraryUri);
-}
-
-
-// Embedder Entrypoint:
-// Function called by standalone embedder to resolve uris when the VM requests
-// Dart_kCanonicalizeUrl from the tag handler.
-String _resolveUri(String base, String userString) {
-  if (!_setupCompleted) {
-    _setupHooks();
-  }
-  if (_traceLoading) {
-    _log('Resolving: $userString from $base');
-  }
-  var baseUri = Uri.parse(base);
-  var result = baseUri.resolve(userString).toString();
-  if (_traceLoading) {
-    _log('Resolved $userString in $base to $result');
-  }
-  return result;
-}
-
-
-// Handling of access to the package root or package map from user code.
-_triggerPackageResolution(action) {
-  if (_packagesReady) {
-    // Packages are ready. Execute the action now.
-    action();
-  } else {
-    if (_pendingPackageLoads.isEmpty) {
-      // Package resolution has not been setup yet, and this is the first
-      // request for package resolution & loading.
-      _requestPackagesMap();
-    }
-    // Register the action for when the package resolution is ready.
-    _pendingPackageLoads.add(action);
-  }
-}
-
-
-Future<Uri> _getPackageRootFuture() {
-  if (_traceLoading) {
-    _log("Request for package root from user code.");
-  }
-  var completer = new Completer<Uri>();
-  _triggerPackageResolution(() {
-    completer.complete(_packageRoot);
-  });
-  return completer.future;
-}
-
-
-Future<Uri> _getPackageConfigFuture() {
-  if (_traceLoading) {
-    _log("Request for package config from user code.");
-  }
-  var completer = new Completer<Uri>();
-  _triggerPackageResolution(() {
-    completer.complete(_packageConfig);
-  });
-  return completer.future;
-}
-
-
-Future<Uri> _resolvePackageUriFuture(Uri packageUri) async {
-  if (_traceLoading) {
-    _log("Request for package Uri resolution from user code: $packageUri");
-  }
-  if (packageUri.scheme != "package") {
-    if (_traceLoading) {
-      _log("Non-package Uri, returning unmodified: $packageUri");
-    }
-    // Return the incoming parameter if not passed a package: URI.
-    return packageUri;
-  }
-
-  if (!_packagesReady) {
-    if (_traceLoading) {
-      _log("Trigger loading by requesting the package config.");
-    }
-    // Make sure to trigger package resolution.
-    var dummy = await _getPackageConfigFuture();
-  }
-  assert(_packagesReady);
-
-  var result;
-  try {
-    result = _resolvePackageUri(packageUri);
-  } catch (e, s) {
-    // Any error during resolution will resolve this package as not mapped,
-    // which is indicated by a null return.
-    if (_traceLoading) {
-      _log("Exception ($e) when resolving package URI: $packageUri");
-    }
-    result = null;
-  }
-  if (_traceLoading) {
-    _log("Resolved '$packageUri' to '$result'");
-  }
-  return result;
-}
-
-
-// Handling of Resource class by dispatching to the load port.
-Future<List<int>> _resourceReadAsBytes(Uri uri) {
-  var completer = new Completer<List<int>>();
-  // Request the load of the resource associating the completer as the context
-  // for the load.
-  _loadData(_Dart_kResourceLoad, uri.toString(), uri, completer);
-  // Return the future that will be triggered once the resource has been loaded.
-  return completer.future;
-}
-
-
 // Embedder Entrypoint (gen_snapshot):
 // Resolve relative paths relative to working directory.
 String _resolveInWorkingDirectory(String fileName) {
@@ -824,6 +284,11 @@
   return uri.toString();
 }
 
+// Only used by vm/cc unit tests.
+Uri _resolvePackageUri(Uri uri) {
+  assert(_packageRoot != null);
+  return _packageRoot.resolve(uri.path);
+}
 
 // Returns either a file path or a URI starting with http[s]:, as a String.
 String _filePathFromUri(String userUri) {
@@ -853,7 +318,6 @@
   }
 }
 
-
 // Embedder Entrypoint.
 _libraryFilePath(String libraryUri) {
   if (!_setupCompleted) {
@@ -869,7 +333,6 @@
   return _filePathFromUri(path);
 }
 
-
 // Register callbacks and hooks with the rest of the core libraries.
 _setupHooks() {
   _setupCompleted = true;
@@ -879,3 +342,54 @@
   VMLibraryHooks.packageConfigUriFuture = _getPackageConfigFuture;
   VMLibraryHooks.resolvePackageUriFuture = _resolvePackageUriFuture;
 }
+
+// Handling of Resource class by dispatching to the load port.
+Future<List<int>> _resourceReadAsBytes(Uri uri) async {
+  List response = await _makeLoaderRequest(_Dart_kResourceLoad, uri.toString());
+  if (response[3] is String) {
+    // Throw the error.
+    throw response[3];
+  } else {
+    return response[3];
+  }
+}
+
+Future<Uri> _getPackageRootFuture() {
+  if (_traceLoading) {
+    _log("Request for package root from user code.");
+  }
+  return _makeLoaderRequest(_Dart_kGetPackageRootUri, null);
+}
+
+Future<Uri> _getPackageConfigFuture() {
+  if (_traceLoading) {
+    _log("Request for package config from user code.");
+  }
+  assert(_loadPort != null);
+  return _makeLoaderRequest(_Dart_kGetPackageConfigUri, null);
+}
+
+Future<Uri> _resolvePackageUriFuture(Uri packageUri) async {
+  if (_traceLoading) {
+    _log("Request for package Uri resolution from user code: $packageUri");
+  }
+  if (packageUri.scheme != "package") {
+    if (_traceLoading) {
+      _log("Non-package Uri, returning unmodified: $packageUri");
+    }
+    // Return the incoming parameter if not passed a package: URI.
+    return packageUri;
+  }
+  var result = await _makeLoaderRequest(_Dart_kResolvePackageUri,
+                                        packageUri.toString());
+  if (result is! Uri) {
+    if (_traceLoading) {
+      _log("Exception when resolving package URI: $packageUri");
+    }
+    result = null;
+  }
+  if (_traceLoading) {
+    _log("Resolved '$packageUri' to '$result'");
+  }
+  return result;
+}
diff --git a/runtime/bin/builtin.h b/runtime/bin/builtin.h
index a4893d9..5029cf1 100644
--- a/runtime/bin/builtin.h
+++ b/runtime/bin/builtin.h
@@ -51,6 +51,11 @@
 
   static Dart_Handle SetLoadPort(Dart_Port port);
 
+  static Dart_Port LoadPort() {
+    ASSERT(load_port_ != ILLEGAL_PORT);
+    return load_port_;
+  }
+
  private:
   // Map specified URI to an actual file name from 'source_paths' and read
   // the file.
diff --git a/runtime/bin/builtin_common.cc b/runtime/bin/builtin_common.cc
index 678e48b..79713e1 100644
--- a/runtime/bin/builtin_common.cc
+++ b/runtime/bin/builtin_common.cc
@@ -23,13 +23,18 @@
 namespace bin {
 
 Dart_Handle Builtin::SetLoadPort(Dart_Port port) {
+  Dart_Handle builtin_lib =
+      Builtin::LoadAndCheckLibrary(Builtin::kBuiltinLibrary);
+  RETURN_IF_ERROR(builtin_lib);
+  // Set the _isolateId field.
+  Dart_Handle result = Dart_SetField(builtin_lib,
+                                     DartUtils::NewString("_isolateId"),
+                                     Dart_NewInteger(Dart_GetMainPortId()));
+  RETURN_IF_ERROR(result);
   load_port_ = port;
   ASSERT(load_port_ != ILLEGAL_PORT);
   Dart_Handle field_name = DartUtils::NewString("_loadPort");
   RETURN_IF_ERROR(field_name);
-  Dart_Handle builtin_lib =
-      Builtin::LoadAndCheckLibrary(Builtin::kBuiltinLibrary);
-  RETURN_IF_ERROR(builtin_lib);
   Dart_Handle send_port = Dart_GetField(builtin_lib, field_name);
   RETURN_IF_ERROR(send_port);
   if (!Dart_IsNull(send_port)) {
@@ -38,7 +43,7 @@
   }
   send_port = Dart_NewSendPort(load_port_);
   RETURN_IF_ERROR(send_port);
-  Dart_Handle result = Dart_SetField(builtin_lib, field_name, send_port);
+  result = Dart_SetField(builtin_lib, field_name, send_port);
   RETURN_IF_ERROR(result);
   return Dart_True();
 }
diff --git a/runtime/bin/builtin_impl_sources.gypi b/runtime/bin/builtin_impl_sources.gypi
index eb90ddf..f775ffc 100644
--- a/runtime/bin/builtin_impl_sources.gypi
+++ b/runtime/bin/builtin_impl_sources.gypi
@@ -47,6 +47,8 @@
     'io_buffer.cc',
     'io_buffer.h',
     'isolate_data.h',
+    'loader.cc',
+    'loader.h',
     'lockers.h',
     'thread.h',
     'thread_android.cc',
diff --git a/runtime/bin/dart_product_entries.txt b/runtime/bin/dart_product_entries.txt
index fcfda57..098d0a0 100644
--- a/runtime/bin/dart_product_entries.txt
+++ b/runtime/bin/dart_product_entries.txt
@@ -1,7 +1,6 @@
 dart:_builtin,::,_getMainClosure
 dart:_builtin,::,_getPrintClosure
 dart:_builtin,::,_getUriBaseClosure
-dart:_builtin,::,_resolveUri
 dart:_builtin,::,_setWorkingDirectory
 dart:_builtin,::,_setPackageRoot
 dart:_builtin,::,_loadPackagesMap
diff --git a/runtime/bin/dartutils.cc b/runtime/bin/dartutils.cc
index b7f418b..1b1f42f 100644
--- a/runtime/bin/dartutils.cc
+++ b/runtime/bin/dartutils.cc
@@ -32,6 +32,7 @@
 namespace bin {
 
 const char* DartUtils::original_working_directory = NULL;
+CommandLineOptions* DartUtils::url_mapping = NULL;
 const char* const DartUtils::kDartScheme = "dart:";
 const char* const DartUtils::kDartExtensionScheme = "dart-ext:";
 const char* const DartUtils::kAsyncLibURL = "dart:async";
@@ -47,6 +48,7 @@
 
 const uint8_t DartUtils::magic_number[] = { 0xf5, 0xf5, 0xdc, 0xdc };
 
+
 static bool IsWindowsHost() {
 #if defined(TARGET_OS_WINDOWS)
   return true;
@@ -56,8 +58,7 @@
 }
 
 
-const char* DartUtils::MapLibraryUrl(CommandLineOptions* url_mapping,
-                                     const char* url_string) {
+const char* DartUtils::MapLibraryUrl(const char* url_string) {
   ASSERT(url_mapping != NULL);
   // We need to check if the passed in url is found in the url_mapping array,
   // in that case use the mapped entry.
@@ -345,17 +346,6 @@
 }
 
 
-Dart_Handle DartUtils::FilePathFromUri(Dart_Handle script_uri) {
-  const int kNumArgs = 1;
-  Dart_Handle dart_args[kNumArgs];
-  dart_args[0] = script_uri;
-  return Dart_Invoke(DartUtils::BuiltinLib(),
-                     NewString("_filePathFromUri"),
-                     kNumArgs,
-                     dart_args);
-}
-
-
 Dart_Handle DartUtils::LibraryFilePath(Dart_Handle library_uri) {
   const int kNumArgs = 1;
   Dart_Handle dart_args[kNumArgs];
@@ -367,13 +357,12 @@
 }
 
 
-Dart_Handle DartUtils::ResolveUri(Dart_Handle library_url, Dart_Handle url) {
-  const int kNumArgs = 2;
+Dart_Handle DartUtils::ResolveScript(Dart_Handle url) {
+  const int kNumArgs = 1;
   Dart_Handle dart_args[kNumArgs];
-  dart_args[0] = library_url;
-  dart_args[1] = url;
+  dart_args[0] = url;
   return Dart_Invoke(DartUtils::BuiltinLib(),
-                     NewString("_resolveUri"),
+                     NewString("_resolveScriptUri"),
                      kNumArgs,
                      dart_args);
 }
@@ -397,8 +386,12 @@
 Dart_Handle DartUtils::LibraryTagHandler(Dart_LibraryTag tag,
                                          Dart_Handle library,
                                          Dart_Handle url) {
-  if (!Dart_IsLibrary(library)) {
-    return Dart_NewApiError("not a library");
+  Dart_Handle library_url = Dart_LibraryUrl(library);
+  if (Dart_IsError(library_url)) {
+    return library_url;
+  }
+  if (tag == Dart_kCanonicalizeUrl) {
+    return Dart_DefaultCanonicalizeUrl(library_url, url);
   }
   if (!Dart_IsString(url)) {
     return Dart_NewApiError("url is not a string");
@@ -408,7 +401,6 @@
   if (Dart_IsError(result)) {
     return result;
   }
-  Dart_Handle library_url = Dart_LibraryUrl(library);
   const char* library_url_string = NULL;
   result = Dart_StringToCString(library_url, &library_url_string);
   if (Dart_IsError(result)) {
@@ -420,10 +412,7 @@
 
   // Handle canonicalization, 'import' and 'part' of 'dart:' libraries.
   if (is_dart_scheme_url || is_dart_library) {
-    if (tag == Dart_kCanonicalizeUrl) {
-      // These will be handled internally.
-      return url;
-    } else if (tag == Dart_kImportTag) {
+    if (tag == Dart_kImportTag) {
       Builtin::BuiltinLibraryId id = Builtin::FindId(url_string);
       if (id == Builtin::kInvalidLibrary) {
         return NewError("The built-in library '%s' is not available"
@@ -452,11 +441,6 @@
     UNREACHABLE();
   }
 
-  if (tag == Dart_kCanonicalizeUrl) {
-    // Resolve the url within the context of the library's URL.
-    return ResolveUri(library_url, url);
-  }
-
   if (DartUtils::IsDartExtensionSchemeURL(url_string)) {
     // Load a native code shared library to use in a native extension
     if (tag != Dart_kImportTag) {
@@ -747,7 +731,7 @@
     Dart_Handle dart_args[kNumArgs];
     dart_args[0] = result;
     result = Dart_Invoke(DartUtils::BuiltinLib(),
-                         NewString("_loadPackagesMap"),
+                         NewString("_setPackagesMap"),
                          kNumArgs,
                          dart_args);
     RETURN_IF_ERROR(result);
@@ -960,6 +944,18 @@
 }
 
 
+Dart_Handle DartUtils::GetCanonicalizableWorkingDirectory() {
+  const char* str = DartUtils::original_working_directory;
+  intptr_t len = strlen(str);
+  if ((str[len] == '/') || (IsWindowsHost() && str[len] == '\\')) {
+    return Dart_NewStringFromCString(str);
+  }
+  char* new_str = reinterpret_cast<char*>(Dart_ScopeAllocate(len + 2));
+  snprintf(new_str, (len + 2), "%s%s", str, File::PathSeparator());
+  return Dart_NewStringFromCString(new_str);
+}
+
+
 // Statically allocated Dart_CObject instances for immutable
 // objects. As these will be used by different threads the use of
 // these depends on the fact that the marking internally in the
diff --git a/runtime/bin/dartutils.h b/runtime/bin/dartutils.h
index b416a93..b9fe476 100644
--- a/runtime/bin/dartutils.h
+++ b/runtime/bin/dartutils.h
@@ -193,13 +193,12 @@
   }
 
   static bool SetOriginalWorkingDirectory();
+  static Dart_Handle GetCanonicalizableWorkingDirectory();
 
-  static const char* MapLibraryUrl(CommandLineOptions* url_mapping,
-                                   const char* url_string);
+  static const char* MapLibraryUrl(const char* url_string);
 
   static Dart_Handle ResolveUriInWorkingDirectory(Dart_Handle script_uri);
-  static Dart_Handle FilePathFromUri(Dart_Handle script_uri);
-  static Dart_Handle ResolveUri(Dart_Handle library_url, Dart_Handle url);
+  static Dart_Handle ResolveScript(Dart_Handle url);
 
   // Sniffs the specified text_buffer to see if it contains the magic number
   // representing a script snapshot. If the text_buffer is a script snapshot
@@ -216,6 +215,9 @@
   // Global state that stores the original working directory..
   static const char* original_working_directory;
 
+  // Global state that captures the URL mappings specified on the command line.
+  static CommandLineOptions* url_mapping;
+
   static const char* const kDartScheme;
   static const char* const kDartExtensionScheme;
   static const char* const kAsyncLibURL;
@@ -231,9 +233,10 @@
 
   static const uint8_t magic_number[];
 
+  static Dart_Handle LibraryFilePath(Dart_Handle library_uri);
+
  private:
   static Dart_Handle SetWorkingDirectory();
-  static Dart_Handle LibraryFilePath(Dart_Handle library_uri);
   static Dart_Handle PrepareBuiltinLibrary(Dart_Handle builtin_lib,
                                            Dart_Handle internal_lib,
                                            bool is_service_isolate,
diff --git a/runtime/bin/extensions.cc b/runtime/bin/extensions.cc
index f4b921f..d319ce7 100644
--- a/runtime/bin/extensions.cc
+++ b/runtime/bin/extensions.cc
@@ -19,11 +19,6 @@
 Dart_Handle Extensions::LoadExtension(const char* extension_directory,
                                       const char* extension_name,
                                       Dart_Handle parent_library) {
-  if (strncmp(extension_directory, "http://", 7) == 0 ||
-      strncmp(extension_directory, "https://", 8) == 0) {
-    return Dart_NewApiError("Cannot load native extensions over http:");
-  }
-
   // For example on Linux: directory/libfoo-arm.so
   const char* library_strings[] = {
     extension_directory,  // directory/
diff --git a/runtime/bin/file.h b/runtime/bin/file.h
index 2c153bc..6b22793 100644
--- a/runtime/bin/file.h
+++ b/runtime/bin/file.h
@@ -81,7 +81,9 @@
     kLockUnlock = 0,
     kLockShared = 1,
     kLockExclusive = 2,
-    kLockMax = 2
+    kLockBlockingShared = 3,
+    kLockBlockingExclusive = 4,
+    kLockMax = 4
   };
 
   intptr_t GetFD();
diff --git a/runtime/bin/file_android.cc b/runtime/bin/file_android.cc
index 8b86121..9bcf44c 100644
--- a/runtime/bin/file_android.cc
+++ b/runtime/bin/file_android.cc
@@ -139,9 +139,11 @@
       fl.l_type = F_UNLCK;
       break;
     case File::kLockShared:
+    case File::kLockBlockingShared:
       fl.l_type = F_RDLCK;
       break;
     case File::kLockExclusive:
+    case File::kLockBlockingExclusive:
       fl.l_type = F_WRLCK;
       break;
     default:
@@ -150,9 +152,12 @@
   fl.l_whence = SEEK_SET;
   fl.l_start = start;
   fl.l_len = end == -1 ? 0 : end - start;
-  // fcntl does not block, but fails if the lock cannot be acquired.
-  int rc = fcntl(handle_->fd(), F_SETLK, &fl);
-  return rc != -1;
+  int cmd = F_SETLK;
+  if ((lock == File::kLockBlockingShared) ||
+      (lock == File::kLockBlockingExclusive)) {
+    cmd = F_SETLKW;
+  }
+  return TEMP_FAILURE_RETRY(fcntl(handle_->fd(), cmd, &fl)) != -1;
 }
 
 
diff --git a/runtime/bin/file_linux.cc b/runtime/bin/file_linux.cc
index 2e47d74..eab92d5 100644
--- a/runtime/bin/file_linux.cc
+++ b/runtime/bin/file_linux.cc
@@ -136,9 +136,11 @@
       fl.l_type = F_UNLCK;
       break;
     case File::kLockShared:
+    case File::kLockBlockingShared:
       fl.l_type = F_RDLCK;
       break;
     case File::kLockExclusive:
+    case File::kLockBlockingExclusive:
       fl.l_type = F_WRLCK;
       break;
     default:
@@ -147,9 +149,12 @@
   fl.l_whence = SEEK_SET;
   fl.l_start = start;
   fl.l_len = end == -1 ? 0 : end - start;
-  // fcntl does not block, but fails if the lock cannot be acquired.
-  int rc = fcntl(handle_->fd(), F_SETLK, &fl);
-  return rc != -1;
+  int cmd = F_SETLK;
+  if ((lock == File::kLockBlockingShared) ||
+      (lock == File::kLockBlockingExclusive)) {
+    cmd = F_SETLKW;
+  }
+  return TEMP_FAILURE_RETRY(fcntl(handle_->fd(), cmd, &fl)) != -1;
 }
 
 
diff --git a/runtime/bin/file_macos.cc b/runtime/bin/file_macos.cc
index 941d419..a43b14e 100644
--- a/runtime/bin/file_macos.cc
+++ b/runtime/bin/file_macos.cc
@@ -139,9 +139,11 @@
       fl.l_type = F_UNLCK;
       break;
     case File::kLockShared:
+    case File::kLockBlockingShared:
       fl.l_type = F_RDLCK;
       break;
     case File::kLockExclusive:
+    case File::kLockBlockingExclusive:
       fl.l_type = F_WRLCK;
       break;
     default:
@@ -150,9 +152,12 @@
   fl.l_whence = SEEK_SET;
   fl.l_start = start;
   fl.l_len = end == -1 ? 0 : end - start;
-  // fcntl does not block, but fails if the lock cannot be acquired.
-  int rc = fcntl(handle_->fd(), F_SETLK, &fl);
-  return rc != -1;
+  int cmd = F_SETLK;
+  if ((lock == File::kLockBlockingShared) ||
+      (lock == File::kLockBlockingExclusive)) {
+    cmd = F_SETLKW;
+  }
+  return TEMP_FAILURE_RETRY(fcntl(handle_->fd(), cmd, &fl)) != -1;
 }
 
 
diff --git a/runtime/bin/file_win.cc b/runtime/bin/file_win.cc
index db3aef8..8d45a17 100644
--- a/runtime/bin/file_win.cc
+++ b/runtime/bin/file_win.cc
@@ -138,9 +138,15 @@
       rc = UnlockFileEx(handle, 0, length_low, length_high, &overlapped);
       break;
     case File::kLockShared:
-    case File::kLockExclusive: {
-      DWORD flags = LOCKFILE_FAIL_IMMEDIATELY;
-      if (lock == File::kLockExclusive) {
+    case File::kLockExclusive:
+    case File::kLockBlockingShared:
+    case File::kLockBlockingExclusive: {
+      DWORD flags = 0;
+      if ((lock == File::kLockShared) || (lock == File::kLockExclusive)) {
+        flags |= LOCKFILE_FAIL_IMMEDIATELY;
+      }
+      if ((lock == File::kLockExclusive) ||
+          (lock == File::kLockBlockingExclusive)) {
         flags |= LOCKFILE_EXCLUSIVE_LOCK;
       }
       rc = LockFileEx(handle, flags, 0,
diff --git a/runtime/bin/fuchsia_test.cc b/runtime/bin/fuchsia_test.cc
new file mode 100644
index 0000000..802ff31
--- /dev/null
+++ b/runtime/bin/fuchsia_test.cc
@@ -0,0 +1,39 @@
+// Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+#include <stdio.h>
+#include <stdlib.h>
+
+#include <dart_api.h>
+
+int main(void) {
+  fprintf(stderr, "Calling Dart_SetVMFlags\n");
+  fflush(stderr);
+  if (!Dart_SetVMFlags(0, NULL)) {
+    fprintf(stderr, "Failed to set flags\n");
+    fflush(stderr);
+    return -1;
+  }
+  fprintf(stderr, "Calling Dart_Initialize\n");
+  fflush(stderr);
+  char* error = Dart_Initialize(
+      NULL, NULL, NULL,
+      NULL, NULL, NULL, NULL,
+      NULL,
+      NULL,
+      NULL,
+      NULL,
+      NULL,
+      NULL,
+      NULL);
+  if (error != NULL) {
+    fprintf(stderr, "VM initialization failed: %s\n", error);
+    fflush(stderr);
+    free(error);
+    return -1;
+  }
+  fprintf(stderr, "Success!\n");
+  fflush(stderr);
+  return 0;
+}
diff --git a/runtime/bin/gen_snapshot.cc b/runtime/bin/gen_snapshot.cc
index 3e18248..cc8bb06 100644
--- a/runtime/bin/gen_snapshot.cc
+++ b/runtime/bin/gen_snapshot.cc
@@ -15,6 +15,7 @@
 #include "bin/dartutils.h"
 #include "bin/eventhandler.h"
 #include "bin/file.h"
+#include "bin/loader.h"
 #include "bin/log.h"
 #include "bin/thread.h"
 #include "bin/utils.h"
@@ -63,7 +64,17 @@
 static const char* assembly_filename = NULL;
 static const char* instructions_blob_filename = NULL;
 static const char* rodata_blob_filename = NULL;
-static const char* package_root = NULL;
+
+
+// Value of the --package-root flag.
+// (This pointer points into an argv buffer and does not need to be
+// free'd.)
+static const char* commandline_package_root = NULL;
+
+// Value of the --packages flag.
+// (This pointer points into an argv buffer and does not need to be
+// free'd.)
+static const char* commandline_packages_file = NULL;
 
 
 // Global state which contains a pointer to the script name for which
@@ -71,10 +82,6 @@
 // of a generic snapshot that contains only the corelibs).
 static char* app_script_name = NULL;
 
-
-// Global state that captures the URL mappings specified on the command line.
-static CommandLineOptions* url_mapping = NULL;
-
 // Global state that captures the entry point manifest files specified on the
 // command line.
 static CommandLineOptions* entry_points_files = NULL;
@@ -249,7 +256,17 @@
     name = ProcessOption(option, "--package-root=");
   }
   if (name != NULL) {
-    package_root = name;
+    commandline_package_root = name;
+    return true;
+  }
+  return false;
+}
+
+
+static bool ProcessPackagesOption(const char* option) {
+  const char* name = ProcessOption(option, "--packages=");
+  if (name != NULL) {
+    commandline_packages_file = name;
     return true;
   }
   return false;
@@ -262,7 +279,7 @@
     mapping = ProcessOption(option, "--url-mapping=");
   }
   if (mapping != NULL) {
-    url_mapping->AddArgument(mapping);
+    DartUtils::url_mapping->AddArgument(mapping);
     return true;
   }
   return false;
@@ -296,6 +313,7 @@
         ProcessEmbedderEntryPointsManifestOption(argv[i]) ||
         ProcessURLmappingOption(argv[i]) ||
         ProcessPackageRootOption(argv[i]) ||
+        ProcessPackagesOption(argv[i]) ||
         ProcessEnvironmentOption(argv[i])) {
       i += 1;
       continue;
@@ -312,6 +330,14 @@
     *script_name = NULL;
   }
 
+  // Verify consistency of arguments.
+  if ((commandline_package_root != NULL) &&
+      (commandline_packages_file != NULL)) {
+    Log::PrintErr("Specifying both a packages directory and a packages "
+                  "file is invalid.\n");
+    return -1;
+  }
+
   if (vm_isolate_snapshot_filename == NULL) {
     Log::PrintErr("No vm isolate snapshot output file specified.\n\n");
     return -1;
@@ -389,9 +415,42 @@
   DISALLOW_COPY_AND_ASSIGN(UriResolverIsolateScope);
 };
 
+
 Dart_Isolate UriResolverIsolateScope::isolate = NULL;
 
 
+static Dart_Handle LoadUrlContents(const char* uri_string) {
+  bool failed = false;
+  char* result_string = NULL;
+  uint8_t* payload = NULL;
+  intptr_t payload_length = 0;
+  // Switch to the UriResolver Isolate and load the script.
+  {
+    UriResolverIsolateScope scope;
+
+    Dart_Handle resolved_uri = Dart_NewStringFromCString(uri_string);
+    Dart_Handle result =  Loader::LoadUrlContents(resolved_uri,
+                                                  &payload,
+                                                  &payload_length);
+    if (Dart_IsError(result)) {
+      failed = true;
+      result_string = strdup(Dart_GetError(result));
+    }
+  }
+  // Switch back to the isolate from which we generate the snapshot and
+  // create the source string for the specified uri.
+  Dart_Handle result;
+  if (!failed) {
+    result = Dart_NewStringFromUTF8(payload, payload_length);
+    free(payload);
+  } else {
+    result = DartUtils::NewString(result_string);
+    free(result_string);
+  }
+  return result;
+}
+
+
 static Dart_Handle ResolveUriInWorkingDirectory(const char* script_uri) {
   bool failed = false;
   char* result_string = NULL;
@@ -418,53 +477,24 @@
 }
 
 
-static Dart_Handle FilePathFromUri(const char* script_uri) {
-  bool failed = false;
-  char* result_string = NULL;
-
-  {
-    UriResolverIsolateScope scope;
-
-    // Run DartUtils::FilePathFromUri in context of uri resolver isolate.
-    Dart_Handle result = DartUtils::FilePathFromUri(
-        DartUtils::NewString(script_uri));
-    if (Dart_IsError(result)) {
-      failed = true;
-      result_string = strdup(Dart_GetError(result));
-    } else {
-      result_string = strdup(DartUtils::GetStringValue(result));
-    }
+static Dart_Handle LoadSnapshotCreationScript(const char* script_name) {
+  // First resolve the specified script uri with respect to the original
+  // working directory.
+  Dart_Handle resolved_uri = ResolveUriInWorkingDirectory(script_name);
+  if (Dart_IsError(resolved_uri)) {
+    return resolved_uri;
   }
-
-  Dart_Handle result = failed ? Dart_NewApiError(result_string) :
-                                DartUtils::NewString(result_string);
-  free(result_string);
-  return result;
-}
-
-
-static Dart_Handle ResolveUri(const char* library_uri, const char* uri) {
-  bool failed = false;
-  char* result_string = NULL;
-
-  {
-    UriResolverIsolateScope scope;
-
-    // Run DartUtils::ResolveUri in context of uri resolver isolate.
-    Dart_Handle result = DartUtils::ResolveUri(
-        DartUtils::NewString(library_uri), DartUtils::NewString(uri));
-    if (Dart_IsError(result)) {
-      failed = true;
-      result_string = strdup(Dart_GetError(result));
-    } else {
-      result_string = strdup(DartUtils::GetStringValue(result));
-    }
+  // Now load the contents of the specified uri.
+  const char* resolved_uri_string = DartUtils::GetStringValue(resolved_uri);
+  Dart_Handle source =  LoadUrlContents(resolved_uri_string);
+  if (Dart_IsError(source)) {
+    return source;
   }
-
-  Dart_Handle result = failed ? Dart_NewApiError(result_string) :
-                                DartUtils::NewString(result_string);
-  free(result_string);
-  return result;
+  if (IsSnapshottingForPrecompilation()) {
+    return Dart_LoadScript(resolved_uri, source, 0, 0);
+  } else {
+    return Dart_LoadLibrary(resolved_uri, source, 0, 0);
+  }
 }
 
 
@@ -491,7 +521,7 @@
   }
   const char* library_url_string = DartUtils::GetStringValue(library_url);
   const char* mapped_library_url_string = DartUtils::MapLibraryUrl(
-      url_mapping, library_url_string);
+      library_url_string);
   if (mapped_library_url_string != NULL) {
     library_url = ResolveUriInWorkingDirectory(mapped_library_url_string);
     library_url_string = DartUtils::GetStringValue(library_url);
@@ -501,8 +531,7 @@
     return Dart_NewApiError("url is not a string");
   }
   const char* url_string = DartUtils::GetStringValue(url);
-  const char* mapped_url_string = DartUtils::MapLibraryUrl(url_mapping,
-                                                           url_string);
+  const char* mapped_url_string = DartUtils::MapLibraryUrl(url_string);
 
   Builtin::BuiltinLibraryId libraryBuiltinId = BuiltinId(library_url_string);
   if (tag == Dart_kCanonicalizeUrl) {
@@ -513,7 +542,7 @@
     if (libraryBuiltinId != Builtin::kInvalidLibrary) {
       return url;
     }
-    return ResolveUri(library_url_string, url_string);
+    return Dart_DefaultCanonicalizeUrl(library_url, url);
   }
 
   Builtin::BuiltinLibraryId builtinId = BuiltinId(url_string);
@@ -544,15 +573,8 @@
       return resolved_url;
     }
   }
-
-  // Get the file path out of the url.
-  Dart_Handle file_path = FilePathFromUri(
-      DartUtils::GetStringValue(resolved_url));
-  if (Dart_IsError(file_path)) {
-    return file_path;
-  }
-  const char* raw_path = DartUtils::GetStringValue(file_path);
-  Dart_Handle source = DartUtils::ReadStringFromFile(raw_path);
+  const char* resolved_uri_string = DartUtils::GetStringValue(resolved_url);
+  Dart_Handle source =  LoadUrlContents(resolved_uri_string);
   if (Dart_IsError(source)) {
     return source;
   }
@@ -565,29 +587,6 @@
 }
 
 
-static Dart_Handle LoadSnapshotCreationScript(const char* script_name) {
-  Dart_Handle resolved_script_uri = ResolveUriInWorkingDirectory(script_name);
-  if (Dart_IsError(resolved_script_uri)) {
-    return resolved_script_uri;
-  }
-  Dart_Handle script_path = FilePathFromUri(
-      DartUtils::GetStringValue(resolved_script_uri));
-  if (Dart_IsError(script_path)) {
-    return script_path;
-  }
-  Dart_Handle source = DartUtils::ReadStringFromFile(
-      DartUtils::GetStringValue(script_path));
-  if (Dart_IsError(source)) {
-    return source;
-  }
-  if (IsSnapshottingForPrecompilation()) {
-    return Dart_LoadScript(resolved_script_uri, source, 0, 0);
-  } else {
-    return Dart_LoadLibrary(resolved_script_uri, source, 0, 0);
-  }
-}
-
-
 static Dart_Handle LoadGenericSnapshotCreationScript(
     Builtin::BuiltinLibraryId id) {
   Dart_Handle source = Builtin::Source(id);
@@ -645,6 +644,8 @@
 "    --package_root=<path>             Where to find packages, that is,      \n"
 "                                      package:...  imports.                 \n"
 "                                                                            \n"
+"    --packages=<packages_file>        Where to find a package spec file     \n"
+"                                                                            \n"
 "    --url_mapping=<mapping>           Uses the URL mapping(s) specified on  \n"
 "                                      the command line to load the          \n"
 "                                      libraries.                            \n"
@@ -1099,7 +1100,7 @@
 
 static void SetupForUriResolution() {
   // Set up the library tag handler for this isolate.
-  Dart_Handle result = Dart_SetLibraryTagHandler(DartUtils::LibraryTagHandler);
+  Dart_Handle result = Dart_SetLibraryTagHandler(Loader::LibraryTagHandler);
   if (Dart_IsError(result)) {
     Log::PrintErr("%s", Dart_GetError(result));
     Dart_ExitScope();
@@ -1157,7 +1158,7 @@
     Log::PrintErr("Error: We only expect to create the service isolate");
     return NULL;
   }
-  Dart_Handle result = Dart_SetLibraryTagHandler(DartUtils::LibraryTagHandler);
+  Dart_Handle result = Dart_SetLibraryTagHandler(Loader::LibraryTagHandler);
   // Setup the native resolver.
   Builtin::LoadAndCheckLibrary(Builtin::kBuiltinLibrary);
   Builtin::LoadAndCheckLibrary(Builtin::kIOLibrary);
@@ -1183,8 +1184,8 @@
   CommandLineOptions vm_options(argc + EXTRA_VM_ARGUMENTS);
 
   // Initialize the URL mapping array.
-  CommandLineOptions url_mapping_array(argc);
-  url_mapping = &url_mapping_array;
+  CommandLineOptions cmdline_url_mapping(argc);
+  DartUtils::url_mapping = &cmdline_url_mapping;
 
   // Initialize the entrypoints array.
   CommandLineOptions entry_points_files_array(argc);
@@ -1246,7 +1247,9 @@
     return 255;
   }
 
-  IsolateData* isolate_data = new IsolateData(NULL, NULL, NULL);
+  IsolateData* isolate_data = new IsolateData(NULL,
+                                              commandline_package_root,
+                                              commandline_packages_file);
   Dart_Isolate isolate = Dart_CreateIsolate(
       NULL, NULL, NULL, NULL, isolate_data, &error);
   if (isolate == NULL) {
@@ -1270,7 +1273,6 @@
     // create a full snapshot. The current isolate is set up so that we can
     // invoke the dart uri resolution code like _resolveURI. App script is
     // loaded into a separate isolate.
-
     SetupForUriResolution();
 
     // Prepare builtin and its dependent libraries for use to resolve URIs.
@@ -1285,14 +1287,14 @@
     CHECK_RESULT(result);
 
     // Setup package root if specified.
-    result = DartUtils::SetupPackageRoot(package_root, NULL);
+    result = DartUtils::SetupPackageRoot(commandline_package_root,
+                                         commandline_packages_file);
     CHECK_RESULT(result);
 
+    UriResolverIsolateScope::isolate = isolate;
     Dart_ExitScope();
     Dart_ExitIsolate();
 
-    UriResolverIsolateScope::isolate = isolate;
-
     // Now we create an isolate into which we load all the code that needs to
     // be in the snapshot.
     isolate_data = new IsolateData(NULL, NULL, NULL);
diff --git a/runtime/bin/isolate_data.h b/runtime/bin/isolate_data.h
index 6532fbc..a5e8bfb 100644
--- a/runtime/bin/isolate_data.h
+++ b/runtime/bin/isolate_data.h
@@ -14,6 +14,7 @@
 
 // Forward declaration.
 class EventHandler;
+class Loader;
 
 // Data associated with every isolate in the standalone VM
 // embedding. This is used to free external resources for each isolate
@@ -27,7 +28,8 @@
         package_root(NULL),
         packages_file(NULL),
         udp_receive_buffer(NULL),
-        builtin_lib_(NULL) {
+        builtin_lib_(NULL),
+        loader_(NULL) {
     if (package_root != NULL) {
       ASSERT(packages_file == NULL);
       this->package_root = strdup(package_root);
@@ -67,8 +69,20 @@
   char* packages_file;
   uint8_t* udp_receive_buffer;
 
+  // While loading a loader is associated with the isolate.
+  bool HasLoader() const { return loader_ != NULL; }
+  Loader* loader() const {
+    ASSERT(loader_ != NULL);
+    return loader_;
+  }
+  void set_loader(Loader* loader) {
+    ASSERT((loader_ == NULL) || (loader == NULL));
+    loader_ = loader;
+  }
+
  private:
   Dart_Handle builtin_lib_;
+  Loader* loader_;
 
   DISALLOW_COPY_AND_ASSIGN(IsolateData);
 };
diff --git a/runtime/bin/loader.cc b/runtime/bin/loader.cc
new file mode 100644
index 0000000..6a99a4e
--- /dev/null
+++ b/runtime/bin/loader.cc
@@ -0,0 +1,752 @@
+// Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+
+#include "bin/loader.h"
+
+#include "bin/builtin.h"
+#include "bin/dartutils.h"
+#include "bin/extensions.h"
+#include "bin/lockers.h"
+#include "bin/utils.h"
+
+namespace dart {
+namespace bin {
+
+// Development flag.
+static bool trace_loader = false;
+// Keep in sync with loader.dart.
+static const intptr_t _Dart_kImportExtension = 9;
+
+Loader::Loader(IsolateData* isolate_data)
+    : port_(ILLEGAL_PORT),
+      isolate_data_(isolate_data),
+      error_(Dart_Null()),
+      monitor_(NULL),
+      pending_operations_(0),
+      results_(NULL),
+      results_length_(0),
+      results_capacity_(0),
+      payload_(NULL),
+      payload_length_(0) {
+  monitor_ = new Monitor();
+  ASSERT(isolate_data_ != NULL);
+  port_ = Dart_NewNativePort("Loader",
+                             Loader::NativeMessageHandler,
+                             false);
+  isolate_data_->set_loader(this);
+  AddLoader(port_, isolate_data_);
+}
+
+
+Loader::~Loader() {
+  ASSERT(port_ != ILLEGAL_PORT);
+  // Enter the monitor while we close the Dart port. After the Dart port is
+  // closed, no more results can be queued.
+  monitor_->Enter();
+  Dart_CloseNativePort(port_);
+  monitor_->Exit();
+  RemoveLoader(port_);
+  port_ = ILLEGAL_PORT;
+  isolate_data_->set_loader(NULL);
+  isolate_data_ = NULL;
+  delete monitor_;
+  monitor_ = NULL;
+  for (intptr_t i = 0; i < results_length_; i++) {
+    results_[i].Cleanup();
+  }
+  free(results_);
+  results_ = NULL;
+  payload_ = NULL;
+  payload_length_ = 0;
+}
+
+
+// Copy the contents of |message| into an |IOResult|.
+void Loader::IOResult::Setup(Dart_CObject* message) {
+  ASSERT(message->type == Dart_CObject_kArray);
+  ASSERT(message->value.as_array.length == 4);
+  Dart_CObject* tag_message = message->value.as_array.values[0];
+  ASSERT(tag_message != NULL);
+  Dart_CObject* uri_message = message->value.as_array.values[1];
+  ASSERT(uri_message != NULL);
+  Dart_CObject* library_uri_message = message->value.as_array.values[2];
+  ASSERT(library_uri_message != NULL);
+  Dart_CObject* payload_message = message->value.as_array.values[3];
+  ASSERT(payload_message != NULL);
+
+  // Grab the tag.
+  ASSERT(tag_message->type == Dart_CObject_kInt32);
+  tag = tag_message->value.as_int32;
+
+  // Grab the uri id.
+  ASSERT(uri_message->type == Dart_CObject_kString);
+  uri = strdup(uri_message->value.as_string);
+
+  // Grab the library uri if one is present.
+  if (library_uri_message->type != Dart_CObject_kNull) {
+    ASSERT(library_uri_message->type == Dart_CObject_kString);
+    library_uri = strdup(library_uri_message->value.as_string);
+  } else {
+    library_uri = NULL;
+  }
+
+  // Grab the payload.
+  if (payload_message->type == Dart_CObject_kString) {
+    // Payload is an error message.
+    payload_length = strlen(payload_message->value.as_string);
+    payload =
+        reinterpret_cast<uint8_t*>(strdup(payload_message->value.as_string));
+  } else {
+    // Payload is the contents of a file.
+    ASSERT(payload_message->type == Dart_CObject_kTypedData);
+    ASSERT(payload_message->value.as_typed_data.type == Dart_TypedData_kUint8);
+    payload_length = payload_message->value.as_typed_data.length;
+    payload = reinterpret_cast<uint8_t*>(malloc(payload_length));
+    memmove(payload,
+            payload_message->value.as_typed_data.values,
+            payload_length);
+  }
+}
+
+
+void Loader::IOResult::Cleanup() {
+  free(uri);
+  free(library_uri);
+  free(payload);
+}
+
+
+// Send the Loader Initialization message to the service isolate. This
+// message is sent the first time a loader is constructed for an isolate and
+// seeds the service isolate with some initial state about this isolate.
+void Loader::Init(const char* package_root,
+                  const char* packages_file,
+                  const char* working_directory,
+                  const char* root_script_uri) {
+  // This port delivers loading messages to the service isolate.
+  Dart_Port loader_port = Builtin::LoadPort();
+  ASSERT(loader_port != ILLEGAL_PORT);
+
+  // Keep in sync with loader.dart.
+  const intptr_t _Dart_kInitLoader = 4;
+
+  Dart_Handle request = Dart_NewList(8);
+  Dart_ListSetAt(request, 0, trace_loader ? Dart_True() : Dart_False());
+  Dart_ListSetAt(request, 1, Dart_NewInteger(Dart_GetMainPortId()));
+  Dart_ListSetAt(request, 2, Dart_NewInteger(_Dart_kInitLoader));
+  Dart_ListSetAt(request, 3, Dart_NewSendPort(port_));
+  Dart_ListSetAt(request, 4,
+                 (package_root == NULL) ? Dart_Null() :
+                      Dart_NewStringFromCString(package_root));
+  Dart_ListSetAt(request, 5,
+                 (packages_file == NULL) ? Dart_Null() :
+                      Dart_NewStringFromCString(packages_file));
+  Dart_ListSetAt(request, 6,
+                      Dart_NewStringFromCString(working_directory));
+  Dart_ListSetAt(request, 7,
+                 (root_script_uri == NULL) ? Dart_Null() :
+                      Dart_NewStringFromCString(root_script_uri));
+
+  bool success = Dart_Post(loader_port, request);
+  ASSERT(success);
+}
+
+
+void Loader::SendImportExtensionRequest(Dart_Handle url,
+                                        Dart_Handle library_url) {
+  // This port delivers loading messages to the service isolate.
+  Dart_Port loader_port = Builtin::LoadPort();
+  ASSERT(loader_port != ILLEGAL_PORT);
+
+
+  Dart_Handle request = Dart_NewList(6);
+  Dart_ListSetAt(request, 0, trace_loader ? Dart_True() : Dart_False());
+  Dart_ListSetAt(request, 1, Dart_NewInteger(Dart_GetMainPortId()));
+  Dart_ListSetAt(request, 2, Dart_NewInteger(_Dart_kImportExtension));
+  Dart_ListSetAt(request, 3, Dart_NewSendPort(port_));
+
+  Dart_ListSetAt(request, 4, url);
+  Dart_ListSetAt(request, 5, library_url);
+
+  if (Dart_Post(loader_port, request)) {
+    MonitorLocker ml(monitor_);
+    pending_operations_++;
+  }
+}
+
+
+// Forward a request from the tag handler to the service isolate.
+void Loader::SendRequest(Dart_LibraryTag tag,
+                         Dart_Handle url,
+                         Dart_Handle library_url) {
+  // This port delivers loading messages to the service isolate.
+  Dart_Port loader_port = Builtin::LoadPort();
+  ASSERT(loader_port != ILLEGAL_PORT);
+
+  Dart_Handle request = Dart_NewList(6);
+  Dart_ListSetAt(request, 0, trace_loader ? Dart_True() : Dart_False());
+  Dart_ListSetAt(request, 1, Dart_NewInteger(Dart_GetMainPortId()));
+  Dart_ListSetAt(request, 2, Dart_NewInteger(tag));
+  Dart_ListSetAt(request, 3, Dart_NewSendPort(port_));
+
+  Dart_ListSetAt(request, 4, url);
+  Dart_ListSetAt(request, 5, library_url);
+
+  if (Dart_Post(loader_port, request)) {
+    MonitorLocker ml(monitor_);
+    pending_operations_++;
+  }
+}
+
+
+void Loader::QueueMessage(Dart_CObject* message) {
+  MonitorLocker ml(monitor_);
+  if (results_length_ == results_capacity_) {
+    // Grow to an initial capacity or double in size.
+    results_capacity_ = (results_capacity_ == 0) ? 4 : results_capacity_ * 2;
+    results_ =
+        reinterpret_cast<IOResult*>(
+            realloc(results_,
+                    sizeof(IOResult) * results_capacity_));
+    ASSERT(results_ != NULL);
+  }
+  ASSERT(results_ != NULL);
+  ASSERT(results_length_ < results_capacity_);
+  results_[results_length_].Setup(message);
+  results_length_++;
+  ml.Notify();
+}
+
+
+void Loader::BlockUntilComplete(ProcessResult process_result) {
+  MonitorLocker ml(monitor_);
+
+  while (true) {
+    // If |ProcessQueueLocked| returns false, we've hit an error and should
+    // stop loading.
+    if (!ProcessQueueLocked(process_result)) {
+      break;
+    }
+
+    // When |pending_operations_| hits 0, we are done loading.
+    if (pending_operations_ == 0) {
+      break;
+    }
+
+    // Wait to be notified about new I/O results.
+    ml.Wait();
+  }
+}
+
+
+static bool LibraryHandleError(Dart_Handle library, Dart_Handle error) {
+  if (!Dart_IsNull(library) && !Dart_IsError(library)) {
+    ASSERT(Dart_IsLibrary(library));
+    Dart_Handle res = Dart_LibraryHandleError(library, error);
+    if (Dart_IsNull(res)) {
+      // Error was handled by library.
+      return true;
+    }
+  }
+  return false;
+}
+
+
+static bool IsWindowsHost() {
+#if defined(TARGET_OS_WINDOWS)
+  return true;
+#else  // defined(TARGET_OS_WINDOWS)
+  return false;
+#endif  // defined(TARGET_OS_WINDOWS)
+}
+
+
+bool Loader::ProcessResultLocked(Loader* loader, Loader::IOResult* result) {
+  // We have to copy everything we care about out of |result| because after
+  // dropping the lock below |result| may no longer valid.
+  Dart_Handle uri =
+      Dart_NewStringFromCString(reinterpret_cast<char*>(result->uri));
+  Dart_Handle library_uri = Dart_Null();
+  if (result->library_uri != NULL) {
+    library_uri =
+        Dart_NewStringFromCString(reinterpret_cast<char*>(result->library_uri));
+  }
+
+  // A negative result tag indicates a loading error occurred in the service
+  // isolate. The payload is a C string of the error message.
+  if (result->tag < 0) {
+    Dart_Handle library = Dart_LookupLibrary(uri);
+    Dart_Handle error = Dart_NewStringFromUTF8(result->payload,
+                                               result->payload_length);
+    // If a library with the given uri exists, give it a chance to handle
+    // the error. If the load requests stems from a deferred library load,
+    // an IO error is not fatal.
+    if (LibraryHandleError(library, error)) {
+      return true;
+    }
+    // Fall through
+    loader->error_ = Dart_NewUnhandledExceptionError(error);
+    return false;
+  }
+
+
+  if (result->tag == _Dart_kImportExtension) {
+    ASSERT(library_uri != Dart_Null());
+    Dart_Handle library = Dart_LookupLibrary(library_uri);
+    ASSERT(!Dart_IsError(library));
+    const char* lib_uri = reinterpret_cast<const char*>(result->payload);
+    if (strncmp(lib_uri, "http://", 7) == 0 ||
+        strncmp(lib_uri, "https://", 8) == 0) {
+      loader->error_ =
+        Dart_NewApiError("Cannot load native extensions over http: or https:");
+        return false;
+    }
+    const char* extension_uri = reinterpret_cast<const char*>(result->uri);
+    const char* lib_path = NULL;
+    if (strncmp(lib_uri, "file://", 7) == 0) {
+      lib_path = DartUtils::RemoveScheme(lib_uri);
+    } else {
+      lib_path = lib_uri;
+    }
+    const char* extension_path = DartUtils::RemoveScheme(extension_uri);
+    if (strchr(extension_path, '/') != NULL ||
+        (IsWindowsHost() && strchr(extension_path, '\\') != NULL)) {
+      loader->error_ = DartUtils::NewError(
+          "Relative paths for dart extensions are not supported: '%s'",
+          extension_path);
+      return false;
+    }
+    Dart_Handle result = Extensions::LoadExtension(lib_path,
+                                                   extension_path,
+                                                   library);
+    if (Dart_IsError(result)) {
+      loader->error_ = result;
+      return false;
+    }
+    return true;
+  }
+
+  // Check for payload and load accordingly.
+  bool is_snapshot = false;
+  const uint8_t* payload = result->payload;
+  intptr_t payload_length = result->payload_length;
+  payload =
+      DartUtils::SniffForMagicNumber(payload,
+                                     &payload_length,
+                                     &is_snapshot);
+  Dart_Handle source = Dart_Null();
+  if (!is_snapshot) {
+    source = Dart_NewStringFromUTF8(result->payload,
+                                    result->payload_length);
+    if (Dart_IsError(source)) {
+      loader->error_ = DartUtils::NewError(
+          "%s is not a valid UTF-8 script",
+          reinterpret_cast<char*>(result->uri));
+      return false;
+    }
+  }
+  intptr_t tag = result->tag;
+
+  // No touching.
+  result = NULL;
+
+  // We must drop the lock here because the tag handler may be recursively
+  // invoked and it will attempt to acquire the lock to queue more work.
+  loader->monitor_->Exit();
+
+  Dart_Handle dart_result = Dart_Null();
+
+  switch (tag) {
+    case Dart_kImportTag:
+      dart_result = Dart_LoadLibrary(uri, source, 0, 0);
+    break;
+    case Dart_kSourceTag: {
+      ASSERT(library_uri != Dart_Null());
+      Dart_Handle library = Dart_LookupLibrary(library_uri);
+      ASSERT(!Dart_IsError(library));
+      dart_result = Dart_LoadSource(library, uri, source, 0, 0);
+    }
+    break;
+    case Dart_kScriptTag:
+      if (is_snapshot) {
+        dart_result = Dart_LoadScriptFromSnapshot(payload, payload_length);
+      } else {
+        dart_result = Dart_LoadScript(uri, source, 0, 0);
+      }
+    break;
+    default:
+      UNREACHABLE();
+  }
+
+  // Re-acquire the lock before exiting the function (it was held before entry),
+  loader->monitor_->Enter();
+  if (Dart_IsError(dart_result)) {
+    // Remember the error if we encountered one.
+    loader->error_ = dart_result;
+    return false;
+  }
+
+  return true;
+}
+
+
+bool Loader::ProcessUrlLoadResultLocked(Loader* loader,
+                                        Loader::IOResult* result) {
+  // A negative result tag indicates a loading error occurred in the service
+  // isolate. The payload is a C string of the error message.
+  if (result->tag < 0) {
+    Dart_Handle error = Dart_NewStringFromUTF8(result->payload,
+                                               result->payload_length);
+    loader->error_ = Dart_NewUnhandledExceptionError(error);
+    return false;
+  }
+  loader->payload_length_ = result->payload_length;
+  loader->payload_ =
+      reinterpret_cast<uint8_t*>(::malloc(loader->payload_length_));
+  memmove(loader->payload_, result->payload, loader->payload_length_);
+  return true;
+}
+
+
+bool Loader::ProcessQueueLocked(ProcessResult process_result) {
+  bool hit_error = false;
+  for (intptr_t i = 0; i < results_length(); i++) {
+    if (!hit_error) {
+      hit_error = !(*process_result)(this, &results_[i]);
+    }
+    pending_operations_--;
+    ASSERT(hit_error || (pending_operations_ >= 0));
+    results_[i].Cleanup();
+  }
+  results_length_ = 0;
+  return !hit_error;
+}
+
+
+void Loader::InitForSnapshot(const char* snapshot_uri) {
+  IsolateData* isolate_data =
+      reinterpret_cast<IsolateData*>(Dart_CurrentIsolateData());
+  ASSERT(isolate_data != NULL);
+  ASSERT(!isolate_data->HasLoader());
+  // Setup a loader. The constructor does a bunch of leg work.
+  Loader* loader = new Loader(isolate_data);
+  // Send the init message.
+  loader->Init(isolate_data->package_root,
+               isolate_data->packages_file,
+               DartUtils::original_working_directory,
+               snapshot_uri);
+  // Destroy the loader. The destructor does a bunch of leg work.
+  delete loader;
+}
+
+
+Dart_Handle Loader::LoadUrlContents(Dart_Handle url,
+                                    uint8_t** payload,
+                                    intptr_t* payload_length) {
+  IsolateData* isolate_data =
+      reinterpret_cast<IsolateData*>(Dart_CurrentIsolateData());
+  ASSERT(isolate_data != NULL);
+  ASSERT(!isolate_data->HasLoader());
+  Loader* loader = NULL;
+
+  // Setup the loader. The constructor does a bunch of leg work.
+  loader = new Loader(isolate_data);
+  loader->Init(isolate_data->package_root,
+               isolate_data->packages_file,
+               DartUtils::original_working_directory,
+               NULL);
+  ASSERT(loader != NULL);
+  ASSERT(isolate_data->HasLoader());
+
+  // Now send a load request to the service isolate.
+  loader->SendRequest(Dart_kScriptTag, url, Dart_Null());
+
+  // Wait for a reply to the load request.
+  loader->BlockUntilComplete(ProcessUrlLoadResultLocked);
+
+  // Copy fields from the loader before deleting it.
+  // The payload array itself which was malloced above is freed by
+  // the caller of LoadUrlContents.
+  Dart_Handle error = loader->error();
+  *payload = loader->payload_;
+  *payload_length = loader->payload_length_;
+
+  // Destroy the loader. The destructor does a bunch of leg work.
+  delete loader;
+
+  // An error occurred during loading.
+  if (!Dart_IsNull(error)) {
+    return error;
+  }
+  return Dart_Null();
+}
+
+
+Dart_Handle Loader::LibraryTagHandler(Dart_LibraryTag tag,
+                                      Dart_Handle library,
+                                      Dart_Handle url) {
+  if (tag == Dart_kCanonicalizeUrl) {
+    Dart_Handle library_url = Dart_LibraryUrl(library);
+    if (Dart_IsError(library_url)) {
+      return library_url;
+    }
+    return Dart_DefaultCanonicalizeUrl(library_url, url);
+  }
+  const char* url_string = NULL;
+  Dart_Handle result = Dart_StringToCString(url, &url_string);
+  if (Dart_IsError(result)) {
+    return result;
+  }
+
+  // Special case for handling dart: imports and parts.
+  if (tag != Dart_kScriptTag) {
+    // Grab the library's url.
+    Dart_Handle library_url = Dart_LibraryUrl(library);
+    if (Dart_IsError(library_url)) {
+      return library_url;
+    }
+    const char* library_url_string = NULL;
+    result = Dart_StringToCString(library_url, &library_url_string);
+    if (Dart_IsError(result)) {
+      return result;
+    }
+
+    bool is_dart_scheme_url = DartUtils::IsDartSchemeURL(url_string);
+    bool is_dart_library = DartUtils::IsDartSchemeURL(library_url_string);
+
+    if (is_dart_scheme_url || is_dart_library) {
+      return DartColonLibraryTagHandler(tag,
+                                        library,
+                                        url,
+                                        library_url_string,
+                                        url_string);
+    }
+  }
+
+  if (DartUtils::IsDartExtensionSchemeURL(url_string)) {
+    // Handle early error cases for dart-ext: imports.
+    if (tag != Dart_kImportTag) {
+      return DartUtils::NewError("Dart extensions must use import: '%s'",
+                                 url_string);
+    }
+    Dart_Handle library_url = Dart_LibraryUrl(library);
+    if (Dart_IsError(library_url)) {
+      return library_url;
+    }
+  }
+
+  IsolateData* isolate_data =
+      reinterpret_cast<IsolateData*>(Dart_CurrentIsolateData());
+  ASSERT(isolate_data != NULL);
+
+  // Grab this isolate's loader.
+  Loader* loader = NULL;
+
+  // The outer invocation of the tag handler for this isolate. We make the outer
+  // invocation block and allow any nested invocations to operate in parallel.
+  const bool blocking_call = !isolate_data->HasLoader();
+
+  // If we are the outer invocation of the tag handler and the tag is an import
+  // this means that we are starting a deferred library load.
+  const bool is_deferred_import = blocking_call && (tag == Dart_kImportTag);
+  if (!isolate_data->HasLoader()) {
+    // The isolate doesn't have a loader -- this is the outer invocation which
+    // will block.
+
+    // Setup the loader. The constructor does a bunch of leg work.
+    loader = new Loader(isolate_data);
+    loader->Init(isolate_data->package_root,
+                 isolate_data->packages_file,
+                 DartUtils::original_working_directory,
+                 (tag == Dart_kScriptTag) ? url_string : NULL);
+  } else {
+    ASSERT(tag != Dart_kScriptTag);
+    // The isolate has a loader -- this is an inner invocation that will queue
+    // work with the service isolate.
+    // Use the existing loader.
+    loader = isolate_data->loader();
+  }
+  ASSERT(loader != NULL);
+  ASSERT(isolate_data->HasLoader());
+
+  if (DartUtils::IsDartExtensionSchemeURL(url_string)) {
+    loader->SendImportExtensionRequest(url, Dart_LibraryUrl(library));
+  } else {
+    loader->SendRequest(tag,
+                        url,
+                        (library != Dart_Null()) ?
+                            Dart_LibraryUrl(library) : Dart_Null());
+  }
+
+
+  if (blocking_call) {
+    // The outer invocation of the tag handler will block here until all nested
+    // invocations complete.
+    loader->BlockUntilComplete(ProcessResultLocked);
+
+    // Remember the error (if any).
+    Dart_Handle error = loader->error();
+    // Destroy the loader. The destructor does a bunch of leg work.
+    delete loader;
+
+    // An error occurred during loading.
+    if (!Dart_IsNull(error)) {
+      if (false && is_deferred_import) {
+        // This blocks handles transitive load errors caused by a deferred
+        // import. Non-transitive load errors are handled above (see callers of
+        // |LibraryHandleError|). To handle the transitive case, we give the
+        // originating deferred library an opportunity to handle it.
+        Dart_Handle deferred_library = Dart_LookupLibrary(url);
+        if (!LibraryHandleError(deferred_library, error)) {
+          // Library did not handle it, return to caller as an unhandled
+          // exception.
+          return Dart_NewUnhandledExceptionError(error);
+        }
+      } else {
+        // We got an error during loading but we aren't loading a deferred
+        // library, return the error to the caller.
+        return error;
+      }
+    }
+
+    // Finalize loading. This will complete any futures for completed deferred
+    // loads.
+    error = Dart_FinalizeLoading(true);
+    if (Dart_IsError(error)) {
+      return error;
+    }
+  }
+  return Dart_Null();
+}
+
+
+Dart_Handle Loader::DartColonLibraryTagHandler(Dart_LibraryTag tag,
+                                               Dart_Handle library,
+                                               Dart_Handle url,
+                                               const char* library_url_string,
+                                               const char* url_string) {
+  // Handle canonicalization, 'import' and 'part' of 'dart:' libraries.
+  if (tag == Dart_kCanonicalizeUrl) {
+    // These will be handled internally.
+    return url;
+  } else if (tag == Dart_kImportTag) {
+    Builtin::BuiltinLibraryId id = Builtin::FindId(url_string);
+    if (id == Builtin::kInvalidLibrary) {
+      return DartUtils::NewError("The built-in library '%s' is not available"
+                                 " on the stand-alone VM.\n", url_string);
+    }
+    return Builtin::LoadLibrary(url, id);
+  } else {
+    ASSERT(tag == Dart_kSourceTag);
+    Builtin::BuiltinLibraryId id = Builtin::FindId(library_url_string);
+    if (id == Builtin::kInvalidLibrary) {
+      return DartUtils::NewError("The built-in library '%s' is not available"
+                                 " on the stand-alone VM. Trying to load"
+                                 " '%s'.\n", library_url_string, url_string);
+    }
+    // Prepend the library URI to form a unique script URI for the part.
+    intptr_t len = snprintf(NULL, 0, "%s/%s", library_url_string, url_string);
+    char* part_uri = reinterpret_cast<char*>(malloc(len + 1));
+    snprintf(part_uri, len + 1, "%s/%s", library_url_string, url_string);
+    Dart_Handle part_uri_obj = DartUtils::NewString(part_uri);
+    free(part_uri);
+    return Dart_LoadSource(library,
+                           part_uri_obj,
+                           Builtin::PartSource(id, url_string), 0, 0);
+  }
+  // All cases should have been handled above.
+  UNREACHABLE();
+}
+
+
+Mutex Loader::loader_infos_lock_;
+Loader::LoaderInfo* Loader::loader_infos_ = NULL;
+intptr_t Loader::loader_infos_length_ = 0;
+intptr_t Loader::loader_infos_capacity_ = 0;
+
+
+// Add a mapping from |port| to |isolate_data| (really the loader). When a
+// native message arrives, we use this map to report the I/O result to the
+// correct loader.
+// This happens whenever an isolate begins loading.
+void Loader::AddLoader(Dart_Port port, IsolateData* isolate_data) {
+  MutexLocker ml(&loader_infos_lock_);
+  ASSERT(LoaderForLocked(port) == NULL);
+  if (loader_infos_length_ == loader_infos_capacity_) {
+    // Grow to an initial capacity or double in size.
+    loader_infos_capacity_ =
+        (loader_infos_capacity_ == 0) ? 4 : loader_infos_capacity_ * 2;
+    loader_infos_ =
+        reinterpret_cast<Loader::LoaderInfo*>(
+            realloc(loader_infos_,
+                    sizeof(Loader::LoaderInfo) * loader_infos_capacity_));
+    ASSERT(loader_infos_ != NULL);
+    // Initialize new entries.
+    for (intptr_t i = loader_infos_length_; i < loader_infos_capacity_; i++) {
+      loader_infos_[i].port = ILLEGAL_PORT;
+      loader_infos_[i].isolate_data = NULL;
+    }
+  }
+  ASSERT(loader_infos_length_ < loader_infos_capacity_);
+  loader_infos_[loader_infos_length_].port = port;
+  loader_infos_[loader_infos_length_].isolate_data = isolate_data;
+  loader_infos_length_++;
+  ASSERT(LoaderForLocked(port) != NULL);
+}
+
+
+// Remove |port| from the map.
+// This happens once an isolate has finished loading.
+void Loader::RemoveLoader(Dart_Port port) {
+  MutexLocker ml(&loader_infos_lock_);
+  const intptr_t index = LoaderIndexFor(port);
+  ASSERT(index >= 0);
+  const intptr_t last = loader_infos_length_ - 1;
+  ASSERT(last >= 0);
+  if (index != last) {
+    // Swap with the tail.
+    loader_infos_[index] = loader_infos_[last];
+  }
+  loader_infos_length_--;
+}
+
+
+intptr_t Loader::LoaderIndexFor(Dart_Port port) {
+  for (intptr_t i = 0; i < loader_infos_length_; i++) {
+    if (loader_infos_[i].port == port) {
+      return i;
+    }
+  }
+  return -1;
+}
+
+
+Loader* Loader::LoaderForLocked(Dart_Port port) {
+  intptr_t index = LoaderIndexFor(port);
+  if (index < 0) {
+    return NULL;
+  }
+  return loader_infos_[index].isolate_data->loader();
+}
+
+
+Loader* Loader::LoaderFor(Dart_Port port) {
+  MutexLocker ml(&loader_infos_lock_);
+  return LoaderForLocked(port);
+}
+
+
+void Loader::NativeMessageHandler(Dart_Port dest_port_id,
+                                  Dart_CObject* message) {
+  MutexLocker ml(&loader_infos_lock_);
+  Loader* loader = LoaderForLocked(dest_port_id);
+  if (loader == NULL) {
+    return;
+  }
+  loader->QueueMessage(message);
+}
+
+}  // namespace bin
+}  // namespace dart
diff --git a/runtime/bin/loader.h b/runtime/bin/loader.h
new file mode 100644
index 0000000..034138e
--- /dev/null
+++ b/runtime/bin/loader.h
@@ -0,0 +1,146 @@
+// Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+#ifndef BIN_LOADER_H_
+#define BIN_LOADER_H_
+
+#include "bin/isolate_data.h"
+#include "include/dart_api.h"
+#include "include/dart_native_api.h"
+#include "platform/assert.h"
+#include "platform/globals.h"
+#include "bin/thread.h"
+
+namespace dart {
+namespace bin {
+
+class Loader {
+ public:
+  explicit Loader(IsolateData* isolate_data);
+  ~Loader();
+
+  static void InitForSnapshot(const char* snapshot_uri);
+
+  // Loads contents of the specified url.
+  static Dart_Handle LoadUrlContents(Dart_Handle url,
+                                     uint8_t** payload,
+                                     intptr_t* payload_length);
+
+
+  // A static tag handler that hides all usage of a loader for an isolate.
+  static Dart_Handle LibraryTagHandler(Dart_LibraryTag tag,
+                                       Dart_Handle library,
+                                       Dart_Handle url);
+
+  Dart_Handle error() const {
+    return error_;
+  }
+
+ private:
+  // The port assigned to our native message handler.
+  Dart_Port port_;
+  // Each Loader is associated with an Isolate via its IsolateData.
+  IsolateData* isolate_data_;
+  // Remember the first error that occurs during loading.
+  Dart_Handle error_;
+  // This monitor is used to protect the pending operations count and the
+  // I/O result queue.
+  Monitor* monitor_;
+
+  // The number of operations dispatched to the service isolate for loading.
+  // Must be accessed with monitor_ held.
+  intptr_t pending_operations_;
+
+  // The result of an I/O request to the service isolate. Payload is either
+  // a UInt8Array or a C string containing an error message.
+  struct IOResult {
+    uint8_t* payload;
+    intptr_t payload_length;
+    char* library_uri;
+    char* uri;
+    int8_t tag;
+
+    void Setup(Dart_CObject* message);
+    void Cleanup();
+  };
+  // An array of I/O results queued from the service isolate.
+  IOResult* results_;
+  intptr_t results_length_;
+  intptr_t results_capacity_;
+  uint8_t* payload_;
+  intptr_t payload_length_;
+  typedef bool (*ProcessResult)(Loader* loader, IOResult* result);
+
+  intptr_t results_length() {
+    return *static_cast<volatile intptr_t*>(&results_length_);
+  }
+
+  // Send the loader init request to the service isolate.
+  void Init(const char* package_root,
+            const char* packages_file,
+            const char* working_directory,
+            const char* root_script_uri);
+
+  // Send a request for a dart-ext: import to the service isolate.
+  void SendImportExtensionRequest(Dart_Handle url,
+                                  Dart_Handle library_url);
+
+  // Send a request from the tag handler to the service isolate.
+  void SendRequest(Dart_LibraryTag tag,
+                   Dart_Handle url,
+                   Dart_Handle library_url);
+
+  /// Queue |message| and notify the loader that a message is available.
+  void QueueMessage(Dart_CObject* message);
+
+  /// Blocks the caller until the loader is finished.
+  void BlockUntilComplete(ProcessResult process_result);
+
+  /// Returns false if |result| is an error and the loader should quit.
+  static bool ProcessResultLocked(Loader* loader, IOResult* result);
+
+  /// Returns false if |result| is an error and the loader should quit.
+  static bool ProcessUrlLoadResultLocked(Loader* loader, IOResult* result);
+
+  /// Returns false if an error occurred and the loader should quit.
+  bool ProcessQueueLocked(ProcessResult process_result);
+
+  // Special inner tag handler for dart: uris.
+  static Dart_Handle DartColonLibraryTagHandler(Dart_LibraryTag tag,
+                                                Dart_Handle library,
+                                                Dart_Handle url,
+                                                const char* library_url_string,
+                                                const char* url_string);
+
+  // We use one native message handler callback for N loaders. The native
+  // message handler callback provides us with the Dart_Port which we use as a
+  // key into our map of active loaders from |port| to |isolate_data|.
+
+  // Static information to map Dart_Port back to the isolate in question.
+  struct LoaderInfo {
+    Dart_Port port;
+    IsolateData* isolate_data;
+  };
+
+  // The map of active loaders.
+  static Mutex loader_infos_lock_;
+  static LoaderInfo* loader_infos_;
+  static intptr_t loader_infos_length_;
+  static intptr_t loader_infos_capacity_;
+
+  static void AddLoader(Dart_Port port, IsolateData* data);
+  static void RemoveLoader(Dart_Port port);
+  static intptr_t LoaderIndexFor(Dart_Port port);
+  static Loader* LoaderFor(Dart_Port port);
+  static Loader* LoaderForLocked(Dart_Port port);
+
+  // This is the global callback for the native message handlers.
+  static void NativeMessageHandler(Dart_Port dest_port_id,
+                                   Dart_CObject* message);
+};
+
+}  // namespace bin
+}  // namespace dart
+
+#endif  // BIN_LOADER_H_
diff --git a/runtime/bin/main.cc b/runtime/bin/main.cc
index 8823eda..a1f859a 100644
--- a/runtime/bin/main.cc
+++ b/runtime/bin/main.cc
@@ -17,6 +17,7 @@
 #include "bin/extensions.h"
 #include "bin/file.h"
 #include "bin/isolate_data.h"
+#include "bin/loader.h"
 #include "bin/log.h"
 #include "bin/platform.h"
 #include "bin/process.h"
@@ -206,7 +207,7 @@
 static bool ProcessPackageRootOption(const char* arg,
                                      CommandLineOptions* vm_options) {
   ASSERT(arg != NULL);
-  if (*arg == '\0' || *arg == '-') {
+  if (*arg == '-') {
     return false;
   }
   commandline_package_root = arg;
@@ -215,9 +216,9 @@
 
 
 static bool ProcessPackagesOption(const char* arg,
-                                     CommandLineOptions* vm_options) {
+                                  CommandLineOptions* vm_options) {
   ASSERT(arg != NULL);
-  if ((*arg == '\0') || (*arg == '-')) {
+  if (*arg == '-') {
     return false;
   }
   commandline_packages_file = arg;
@@ -424,6 +425,26 @@
 }
 
 
+static bool ProcessHotReloadTestModeOption(const char* arg,
+                                           CommandLineOptions* vm_options) {
+  if (*arg != '\0') {
+    return false;
+  }
+
+  // Identity reload.
+  vm_options->AddArgument("--identity_reload");
+  // Start reloading quickly.
+  vm_options->AddArgument("--reload_every=50");
+  // Reload from optimized and unoptimized code.
+  vm_options->AddArgument("--reload_every_optimized=false");
+  // Reload less frequently as time goes on.
+  vm_options->AddArgument("--reload_every_back_off");
+  // Ensure that an isolate has reloaded once.
+  vm_options->AddArgument("--check_reloaded");
+
+  return true;
+}
+
 
 static bool ProcessShutdownOption(const char* arg,
                                   CommandLineOptions* vm_options) {
@@ -476,6 +497,7 @@
   { "--run-app-snapshot=", ProcessRunAppSnapshotOption },
   { "--use-blobs", ProcessUseBlobsOption },
   { "--trace-loading", ProcessTraceLoadingOption },
+  { "--hot-reload-test-mode", ProcessHotReloadTestModeOption },
   { NULL, NULL }
 };
 
@@ -591,6 +613,16 @@
                   "file is invalid.\n");
     return -1;
   }
+  if ((commandline_package_root != NULL) &&
+      (strlen(commandline_package_root) == 0)) {
+    Log::PrintErr("Empty package root specified.\n");
+    return -1;
+  }
+  if ((commandline_packages_file != NULL) &&
+      (strlen(commandline_packages_file) == 0)) {
+    Log::PrintErr("Empty package file name specified.\n");
+    return -1;
+  }
   if (is_noopt && gen_snapshot_kind != kNone) {
     Log::PrintErr("Generating a snapshot with dart_noopt is invalid.\n");
     return -1;
@@ -690,7 +722,7 @@
                                                 int* exit_code) {
   ASSERT(script_uri != NULL);
 
-  const bool needs_load_port = !run_app_snapshot;
+  const bool needs_load_port = true;
 #if defined(PRODUCT)
   const bool run_service_isolate = needs_load_port;
 #else
@@ -726,7 +758,7 @@
   }
 
   // Set up the library tag handler for this isolate.
-  Dart_Handle result = Dart_SetLibraryTagHandler(DartUtils::LibraryTagHandler);
+  Dart_Handle result = Dart_SetLibraryTagHandler(Loader::LibraryTagHandler);
   CHECK_RESULT(result);
 
   if (Dart_IsServiceIsolate(isolate)) {
@@ -770,13 +802,15 @@
   if (run_app_snapshot) {
     result = DartUtils::SetupIOLibrary(script_uri);
     CHECK_RESULT(result);
+    Loader::InitForSnapshot(script_uri);
   } else {
     // Load the specified application script into the newly created isolate.
-    result = DartUtils::LoadScript(script_uri);
-    CHECK_RESULT(result);
-
-    // Run event-loop and wait for script loading to complete.
-    result = Dart_RunLoop();
+    Dart_Handle uri =
+        DartUtils::ResolveScript(Dart_NewStringFromCString(script_uri));
+    CHECK_RESULT(uri);
+    result = Loader::LibraryTagHandler(Dart_kScriptTag,
+                                       Dart_Null(),
+                                       uri);
     CHECK_RESULT(result);
 
     Dart_TimelineEvent("LoadScript",
@@ -932,9 +966,11 @@
 }
 
 
-static const char* ServiceRequestError(Dart_Handle error) {
+static const char* InternalJsonRpcError(Dart_Handle error) {
   TextBuffer buffer(128);
-  buffer.Printf("{\"type\":\"Error\",\"text\":\"Internal error %s\"}",
+  buffer.Printf("{\"code\":-32603,"
+                "\"message\":\"Internal error\","
+                "\"details\": \"%s\"}",
                 Dart_GetError(error));
   return buffer.Steal();
 }
@@ -947,28 +983,32 @@
 };
 
 
-static const char* ServiceGetIOHandler(
+static bool ServiceGetIOHandler(
     const char* method,
     const char** param_keys,
     const char** param_values,
     intptr_t num_params,
-    void* user_data) {
+    void* user_data,
+    const char** response) {
   DartScope scope;
   // TODO(ajohnsen): Store the library/function in isolate data or user_data.
   Dart_Handle dart_io_str = Dart_NewStringFromCString("dart:io");
   if (Dart_IsError(dart_io_str)) {
-    return ServiceRequestError(dart_io_str);
+    *response = InternalJsonRpcError(dart_io_str);
+    return false;
   }
 
   Dart_Handle io_lib = Dart_LookupLibrary(dart_io_str);
   if (Dart_IsError(io_lib)) {
-    return ServiceRequestError(io_lib);
+    *response = InternalJsonRpcError(io_lib);
+    return false;
   }
 
   Dart_Handle handler_function_name =
       Dart_NewStringFromCString("_serviceObjectHandler");
   if (Dart_IsError(handler_function_name)) {
-    return ServiceRequestError(handler_function_name);
+    *response = InternalJsonRpcError(handler_function_name);
+    return false;
   }
 
   // TODO(johnmccutchan): paths is no longer used.  Update the io
@@ -983,15 +1023,18 @@
   Dart_Handle args[] = {paths, keys, values};
   Dart_Handle result = Dart_Invoke(io_lib, handler_function_name, 3, args);
   if (Dart_IsError(result)) {
-    return ServiceRequestError(result);
+    *response = InternalJsonRpcError(result);
+    return false;
   }
 
   const char *json;
   result = Dart_StringToCString(result, &json);
   if (Dart_IsError(result)) {
-    return ServiceRequestError(result);
+    *response = InternalJsonRpcError(result);
+    return false;
   }
-  return strdup(json);
+  *response = strdup(json);
+  return true;
 }
 
 
@@ -1359,7 +1402,6 @@
         reinterpret_cast<IsolateData*>(Dart_IsolateData(isolate));
     result = Dart_LibraryImportLibrary(
         isolate_data->builtin_lib(), root_lib, Dart_Null());
-#if !defined(PRODUCT)
     if (is_noopt ||
         (gen_snapshot_kind == kAppAfterRun) ||
         (gen_snapshot_kind == kAppAOT) ||
@@ -1374,7 +1416,6 @@
         exit(kErrorExitCode);
       }
     }
-#endif  // PRODUCT
 
     if (compile_all) {
       result = Dart_CompileAll();
@@ -1386,11 +1427,10 @@
         { "dart:_builtin", "::", "_getMainClosure" },
         { "dart:_builtin", "::", "_getPrintClosure" },
         { "dart:_builtin", "::", "_getUriBaseClosure" },
-        { "dart:_builtin", "::", "_resolveUri" },
+        { "dart:_builtin", "::", "_resolveInWorkingDirectory" },
         { "dart:_builtin", "::", "_setWorkingDirectory" },
         { "dart:_builtin", "::", "_setPackageRoot" },
-        { "dart:_builtin", "::", "_loadPackagesMap" },
-        { "dart:_builtin", "::", "_loadDataAsync" },
+        { "dart:_builtin", "::", "_libraryFilePath" },
         { "dart:io", "::", "_makeUint8ListView" },
         { "dart:io", "::", "_makeDatagram" },
         { "dart:io", "::", "_setupHooks" },
@@ -1410,9 +1450,7 @@
         { "dart:io", "_ProcessStartStatus", "set:_errorMessage" },
         { "dart:io", "_SecureFilterImpl", "get:ENCRYPTED_SIZE" },
         { "dart:io", "_SecureFilterImpl", "get:SIZE" },
-#if !defined(PRODUCT)
         { "dart:vmservice_io", "::", "main" },
-#endif  // !PRODUCT
         { NULL, NULL, NULL }  // Must be terminated with NULL entries.
       };
 
diff --git a/runtime/bin/vmservice/loader.dart b/runtime/bin/vmservice/loader.dart
index 7bec4a1..7686f1c 100644
--- a/runtime/bin/vmservice/loader.dart
+++ b/runtime/bin/vmservice/loader.dart
@@ -4,6 +4,311 @@
 
 part of vmservice_io;
 
+_sanitizeWindowsPath(path) {
+  // For Windows we need to massage the paths a bit according to
+  // http://blogs.msdn.com/b/ie/archive/2006/12/06/file-uris-in-windows.aspx
+  //
+  // Convert
+  // C:\one\two\three
+  // to
+  // /C:/one/two/three
+
+  if (_isWindows == false) {
+    // Do nothing when not running Windows.
+    return path;
+  }
+
+  var fixedPath = "${path.replaceAll('\\', '/')}";
+
+  if ((path.length > 2) && (path[1] == ':')) {
+    // Path begins with a drive letter.
+    return '/$fixedPath';
+  }
+
+  return fixedPath;
+}
+
+_trimWindowsPath(path) {
+  // Convert /X:/ to X:/.
+  if (_isWindows == false) {
+    // Do nothing when not running Windows.
+    return path;
+  }
+  if (!path.startsWith('/') || (path.length < 3)) {
+    return path;
+  }
+  // Match '/?:'.
+  if ((path[0] == '/') && (path[2] == ':')) {
+    // Remove leading '/'.
+    return path.substring(1);
+  }
+  return path;
+}
+
+// Ensure we have a trailing slash character.
+_enforceTrailingSlash(uri) {
+  if (!uri.endsWith('/')) {
+    return '$uri/';
+  }
+  return uri;
+}
+
+// State associated with the isolate that is used for loading.
+class IsolateLoaderState extends IsolateEmbedderData {
+  IsolateLoaderState(this.isolateId);
+
+  final int isolateId;
+
+  SendPort sp;
+
+  void init(String packageRootFlag,
+            String packagesConfigFlag,
+            String workingDirectory,
+            String rootScript) {
+    // _workingDirectory must be set first.
+    _workingDirectory = new Uri.directory(workingDirectory);
+    if (rootScript != null) {
+      _rootScript = Uri.parse(rootScript);
+    }
+    // If the --package-root flag was passed.
+    if (packageRootFlag != null) {
+      _setPackageRoot(packageRootFlag);
+    }
+    // If the --packages flag was passed.
+    if (packagesConfigFlag != null) {
+      _setPackagesConfig(packagesConfigFlag);
+    }
+  }
+
+  void cleanup() {
+    if (_packagesPort != null) {
+      _packagesPort.close();
+      _packagesPort = null;
+    }
+  }
+
+  // The working directory when the embedder started.
+  Uri _workingDirectory;
+
+  // The root script's uri.
+  Uri _rootScript;
+
+  bool _traceLoading = false;
+
+  // Packages are either resolved looking up in a map or resolved from within a
+  // package root.
+  bool get _packagesReady => (_packageRoot != null) ||
+                             (_packageMap != null) ||
+                             (_packageError != null);
+
+  // Error string set if there was an error resolving package configuration.
+  // For example not finding a .packages file or packages/ directory, malformed
+  // .packages file or any other related error.
+  String _packageError = null;
+
+  // The directory to look in to resolve "package:" scheme URIs. By default it
+  // is the 'packages' directory right next to the script.
+  Uri _packageRoot = null;
+
+  // The map describing how certain package names are mapped to Uris.
+  Uri _packageConfig = null;
+  Map<String, Uri> _packageMap = null;
+
+  _setPackageRoot(String packageRoot) {
+    packageRoot = _sanitizeWindowsPath(packageRoot);
+    if (packageRoot.startsWith('file:') ||
+        packageRoot.startsWith('http:') ||
+        packageRoot.startsWith('https:')) {
+      packageRoot = _enforceTrailingSlash(packageRoot);
+      _packageRoot = _workingDirectory.resolve(packageRoot);
+    } else {
+      packageRoot = _sanitizeWindowsPath(packageRoot);
+      packageRoot = _trimWindowsPath(packageRoot);
+      _packageRoot =
+          _workingDirectory.resolveUri(new Uri.directory(packageRoot));
+    }
+  }
+
+  _setPackagesConfig(String packagesParam) {
+    var packagesName = _sanitizeWindowsPath(packagesParam);
+    var packagesUri = Uri.parse(packagesName);
+    if (packagesUri.scheme == '') {
+      // Script does not have a scheme, assume that it is a path,
+      // resolve it against the working directory.
+      packagesUri = _workingDirectory.resolveUri(packagesUri);
+    }
+    _requestPackagesMap(packagesUri);
+    _pendingPackageLoads.add(() {
+      // Dummy action.
+    });
+  }
+
+  // Handling of access to the package root or package map from user code.
+  _triggerPackageResolution(action) {
+    if (_packagesReady) {
+      // Packages are ready. Execute the action now.
+      action();
+    } else {
+      if (_pendingPackageLoads.isEmpty) {
+        // Package resolution has not been setup yet, and this is the first
+        // request for package resolution & loading.
+        _requestPackagesMap();
+      }
+      // Register the action for when the package resolution is ready.
+      _pendingPackageLoads.add(action);
+    }
+  }
+
+  // A list of callbacks which should be invoked after the package map has been
+  // loaded.
+  List<Function> _pendingPackageLoads = [];
+
+  // Given a uri with a 'package' scheme, return a Uri that is prefixed with
+  // the package root or resolved relative to the package configuration.
+  Uri _resolvePackageUri(Uri uri) {
+    assert(uri.scheme == "package");
+    assert(_packagesReady);
+
+    if (uri.host.isNotEmpty) {
+      var path = '${uri.host}${uri.path}';
+      var right = 'package:$path';
+      var wrong = 'package://$path';
+
+      throw "URIs using the 'package:' scheme should look like "
+            "'$right', not '$wrong'.";
+    }
+
+    var packageNameEnd = uri.path.indexOf('/');
+    if (packageNameEnd == 0) {
+      // Package URIs must have a non-empty package name (not start with "/").
+      throw "URIS using the 'package:' scheme should look like "
+            "'package:packageName${uri.path}', not 'package:${uri.path}'";
+    }
+    if (_traceLoading) {
+      _log('Resolving package with uri path: ${uri.path}');
+    }
+    var resolvedUri;
+    if (_packageError != null) {
+      if (_traceLoading) {
+        _log("Resolving package with pending resolution error: $_packageError");
+      }
+      throw _packageError;
+    } else if (_packageRoot != null) {
+      resolvedUri = _packageRoot.resolve(uri.path);
+    } else {
+      if (packageNameEnd < 0) {
+        // Package URIs must have a path after the package name, even if it's
+        // just "/".
+        throw "URIS using the 'package:' scheme should look like "
+              "'package:${uri.path}/', not 'package:${uri.path}'";
+      }
+      var packageName = uri.path.substring(0, packageNameEnd);
+      var mapping = _packageMap[packageName];
+      if (_traceLoading) {
+        _log("Mapped '$packageName' package to '$mapping'");
+      }
+      if (mapping == null) {
+        throw "No mapping for '$packageName' package when resolving '$uri'.";
+      }
+      var path;
+      assert(uri.path.length > packageName.length);
+      path = uri.path.substring(packageName.length + 1);
+      if (_traceLoading) {
+        _log("Path to be resolved in package: $path");
+      }
+      resolvedUri = mapping.resolve(path);
+    }
+    if (_traceLoading) {
+      _log("Resolved '$uri' to '$resolvedUri'.");
+    }
+    return resolvedUri;
+  }
+
+  RawReceivePort _packagesPort;
+
+  void _requestPackagesMap([Uri packageConfig]) {
+    assert(_packagesPort == null);
+    assert(_rootScript != null);
+    // Create a port to receive the packages map on.
+    _packagesPort = new RawReceivePort(_handlePackagesReply);
+    var sp = _packagesPort.sendPort;
+
+    if (packageConfig != null) {
+      // Explicitly specified .packages path.
+      _handlePackagesRequest(sp,
+                             _traceLoading,
+                             -2,
+                             packageConfig);
+    } else {
+      // Search for .packages or packages/ starting at the root script.
+      _handlePackagesRequest(sp,
+                             _traceLoading,
+                             -1,
+                             _rootScript);
+    }
+
+    if (_traceLoading) {
+      _log("Requested packages map for '$_rootScript'.");
+    }
+  }
+
+  void _handlePackagesReply(msg) {
+    assert(_packagesPort != null);
+    // Make sure to close the _packagePort before any other action.
+    _packagesPort.close();
+    _packagesPort = null;
+
+    if (_traceLoading) {
+      _log("Got packages reply: $msg");
+    }
+    if (msg is String) {
+      if (_traceLoading) {
+        _log("Got failure response on package port: '$msg'");
+      }
+      // Remember the error message.
+      _packageError = msg;
+    } else if (msg is List) {
+      if (msg.length == 1) {
+        if (_traceLoading) {
+          _log("Received package root: '${msg[0]}'");
+        }
+        _packageRoot = Uri.parse(msg[0]);
+      } else {
+        // First entry contains the location of the loaded .packages file.
+        assert((msg.length % 2) == 0);
+        assert(msg.length >= 2);
+        assert(msg[1] == null);
+        _packageConfig = Uri.parse(msg[0]);
+        _packageMap = new Map<String, Uri>();
+        for (var i = 2; i < msg.length; i+=2) {
+          // TODO(iposva): Complain about duplicate entries.
+          _packageMap[msg[i]] = Uri.parse(msg[i+1]);
+        }
+        if (_traceLoading) {
+          _log("Setup package map: $_packageMap");
+        }
+      }
+    } else {
+      _packageError = "Bad type of packages reply: ${msg.runtimeType}";
+      if (_traceLoading) {
+        _log(_packageError);
+      }
+    }
+
+    // Resolve all pending package loads now that we know how to resolve them.
+    while (_pendingPackageLoads.length > 0) {
+      // Order does not matter as we queue all of the requests up right now.
+      var req = _pendingPackageLoads.removeLast();
+      // Call the registered closure, to handle the delayed action.
+      req();
+    }
+    // Reset the pending package loads to empty. So that we eventually can
+    // finish loading.
+    _pendingPackageLoads = [];
+  }
+
+}
+
 _log(msg) {
   print("% $msg");
 }
@@ -11,19 +316,57 @@
 var _httpClient;
 
 // Send a response to the requesting isolate.
-void _sendResourceResponse(SendPort sp, int id, dynamic data) {
+void _sendResourceResponse(SendPort sp,
+                           int tag,
+                           Uri uri,
+                           String libraryUrl,
+                           dynamic data) {
   assert((data is List<int>) || (data is String));
-  var msg = new List(2);
-  msg[0] = id;
-  msg[1] = data;
+  var msg = new List(4);
+  if (data is String) {
+    // We encountered an error, flip the sign of the tag to indicate that.
+    tag = -tag;
+    if (libraryUrl == null) {
+      data = 'Could not load "$uri": $data';
+    } else {
+      data = 'Could not import "$uri" from "$libraryUrl": $data';
+    }
+  }
+  msg[0] = tag;
+  msg[1] = uri.toString();
+  msg[2] = libraryUrl;
+  msg[3] = data;
   sp.send(msg);
 }
 
-void _loadHttp(SendPort sp, int id, Uri uri) {
+// Send a response to the requesting isolate.
+void _sendExtensionImportResponse(SendPort sp,
+                                  Uri uri,
+                                  String libraryUrl,
+                                  String resolvedUri) {
+  var msg = new List(4);
+  int tag = _Dart_kImportExtension;
+  if (resolvedUri == null) {
+    // We could not resolve the dart-ext: uri.
+    tag = -tag;
+    resolvedUri = 'Could not resolve "$uri" from "$libraryUrl"';
+  }
+  msg[0] = tag;
+  msg[1] = uri.toString();
+  msg[2] = libraryUrl;
+  msg[3] = resolvedUri;
+  sp.send(msg);
+}
+
+void _loadHttp(SendPort sp,
+               int tag,
+               Uri uri,
+               Uri resolvedUri,
+               String libraryUrl) {
   if (_httpClient == null) {
     _httpClient = new HttpClient()..maxConnectionsPerHost = 6;
   }
-  _httpClient.getUrl(uri)
+  _httpClient.getUrl(resolvedUri)
     .then((HttpClientRequest request) => request.close())
     .then((HttpClientResponse response) {
       var builder = new BytesBuilder(copy: false);
@@ -31,37 +374,46 @@
           builder.add,
           onDone: () {
             if (response.statusCode != 200) {
-              var msg = "Failure getting $uri:\n"
+              var msg = "Failure getting $resolvedUri:\n"
                         "  ${response.statusCode} ${response.reasonPhrase}";
-              _sendResourceResponse(sp, id, msg);
+              _sendResourceResponse(sp, tag, uri, libraryUrl, msg);
             } else {
-              _sendResourceResponse(sp, id, builder.takeBytes());
+              _sendResourceResponse(sp, tag, uri, libraryUrl,
+                                    builder.takeBytes());
             }
           },
           onError: (e) {
-            _sendResourceResponse(sp, id, e.toString());
+            _sendResourceResponse(sp, tag, uri, libraryUrl, e.toString());
           });
     })
     .catchError((e) {
-      _sendResourceResponse(sp, id, e.toString());
+      _sendResourceResponse(sp, tag, uri, libraryUrl, e.toString());
     });
   // It's just here to push an event on the event loop so that we invoke the
   // scheduled microtasks.
   Timer.run(() {});
 }
 
-void _loadFile(SendPort sp, int id, Uri uri) {
-  var path = uri.toFilePath();
+void _loadFile(SendPort sp,
+               int tag,
+               Uri uri,
+               Uri resolvedUri,
+               String libraryUrl) {
+  var path = resolvedUri.toFilePath();
   var sourceFile = new File(path);
   sourceFile.readAsBytes().then((data) {
-    _sendResourceResponse(sp, id, data);
+    _sendResourceResponse(sp, tag, uri, libraryUrl, data);
   },
   onError: (e) {
-    _sendResourceResponse(sp, id, e.toString());
+    _sendResourceResponse(sp, tag, uri, libraryUrl, e.toString());
   });
 }
 
-void _loadDataUri(SendPort sp, int id, Uri uri) {
+void _loadDataUri(SendPort sp,
+                  int tag,
+                  Uri uri,
+                  Uri resolvedUri,
+                  String libraryUrl) {
   try {
     var mime = uri.data.mimeType;
     if ((mime != "application/dart") &&
@@ -74,25 +426,102 @@
       // The C++ portion of the embedder assumes UTF-8.
       throw "Only utf-8 or US-ASCII encodings are supported: $charset given.";
     }
-    _sendResourceResponse(sp, id, uri.data.contentAsBytes());
+    _sendResourceResponse(sp, tag, uri, libraryUrl, uri.data.contentAsBytes());
   } catch (e) {
-    _sendResourceResponse(sp, id, "Invalid data uri ($uri):\n  $e");
+    _sendResourceResponse(sp, tag, uri, libraryUrl,
+                          "Invalid data uri ($uri):\n  $e");
   }
 }
 
-_handleResourceRequest(SendPort sp, bool traceLoading, int id, Uri resource) {
-  if (resource.scheme == 'file') {
-    _loadFile(sp, id, resource);
-  } else if ((resource.scheme == 'http') || (resource.scheme == 'https')) {
-    _loadHttp(sp, id, resource);
-  } else if ((resource.scheme == 'data')) {
-    _loadDataUri(sp, id, resource);
+// Loading a package URI needs to first map the package name to a loadable
+// URI.
+_loadPackage(IsolateLoaderState loaderState,
+             SendPort sp,
+             bool traceLoading,
+             int tag,
+             Uri uri,
+             Uri resolvedUri,
+             String libraryUrl) {
+  if (loaderState._packagesReady) {
+    var resolvedUri;
+    try {
+      resolvedUri = loaderState._resolvePackageUri(uri);
+    } catch (e, s) {
+      if (traceLoading) {
+        _log("Exception ($e) when resolving package URI: $uri");
+      }
+      // Report error.
+      _sendResourceResponse(sp,
+                            tag,
+                            uri,
+                            libraryUrl,
+                            e.toString());
+      return;
+    }
+    // Recursively call with the new resolved uri.
+    _handleResourceRequest(loaderState,
+                           sp,
+                           traceLoading,
+                           tag,
+                           uri,
+                           resolvedUri,
+                           libraryUrl);
   } else {
-    _sendResourceResponse(sp, id,
-                          'Unknown scheme (${resource.scheme}) for $resource');
+    if (loaderState._pendingPackageLoads.isEmpty) {
+      // Package resolution has not been setup yet, and this is the first
+      // request for package resolution & loading.
+      loaderState._requestPackagesMap();
+    }
+    // Register the action of loading this package once the package resolution
+    // is ready.
+    loaderState._pendingPackageLoads.add(() {
+      _handleResourceRequest(loaderState,
+                             sp,
+                             traceLoading,
+                             tag,
+                             uri,
+                             uri,
+                             libraryUrl);
+    });
+    if (traceLoading) {
+      _log("Pending package load of '$uri': "
+           "${loaderState._pendingPackageLoads.length} pending");
+    }
   }
 }
 
+// TODO(johnmccutchan): This and most other top level functions in this file
+// should be turned into methods on the IsolateLoaderState class.
+_handleResourceRequest(IsolateLoaderState loaderState,
+                       SendPort sp,
+                       bool traceLoading,
+                       int tag,
+                       Uri uri,
+                       Uri resolvedUri,
+                       String libraryUrl) {
+  if (resolvedUri.scheme == '' || resolvedUri.scheme == 'file') {
+    _loadFile(sp, tag, uri, resolvedUri, libraryUrl);
+  } else if ((resolvedUri.scheme == 'http') ||
+             (resolvedUri.scheme == 'https')) {
+    _loadHttp(sp, tag, uri, resolvedUri, libraryUrl);
+  } else if ((resolvedUri.scheme == 'data')) {
+    _loadDataUri(sp, tag, uri, resolvedUri, libraryUrl);
+  } else if ((resolvedUri.scheme == 'package')) {
+    _loadPackage(loaderState,
+                 sp,
+                 traceLoading,
+                 tag,
+                 uri,
+                 resolvedUri,
+                 libraryUrl);
+  } else {
+    _sendResourceResponse(sp, tag,
+                          uri,
+                          libraryUrl,
+                          'Unknown scheme (${resolvedUri.scheme}) for '
+                          '$resolvedUri');
+  }
+}
 
 // Handling of packages requests. Finding and parsing of .packages file or
 // packages/ directories.
@@ -329,7 +758,6 @@
   }
 }
 
-
 Future<bool> _loadHttpPackagesFile(SendPort sp,
                                    bool traceLoading,
                                    Uri resource) async {
@@ -386,14 +814,17 @@
   }
 }
 
-
+// This code used to exist in a second isolate and so it uses a SendPort to
+// report it's return value. This could be refactored so that it returns it's
+// value and the caller could wait on the future rather than a message on
+// SendPort.
 _handlePackagesRequest(SendPort sp,
                        bool traceLoading,
-                       int id,
+                       int tag,
                        Uri resource) async {
   try {
-    if (id == -1) {
-      if (resource.scheme == 'file') {
+    if (tag == -1) {
+      if (resource.scheme == '' || resource.scheme == 'file') {
         _findPackagesFile(sp, traceLoading, resource);
       } else if ((resource.scheme == 'http') || (resource.scheme == 'https')) {
         // Try to load the .packages file next to the resource.
@@ -409,11 +840,11 @@
         sp.send("Unsupported scheme used to locate .packages file: "
                 "'$resource'.");
       }
-    } else if (id == -2) {
+    } else if (tag == -2) {
       if (traceLoading) {
         _log("Handling load of packages map: '$resource'.");
       }
-      if (resource.scheme == 'file') {
+      if (resource.scheme == '' || resource.scheme == 'file') {
         var exists = await new File.fromUri(resource).exists();
         if (exists) {
           _loadPackagesFile(sp, traceLoading, resource);
@@ -432,7 +863,7 @@
                 "'$resource'.");
       }
     } else {
-      sp.send("Unknown packages request id: $id for '$resource'.");
+      sp.send("Unknown packages request tag: $tag for '$resource'.");
     }
   } catch (e, s) {
     if (traceLoading) {
@@ -442,21 +873,195 @@
   }
 }
 
+// Shutdown all active loaders by sending an error message.
+void shutdownLoaders() {
+  String message = 'Service shutdown';
+  if (_httpClient != null) {
+    _httpClient.close(force: true);
+    _httpClient = null;
+  }
+  isolateEmbedderData.values.toList().forEach((IsolateLoaderState ils) {
+    ils.cleanup();
+    assert(ils.sp != null);
+    _sendResourceResponse(ils.sp, 1, null, null, message);
+  });
+}
+
+// See Dart_LibraryTag in dart_api.h
+const _Dart_kCanonicalizeUrl = 0;      // Canonicalize the URL.
+const _Dart_kScriptTag = 1;            // Load the root script.
+const _Dart_kSourceTag = 2;            // Load a part source.
+const _Dart_kImportTag = 3;            // Import a library.
+
+// Extra requests. Keep these in sync between loader.dart and builtin.dart.
+const _Dart_kInitLoader = 4;           // Initialize the loader.
+const _Dart_kResourceLoad = 5;         // Resource class support.
+const _Dart_kGetPackageRootUri = 6;    // Uri of the packages/ directory.
+const _Dart_kGetPackageConfigUri = 7;  // Uri of the .packages file.
+const _Dart_kResolvePackageUri = 8;    // Resolve a package: uri.
+const _Dart_kImportExtension = 9;      // Import a dart-ext: file.
 
 // External entry point for loader requests.
 _processLoadRequest(request) {
-  SendPort sp = request[0];
-  assert(sp != null);
-  bool traceLoading = request[1];
-  assert(traceLoading != null);
-  int id = request[2];
-  assert(id != null);
-  String resource = request[3];
-  assert(resource != null);
-  var uri = Uri.parse(resource);
-  if (id >= 0) {
-    _handleResourceRequest(sp, traceLoading, id, uri);
-  } else {
-    _handlePackagesRequest(sp, traceLoading, id, uri);
+  assert(request is List);
+  assert(request.length > 4);
+
+  // Should we trace loading?
+  bool traceLoading = request[0];
+
+  // This is the sending isolate's Dart_GetMainPortId().
+  int isolateId = request[1];
+
+  // The tag describing the operation.
+  int tag = request[2];
+
+  // The send port to send the response on.
+  SendPort sp = request[3];
+
+  // Grab the loader state for the requesting isolate.
+  IsolateLoaderState loaderState = isolateEmbedderData[isolateId];
+
+  // We are either about to initialize the loader, or, we already have.
+  assert((tag == _Dart_kInitLoader) || (loaderState != null));
+
+  // Handle the request specified in the tag.
+  switch (tag) {
+    case _Dart_kScriptTag: {
+      Uri uri = Uri.parse(request[4]);
+      // Remember the root script.
+      loaderState._rootScript = uri;
+      _handleResourceRequest(loaderState,
+                             sp,
+                             traceLoading,
+                             tag,
+                             uri,
+                             uri,
+                             null);
+    }
+    break;
+    case _Dart_kSourceTag:
+    case _Dart_kImportTag: {
+      // The url of the file being loaded.
+      var uri = Uri.parse(request[4]);
+      // The library that is importing/parting the file.
+      String libraryUrl = request[5];
+      _handleResourceRequest(loaderState,
+                             sp,
+                             traceLoading,
+                             tag,
+                             uri,
+                             uri,
+                             libraryUrl);
+    }
+    break;
+    case _Dart_kInitLoader: {
+      String packageRoot = request[4];
+      String packagesFile = request[5];
+      String workingDirectory = request[6];
+      String rootScript = request[7];
+      if (loaderState == null) {
+        loaderState = new IsolateLoaderState(isolateId);
+        isolateEmbedderData[isolateId] = loaderState;
+        loaderState.init(packageRoot,
+                         packagesFile,
+                         workingDirectory,
+                         rootScript);
+      }
+      loaderState.sp = sp;
+      assert(isolateEmbedderData[isolateId] == loaderState);
+    }
+    break;
+    case _Dart_kResourceLoad: {
+      Uri uri = Uri.parse(request[4]);
+      _handleResourceRequest(loaderState,
+                             sp,
+                             traceLoading,
+                             tag,
+                             uri,
+                             uri,
+                             null);
+    }
+    break;
+    case _Dart_kGetPackageRootUri:
+      loaderState._triggerPackageResolution(() {
+        // Respond with the package root (if any) after package resolution.
+        sp.send(loaderState._packageRoot);
+      });
+    break;
+    case _Dart_kGetPackageConfigUri:
+      loaderState._triggerPackageResolution(() {
+        // Respond with the packages config (if any) after package resolution.
+        sp.send(loaderState._packageConfig);
+      });
+    break;
+    case _Dart_kResolvePackageUri:
+      Uri uri = Uri.parse(request[4]);
+      loaderState._triggerPackageResolution(() {
+        // Respond with the resolved package uri after package resolution.
+        Uri resolvedUri;
+        try {
+          resolvedUri = loaderState._resolvePackageUri(uri);
+        } catch (e, s) {
+          if (traceLoading) {
+            _log("Exception ($e) when resolving package URI: $uri");
+          }
+          resolvedUri = null;
+        }
+        sp.send(resolvedUri);
+      });
+    break;
+    case _Dart_kImportExtension:
+      Uri uri = Uri.parse(request[4]);
+      String libraryUri = request[5];
+      // Strip any filename off of the libraryUri's path.
+      int index = libraryUri.lastIndexOf('/');
+      var path;
+      if (index == -1) {
+        path = './';
+      } else {
+        path = libraryUri.substring(0, index + 1);
+      }
+      var pathUri = Uri.parse(path);
+      switch (pathUri.scheme) {
+        case '':
+        case 'file':
+          _sendExtensionImportResponse(sp, uri, libraryUri,
+                                       pathUri.toFilePath());
+        break;
+        case 'data':
+        case 'http':
+        case 'https':
+          _sendExtensionImportResponse(sp, uri, libraryUri,
+                                       pathUri.toString());
+        break;
+        case 'package':
+          // Start package resolution.
+          loaderState._triggerPackageResolution(() {
+            // Attempt to find the fully resolved uri of [path].
+            Uri resolvedUri;
+            try {
+              resolvedUri = loaderState._resolvePackageUri(pathUri);
+            } catch (e, s) {
+              if (traceLoading) {
+                _log("Exception ($e) when resolving package URI: $uri");
+              }
+              resolvedUri = null;
+            }
+            _sendExtensionImportResponse(sp,
+                                         uri,
+                                         libraryUri,
+                                         resolvedUri.toString());
+          });
+        break;
+        default:
+          if (traceLoading) {
+            _log('Unknown scheme (${pathUri.scheme}) in $pathUri.');
+          }
+          _sendExtensionImportResponse(sp, uri, libraryUri, null);
+        break;
+      }
+    break;
+    default:
+      _log('Unknown loader request tag=$tag from $isolateId');
   }
 }
diff --git a/runtime/bin/vmservice/vmservice_io.dart b/runtime/bin/vmservice/vmservice_io.dart
index ba82441..3e1bee4 100644
--- a/runtime/bin/vmservice/vmservice_io.dart
+++ b/runtime/bin/vmservice/vmservice_io.dart
@@ -39,6 +39,7 @@
 }
 
 Future cleanupCallback() async {
+  shutdownLoaders();
   // Cancel the sigquit subscription.
   if (_signalSubscription != null) {
     await _signalSubscription.cancel();
@@ -51,10 +52,54 @@
       print("Error in vm-service shutdown: $e\n$st\n");
     }
   }
+  if (_registerSignalHandlerTimer != null) {
+    _registerSignalHandlerTimer.cancel();
+    _registerSignalHandlerTimer = null;
+  }
   // Call out to embedder's shutdown callback.
   _shutdown();
 }
 
+Future<Uri> createTempDirCallback(String base) async {
+  Directory temp = await Directory.systemTemp.createTemp(base);
+  return temp.uri;
+}
+
+Future deleteDirCallback(Uri path) async {
+  Directory dir = new Directory.fromUri(path);
+  await dir.delete(recursive: true);
+}
+
+Future writeFileCallback(Uri path, List<int> bytes) async {
+  var file = await new File.fromUri(path);
+  await file.writeAsBytes(bytes);
+}
+
+Future<List<int>> readFileCallback(Uri path) async {
+  var file = await new File.fromUri(path);
+  return await file.readAsBytes();
+}
+
+Future<List<Map<String,String>>> listFilesCallback(Uri dirPath) async {
+  var dir = new Directory.fromUri(dirPath);
+  var dirPathStr = dirPath.path;
+  var stream = dir.list(recursive: true);
+  var result = [];
+  await for (var fileEntity in stream) {
+    var filePath = new Uri.file(fileEntity.path).path;
+    var stat = await fileEntity.stat();
+    if (stat.type == FileSystemEntityType.FILE &&
+        filePath.startsWith(dirPathStr)) {
+      var map = {};
+      map['name'] = '/' + filePath.substring(dirPathStr.length);
+      map['size'] = stat.size;
+      map['modified'] = stat.modified.millisecondsSinceEpoch;
+      result.add(map);
+    }
+  }
+  return result;
+}
+
 _clearFuture(_) {
   serverFuture = null;
 }
@@ -73,7 +118,10 @@
   }
 }
 
+Timer _registerSignalHandlerTimer;
+
 _registerSignalHandler() {
+  _registerSignalHandlerTimer = null;
   if (_signalWatch == null) {
     // Cannot register for signals.
     return;
@@ -88,6 +136,14 @@
 main() {
   // Set embedder hooks.
   VMServiceEmbedderHooks.cleanup = cleanupCallback;
+  VMServiceEmbedderHooks.createTempDir = createTempDirCallback;
+  VMServiceEmbedderHooks.deleteDir = deleteDirCallback;
+  VMServiceEmbedderHooks.writeFile = writeFileCallback;
+  VMServiceEmbedderHooks.readFile = readFileCallback;
+  VMServiceEmbedderHooks.listFiles = listFilesCallback;
+  // Always instantiate the vmservice object so that the exit message
+  // can be delivered and waiting loaders can be cancelled.
+  var service = new VMService();
   if (_autoStart) {
     _lazyServerBoot();
     server.startup();
@@ -95,12 +151,10 @@
     // scheduled microtasks.
     Timer.run(() {});
   }
-  // TODO(johnmccutchan): Fixup service isolate shutdown in the general case.
-  // See ServiceIsolate::KillServiceIsolate and ServiceIsolate::Shutdown.
   scriptLoadPort.handler = _processLoadRequest;
   // Register signal handler after a small delay to avoid stalling main
   // isolate startup.
-  new Timer(shortDelay, _registerSignalHandler);
+  _registerSignalHandlerTimer = new Timer(shortDelay, _registerSignalHandler);
   return scriptLoadPort;
 }
 
diff --git a/runtime/bin/vmservice_impl.cc b/runtime/bin/vmservice_impl.cc
index 2736408..b7f4df4 100644
--- a/runtime/bin/vmservice_impl.cc
+++ b/runtime/bin/vmservice_impl.cc
@@ -342,8 +342,8 @@
     return result;
   }
   if (tag == Dart_kImportTag) {
-    // Embedder handles all requests for external libraries.
-    return DartUtils::LibraryTagHandler(tag, library, url);
+    UNREACHABLE();
+    return Dart_Null();
   }
   ASSERT((tag == Dart_kSourceTag) || (tag == Dart_kCanonicalizeUrl));
   if (tag == Dart_kCanonicalizeUrl) {
diff --git a/runtime/include/dart_api.h b/runtime/include/dart_api.h
index d019d4f..123064c 100755
--- a/runtime/include/dart_api.h
+++ b/runtime/include/dart_api.h
@@ -2656,10 +2656,10 @@
 /* TODO(turnidge): Finish documenting this section. */
 
 typedef enum {
-  Dart_kImportTag = 0,
-  Dart_kSourceTag,
-  Dart_kCanonicalizeUrl,
+  Dart_kCanonicalizeUrl = 0,
   Dart_kScriptTag,
+  Dart_kSourceTag,
+  Dart_kImportTag,
 } Dart_LibraryTag;
 
 /* TODO(turnidge): Document. */
@@ -2684,15 +2684,35 @@
     Dart_LibraryTagHandler handler);
 
 /**
+ * Canonicalizes a url with respect to some library.
+ *
+ * The url is resolved with respect to the library's url and some url
+ * normalizations are performed.
+ *
+ * This canonicalization function should be sufficient for most
+ * embedders to implement the Dart_kCanonicalizeUrl tag.
+ *
+ * \param base_url The base url relative to which the url is
+ *                being resolved.
+ * \param url The url being resolved and canonicalized.  This
+ *            parameter is a string handle.
+ *
+ * \return If no error occurs, a String object is returned.  Otherwise
+ *   an error handle is returned.
+ */
+DART_EXPORT Dart_Handle Dart_DefaultCanonicalizeUrl(Dart_Handle base_url,
+                                                    Dart_Handle url);
+
+/**
  * Loads the root script for the current isolate. The script can be
  * embedded in another file, for example in an html file.
  *
  * TODO(turnidge): Document.
  *
- * \line_offset is the number of text lines before the
+ * \param line_offset is the number of text lines before the
  *   first line of the Dart script in the containing file.
  *
- * \col_offset is the number of characters before the first character
+ * \param col_offset is the number of characters before the first character
  *   in the first line of the Dart script.
  */
 DART_EXPORT Dart_Handle Dart_LoadScript(Dart_Handle url,
diff --git a/runtime/include/dart_tools_api.h b/runtime/include/dart_tools_api.h
index da43d94..961c444 100644
--- a/runtime/include/dart_tools_api.h
+++ b/runtime/include/dart_tools_api.h
@@ -752,22 +752,44 @@
  * a service request it can't handle and the service request command name
  * matches one of the embedder registered handlers.
  *
+ * The return value of the callback indicates whether the response
+ * should be used as a regular result or an error result.
+ * Specifically, if the callback returns true, a regular JSON-RPC
+ * response is built in the following way:
+ *
+ * {
+ *   "jsonrpc": "2.0",
+ *   "result": <json_object>,
+ *   "id": <some sequence id>,
+ * }
+ *
+ * If the callback returns false, a JSON-RPC error is built like this:
+ *
+ * {
+ *   "jsonrpc": "2.0",
+ *   "error": <json_object>,
+ *   "id": <some sequence id>,
+ * }
+ *
  * \param method The rpc method name.
  * \param param_keys Service requests can have key-value pair parameters. The
  *   keys and values are flattened and stored in arrays.
  * \param param_values The values associated with the keys.
  * \param num_params The length of the param_keys and param_values arrays.
  * \param user_data The user_data pointer registered with this handler.
+ * \param result A C string containing a valid JSON object. The returned
+ *   pointer will be freed by the VM by calling free.
  *
- * \return Returns a C string containing a valid JSON object. The returned
- * pointer will be freed by the VM by calling free.
+ * \return True if the result is a regular JSON-RPC response, false if the
+ *   result is a JSON-RPC error.
  */
-typedef const char* (*Dart_ServiceRequestCallback)(
+typedef bool (*Dart_ServiceRequestCallback)(
     const char* method,
     const char** param_keys,
     const char** param_values,
     intptr_t num_params,
-    void* user_data);
+    void* user_data,
+    const char** json_object);
 
 
 /**
diff --git a/runtime/lib/bigint.dart b/runtime/lib/bigint.dart
index bc9b8f4..c21b04a 100644
--- a/runtime/lib/bigint.dart
+++ b/runtime/lib/bigint.dart
@@ -1336,6 +1336,8 @@
     return str;
   }
 
+  int _bitAndFromSmi(int other) => _bitAndFromInteger(other);
+
   int _bitAndFromInteger(int other) {
     return other._toBigint()._and(this)._toValidInt();
   }
diff --git a/runtime/lib/double.cc b/runtime/lib/double.cc
index 2706269..c28e1b1 100644
--- a/runtime/lib/double.cc
+++ b/runtime/lib/double.cc
@@ -232,9 +232,7 @@
 
 DEFINE_NATIVE_ENTRY(Double_toString, 1) {
   const Number& number = Number::CheckedHandle(arguments->NativeArgAt(0));
-  Heap::Space space = isolate->heap()->ShouldPretenure(kOneByteStringCid) ?
-      Heap::kPretenured : Heap::kNew;
-  return number.ToString(space);
+  return number.ToString(Heap::kNew);
 }
 
 
diff --git a/runtime/lib/errors.cc b/runtime/lib/errors.cc
index 0387f80..28d4ef4 100644
--- a/runtime/lib/errors.cc
+++ b/runtime/lib/errors.cc
@@ -26,6 +26,7 @@
 
   DartFrameIterator iterator;
   iterator.NextFrame();  // Skip native call.
+  iterator.NextFrame();  // Skip _AssertionError._checkAssertion frame
   const Script& script = Script::Handle(Exceptions::GetCallerScript(&iterator));
 
   // Initialize argument 'failed_assertion' with source snippet.
diff --git a/runtime/lib/errors_patch.dart b/runtime/lib/errors_patch.dart
index 98814f8..faf342b 100644
--- a/runtime/lib/errors_patch.dart
+++ b/runtime/lib/errors_patch.dart
@@ -26,6 +26,15 @@
   static _throwNew(int assertionStart, int assertionEnd)
       native "AssertionError_throwNew";
 
+  static void _checkAssertion(condition, int start, int end) {
+    if (condition is Function) {
+      condition = condition();
+    }
+    if (!condition) {
+      _throwNew(start, end);
+    }
+  }
+
   String toString() {
     if (_url == null) {
       return _failedAssertion;
diff --git a/runtime/lib/integers.cc b/runtime/lib/integers.cc
index 351cbbf..efb12af 100644
--- a/runtime/lib/integers.cc
+++ b/runtime/lib/integers.cc
@@ -146,7 +146,7 @@
   const Integer& right_int = Integer::CheckedHandle(arguments->NativeArgAt(0));
   GET_NON_NULL_NATIVE_ARGUMENT(Integer, left_int, arguments->NativeArgAt(1));
   ASSERT(CheckInteger(right_int));
-  ASSERT(CheckInteger(right_int));
+  ASSERT(CheckInteger(left_int));
   if (FLAG_trace_intrinsified_natives) {
     OS::Print("Integer_moduloFromInteger %s mod %s\n",
         left_int.ToCString(), right_int.ToCString());
@@ -285,6 +285,19 @@
 }
 
 
+DEFINE_NATIVE_ENTRY(Smi_bitAndFromSmi, 2) {
+  const Smi& left = Smi::CheckedHandle(arguments->NativeArgAt(0));
+  GET_NON_NULL_NATIVE_ARGUMENT(Smi, right, arguments->NativeArgAt(1));
+  if (FLAG_trace_intrinsified_natives) {
+    OS::Print("Smi_bitAndFromSmi %s & %s\n",
+        left.ToCString(), right.ToCString());
+  }
+  const Smi& left_value = Smi::Cast(left);
+  const Smi& right_value = Smi::Cast(right);
+  return Smi::New(left_value.Value() & right_value.Value());
+}
+
+
 DEFINE_NATIVE_ENTRY(Smi_shrFromInt, 2) {
   const Smi& amount = Smi::CheckedHandle(arguments->NativeArgAt(0));
   GET_NON_NULL_NATIVE_ARGUMENT(Integer, value, arguments->NativeArgAt(1));
diff --git a/runtime/lib/integers.dart b/runtime/lib/integers.dart
index a7722b0..b86b71c 100644
--- a/runtime/lib/integers.dart
+++ b/runtime/lib/integers.dart
@@ -63,6 +63,7 @@
   num remainder(num other) {
     return other._remainderFromInteger(this);
   }
+  int _bitAndFromSmi(int other) native "Integer_bitAndFromInteger";
   int _bitAndFromInteger(int other) native "Integer_bitAndFromInteger";
   int _bitOrFromInteger(int other) native "Integer_bitOrFromInteger";
   int _bitXorFromInteger(int other) native "Integer_bitXorFromInteger";
@@ -412,6 +413,9 @@
   int operator ~() native "Smi_bitNegate";
   int get bitLength native "Smi_bitLength";
 
+  int operator &(int other) => other._bitAndFromSmi(this);
+
+  int _bitAndFromSmi(int other) native "Smi_bitAndFromSmi";
   int _shrFromInt(int other) native "Smi_shrFromInt";
   int _shlFromInt(int other) native "Smi_shlFromInt";
 
@@ -609,6 +613,8 @@
   int operator ~() native "Mint_bitNegate";
   int get bitLength native "Mint_bitLength";
 
+  int _bitAndFromSmi(int other) => _bitAndFromInteger(other);
+
   // Shift by mint exceeds range that can be handled by the VM.
   int _shrFromInt(int other) {
     if (other < 0) {
diff --git a/runtime/lib/isolate_patch.dart b/runtime/lib/isolate_patch.dart
index 09ae531..4dd89c0 100644
--- a/runtime/lib/isolate_patch.dart
+++ b/runtime/lib/isolate_patch.dart
@@ -312,13 +312,12 @@
     try {
       // The VM will invoke [_startIsolate] with entryPoint as argument.
       readyPort = new RawReceivePort();
-      var packageRoot = null;
-      var packageConfig = null;
-      if (Isolate._packageSupported()) {
-        packageRoot = (await Isolate.packageRoot)?.toString();
-        packageConfig = (await Isolate.packageConfig)?.toString();
-      }
 
+      // We do not inherit the package root or package config settings
+      // from the parent isolate, instead we use the values that were
+      // set on the command line.
+      var packageRoot = VMLibraryHooks.packageRootString;
+      var packageConfig = VMLibraryHooks.packageConfigString;
       var script = VMLibraryHooks.platformScript;
       if (script == null) {
         // We do not have enough information to support spawning the new
diff --git a/runtime/lib/lib_prefix.dart b/runtime/lib/lib_prefix.dart
index 4f509da..f457b02 100644
--- a/runtime/lib/lib_prefix.dart
+++ b/runtime/lib/lib_prefix.dart
@@ -8,7 +8,7 @@
 // This type corresponds to the VM-internal class LibraryPrefix.
 class _LibraryPrefix {
   bool _load() native "LibraryPrefix_load";
-  Error _loadError() native "LibraryPrefix_loadError";
+  Object _loadError() native "LibraryPrefix_loadError";
   bool isLoaded() native "LibraryPrefix_isLoaded";
   bool _invalidateDependentCode()
       native "LibraryPrefix_invalidateDependentCode";
@@ -32,7 +32,7 @@
       // prefix. If that is the case, we must invalidate the dependent
       // code and complete the future now since there will be no callback
       // from the VM.
-      if (hasCompleted) {
+      if (hasCompleted && !completer.isCompleted) {
         _invalidateDependentCode();
         completer.complete(true);
         _outstandingLoadRequests.remove(pair);
@@ -46,19 +46,35 @@
 // second element is the Completer for the load request.
 var _outstandingLoadRequests = new List<List>();
 
-// Called from the VM when all outstanding load requests have
-// finished.
+// Called from the VM when an outstanding load request has finished.
 _completeDeferredLoads() {
-  for (int i = 0; i < _outstandingLoadRequests.length; i++) {
-    var prefix = _outstandingLoadRequests[i][0];
-    var completer = _outstandingLoadRequests[i][1];
+  // Determine which outstanding load requests have completed and complete
+  // their completer (with an error or true). For outstanding load requests
+  // which have not completed, remember them for next time in
+  // stillOutstandingLoadRequests.
+  var stillOutstandingLoadRequests = new List<List>();
+  var completedLoadRequests = new List<List>();
+
+  // Make a copy of the outstandingRequests because the call to _load below
+  // may recursively trigger another call to |_completeDeferredLoads|, which
+  // can cause |_outstandingLoadRequests| to be modified.
+  var outstandingRequests = _outstandingLoadRequests.toList();
+  for (int i = 0; i < outstandingRequests.length; i++) {
+    var prefix = outstandingRequests[i][0];
+    var completer = outstandingRequests[i][1];
     var error = prefix._loadError();
+    if (completer.isCompleted) {
+      // Already completed. Skip.
+      continue;
+    }
     if (error != null) {
       completer.completeError(error);
-    } else {
+    } else if (prefix._load()) {
       prefix._invalidateDependentCode();
       completer.complete(true);
+    } else {
+      stillOutstandingLoadRequests.add(outstandingRequests[i]);
     }
   }
-  _outstandingLoadRequests.clear();
+  _outstandingLoadRequests = stillOutstandingLoadRequests;
 }
diff --git a/runtime/lib/stacktrace.cc b/runtime/lib/stacktrace.cc
index 9d9a307..a63d66d 100644
--- a/runtime/lib/stacktrace.cc
+++ b/runtime/lib/stacktrace.cc
@@ -61,6 +61,16 @@
   OS::PrintErr("=== Current Trace:\n%s===\n", stacktrace.ToCString());
 }
 
+// Like _printCurrentStacktrace, but works in a NoSafepointScope.
+void _printCurrentStacktraceNoSafepoint() {
+  StackFrameIterator frames(StackFrameIterator::kDontValidateFrames);
+  StackFrame* frame = frames.NextFrame();
+  while (frame != NULL) {
+    OS::Print("%s\n", frame->ToCString());
+    frame = frames.NextFrame();
+  }
+}
+
 DEFINE_NATIVE_ENTRY(StackTrace_current, 0) {
   const Stacktrace& stacktrace = GetCurrentStacktrace(1);
   return stacktrace.raw();
diff --git a/runtime/lib/string.cc b/runtime/lib/string.cc
index 01f6ad0..d4008a3 100644
--- a/runtime/lib/string.cc
+++ b/runtime/lib/string.cc
@@ -300,8 +300,7 @@
 
 DEFINE_NATIVE_ENTRY(OneByteString_allocate, 1) {
   GET_NON_NULL_NATIVE_ARGUMENT(Smi, length_obj, arguments->NativeArgAt(0));
-  Heap::Space space = isolate->heap()->SpaceForAllocation(kOneByteStringCid);
-  return OneByteString::New(length_obj.Value(), space);
+  return OneByteString::New(length_obj.Value(), Heap::kNew);
 }
 
 
@@ -326,7 +325,7 @@
   }
   ASSERT(length >= 0);
 
-  Heap::Space space = isolate->heap()->SpaceForAllocation(kOneByteStringCid);
+  Heap::Space space = Heap::kNew;
   if (list.IsTypedData()) {
     const TypedData& array = TypedData::Cast(list);
     if (end > array.LengthInBytes()) {
@@ -425,7 +424,7 @@
     Exceptions::ThrowArgumentError(end_obj);
   }
 
-  Heap::Space space = isolate->heap()->SpaceForAllocation(kTwoByteStringCid);
+  Heap::Space space = Heap::kNew;
   if (list.IsTypedData()) {
     const TypedData& array = TypedData::Cast(list);
     if (array.ElementType() != kUint16ArrayElement) {
diff --git a/runtime/lib/symbol_patch.dart b/runtime/lib/symbol_patch.dart
index fdb6527..1a1b2bc 100644
--- a/runtime/lib/symbol_patch.dart
+++ b/runtime/lib/symbol_patch.dart
@@ -51,4 +51,9 @@
     }
     return result.toString();
   }
+
+  /* patch */ int get hashCode {
+    const arbitraryPrime = 664597;
+    return 0x1fffffff & (arbitraryPrime * _name.hashCode);
+  }
 }
diff --git a/runtime/lib/timeline.cc b/runtime/lib/timeline.cc
index b5b397d..6923e6f 100644
--- a/runtime/lib/timeline.cc
+++ b/runtime/lib/timeline.cc
@@ -54,6 +54,7 @@
 
 
 DEFINE_NATIVE_ENTRY(Timeline_reportTaskEvent, 6) {
+#ifndef PRODUCT
   if (!FLAG_support_timeline) {
     return Object::null();
   }
@@ -114,12 +115,13 @@
 
   // json was allocated in the zone and a copy will be stored in event.
   event->CompleteWithPreSerializedJSON(json);
-
+#endif
   return Object::null();
 }
 
 
 DEFINE_NATIVE_ENTRY(Timeline_reportCompleteEvent, 5) {
+#ifndef PRODUCT
   if (!FLAG_support_timeline) {
     return Object::null();
   }
@@ -185,12 +187,13 @@
                   end_cpu);
   // json was allocated in the zone and a copy will be stored in event.
   event->CompleteWithPreSerializedJSON(json);
-
+#endif
   return Object::null();
 }
 
 
 DEFINE_NATIVE_ENTRY(Timeline_reportInstantEvent, 4) {
+#ifndef PRODUCT
   if (!FLAG_support_timeline) {
     return Object::null();
   }
@@ -228,7 +231,7 @@
   event->Instant("", start.AsInt64Value());
   // json was allocated in the zone and a copy will be stored in event.
   event->CompleteWithPreSerializedJSON(json);
-
+#endif
   return Object::null();
 }
 
diff --git a/runtime/lib/typed_data.dart b/runtime/lib/typed_data.dart
index ad0d89a..f37cacb 100644
--- a/runtime/lib/typed_data.dart
+++ b/runtime/lib/typed_data.dart
@@ -2886,12 +2886,10 @@
   Int32x4 withFlagY(bool y) native "Int32x4_setFlagY";
   Int32x4 withFlagZ(bool z) native "Int32x4_setFlagZ";
   Int32x4 withFlagW(bool w) native "Int32x4_setFlagW";
-  Float32x4 select(Float32x4 trueValue,
-                          Float32x4 falseValue) {
+  Float32x4 select(Float32x4 trueValue, Float32x4 falseValue) {
     return _select(trueValue, falseValue);
   }
-  Float32x4 _select(Float32x4 trueValue,
-                           Float32x4 falseValue)
+  Float32x4 _select(Float32x4 trueValue, Float32x4 falseValue)
       native "Int32x4_select";
 
   /// Mask passed to [shuffle] or [shuffleMix].
diff --git a/runtime/lib/vmservice.cc b/runtime/lib/vmservice.cc
index 502f510..831b9b8 100644
--- a/runtime/lib/vmservice.cc
+++ b/runtime/lib/vmservice.cc
@@ -9,6 +9,7 @@
 #include "vm/flags.h"
 #include "vm/growable_array.h"
 #include "vm/message.h"
+#include "vm/message_handler.h"
 #include "vm/native_entry.h"
 #include "vm/object.h"
 #include "vm/port.h"
@@ -115,6 +116,15 @@
 }
 
 
+DEFINE_NATIVE_ENTRY(VMService_SendObjectRootServiceMessage, 1) {
+  GET_NON_NULL_NATIVE_ARGUMENT(Array, message, arguments->NativeArgAt(0));
+  if (FLAG_support_service) {
+    Service::HandleObjectRootMessage(message);
+  }
+  return Object::null();
+}
+
+
 DEFINE_NATIVE_ENTRY(VMService_OnStart, 0) {
   if (FLAG_trace_service) {
     OS::Print("vm-service: Booting dart:vmservice library.\n");
@@ -139,6 +149,9 @@
 DEFINE_NATIVE_ENTRY(VMService_OnExit, 0) {
   if (FLAG_trace_service) {
     OS::Print("vm-service: processed exit message.\n");
+    MessageHandler* message_handler = isolate->message_handler();
+    OS::Print("vm-service: live ports = %" Pd "\n",
+              message_handler->live_ports());
   }
   return Object::null();
 }
diff --git a/runtime/lib/vmservice_patch.dart b/runtime/lib/vmservice_patch.dart
index f26b93d..e0a85ae 100644
--- a/runtime/lib/vmservice_patch.dart
+++ b/runtime/lib/vmservice_patch.dart
@@ -25,6 +25,8 @@
     native "VMService_SendIsolateServiceMessage";
 patch void sendRootServiceMessage(List m)
     native "VMService_SendRootServiceMessage";
+patch void sendObjectRootServiceMessage(List m)
+    native "VMService_SendObjectRootServiceMessage";
 patch void _onStart() native "VMService_OnStart";
 patch void _onExit() native "VMService_OnExit";
 patch void onServerAddressChange(String address)
diff --git a/runtime/observatory/.analysis_options b/runtime/observatory/.analysis_options
index fd7cfbf..fc9ab29 100644
--- a/runtime/observatory/.analysis_options
+++ b/runtime/observatory/.analysis_options
@@ -1,9 +1,6 @@
 analyzer:
   exclude:
-# Remove once Dart 1.13-dev is released
+    - tests/service/get_isolate_after_language_error_test.dart
     - tests/service/developer_extension_test.dart
-# Remove once Dart 1.13-dev is released
-    - tests/service/logging_test.dart
-    - tests/ui/log.dart
-# Remove once Dart 1.13-dev is released
-    - tests/service/get_stack_rpc_test.dart
+    - tests/service/address_mapper_test.dart
+    - tests/service/pause_on_unhandled_exceptions_test.dart
diff --git a/runtime/observatory/HACKING.md b/runtime/observatory/HACKING.md
new file mode 100644
index 0000000..af74bda
--- /dev/null
+++ b/runtime/observatory/HACKING.md
@@ -0,0 +1,193 @@
+# Hacking Observatory
+
+These instructions will guide you through the Observatory development and
+testing workflow.
+
+## SDK Setup & Build
+Getting ready to start.
+
+Before you start to hack on Observatory, follow the [instructions][build_sdk] to
+have a working environment in which you are able to build and test the Dart SDK.
+
+### Develop with Dartium ~ Suggested
+If you want to avoid triggering a new compilation to JavaScript for each edit
+you do, you can use a modified version of Chromium named Dartium that will
+interpret you dart code directly.
+
+You can obtain Dartium in two different ways:
+1. [Download][download_dartium] the binaries
+2. [Build][build_dartium] Dartium from the source code
+
+
+## Run existing tests
+Before hacking Observatory let's run the existing Observatory tests.
+We suggest to run all the test in __debug__ mode.
+
+First build the sdk in debug mode
+```
+$ ./tools/build.py --mode debug create_sdk
+```
+
+From the root of the sdk repository run:
+```
+$ ./tools/test.py -mdebug service
+```
+
+## Serve Observatory
+Observatory is built as part of building the sdk, but when working on
+Observatory we recommend that you use __pub serve__ so you can avoid the
+overhead of building the sdk for each change.
+
+Use __pub__ to __serve__ Observatory:
+```
+[...]/runtime/observatory$ pub serve
+```
+
+## Open Observatory
+You can open the development version of Observatory from
+Chrome/Chromium/__Dartium__ by navigating to [localhost:8080][open_observatory]
+
+Every change you make to the Observatory source code will be visible by simply
+__refreshing__ the page in the browser.
+
+## Connect to a VM
+Start a Dart VM with the ``--observe`` flag (as explained in the
+[get started guide][observatory_get_started]) and connect your Observatory
+instance to that VM.
+
+Example script (file name ```clock.dart```):
+```dart
+import 'dart:async' show Timer, Duration;
+
+main() {
+  bool tick = true;
+  new Timer.periodic(const Duration(seconds: 1), (Timer t) {
+    print(tick ? 'tick' : 'tock');
+    tick = !tick;
+  });
+}
+```
+Start the script:
+```
+$ dart --observe clock.dart
+```
+
+## Code Reviews
+The development workflow of Dart (and Observatory) is based on code reviews.
+
+Follow the code review [instructions][code_review] to be able to successfully
+submit your code.
+
+The main reviewers for Observatory related CLs are:
+  - turnidge
+  - johnmccutchan
+  - rmacnak
+
+## Write a new service test
+All the service tests are located in the ```tests/service``` folder.
+Test file names follow the convention ```<description>_test.dart```
+(e.g. ```a_brief_description_test.dart```).
+
+The test is generally structured in the following way.
+```dart
+import 'package:test/test.dart';
+
+main() {
+  // Some code that you need to test.
+  var a = 1 + 2;
+
+  // Some assertions to check the results.
+  expect(a, equal(3));
+}
+```
+See the official [test library][test_library] instructions;
+
+The ```test_helper.dart``` file expose some functions that allow to run a part
+of the code into another __VM__.
+
+To test synchronous operations:
+```dart
+import 'test_helper.dart';
+
+code() {
+  // Write the code you want to be execute into another VM.
+}
+
+var tests = [
+  // A series of tests that you want to run against the above code.
+  (Isolate isolate) async {
+    await isolate.reload();
+    // Use the isolate to communicate to the VM.
+  }
+];
+
+main(args) => runIsolateTestsSynchronous(args,
+                                        tests,
+                                        testeeConcurrent: code);
+```
+
+In order to test asynchronous operations:
+```dart
+import 'test_helper.dart';
+
+code() async {
+  // Write the asynchronous code you want to be execute into another VM.
+}
+
+var tests = [
+  // A series of tests that you want to run against the above code.
+  (Isolate isolate) async {
+    await isolate.reload();
+    // Use the isolate to communicate to the VM.
+  }
+];
+
+main(args) async => runIsolateTests(args,
+                                    tests,
+                                    testeeConcurrent: code);
+```
+
+Both ```runIsolateTests``` and ```runIsolateTestsSynchronous``` have the
+following named parameters:
+ - __testeeBefore__ (void()) a function that is going to be executed before
+the test
+ - __testeeConcurrent__ (void()) test that is going to be executed
+ - __pause_on_start__ (bool, default: false) pause the Isolate before the first
+instruction
+ - __pause_on_exit__ (bool, default: false) pause the Isolate after the last
+instruction
+ - __pause_on_unhandled_exceptions__ (bool, default: false) pause the Isolate at
+an unhandled exception
+ - __trace_service__ (bool, default: false) trace VM service requests
+ - __trace_compiler__ (bool, default: false) trace compiler operations
+ - __verbose_vm__ (bool, default: false) verbose logging
+
+
+Some common and reusable test are available from ```service_test_common.dart```:
+ - hasPausedFor
+   - hasStoppedAtBreakpoint
+   - hasStoppedWithUnhandledException
+   - hasStoppedAtExit
+   - hasPausedAtStartcode_review
+and utility functions:
+ - subscribeToStream
+ - cancelStreamSubscription
+ - asyncStepOver
+ - setBreakpointAtLine
+ - resumeIsolate
+ - resumeAndAwaitEvent
+ - resumeIsolateAndAwaitEvent
+ - stepOver
+ - getClassFromRootLib
+ - rootLibraryFieldValue
+
+## Run your tests
+See: __Run existing tests__
+
+[build_sdk]: https://github.com/dart-lang/sdk/wiki/Building "Building the Dart SDK"
+[download_dartium]: https://www.dartlang.org/tools/dartium/ "Download Dartium"
+[build_dartium]: https://github.com/dart-lang/sdk/wiki/Building-Dartium "Build Dartium"
+[open_observatory]: http://localhost:8080/ "Open Observatory"
+[observatory_get_started]: https://dart-lang.github.io/observatory/get-started.html "Observatory get started"
+[code_review]: https://github.com/dart-lang/sdk/wiki/Code-review-workflow-with-GitHub-and-reitveld "Code Review"
+[test_library]: https://pub.dartlang.org/packages/test "Test Library"
diff --git a/runtime/observatory/lib/elements.dart b/runtime/observatory/lib/elements.dart
index 4863c2c..9ee5cf2 100644
--- a/runtime/observatory/lib/elements.dart
+++ b/runtime/observatory/lib/elements.dart
@@ -43,6 +43,7 @@
 export 'package:observatory/src/elements/object_common.dart';
 export 'package:observatory/src/elements/object_view.dart';
 export 'package:observatory/src/elements/objectpool_view.dart';
+export 'package:observatory/src/elements/objectstore_view.dart';
 export 'package:observatory/src/elements/observatory_application.dart';
 export 'package:observatory/src/elements/observatory_element.dart';
 export 'package:observatory/src/elements/persistent_handles.dart';
diff --git a/runtime/observatory/lib/elements.html b/runtime/observatory/lib/elements.html
index 4e84fec..52b59b4 100644
--- a/runtime/observatory/lib/elements.html
+++ b/runtime/observatory/lib/elements.html
@@ -36,6 +36,7 @@
 <link rel="import" href="src/elements/object_common.html">
 <link rel="import" href="src/elements/object_view.html">
 <link rel="import" href="src/elements/objectpool_view.html">
+<link rel="import" href="src/elements/objectstore_view.html">
 <link rel="import" href="src/elements/observatory_application.html">
 <link rel="import" href="src/elements/observatory_element.html">
 <link rel="import" href="src/elements/persistent_handles.html">
diff --git a/runtime/observatory/lib/src/app/application.dart b/runtime/observatory/lib/src/app/application.dart
index 0ffe385..be95803 100644
--- a/runtime/observatory/lib/src/app/application.dart
+++ b/runtime/observatory/lib/src/app/application.dart
@@ -136,6 +136,7 @@
     _pageRegistry.add(new InspectPage(this));
     _pageRegistry.add(new ClassTreePage(this));
     _pageRegistry.add(new DebuggerPage(this));
+    _pageRegistry.add(new ObjectStorePage(this));
     _pageRegistry.add(new CpuProfilerPage(this));
     _pageRegistry.add(new TableCpuProfilerPage(this));
     _pageRegistry.add(new AllocationProfilerPage(this));
@@ -184,7 +185,7 @@
   void _installPage(Page page) {
     assert(page != null);
     if (currentPage == page) {
-      // Already isntalled.
+      // Already installed.
       return;
     }
     if (currentPage != null) {
diff --git a/runtime/observatory/lib/src/app/page.dart b/runtime/observatory/lib/src/app/page.dart
index b2216e4..7f51a8e 100644
--- a/runtime/observatory/lib/src/app/page.dart
+++ b/runtime/observatory/lib/src/app/page.dart
@@ -199,6 +199,24 @@
   }
 }
 
+
+class ObjectStorePage extends SimplePage {
+  ObjectStorePage(app) : super('object-store', 'objectstore-view', app);
+
+  void _visit(Uri uri) {
+    super._visit(uri);
+    getIsolate(uri).then((isolate) {
+      isolate.getObjectStore().then((objectStore) {
+        if (element != null) {
+          /// Update the page.
+          ObjectStoreViewElement page = element;
+          page.objectStore = objectStore;
+        }
+      });
+    });
+  }
+}
+
 class CpuProfilerPage extends SimplePage {
   CpuProfilerPage(app) : super('profiler', 'cpu-profile', app);
 
diff --git a/runtime/observatory/lib/src/debugger/debugger_location.dart b/runtime/observatory/lib/src/debugger/debugger_location.dart
index 7688712..ac3a9bf 100644
--- a/runtime/observatory/lib/src/debugger/debugger_location.dart
+++ b/runtime/observatory/lib/src/debugger/debugger_location.dart
@@ -256,7 +256,6 @@
         return new DebuggerLocation.error(
             "Function '${match.group(0)}' is ambiguous");
       }
-      return new DebuggerLocation.error('foo');
     });
   }
 
diff --git a/runtime/observatory/lib/src/elements/isolate_view.html b/runtime/observatory/lib/src/elements/isolate_view.html
index 69c30ed..e7777a7 100644
--- a/runtime/observatory/lib/src/elements/isolate_view.html
+++ b/runtime/observatory/lib/src/elements/isolate_view.html
@@ -94,6 +94,12 @@
               <div class="memberName">service protocol extensions</div>
               <div class="memberValue">{{ isolate.extensionRPCs }}</div>
             </div>
+            <div class="memberItem">
+              <div class="memberName">object store</div>
+              <div class="memberValue">
+                <a on-click="{{ goto }}" _href="{{ gotoLink('/object-store', isolate) }}">object store</a>
+              </div>
+            </div>
 
             <div class="memberItem">
               <div class="memberName">
diff --git a/runtime/observatory/lib/src/elements/objectstore_view.dart b/runtime/observatory/lib/src/elements/objectstore_view.dart
new file mode 100644
index 0000000..c5ff3bc
--- /dev/null
+++ b/runtime/observatory/lib/src/elements/objectstore_view.dart
@@ -0,0 +1,24 @@
+// Copyright (c) 2013, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+library objectstore_view_element;
+
+import 'dart:async';
+import 'observatory_element.dart';
+import 'package:observatory/service.dart';
+import 'package:polymer/polymer.dart';
+
+
+@CustomTag('objectstore-view')
+class ObjectStoreViewElement extends ObservatoryElement {
+  @published ObjectStore objectStore;
+
+  ObjectStoreViewElement.created() : super.created();
+
+  Future refresh() {
+    return objectStore.isolate.getObjectStore().then((newObjectStore) {
+      objectStore = newObjectStore;
+    });
+  }
+}
diff --git a/runtime/observatory/lib/src/elements/objectstore_view.html b/runtime/observatory/lib/src/elements/objectstore_view.html
new file mode 100644
index 0000000..a8d4d02
--- /dev/null
+++ b/runtime/observatory/lib/src/elements/objectstore_view.html
@@ -0,0 +1,46 @@
+<link rel="import" href="../../../../packages/polymer/polymer.html">
+<link rel="import" href="class_ref.html">
+<link rel="import" href="curly_block.html">
+<link rel="import" href="eval_box.html">
+<link rel="import" href="field_ref.html">
+<link rel="import" href="function_ref.html">
+<link rel="import" href="instance_ref.html">
+<link rel="import" href="observatory_element.html">
+<link rel="import" href="library_ref.html">
+<link rel="import" href="nav_bar.html">
+<link rel="import" href="script_ref.html">
+<link rel="import" href="view_footer.html">
+
+<polymer-element name="objectstore-view" extends="observatory-element">
+  <template>
+    <link rel="stylesheet" href="css/shared.css">
+
+    <nav-bar>
+      <top-nav-menu></top-nav-menu>
+      <vm-nav-menu vm="{{ objectStore.isolate.vm }}"></vm-nav-menu>
+      <isolate-nav-menu isolate="{{ objectStore.isolate }}" last="{{ true }}"></isolate-nav-menu>
+      <nav-refresh callback="{{ refresh }}"></nav-refresh>
+    </nav-bar>
+
+    <div class="content-centered-big">
+      <h1>
+        object store ({{ objectStore.fields.length }})
+      </h1>
+
+      <hr>
+
+      <div class="memberList">
+        <template repeat="{{ field in objectStore.fields }}">
+          <div class="memberItem">
+            <div class="memberName">{{ field.name }}</div>
+            <div class="memberValue"><any-service-ref ref="{{ field.value }}"></any-service-ref></div>
+          </div>
+        </template>
+      </div>
+    </div>
+
+    <view-footer></view-footer>
+ </template>
+</polymer-element>
+
+<script type="application/dart" src="library_view.dart"></script>
diff --git a/runtime/observatory/lib/src/service/object.dart b/runtime/observatory/lib/src/service/object.dart
index b0be812..18d29bf 100644
--- a/runtime/observatory/lib/src/service/object.dart
+++ b/runtime/observatory/lib/src/service/object.dart
@@ -42,7 +42,11 @@
   static const kStreamNotSubscribed     = 104;
   static const kIsolateMustBeRunnable   = 105;
   static const kIsolateMustBePaused     = 106;
-  static const kIsolateIsReloading      = 107;
+  static const kIsolateIsReloading      = 1000;
+  static const kFileSystemAlreadyExists = 1001;
+  static const kFileSystemDoesNotExist  = 1002;
+  static const kFileDoesNotExist        = 1003;
+  static const kIsolateReloadFailed     = 1004;
 
   int code;
   Map data;
@@ -1636,6 +1640,14 @@
     return invokeRpc('getStack', {});
   }
 
+  Future<ObjectStore> getObjectStore() {
+    return invokeRpcNoUpgrade('_getObjectStore', {}).then((map) {
+      ObjectStore objectStore = new ObjectStore._empty(this);
+      objectStore._update(map, false);
+      return objectStore;
+    });
+  }
+
   Future<ServiceObject> _eval(ServiceObject target,
                               String expression) {
     Map params = {
@@ -1744,6 +1756,36 @@
   String toString() => "Isolate($name)";
 }
 
+
+class NamedField {
+  final String name;
+  final ServiceObject value;
+  NamedField(this.name, this.value);
+}
+
+
+class ObjectStore extends ServiceObject {
+  @observable List<NamedField> fields = new List<NamedField>();
+
+  ObjectStore._empty(ServiceObjectOwner owner) : super._empty(owner);
+
+  void _update(ObservableMap map, bool mapIsRef) {
+    // Extract full properties.
+    _upgradeCollection(map, isolate);
+
+    if (mapIsRef) {
+      return;
+    }
+
+    fields.clear();
+    map['fields'].forEach((key, value) {
+      fields.add(new NamedField(key, value));
+    });
+    _loaded = true;
+  }
+}
+
+
 /// A [ServiceObject] which implements [ObservableMap].
 class ServiceMap extends ServiceObject implements ObservableMap {
   final ObservableMap _map = new ObservableMap();
diff --git a/runtime/observatory/observatory_sources.gypi b/runtime/observatory/observatory_sources.gypi
index 9c9baec..c2084e1 100644
--- a/runtime/observatory/observatory_sources.gypi
+++ b/runtime/observatory/observatory_sources.gypi
@@ -105,6 +105,8 @@
     'lib/src/elements/library_ref.html',
     'lib/src/elements/library_view.dart',
     'lib/src/elements/library_view.html',
+    'lib/src/elements/objectstore_view.dart',
+    'lib/src/elements/objectstore_view.html',
     'lib/src/elements/logging.dart',
     'lib/src/elements/logging.html',
     'lib/src/elements/megamorphiccache_view.dart',
diff --git a/runtime/observatory/pubspec.yaml b/runtime/observatory/pubspec.yaml
index b00ab07..e71be36 100644
--- a/runtime/observatory/pubspec.yaml
+++ b/runtime/observatory/pubspec.yaml
@@ -18,6 +18,8 @@
   charted: ^0.3.0
   polymer: ^0.16.3
   unittest: < 0.12.0
+  js: ^0.6.0
+  js_util: ^0.2.0
   usage: any
 dependency_overrides:
   analyzer:
@@ -52,6 +54,10 @@
     path: ../../third_party/observatory_pub_packages/packages/initialize
   intl:
     path: ../../third_party/observatory_pub_packages/packages/intl
+  js:
+    path: ../../third_party/observatory_pub_packages/packages/js
+  js_util:
+    path: ../../third_party/observatory_pub_packages/packages/js_util
   logging:
     path: ../../third_party/observatory_pub_packages/packages/logging
   matcher:
@@ -76,6 +82,8 @@
     path: ../../third_party/observatory_pub_packages/packages/pool
   quiver:
     path: ../../third_party/observatory_pub_packages/packages/quiver
+  quiver_iterables:
+    path: ../../third_party/observatory_pub_packages/packages/quiver_iterables
   smoke:
     path: ../../third_party/observatory_pub_packages/packages/smoke
   source_maps:
diff --git a/runtime/observatory/tests/observatory_ui/observatory_ui.status b/runtime/observatory/tests/observatory_ui/observatory_ui.status
new file mode 100644
index 0000000..4d08c8a
--- /dev/null
+++ b/runtime/observatory/tests/observatory_ui/observatory_ui.status
@@ -0,0 +1,6 @@
+# Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
+# for details. All rights reserved. Use of this source code is governed by a
+# BSD-style license that can be found in the LICENSE file.
+
+[ $browser == false ]
+*: SkipByDesign
diff --git a/runtime/observatory/tests/service/dev_fs_test.dart b/runtime/observatory/tests/service/dev_fs_test.dart
new file mode 100644
index 0000000..08e5a5e
--- /dev/null
+++ b/runtime/observatory/tests/service/dev_fs_test.dart
@@ -0,0 +1,158 @@
+// Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+// VMOptions=--error_on_bad_type --error_on_bad_override
+
+import 'dart:convert';
+import 'package:observatory/service_io.dart';
+import 'package:unittest/unittest.dart';
+import 'test_helper.dart';
+
+var tests = [
+  (VM vm) async {
+    var result = await vm.invokeRpcNoUpgrade('_listDevFS', {});
+    expect(result['type'], equals('FileSystemList'));
+    expect(result['fsNames'].toString(), equals("[]"));
+
+    var params = {
+      'fsName': 'alpha'
+    };
+    result = await vm.invokeRpcNoUpgrade('_createDevFS', params);
+    expect(result['type'], equals('FileSystem'));
+    expect(result['name'], equals('alpha'));
+    expect(result['uri'], new isInstanceOf<String>());
+
+    result = await vm.invokeRpcNoUpgrade('_listDevFS', {});
+    expect(result['type'], equals('FileSystemList'));
+    expect(result['fsNames'].toString(), equals('[alpha]'));
+
+    bool caughtException;
+    try {
+      await vm.invokeRpcNoUpgrade('_createDevFS', params);
+      expect(false, isTrue, reason:'Unreachable');
+    } on ServerRpcException catch(e) {
+      caughtException = true;
+      expect(e.code, equals(ServerRpcException.kFileSystemAlreadyExists));
+      expect(e.message, "_createDevFS: file system 'alpha' already exists");
+    }
+    expect(caughtException, isTrue);
+
+    result = await vm.invokeRpcNoUpgrade('_deleteDevFS', params);
+    expect(result['type'], equals('Success'));
+
+    result = await vm.invokeRpcNoUpgrade('_listDevFS', {});
+    expect(result['type'], equals('FileSystemList'));
+    expect(result['fsNames'].toString(), equals("[]"));
+
+    caughtException = false;
+    try {
+      await vm.invokeRpcNoUpgrade('_deleteDevFS', params);
+      expect(false, isTrue, reason:'Unreachable');
+    } on ServerRpcException catch(e) {
+      caughtException = true;
+      expect(e.code, equals(ServerRpcException.kFileSystemDoesNotExist));
+      expect(e.message, "_deleteDevFS: file system 'alpha' does not exist");
+    }
+    expect(caughtException, isTrue);
+  },
+
+  (VM vm) async {
+    var fsId = 'banana';
+    var filePath = '/foobar.dat';
+    var fileContents = BASE64.encode(UTF8.encode('fileContents'));
+
+    var result;
+    // Create DevFS.
+    result = await vm.invokeRpcNoUpgrade('_createDevFS', { 'fsName': fsId });
+    expect(result['type'], equals('FileSystem'));
+    expect(result['name'], equals(fsId));
+    expect(result['uri'], new isInstanceOf<String>());
+
+    bool caughtException = false;
+    try {
+      await vm.invokeRpcNoUpgrade('_readDevFSFile', {
+        'fsName': fsId,
+        'path': filePath,
+      });
+      expect(false, isTrue, reason:'Unreachable');
+    } on ServerRpcException catch(e) {
+      caughtException = true;
+      expect(e.code, equals(ServerRpcException.kFileDoesNotExist));
+      expect(e.message, startsWith("_readDevFSFile: FileSystemException: "));
+    }
+    expect(caughtException, isTrue);
+
+    // Write a file.
+    result = await vm.invokeRpcNoUpgrade('_writeDevFSFile', {
+        'fsName': fsId,
+        'path': filePath,
+        'fileContents': fileContents
+    });
+    expect(result['type'], equals('Success'));
+
+    // Read the file back.
+    result = await vm.invokeRpcNoUpgrade('_readDevFSFile', {
+        'fsName': fsId,
+        'path': filePath,
+    });
+    expect(result['type'], equals('FSFile'));
+    expect(result['fileContents'], equals(fileContents));
+
+    // The leading '/' is optional.
+    result = await vm.invokeRpcNoUpgrade('_readDevFSFile', {
+        'fsName': fsId,
+        'path': filePath.substring(1),
+    });
+    expect(result['type'], equals('FSFile'));
+    expect(result['fileContents'], equals(fileContents));
+
+    // Read a file outside of the fs.
+    caughtException = false;
+    try {
+      await vm.invokeRpcNoUpgrade('_readDevFSFile', {
+        'fsName': fsId,
+        'path': '../foo',
+      });
+      expect(false, isTrue, reason:'Unreachable');
+    } on ServerRpcException catch(e) {
+      caughtException = true;
+      expect(e.code, equals(ServerRpcException.kInvalidParams));
+      expect(e.message, "_readDevFSFile: invalid 'path' parameter: ../foo");
+    }
+    expect(caughtException, isTrue);
+
+    // Write a set of files.
+    result = await vm.invokeRpcNoUpgrade('_writeDevFSFiles', {
+        'fsName': fsId,
+        'files': [
+          ['/a', BASE64.encode(UTF8.encode('a_contents'))],
+          ['/b', BASE64.encode(UTF8.encode('b_contents'))]
+        ]
+    });
+    expect(result['type'], equals('Success'));
+
+    // Read one of the files back.
+    result = await vm.invokeRpcNoUpgrade('_readDevFSFile', {
+        'fsName': fsId,
+        'path': '/b',
+    });
+    expect(result['type'], equals('FSFile'));
+    expect(result['fileContents'],
+           equals(BASE64.encode(UTF8.encode('b_contents'))));
+
+    // List all the files in the file system.
+    result = await vm.invokeRpcNoUpgrade('_listDevFSFiles', {
+        'fsName': fsId,
+    });
+    expect(result['type'], equals('FSFileList'));
+    expect(result['files'].length, equals(3));
+
+    // Delete DevFS.
+    result = await vm.invokeRpcNoUpgrade('_deleteDevFS', {
+        'fsName': fsId,
+    });
+    expect(result['type'], equals('Success'));
+  },
+];
+
+main(args) async => runVMTests(args, tests);
diff --git a/runtime/observatory/tests/service/evaluate_activation_in_method_class_other.dart b/runtime/observatory/tests/service/evaluate_activation_in_method_class_other.dart
index 6cc5879..bf070a1 100644
--- a/runtime/observatory/tests/service/evaluate_activation_in_method_class_other.dart
+++ b/runtime/observatory/tests/service/evaluate_activation_in_method_class_other.dart
@@ -11,6 +11,7 @@
   var instVar = 'Superclass';
   method() => 'Superclass';
   static staticMethod() => 'Superclass';
+  suppress_warning() => _instVar;
 }
 
 class Klass extends Superclass {
@@ -22,5 +23,7 @@
   test() {
     var _local = 'Klass';
     debugger();
+    // Suppress unused variable warning.
+    print(_local);
   }
 }
diff --git a/runtime/observatory/tests/service/evaluate_activation_in_method_class_test.dart b/runtime/observatory/tests/service/evaluate_activation_in_method_class_test.dart
index cfe4315..d07b832 100644
--- a/runtime/observatory/tests/service/evaluate_activation_in_method_class_test.dart
+++ b/runtime/observatory/tests/service/evaluate_activation_in_method_class_test.dart
@@ -20,6 +20,7 @@
   var instVar = 'Subclass';
   method() => 'Subclass';
   static staticMethod() => 'Subclass';
+  suppress_warning() => _instVar;
 }
 
 testeeDo() {
@@ -70,7 +71,7 @@
   result = await isolate.evalFrame(topFrame, 'staticMethod()');
   print(result);
   expect(result.valueAsString, equals('Klass'));
-  
+
   // function.Owner verus function.Origin
   // The mixin of Superclass is in _other.dart and the mixin
   // application is in _test.dart.
diff --git a/runtime/observatory/tests/service/get_isolate_after_language_error_test.dart b/runtime/observatory/tests/service/get_isolate_after_language_error_test.dart
index dc763a0..ccb00fe 100644
--- a/runtime/observatory/tests/service/get_isolate_after_language_error_test.dart
+++ b/runtime/observatory/tests/service/get_isolate_after_language_error_test.dart
@@ -3,7 +3,6 @@
 // BSD-style license that can be found in the LICENSE file.
 // VMOptions=--error_on_bad_type --error_on_bad_override
 
-import 'dart:async';
 import 'package:observatory/service_io.dart';
 import 'package:unittest/unittest.dart';
 import 'test_helper.dart';
@@ -29,4 +28,4 @@
 main(args) => runIsolateTestsSynchronous(args,
                                          tests,
                                          pause_on_exit: true,
-                                         testeeConcurrent: doThrow);
\ No newline at end of file
+                                         testeeConcurrent: doThrow);
diff --git a/runtime/observatory/tests/service/get_isolate_after_stack_overflow_error_test.dart b/runtime/observatory/tests/service/get_isolate_after_stack_overflow_error_test.dart
new file mode 100644
index 0000000..0223218
--- /dev/null
+++ b/runtime/observatory/tests/service/get_isolate_after_stack_overflow_error_test.dart
@@ -0,0 +1,33 @@
+// Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+// VMOptions=--error_on_bad_type --error_on_bad_override
+
+import 'package:observatory/service_io.dart';
+import 'package:unittest/unittest.dart';
+import 'test_helper.dart';
+import 'service_test_common.dart';
+
+// Non tailable recursive function that should trigger a Stack Overflow.
+num factorialGrowth([num n = 1]) {
+  return factorialGrowth(n + 1) * n;
+}
+
+void nonTailableRecursion() {
+  factorialGrowth();
+}
+
+var tests = [
+  hasStoppedAtExit,
+
+  (Isolate isolate) async {
+    await isolate.reload();
+    expect(isolate.error, isNotNull);
+    expect(isolate.error.message.contains('Stack Overflow'), isTrue);
+  }
+];
+
+main(args) async => runIsolateTests(args,
+                              tests,
+                              pause_on_exit: true,
+                              testeeConcurrent: nonTailableRecursion);
diff --git a/runtime/observatory/tests/service/get_object_store_rpc_test.dart b/runtime/observatory/tests/service/get_object_store_rpc_test.dart
new file mode 100644
index 0000000..fb7f7f6
--- /dev/null
+++ b/runtime/observatory/tests/service/get_object_store_rpc_test.dart
@@ -0,0 +1,46 @@
+// Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+// VMOptions=--error_on_bad_type --error_on_bad_override
+
+import 'package:observatory/service_io.dart';
+import 'package:unittest/unittest.dart';
+import 'service_test_common.dart';
+import 'test_helper.dart';
+import 'dart:developer' as developer;
+
+void doDebugger() {
+  developer.debugger(message: "foo", when: true);
+}
+
+bool isClosureFunctionsList(NamedField field) {
+  return field.name == 'closure_functions_';
+}
+
+var tests = [
+
+// Initial data fetch and verify we've hit the breakpoint.
+(Isolate isolate) async {
+  await isolate.rootLibrary.load();
+  var script = isolate.rootLibrary.scripts[0];
+  await script.load();
+  await hasStoppedAtBreakpoint(isolate);
+  // Sanity check.
+  expect(isolate.pauseEvent.kind, equals(ServiceEvent.kPauseBreakpoint));
+},
+
+// Get object_store.
+(Isolate isolate) async {
+  var object_store = await isolate.getObjectStore();
+  expect(object_store.runtimeType, equals(ObjectStore));
+  // Sanity check.
+  expect(object_store.fields.length, greaterThanOrEqualTo(1));
+  // Checking Closures.
+  expect(object_store.fields.singleWhere(isClosureFunctionsList), isNotNull);
+  expect(object_store.fields.singleWhere(isClosureFunctionsList).value.isList, isTrue);
+}
+
+];
+
+main(args) => runIsolateTestsSynchronous(args, tests,
+                                          testeeConcurrent: doDebugger);
diff --git a/runtime/observatory/tests/service/instance_field_order_rpc_test.dart b/runtime/observatory/tests/service/instance_field_order_rpc_test.dart
index 0323f8c..f39d324 100644
--- a/runtime/observatory/tests/service/instance_field_order_rpc_test.dart
+++ b/runtime/observatory/tests/service/instance_field_order_rpc_test.dart
@@ -5,7 +5,6 @@
 
 library get_object_rpc_test;
 
-import 'dart:typed_data';
 import 'package:observatory/service_io.dart';
 import 'package:unittest/unittest.dart';
 import 'test_helper.dart';
diff --git a/runtime/observatory/tests/service/pause_idle_isolate_test.dart b/runtime/observatory/tests/service/pause_idle_isolate_test.dart
index a75dcd1..859291f 100644
--- a/runtime/observatory/tests/service/pause_idle_isolate_test.dart
+++ b/runtime/observatory/tests/service/pause_idle_isolate_test.dart
@@ -3,7 +3,6 @@
 // BSD-style license that can be found in the LICENSE file.
 // VMOptions=--error_on_bad_type --error_on_bad_override
 
-import 'dart:async';
 import 'dart:developer';
 import 'dart:io';
 import 'dart:isolate' show ReceivePort;
diff --git a/runtime/observatory/tests/service/pause_on_exceptions_test.dart b/runtime/observatory/tests/service/pause_on_exceptions_test.dart
index 03d4731..da2e250 100644
--- a/runtime/observatory/tests/service/pause_on_exceptions_test.dart
+++ b/runtime/observatory/tests/service/pause_on_exceptions_test.dart
@@ -10,7 +10,6 @@
 
 doThrow() {
   throw "TheException"; // Line 13.
-  return "end of doThrow";
 }
 
 doCaught() {
diff --git a/runtime/observatory/tests/service/service.status b/runtime/observatory/tests/service/service.status
index 31ffa21..b18b6ce 100644
--- a/runtime/observatory/tests/service/service.status
+++ b/runtime/observatory/tests/service/service.status
@@ -54,3 +54,19 @@
 # TODO(vegorov) re-enable when debugger, coverage and profiling is completely
 # fixed for SIMDBC.
 *: Skip
+
+[ $hot_reload ]
+add_breakpoint_rpc_test: Pass, Timeout, Fail, Crash
+code_test: Fail, Crash
+debugger_location_test: Timeout, Fail, Crash
+debugging_inlined_finally_test: Pass, Timeout, Fail, Crash
+dominator_tree_test: Timeout, Crash
+eval_test: Timeout, Fail, Crash
+evaluate_in_frame_rpc_test: Timeout, Fail, Crash
+get_cpu_profile_timeline_rpc_test: Pass, Timeout, Crash
+get_heap_map_rpc_test: Pass, Timeout, Fail, Crash
+get_vm_timeline_rpc_test: Timeout, Fail, Crash
+graph_test: Pass, Timeout, Fail, Crash
+smart_next_test: Pass, Timeout, Fail, Crash
+step_over_await_test: Pass, Timeout, Fail, Crash
+vm_timeline_events_test: Timeout, Fail, Crash
diff --git a/runtime/observatory/tests/service/set_name_rpc_test.dart b/runtime/observatory/tests/service/set_name_rpc_test.dart
new file mode 100644
index 0000000..bb3d8c8
--- /dev/null
+++ b/runtime/observatory/tests/service/set_name_rpc_test.dart
@@ -0,0 +1,36 @@
+// Copyright (c) 2015, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+// VMOptions=--error_on_bad_type --error_on_bad_override --vm-name=Walter
+
+import 'package:observatory/service_io.dart';
+import 'package:unittest/unittest.dart';
+import 'test_helper.dart';
+import 'dart:async';
+
+var tests = [
+  (Isolate isolate) async {
+    expect(isolate.name, equals('set_name_rpc_test.dart\$main'));
+
+    Completer completer = new Completer();
+    var stream = await isolate.vm.getEventStream(VM.kIsolateStream);
+    var subscription;
+    subscription = stream.listen((ServiceEvent event) {
+      if (event.kind == ServiceEvent.kIsolateUpdate) {
+        expect(event.owner.type, equals('Isolate'));
+        expect(event.owner.name, equals('Barbara'));
+        subscription.cancel();
+        completer.complete();
+      }
+    });
+
+    var result = await isolate.setName('Barbara');
+    expect(result.type, equals('Success'));
+
+    await completer.future;
+    expect(isolate.name, equals('Barbara'));
+
+  }
+];
+
+main(args) async => runIsolateTests(args, tests);
diff --git a/runtime/observatory/tests/service/string_escaping_test.dart b/runtime/observatory/tests/service/string_escaping_test.dart
index fd99cee..0f5dee4 100644
--- a/runtime/observatory/tests/service/string_escaping_test.dart
+++ b/runtime/observatory/tests/service/string_escaping_test.dart
@@ -48,44 +48,47 @@
   malformedWithTrailSurrogate = "before" + "𝄞"[1] + "after";
 }
 
+testStrings(Isolate isolate) async {
+  Library lib = isolate.rootLibrary;
+  await lib.load();
+  for (var variable in lib.variables) {
+    await variable.load();
+  }
+
+  expectFullString(String varName, String varValueAsString) {
+    Field field = lib.variables.singleWhere((v) => v.name == varName);
+    Instance value = field.staticValue;
+    expect(value.valueAsString, equals(varValueAsString));
+    expect(value.valueAsStringIsTruncated, isFalse);
+  }
+
+  expectTruncatedString(String varName, String varValueAsString) {
+    Field field = lib.variables.singleWhere((v) => v.name == varName);
+    Instance value = field.staticValue;
+    print(value.valueAsString);
+    expect(varValueAsString, startsWith(value.valueAsString));
+    expect(value.valueAsStringIsTruncated, isTrue);
+  }
+
+  script();  // Need to initialize variables in the testing isolate.
+  expectFullString('ascii', ascii);
+  expectFullString('latin1', latin1);
+  expectFullString('unicode', unicode);
+  expectFullString('hebrew', hebrew);
+  expectFullString('singleQuotes', singleQuotes);
+  expectFullString('doubleQuotes', doubleQuotes);
+  expectFullString('newLines', newLines);
+  expectFullString('tabs', tabs);
+  expectFullString('suggrogatePairs', suggrogatePairs);
+  expectFullString('nullInTheMiddle', nullInTheMiddle);
+  expectTruncatedString('longStringEven', longStringEven);
+  expectTruncatedString('longStringOdd', longStringOdd);
+  expectFullString('malformedWithLeadSurrogate', malformedWithLeadSurrogate);
+  expectFullString('malformedWithTrailSurrogate', malformedWithTrailSurrogate);
+}
+
 var tests = [
-
-(Isolate isolate) =>
-  isolate.rootLibrary.load().then((Library lib) {
-    expectFullString(String varName, String varValueAsString) {
-      Field field = lib.variables.singleWhere((v) => v.name == varName);
-      field.load().then((_) {
-        Instance value = field.staticValue;
-        expect(value.valueAsString, equals(varValueAsString));
-        expect(value.valueAsStringIsTruncated, isFalse);
-      });
-    }
-    expectTruncatedString(String varName, String varValueAsString) {
-      Field field = lib.variables.singleWhere((v) => v.name == varName);
-      field.load().then((_) {
-        Instance value = field.staticValue;
-        expect(varValueAsString, startsWith(value.valueAsString));
-        expect(value.valueAsStringIsTruncated, isTrue);
-      });
-    }
-
-    script();  // Need to initialize variables in the testing isolate.
-    expectFullString('ascii', ascii);
-    expectFullString('latin1', latin1);
-    expectFullString('unicode', unicode);
-    expectFullString('hebrew', hebrew);
-    expectFullString('singleQuotes', singleQuotes);
-    expectFullString('doubleQuotes', doubleQuotes);
-    expectFullString('newLines', newLines);
-    expectFullString('tabs', tabs);
-    expectFullString('suggrogatePairs', suggrogatePairs);
-    expectFullString('nullInTheMiddle', nullInTheMiddle);
-    expectTruncatedString('longStringEven', longStringEven);
-    expectTruncatedString('longStringOdd', longStringOdd);
-    expectFullString('malformedWithLeadSurrogate', malformedWithLeadSurrogate);
-    expectFullString('malformedWithTrailSurrogate', malformedWithTrailSurrogate);
-  }),
-
+  testStrings,
 ];
 
 main(args) => runIsolateTests(args, tests, testeeBefore: script);
diff --git a/runtime/observatory/tests/service/test_helper.dart b/runtime/observatory/tests/service/test_helper.dart
index e22cbb2..0e8b6e9 100644
--- a/runtime/observatory/tests/service/test_helper.dart
+++ b/runtime/observatory/tests/service/test_helper.dart
@@ -36,7 +36,7 @@
   return Platform.environment[_SKY_SHELL_ENV_KEY];
 }
 
-class _SerivceTesteeRunner {
+class _ServiceTesteeRunner {
   Future run({testeeBefore(): null,
               testeeConcurrent(): null,
               bool pause_on_start: false,
@@ -310,7 +310,14 @@
         await process.requestExit();
       }, onError: (e, st) {
         process.requestExit();
-        if (!_isWebSocketDisconnect(e)) {
+        // TODO: remove this workaround.
+        // This is necessary due to non awaited operations.
+        // E.G. object.dart (398~402)
+        // When an exception is thrown inside a test (directly or via await) the
+        // stacktrace is non-null and shows where the exception has been thrown.
+        // If vice versa the exception is due to an error in a non-awaited
+        // Future the stacktrace is null.
+        if (st != null || !_isWebSocketDisconnect(e)) {
           print('Unexpected exception in service tests: $e $st');
           throw e;
         }
@@ -335,7 +342,7 @@
                         bool pause_on_unhandled_exceptions: false}) async {
   assert(!pause_on_start || testeeBefore == null);
   if (_isTestee()) {
-    new _SerivceTesteeRunner().run(testeeBefore: testeeBefore,
+    new _ServiceTesteeRunner().run(testeeBefore: testeeBefore,
                                    testeeConcurrent: testeeConcurrent,
                                    pause_on_start: pause_on_start,
                                    pause_on_exit: pause_on_exit);
@@ -373,7 +380,7 @@
                                  bool pause_on_unhandled_exceptions: false}) {
   assert(!pause_on_start || testeeBefore == null);
   if (_isTestee()) {
-    new _SerivceTesteeRunner().runSync(testeeBeforeSync: testeeBefore,
+    new _ServiceTesteeRunner().runSync(testeeBeforeSync: testeeBefore,
                                        testeeConcurrentSync: testeeConcurrent,
                                        pause_on_start: pause_on_start,
                                        pause_on_exit: pause_on_exit);
@@ -406,7 +413,7 @@
                    bool verbose_vm: false,
                    bool pause_on_unhandled_exceptions: false}) async {
   if (_isTestee()) {
-    new _SerivceTesteeRunner().run(testeeBefore: testeeBefore,
+    new _ServiceTesteeRunner().run(testeeBefore: testeeBefore,
                                    testeeConcurrent: testeeConcurrent,
                                    pause_on_start: pause_on_start,
                                    pause_on_exit: pause_on_exit);
diff --git a/runtime/platform/assert.cc b/runtime/platform/assert.cc
index e9d1c4f..c5b91e7 100644
--- a/runtime/platform/assert.cc
+++ b/runtime/platform/assert.cc
@@ -4,9 +4,6 @@
 
 #include "platform/assert.h"
 
-#include <sstream>
-#include <string>
-
 #include "platform/globals.h"
 #include "vm/os.h"
 
@@ -18,19 +15,27 @@
 }
 
 void DynamicAssertionHelper::Fail(const char* format, ...) {
-  std::ostringstream stream;
-  stream << file_ << ":" << line_ << ": error: ";
+  // Take only the last 1KB of the file name if it is longer.
+  const intptr_t file_len = strlen(file_);
+  const intptr_t file_offset = (file_len > (1 * KB)) ? file_len - (1 * KB) : 0;
+  const char* file = file_ + file_offset;
 
+  // Print the file and line number into the buffer.
+  char buffer[4 * KB];
+  intptr_t file_and_line_length =
+      snprintf(buffer, sizeof(buffer), "%s: %d: error: ", file, line_);
+
+  // Print the error message into the buffer.
   va_list arguments;
   va_start(arguments, format);
-  char buffer[4 * KB];
-  vsnprintf(buffer, sizeof(buffer), format, arguments);
+  vsnprintf(buffer + file_and_line_length,
+            sizeof(buffer) - file_and_line_length,
+            format,
+            arguments);
   va_end(arguments);
-  stream << buffer << std::endl;
 
-  // Get the message from the string stream and dump it on stderr.
-  std::string message = stream.str();
-  fprintf(stderr, "%s", message.c_str());
+  // Print the buffer on stderr.
+  fprintf(stderr, "%s\n", buffer);
   fflush(stderr);
 
   // In case of failed assertions, abort right away. Otherwise, wait
diff --git a/runtime/platform/globals.h b/runtime/platform/globals.h
index e3d1a34..5e3b011 100644
--- a/runtime/platform/globals.h
+++ b/runtime/platform/globals.h
@@ -113,7 +113,7 @@
 // Windows, both 32- and 64-bit, regardless of the check for _WIN32.
 #define TARGET_OS_WINDOWS 1
 
-#else
+#elif !defined(TARGET_OS_FUCHSIA)
 #error Automatic target os detection failed.
 #endif
 
diff --git a/runtime/platform/math.h b/runtime/platform/math.h
index 039153e..0dc34008 100644
--- a/runtime/platform/math.h
+++ b/runtime/platform/math.h
@@ -8,11 +8,16 @@
 // We must take these math functions from the C++ header file as long as we
 // are using the STL. Otherwise the Android build will break due to confusion
 // between C++ and C headers when math.h is also included.
+#if !defined(TARGET_OS_FUCHSIA)
 #include <cmath>
 
 #define isinf(val) std::isinf(val)
 #define isnan(val) std::isnan(val)
 #define signbit(val) std::signbit(val)
 #define isfinite(val) std::isfinite(val)
+#else
+// TODO(zra): When Fuchsia has STL, do the same thing as above.
+#include <math.h>
+#endif
 
 #endif  // PLATFORM_MATH_H_
diff --git a/runtime/platform/text_buffer.cc b/runtime/platform/text_buffer.cc
index 1536dac..9cbb577 100644
--- a/runtime/platform/text_buffer.cc
+++ b/runtime/platform/text_buffer.cc
@@ -8,6 +8,7 @@
 #include "platform/globals.h"
 #include "platform/utils.h"
 #include "vm/os.h"
+#include "vm/unicode.h"
 
 namespace dart {
 
@@ -78,60 +79,52 @@
   return len;
 }
 
-
-// Write a UTF-16 code unit so it can be read by a JSON parser in a string
-// literal. Use escape sequences for characters other than printable ASCII.
+// Write a UTF-32 code unit so it can be read by a JSON parser in a string
+// literal. Use official encoding from JSON specification. http://json.org/
 void TextBuffer::EscapeAndAddCodeUnit(uint32_t codeunit) {
   switch (codeunit) {
     case '"':
-      Printf("%s", "\\\"");
+      AddRaw(reinterpret_cast<uint8_t const*>("\\\""), 2);
       break;
     case '\\':
-      Printf("%s", "\\\\");
+      AddRaw(reinterpret_cast<uint8_t const*>("\\\\"), 2);
       break;
     case '/':
-      Printf("%s", "\\/");
+      AddRaw(reinterpret_cast<uint8_t const*>("\\/"), 2);
       break;
     case '\b':
-      Printf("%s", "\\b");
+      AddRaw(reinterpret_cast<uint8_t const*>("\\b"), 2);
       break;
     case '\f':
-      Printf("%s", "\\f");
+      AddRaw(reinterpret_cast<uint8_t const*>("\\f"), 2);
       break;
     case '\n':
-      Printf("%s", "\\n");
+      AddRaw(reinterpret_cast<uint8_t const*>("\\n"), 2);
       break;
     case '\r':
-      Printf("%s", "\\r");
+      AddRaw(reinterpret_cast<uint8_t const*>("\\r"), 2);
       break;
     case '\t':
-      Printf("%s", "\\t");
+      AddRaw(reinterpret_cast<uint8_t const*>("\\t"), 2);
       break;
     default:
       if (codeunit < 0x20) {
-        // Encode character as \u00HH.
-        uint32_t digit2 = (codeunit >> 4) & 0xf;
-        uint32_t digit3 = (codeunit & 0xf);
-        Printf("\\u00%c%c",
-               digit2 > 9 ? 'A' + (digit2 - 10) : '0' + digit2,
-               digit3 > 9 ? 'A' + (digit3 - 10) : '0' + digit3);
-      } else if (codeunit > 127) {
-        // Encode character as \uHHHH.
-        uint32_t digit0 = (codeunit >> 12) & 0xf;
-        uint32_t digit1 = (codeunit >> 8) & 0xf;
-        uint32_t digit2 = (codeunit >> 4) & 0xf;
-        uint32_t digit3 = (codeunit & 0xf);
-        Printf("\\u%c%c%c%c",
-               digit0 > 9 ? 'A' + (digit0 - 10) : '0' + digit0,
-               digit1 > 9 ? 'A' + (digit1 - 10) : '0' + digit1,
-               digit2 > 9 ? 'A' + (digit2 - 10) : '0' + digit2,
-               digit3 > 9 ? 'A' + (digit3 - 10) : '0' + digit3);
+        EscapeAndAddUTF16CodeUnit(codeunit);
       } else {
-        AddChar(codeunit);
+        char encoded[6];
+        intptr_t length = Utf8::Length(codeunit);
+        Utf8::Encode(codeunit, encoded);
+        AddRaw(reinterpret_cast<uint8_t const*>(encoded), length);
       }
   }
 }
 
+// Write an incomplete UTF-16 code unit so it can be read by a JSON parser in a
+// string literal.
+void TextBuffer::EscapeAndAddUTF16CodeUnit(uint16_t codeunit) {
+  Printf("\\u%04X", codeunit);
+}
+
 
 void TextBuffer::AddString(const char* s) {
   Printf("%s", s);
diff --git a/runtime/platform/text_buffer.h b/runtime/platform/text_buffer.h
index 1908ecb..6155cc4 100644
--- a/runtime/platform/text_buffer.h
+++ b/runtime/platform/text_buffer.h
@@ -20,6 +20,7 @@
 
   intptr_t Printf(const char* format, ...) PRINTF_ATTRIBUTE(2, 3);
   void AddChar(char ch);
+  void EscapeAndAddUTF16CodeUnit(uint16_t cu);
   void EscapeAndAddCodeUnit(uint32_t cu);
   void AddString(const char* s);
   void AddEscapedString(const char* s);
diff --git a/runtime/platform/utils.h b/runtime/platform/utils.h
index 508e0e7..65bc14f 100644
--- a/runtime/platform/utils.h
+++ b/runtime/platform/utils.h
@@ -215,6 +215,8 @@
 
 #if defined(TARGET_OS_ANDROID)
 #include "platform/utils_android.h"
+#elif defined(TARGET_OS_FUCHSIA)
+#include "platform/utils_fuchsia.h"
 #elif defined(TARGET_OS_LINUX)
 #include "platform/utils_linux.h"
 #elif defined(TARGET_OS_MACOS)
diff --git a/runtime/platform/utils_fuchsia.h b/runtime/platform/utils_fuchsia.h
new file mode 100644
index 0000000..7054225
--- /dev/null
+++ b/runtime/platform/utils_fuchsia.h
@@ -0,0 +1,67 @@
+// Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+#ifndef PLATFORM_UTILS_FUCHSIA_H_
+#define PLATFORM_UTILS_FUCHSIA_H_
+
+#include "platform/assert.h"
+
+namespace dart {
+
+inline int Utils::CountLeadingZeros(uword x) {
+  UNIMPLEMENTED();
+  return 0;
+}
+
+
+inline int Utils::CountTrailingZeros(uword x) {
+  UNIMPLEMENTED();
+  return 0;
+}
+
+
+inline uint16_t Utils::HostToBigEndian16(uint16_t value) {
+  UNIMPLEMENTED();
+  return 0;
+}
+
+
+inline uint32_t Utils::HostToBigEndian32(uint32_t value) {
+  UNIMPLEMENTED();
+  return 0;
+}
+
+
+inline uint64_t Utils::HostToBigEndian64(uint64_t value) {
+  UNIMPLEMENTED();
+  return 0;
+}
+
+
+inline uint16_t Utils::HostToLittleEndian16(uint16_t value) {
+  UNIMPLEMENTED();
+  return 0;
+}
+
+
+inline uint32_t Utils::HostToLittleEndian32(uint32_t value) {
+  UNIMPLEMENTED();
+  return 0;
+}
+
+
+inline uint64_t Utils::HostToLittleEndian64(uint64_t value) {
+  UNIMPLEMENTED();
+  return 0;
+}
+
+
+inline char* Utils::StrError(int err, char* buffer, size_t bufsize) {
+  UNIMPLEMENTED();
+  return NULL;
+}
+
+}  // namespace dart
+
+#endif  // PLATFORM_UTILS_FUCHSIA_H_
diff --git a/runtime/tests/vm/vm.status b/runtime/tests/vm/vm.status
index e34859d..1faa41c 100644
--- a/runtime/tests/vm/vm.status
+++ b/runtime/tests/vm/vm.status
@@ -13,6 +13,9 @@
 cc/ArrayNew_Overflow_Crash: Crash, Timeout
 cc/AllocGeneric_Overflow: Crash, Timeout
 cc/CodeImmutability: Crash
+cc/Fail0: Fail
+cc/Fail1: Fail
+cc/Fail2: Fail
 
 cc/SNPrint_BadArgs: Skip
 
@@ -85,12 +88,8 @@
 [ $builder_tag == asan ]
 cc/CodeImmutability: Fail,OK # Address Sanitizer turns a crash into a failure.
 
-[ $arch == ia32 && $builder_tag == asan ]
-cc/Dart2JSCompileAll: Crash # Issue 26487 - stack overflow
-cc/Dart2JSCompilerStats: Crash # Issue 26487 - stack overflow
-
-[ $noopt ]
-dart/byte_array_test: Crash # Incompatible flag --disable_alloc_stubs_after_gc
+[ $noopt || $compiler == precompiler ]
+dart/byte_array_test: Skip # Incompatible flag --disable_alloc_stubs_after_gc
 
 [ $noopt || $compiler == precompiler || $mode == product ]
 dart/redirection_type_shuffling_test: SkipByDesign # Imports dart:mirrors
@@ -145,6 +144,7 @@
 cc/SourceReport_CallSites_SimpleCall: Skip
 cc/SourceReport_Coverage_AllFunctions: Skip
 cc/SourceReport_Coverage_ForceCompile: Skip
+cc/SourceReport_Coverage_AllFunctions_ForceCompile: Skip
 cc/SourceReport_Coverage_NestedFunctions: Skip
 cc/SourceReport_Coverage_SimpleCall: Skip
 cc/SourceReport_Coverage_UnusedClass_NoForceCompile: Skip
diff --git a/runtime/tools/create_snapshot_bin.py b/runtime/tools/create_snapshot_bin.py
index 78213cf..65e56de 100755
--- a/runtime/tools/create_snapshot_bin.py
+++ b/runtime/tools/create_snapshot_bin.py
@@ -45,6 +45,9 @@
   result.add_option("--package_root",
       action="store", type="string",
       help="path used to resolve package: imports.")
+  result.add_option("--packages",
+      action="store", type="string",
+      help="package config file used to reasolve package: imports.")
   result.add_option("--url_mapping",
       default=[],
       action="append",
@@ -112,6 +115,10 @@
   if options.package_root:
     script_args.append(''.join([ "--package_root=", options.package_root]))
 
+  # Pass along the packages if there is one.
+  if options.packages:
+    script_args.append(''.join([ "--packages=", options.packages]))
+
   # First setup the vm isolate and regular isolate snapshot output filename.
   script_args.append(''.join([ "--vm_isolate_snapshot=",
                                options.vm_output_bin ]))
diff --git a/runtime/vm/aot_optimizer.cc b/runtime/vm/aot_optimizer.cc
index 8b66e49..cdc4753 100644
--- a/runtime/vm/aot_optimizer.cc
+++ b/runtime/vm/aot_optimizer.cc
@@ -76,7 +76,7 @@
           const ICData& ic_data = ICData::ZoneHandle(zone(), ICData::New(
               function(), call->function_name(),
               arguments_descriptor, call->deopt_id(),
-              call->checked_argument_count()));
+              call->checked_argument_count(), false));
           call->set_ic_data(&ic_data);
         }
       }
@@ -108,8 +108,6 @@
             VisitInstanceCall(call);
           }
         }
-      } else if (instr->IsPolymorphicInstanceCall()) {
-        SpecializePolymorphicInstanceCall(instr->AsPolymorphicInstanceCall());
       }
     }
     current_iterator_ = NULL;
@@ -258,7 +256,7 @@
         String::Handle(Z, ic_data.target_name()),
         Object::empty_array(),  // Dummy argument descriptor.
         ic_data.deopt_id(),
-        ic_data.NumArgsTested()));
+        ic_data.NumArgsTested(), false));
     new_ic_data.SetDeoptReasons(ic_data.DeoptReasons());
     new_ic_data.AddReceiverCheck(cid, function);
     return new_ic_data;
@@ -268,37 +266,6 @@
 }
 
 
-void AotOptimizer::SpecializePolymorphicInstanceCall(
-    PolymorphicInstanceCallInstr* call) {
-  if (!FLAG_polymorphic_with_deopt) {
-    // Specialization adds receiver checks which can lead to deoptimization.
-    return;
-  }
-  if (!call->with_checks()) {
-    return;  // Already specialized.
-  }
-
-  const intptr_t receiver_cid =
-      call->PushArgumentAt(0)->value()->Type()->ToCid();
-  if (receiver_cid == kDynamicCid) {
-    return;  // No information about receiver was infered.
-  }
-
-  const ICData& ic_data = TrySpecializeICData(call->ic_data(), receiver_cid);
-  if (ic_data.raw() == call->ic_data().raw()) {
-    // No specialization.
-    return;
-  }
-
-  PolymorphicInstanceCallInstr* specialized =
-      new(Z) PolymorphicInstanceCallInstr(call->instance_call(),
-                                          ic_data,
-                                          /* with_checks = */ false,
-                                          /* complete = */ false);
-  call->ReplaceWith(specialized, current_iterator());
-}
-
-
 static BinarySmiOpInstr* AsSmiShiftLeftInstruction(Definition* d) {
   BinarySmiOpInstr* instr = d->AsBinarySmiOp();
   if ((instr != NULL) && (instr->op_kind() == Token::kSHL)) {
@@ -779,7 +746,8 @@
   if (ic_data.NumberOfChecks() != 1) {
     return false;
   }
-  return TryReplaceInstanceCallWithInline(call);
+  return FlowGraphInliner::TryReplaceInstanceCallWithInline(
+      flow_graph_, current_iterator(), call);
 }
 
 
@@ -1662,50 +1630,6 @@
 }
 
 
-bool AotOptimizer::TryReplaceInstanceCallWithInline(
-    InstanceCallInstr* call) {
-  Function& target = Function::Handle(Z);
-  GrowableArray<intptr_t> class_ids;
-  call->ic_data()->GetCheckAt(0, &class_ids, &target);
-  const intptr_t receiver_cid = class_ids[0];
-
-  TargetEntryInstr* entry;
-  Definition* last;
-  if (!FlowGraphInliner::TryInlineRecognizedMethod(flow_graph_,
-                                                   receiver_cid,
-                                                   target,
-                                                   call,
-                                                   call->ArgumentAt(0),
-                                                   call->token_pos(),
-                                                   *call->ic_data(),
-                                                   &entry, &last)) {
-    return false;
-  }
-
-  // Insert receiver class check.
-  AddReceiverCheck(call);
-  // Remove the original push arguments.
-  for (intptr_t i = 0; i < call->ArgumentCount(); ++i) {
-    PushArgumentInstr* push = call->PushArgumentAt(i);
-    push->ReplaceUsesWith(push->value()->definition());
-    push->RemoveFromGraph();
-  }
-  // Replace all uses of this definition with the result.
-  call->ReplaceUsesWith(last);
-  // Finally insert the sequence other definition in place of this one in the
-  // graph.
-  call->previous()->LinkTo(entry->next());
-  entry->UnuseAllInputs();  // Entry block is not in the graph.
-  last->LinkTo(call);
-  // Remove through the iterator.
-  ASSERT(current_iterator()->Current() == call);
-  current_iterator()->RemoveCurrentFromGraph();
-  call->set_previous(NULL);
-  call->set_next(NULL);
-  return true;
-}
-
-
 void AotOptimizer::ReplaceWithMathCFunction(
     InstanceCallInstr* call,
     MethodRecognizer::Kind recognized_kind) {
@@ -1761,49 +1685,15 @@
   MethodRecognizer::Kind recognized_kind =
       MethodRecognizer::RecognizeKind(target);
 
-  if ((recognized_kind == MethodRecognizer::kGrowableArraySetData) &&
-      (ic_data.NumberOfChecks() == 1) &&
-      (class_ids[0] == kGrowableObjectArrayCid)) {
-    // This is an internal method, no need to check argument types.
-    Definition* array = call->ArgumentAt(0);
-    Definition* value = call->ArgumentAt(1);
-    StoreInstanceFieldInstr* store = new(Z) StoreInstanceFieldInstr(
-        GrowableObjectArray::data_offset(),
-        new(Z) Value(array),
-        new(Z) Value(value),
-        kEmitStoreBarrier,
-        call->token_pos());
-    ReplaceCall(call, store);
-    return true;
-  }
-
-  if ((recognized_kind == MethodRecognizer::kGrowableArraySetLength) &&
-      (ic_data.NumberOfChecks() == 1) &&
-      (class_ids[0] == kGrowableObjectArrayCid)) {
-    // This is an internal method, no need to check argument types nor
-    // range.
-    Definition* array = call->ArgumentAt(0);
-    Definition* value = call->ArgumentAt(1);
-    StoreInstanceFieldInstr* store = new(Z) StoreInstanceFieldInstr(
-        GrowableObjectArray::length_offset(),
-        new(Z) Value(array),
-        new(Z) Value(value),
-        kNoStoreBarrier,
-        call->token_pos());
-    ReplaceCall(call, store);
-    return true;
-  }
-
   if ((recognized_kind == MethodRecognizer::kOneByteStringCodeUnitAt) ||
       (recognized_kind == MethodRecognizer::kTwoByteStringCodeUnitAt) ||
       (recognized_kind == MethodRecognizer::kExternalOneByteStringCodeUnitAt) ||
-      (recognized_kind == MethodRecognizer::kExternalTwoByteStringCodeUnitAt)) {
+      (recognized_kind == MethodRecognizer::kExternalTwoByteStringCodeUnitAt) ||
+      (recognized_kind == MethodRecognizer::kGrowableArraySetData) ||
+      (recognized_kind == MethodRecognizer::kGrowableArraySetLength)) {
       ASSERT(ic_data.NumberOfChecks() == 1);
-      ASSERT((class_ids[0] == kOneByteStringCid) ||
-             (class_ids[0] == kTwoByteStringCid) ||
-             (class_ids[0] == kExternalOneByteStringCid) ||
-             (class_ids[0] == kExternalTwoByteStringCid));
-    return TryReplaceInstanceCallWithInline(call);
+    return FlowGraphInliner::TryReplaceInstanceCallWithInline(
+        flow_graph_, current_iterator(), call);
   }
 
   if ((recognized_kind == MethodRecognizer::kStringBaseCharAt) &&
@@ -1812,7 +1702,8 @@
              (class_ids[0] == kTwoByteStringCid) ||
              (class_ids[0] == kExternalOneByteStringCid) ||
              (class_ids[0] == kExternalTwoByteStringCid));
-    return TryReplaceInstanceCallWithInline(call);
+    return FlowGraphInliner::TryReplaceInstanceCallWithInline(
+        flow_graph_, current_iterator(), call);
   }
 
   if ((class_ids[0] == kOneByteStringCid) && (ic_data.NumberOfChecks() == 1)) {
@@ -1900,7 +1791,8 @@
       case MethodRecognizer::kDoubleSub:
       case MethodRecognizer::kDoubleMul:
       case MethodRecognizer::kDoubleDiv:
-        return TryReplaceInstanceCallWithInline(call);
+        return FlowGraphInliner::TryReplaceInstanceCallWithInline(
+            flow_graph_, current_iterator(), call);
       default:
         // Unsupported method.
         return false;
@@ -1909,7 +1801,8 @@
 
   if (IsSupportedByteArrayViewCid(class_ids[0]) &&
       (ic_data.NumberOfChecks() == 1)) {
-    return TryReplaceInstanceCallWithInline(call);
+    return FlowGraphInliner::TryReplaceInstanceCallWithInline(
+        flow_graph_, current_iterator(), call);
   }
 
   if ((class_ids[0] == kFloat32x4Cid) && (ic_data.NumberOfChecks() == 1)) {
@@ -1982,7 +1875,8 @@
 // If type tests specified by 'ic_data' do not depend on type arguments,
 // return mapping cid->result in 'results' (i : cid; i + 1: result).
 // If all tests yield the same result, return it otherwise return Bool::null.
-// If no mapping is possible, 'results' is empty.
+// If no mapping is possible, 'results' has less than
+// (ic_data.NumberOfChecks() * 2) entries
 // An instance-of test returning all same results can be converted to a class
 // check.
 RawBool* AotOptimizer::InstanceOfAsBool(
@@ -2111,7 +2005,7 @@
 // TODO(srdjan): Do also for other than 'int' type.
 static bool TryExpandTestCidsResult(ZoneGrowableArray<intptr_t>* results,
                                     const AbstractType& type) {
-  ASSERT(results->length() >= 2);  // At least on eentry.
+  ASSERT(results->length() >= 2);  // At least on entry.
   const ClassTable& class_table = *Isolate::Current()->class_table();
   if ((*results)[0] != kSmiCid) {
     const Class& cls = Class::Handle(class_table.At(kSmiCid));
@@ -2133,14 +2027,26 @@
 
   ASSERT(type.IsInstantiated() && !type.IsMalformedOrMalbounded());
   ASSERT(results->length() >= 2);
-  if (type.IsIntType()) {
+  if (type.IsSmiType()) {
+    ASSERT((*results)[0] == kSmiCid);
+    return false;
+  } else if (type.IsIntType()) {
     ASSERT((*results)[0] == kSmiCid);
     TryAddTest(results, kMintCid, true);
     TryAddTest(results, kBigintCid, true);
     // Cannot deoptimize since all tests returning true have been added.
     return false;
+  } else if (type.IsNumberType()) {
+    ASSERT((*results)[0] == kSmiCid);
+    TryAddTest(results, kMintCid, true);
+    TryAddTest(results, kBigintCid, true);
+    TryAddTest(results, kDoubleCid, true);
+    return false;
+  } else if (type.IsDoubleType()) {
+    ASSERT((*results)[0] == kSmiCid);
+    TryAddTest(results, kDoubleCid, true);
+    return false;
   }
-
   return true;  // May deoptimize since we have not identified all 'true' tests.
 }
 
@@ -2180,46 +2086,6 @@
     negate = Bool::Cast(call->ArgumentAt(3)->OriginalDefinition()
         ->AsConstant()->value()).value();
   }
-  const ICData& unary_checks =
-      ICData::ZoneHandle(Z, call->ic_data()->AsUnaryClassChecks());
-  if ((unary_checks.NumberOfChecks() > 0) &&
-      (unary_checks.NumberOfChecks() <= FLAG_max_polymorphic_checks)) {
-    ZoneGrowableArray<intptr_t>* results =
-        new(Z) ZoneGrowableArray<intptr_t>(unary_checks.NumberOfChecks() * 2);
-    Bool& as_bool =
-        Bool::ZoneHandle(Z, InstanceOfAsBool(unary_checks, type, results));
-    if (as_bool.IsNull()) {
-      if (results->length() == unary_checks.NumberOfChecks() * 2) {
-        const bool can_deopt = TryExpandTestCidsResult(results, type);
-        TestCidsInstr* test_cids = new(Z) TestCidsInstr(
-            call->token_pos(),
-            negate ? Token::kISNOT : Token::kIS,
-            new(Z) Value(left),
-            *results,
-            can_deopt ? call->deopt_id() : Thread::kNoDeoptId);
-        // Remove type.
-        ReplaceCall(call, test_cids);
-        return;
-      }
-    } else {
-      // TODO(srdjan): Use TestCidsInstr also for this case.
-      // One result only.
-      AddReceiverCheck(call);
-      if (negate) {
-        as_bool = Bool::Get(!as_bool.value()).raw();
-      }
-      ConstantInstr* bool_const = flow_graph()->GetConstant(as_bool);
-      for (intptr_t i = 0; i < call->ArgumentCount(); ++i) {
-        PushArgumentInstr* push = call->PushArgumentAt(i);
-        push->ReplaceUsesWith(push->value()->definition());
-        push->RemoveFromGraph();
-      }
-      call->ReplaceUsesWith(bool_const);
-      ASSERT(current_iterator()->Current() == call);
-      current_iterator()->RemoveCurrentFromGraph();
-      return;
-    }
-  }
 
   if (TypeCheckAsClassEquality(type)) {
     LoadClassIdInstr* left_cid = new(Z) LoadClassIdInstr(new(Z) Value(left));
@@ -2242,6 +2108,31 @@
     return;
   }
 
+  const ICData& unary_checks =
+      ICData::ZoneHandle(Z, call->ic_data()->AsUnaryClassChecks());
+  if ((unary_checks.NumberOfChecks() > 0) &&
+      (unary_checks.NumberOfChecks() <= FLAG_max_polymorphic_checks)) {
+    ZoneGrowableArray<intptr_t>* results =
+        new(Z) ZoneGrowableArray<intptr_t>(unary_checks.NumberOfChecks() * 2);
+    InstanceOfAsBool(unary_checks, type, results);
+    if (results->length() == unary_checks.NumberOfChecks() * 2) {
+      const bool can_deopt = TryExpandTestCidsResult(results, type);
+      if (can_deopt && !IsAllowedForInlining(call->deopt_id())) {
+        // Guard against repeated speculative inlining.
+        return;
+      }
+      TestCidsInstr* test_cids = new(Z) TestCidsInstr(
+          call->token_pos(),
+          negate ? Token::kISNOT : Token::kIS,
+          new(Z) Value(left),
+          *results,
+          can_deopt ? call->deopt_id() : Thread::kNoDeoptId);
+      // Remove type.
+      ReplaceCall(call, test_cids);
+      return;
+    }
+  }
+
   InstanceOfInstr* instance_of =
       new(Z) InstanceOfInstr(call->token_pos(),
                              new(Z) Value(left),
@@ -2270,6 +2161,10 @@
     const Bool& as_bool = Bool::ZoneHandle(Z,
         InstanceOfAsBool(unary_checks, type, results));
     if (as_bool.raw() == Bool::True().raw()) {
+      // Guard against repeated speculative inlining.
+      if (!IsAllowedForInlining(call->deopt_id())) {
+        return;
+      }
       AddReceiverCheck(call);
       // Remove the original push arguments.
       for (intptr_t i = 0; i < call->ArgumentCount(); ++i) {
@@ -2295,11 +2190,12 @@
 }
 
 
-bool AotOptimizer::IsBlackListedForInlining(intptr_t call_deopt_id) {
+bool AotOptimizer::IsAllowedForInlining(intptr_t call_deopt_id) {
+  if (!use_speculative_inlining_) return false;
   for (intptr_t i = 0; i < inlining_black_list_->length(); ++i) {
-    if ((*inlining_black_list_)[i] == call_deopt_id) return true;
+    if ((*inlining_black_list_)[i] == call_deopt_id) return false;
   }
-  return false;
+  return true;
 }
 
 
@@ -2322,6 +2218,24 @@
 }
 
 
+bool AotOptimizer::TryInlineFieldAccess(InstanceCallInstr* call) {
+  const Token::Kind op_kind = call->token_kind();
+  if ((op_kind == Token::kGET) && TryInlineInstanceGetter(call)) {
+    return true;
+  }
+
+  const ICData& unary_checks =
+      ICData::Handle(Z, call->ic_data()->AsUnaryClassChecks());
+  if ((unary_checks.NumberOfChecks() > 0) &&
+      (op_kind == Token::kSET) &&
+      TryInlineInstanceSetter(call, unary_checks)) {
+    return true;
+  }
+
+  return false;
+}
+
+
 // Tries to optimize instance call by replacing it with a faster instruction
 // (e.g, binary op, field load, ..).
 void AotOptimizer::VisitInstanceCall(InstanceCallInstr* instr) {
@@ -2340,20 +2254,13 @@
     return;
   }
 
-  if ((op_kind == Token::kGET) &&
-      TryInlineInstanceGetter(instr)) {
-    return;
-  }
-  const ICData& unary_checks =
-      ICData::ZoneHandle(Z, instr->ic_data()->AsUnaryClassChecks());
-  if ((unary_checks.NumberOfChecks() > 0) &&
-      (op_kind == Token::kSET) &&
-      TryInlineInstanceSetter(instr, unary_checks)) {
+  if (TryInlineFieldAccess(instr)) {
     return;
   }
 
-  if (use_speculative_inlining_ &&
-      !IsBlackListedForInlining(instr->deopt_id()) &&
+  const ICData& unary_checks =
+      ICData::ZoneHandle(Z, instr->ic_data()->AsUnaryClassChecks());
+  if (IsAllowedForInlining(instr->deopt_id()) &&
       (unary_checks.NumberOfChecks() > 0)) {
     if ((op_kind == Token::kINDEX) && TryReplaceWithIndexedOp(instr)) {
       return;
@@ -2378,6 +2285,10 @@
         TryReplaceWithUnaryOp(instr, op_kind)) {
       return;
     }
+
+    if (TryInlineInstanceMethod(instr)) {
+      return;
+    }
   }
 
   bool has_one_target =
@@ -2435,9 +2346,7 @@
       break;
   }
 
-  // No IC data checks. Try resolve target using the propagated type.
-  // If the propagated type has a method with the target name and there are
-  // no overrides with that name according to CHA, call the method directly.
+  // No IC data checks. Try resolve target using the propagated cid.
   const intptr_t receiver_cid =
       instr->PushArgumentAt(0)->value()->Type()->ToCid();
   if (receiver_cid != kDynamicCid) {
@@ -2454,31 +2363,20 @@
             instr->function_name(),
             args_desc));
     if (!function.IsNull()) {
-      intptr_t subclasses = 0;
-      if (!thread()->cha()->HasOverride(receiver_class,
-                                        instr->function_name(),
-                                        &subclasses)) {
-        if (FLAG_trace_cha) {
-          THR_Print("  **(CHA) Instance call needs no check, "
-              "no overrides of '%s' '%s'\n",
-              instr->function_name().ToCString(), receiver_class.ToCString());
-        }
-
-        // Create fake IC data with the resolved target.
-        const ICData& ic_data = ICData::Handle(
-            ICData::New(flow_graph_->function(),
-                        instr->function_name(),
-                        args_desc_array,
-                        Thread::kNoDeoptId,
-                        /* args_tested = */ 1));
-        ic_data.AddReceiverCheck(receiver_class.id(), function);
-        PolymorphicInstanceCallInstr* call =
-            new(Z) PolymorphicInstanceCallInstr(instr, ic_data,
-                                                /* with_checks = */ false,
-                                                /* complete = */ true);
-        instr->ReplaceWith(call, current_iterator());
-        return;
-      }
+      const ICData& ic_data = ICData::Handle(
+          ICData::New(flow_graph_->function(),
+                      instr->function_name(),
+                      args_desc_array,
+                      Thread::kNoDeoptId,
+                      /* args_tested = */ 1,
+                      false));
+      ic_data.AddReceiverCheck(receiver_class.id(), function);
+      PolymorphicInstanceCallInstr* call =
+          new(Z) PolymorphicInstanceCallInstr(instr, ic_data,
+                                              /* with_checks = */ false,
+                                              /* complete = */ true);
+      instr->ReplaceWith(call, current_iterator());
+      return;
     }
   }
 
@@ -2488,58 +2386,124 @@
       flow_graph_->IsReceiver(callee_receiver)) {
     // Call receiver is method receiver.
     Class& receiver_class = Class::Handle(Z, function.Owner());
+
     GrowableArray<intptr_t> class_ids(6);
     if (thread()->cha()->ConcreteSubclasses(receiver_class, &class_ids)) {
-      if (class_ids.length() <= FLAG_max_exhaustive_polymorphic_checks) {
-        if (FLAG_trace_cha) {
-          THR_Print("  **(CHA) Only %" Pd " concrete subclasses of %s for %s\n",
-                    class_ids.length(),
-                    receiver_class.ToCString(),
-                    instr->function_name().ToCString());
+      // First check if all subclasses end up calling the same method.
+      // If this is the case we will replace instance call with a direct
+      // static call.
+      // Otherwise we will try to create ICData that contains all possible
+      // targets with appropriate checks.
+      Function& single_target = Function::Handle(Z);
+      ICData& ic_data = ICData::Handle(Z);
+
+      const Array& args_desc_array = Array::Handle(Z,
+          ArgumentsDescriptor::New(instr->ArgumentCount(),
+                                   instr->argument_names()));
+      ArgumentsDescriptor args_desc(args_desc_array);
+
+      Function& target = Function::Handle(Z);
+      Class& cls = Class::Handle(Z);
+      for (intptr_t i = 0; i < class_ids.length(); i++) {
+        const intptr_t cid = class_ids[i];
+        cls = isolate()->class_table()->At(cid);
+        target = Resolver::ResolveDynamicForReceiverClass(
+            cls,
+            instr->function_name(),
+            args_desc);
+
+        if (target.IsNull()) {
+          // Can't resolve the target. It might be a noSuchMethod,
+          // call through getter or closurization.
+          single_target = Function::null();
+          ic_data = ICData::null();
+          break;
+        } else if (ic_data.IsNull()) {
+          // First we are trying to compute a single target for all subclasses.
+          if (single_target.IsNull()) {
+            ASSERT(i == 0);
+            single_target = target.raw();
+            continue;
+          } else if (single_target.raw() == target.raw()) {
+            continue;
+          }
+
+          // The call does not resolve to a single target within the hierarchy.
+          // If we have too many subclasses abort the optimization.
+          if (class_ids.length() > FLAG_max_exhaustive_polymorphic_checks) {
+            single_target = Function::null();
+            break;
+          }
+
+          // Create an ICData and map all previously seen classes (< i) to
+          // the computed single_target.
+          ic_data = ICData::New(function,
+                                instr->function_name(),
+                                args_desc_array,
+                                Thread::kNoDeoptId,
+                                /* args_tested = */ 1, false);
+          for (intptr_t j = 0; j < i; j++) {
+            ic_data.AddReceiverCheck(class_ids[j], single_target);
+          }
+
+          single_target = Function::null();
         }
 
-        const Array& args_desc_array = Array::Handle(Z,
-            ArgumentsDescriptor::New(instr->ArgumentCount(),
-                                     instr->argument_names()));
-        ArgumentsDescriptor args_desc(args_desc_array);
+        ASSERT(ic_data.raw() != ICData::null());
+        ASSERT(single_target.raw() == Function::null());
+        ic_data.AddReceiverCheck(cid, target);
+      }
 
-        const ICData& ic_data = ICData::Handle(
-            ICData::New(function,
-                        instr->function_name(),
-                        args_desc_array,
-                        Thread::kNoDeoptId,
-                        /* args_tested = */ 1));
+      if (single_target.raw() != Function::null()) {
+        // If this is a getter or setter invocation try inlining it right away
+        // instead of replacing it with a static call.
+        if ((op_kind == Token::kGET) || (op_kind == Token::kSET)) {
+          // Create fake IC data with the resolved target.
+          const ICData& ic_data = ICData::Handle(
+              ICData::New(flow_graph_->function(),
+                          instr->function_name(),
+                          args_desc_array,
+                          Thread::kNoDeoptId,
+                          /* args_tested = */ 1,
+                          false));
+          cls = single_target.Owner();
+          ic_data.AddReceiverCheck(cls.id(), single_target);
+          instr->set_ic_data(&ic_data);
 
-        Function& target = Function::Handle(Z);
-        Class& cls = Class::Handle(Z);
-        bool includes_dispatcher_case = false;
-        for (intptr_t i = 0; i < class_ids.length(); i++) {
-          intptr_t cid = class_ids[i];
-          cls = isolate()->class_table()->At(cid);
-          target = Resolver::ResolveDynamicForReceiverClass(
-              cls,
-              instr->function_name(),
-              args_desc);
-          if (target.IsNull()) {
-            // noSuchMethod, call through getter or closurization
-            includes_dispatcher_case = true;
-          } else {
-            ic_data.AddReceiverCheck(cid, target);
+          if (TryInlineFieldAccess(instr)) {
+            return;
           }
         }
-        if (!includes_dispatcher_case && (ic_data.NumberOfChecks() > 0)) {
-          PolymorphicInstanceCallInstr* call =
-              new(Z) PolymorphicInstanceCallInstr(instr, ic_data,
-                                                  /* with_checks = */ true,
-                                                  /* complete = */ true);
-          instr->ReplaceWith(call, current_iterator());
-          return;
+
+        // We have computed that there is only a single target for this call
+        // within the whole hierarchy. Replace InstanceCall with StaticCall.
+        ZoneGrowableArray<PushArgumentInstr*>* args =
+            new (Z) ZoneGrowableArray<PushArgumentInstr*>(
+                instr->ArgumentCount());
+        for (intptr_t i = 0; i < instr->ArgumentCount(); i++) {
+          args->Add(instr->PushArgumentAt(i));
         }
+        StaticCallInstr* call = new (Z) StaticCallInstr(
+            instr->token_pos(),
+            Function::ZoneHandle(Z, single_target.raw()),
+            instr->argument_names(),
+            args,
+            instr->deopt_id());
+        instr->ReplaceWith(call, current_iterator());
+        return;
+      } else if ((ic_data.raw() != ICData::null()) &&
+                 (ic_data.NumberOfChecks() > 0)) {
+        PolymorphicInstanceCallInstr* call =
+            new(Z) PolymorphicInstanceCallInstr(instr, ic_data,
+                                                /* with_checks = */ true,
+                                                /* complete = */ true);
+        instr->ReplaceWith(call, current_iterator());
+        return;
       }
     }
   }
 
-  // More than one targets. Generate generic polymorphic call without
+  // More than one target. Generate generic polymorphic call without
   // deoptimization.
   if (instr->ic_data()->NumberOfUsedChecks() > 0) {
     ASSERT(!FLAG_polymorphic_with_deopt);
diff --git a/runtime/vm/aot_optimizer.h b/runtime/vm/aot_optimizer.h
index 97c996a..1afdadd 100644
--- a/runtime/vm/aot_optimizer.h
+++ b/runtime/vm/aot_optimizer.h
@@ -62,8 +62,6 @@
   bool TryCreateICData(InstanceCallInstr* call);
   const ICData& TrySpecializeICData(const ICData& ic_data, intptr_t cid);
 
-  void SpecializePolymorphicInstanceCall(PolymorphicInstanceCallInstr* call);
-
   bool TryReplaceWithIndexedOp(InstanceCallInstr* call);
 
   bool TryReplaceWithBinaryOp(InstanceCallInstr* call, Token::Kind op_kind);
@@ -72,6 +70,7 @@
   bool TryReplaceWithEqualityOp(InstanceCallInstr* call, Token::Kind op_kind);
   bool TryReplaceWithRelationalOp(InstanceCallInstr* call, Token::Kind op_kind);
 
+  bool TryInlineFieldAccess(InstanceCallInstr* call);
   bool TryInlineInstanceGetter(InstanceCallInstr* call);
   bool TryInlineInstanceSetter(InstanceCallInstr* call,
                                const ICData& unary_ic_data);
@@ -165,7 +164,7 @@
 
   const Function& function() const { return flow_graph_->function(); }
 
-  bool IsBlackListedForInlining(intptr_t deopt_id);
+  bool IsAllowedForInlining(intptr_t deopt_id);
 
   FlowGraph* flow_graph_;
 
diff --git a/runtime/vm/assembler.cc b/runtime/vm/assembler.cc
index fe775bf..8bb29ca 100644
--- a/runtime/vm/assembler.cc
+++ b/runtime/vm/assembler.cc
@@ -275,7 +275,7 @@
   // If the object is not patchable, check if we've already got it in the
   // object pool.
   if (patchable == kNotPatchable) {
-    intptr_t idx = object_pool_index_table_.Lookup(entry);
+    intptr_t idx = object_pool_index_table_.LookupValue(entry);
     if (idx != ObjIndexPair::kNoIndex) {
       return idx;
     }
diff --git a/runtime/vm/assembler_arm.cc b/runtime/vm/assembler_arm.cc
index db3314d..2e39e22 100644
--- a/runtime/vm/assembler_arm.cc
+++ b/runtime/vm/assembler_arm.cc
@@ -1584,8 +1584,6 @@
                                  Register pp) {
   ASSERT(!object.IsICData() || ICData::Cast(object).IsOriginal());
   ASSERT(!object.IsField() || Field::Cast(object).IsOriginal());
-  // Load common VM constants from the thread. This works also in places where
-  // no constant pool is set up (e.g. intrinsic code).
   if (Thread::CanLoadFromThread(object)) {
     // Load common VM constants from the thread. This works also in places where
     // no constant pool is set up (e.g. intrinsic code).
@@ -1689,49 +1687,6 @@
 }
 
 
-Operand Assembler::GetVerifiedMemoryShadow() {
-  Operand offset;
-  if (!Operand::CanHold(VerifiedMemory::offset(), &offset)) {
-    FATAL1("Offset 0x%" Px " not representable", VerifiedMemory::offset());
-  }
-  return offset;
-}
-
-
-void Assembler::WriteShadowedField(Register base,
-                                   intptr_t offset,
-                                   Register value,
-                                   Condition cond) {
-  if (VerifiedMemory::enabled()) {
-    ASSERT(base != value);
-    Operand shadow(GetVerifiedMemoryShadow());
-    add(base, base, shadow, cond);
-    str(value, Address(base, offset), cond);
-    sub(base, base, shadow, cond);
-  }
-  str(value, Address(base, offset), cond);
-}
-
-
-void Assembler::WriteShadowedFieldPair(Register base,
-                                       intptr_t offset,
-                                       Register value_even,
-                                       Register value_odd,
-                                       Condition cond) {
-  ASSERT(value_odd == value_even + 1);
-  ASSERT(value_even % 2 == 0);
-  if (VerifiedMemory::enabled()) {
-    ASSERT(base != value_even);
-    ASSERT(base != value_odd);
-    Operand shadow(GetVerifiedMemoryShadow());
-    add(base, base, shadow, cond);
-    strd(value_even, value_odd, base, offset, cond);
-    sub(base, base, shadow, cond);
-  }
-  strd(value_even, value_odd, base, offset, cond);
-}
-
-
 Register UseRegister(Register reg, RegList* used) {
   ASSERT(reg != THR);
   ASSERT(reg != SP);
@@ -1751,89 +1706,12 @@
 }
 
 
-void Assembler::VerifiedWrite(Register object,
-                              const Address& address,
-                              Register new_value,
-                              FieldContent old_content) {
-#if defined(DEBUG)
-  ASSERT(address.mode() == Address::Offset ||
-         address.mode() == Address::NegOffset);
-  // Allocate temporary registers (and check for register collisions).
-  RegList used = 0;
-  UseRegister(new_value, &used);
-  Register base = UseRegister(address.rn(), &used);
-  if ((object != base) && (object != kNoRegister)) {
-    UseRegister(object, &used);
-  }
-  if (address.rm() != kNoRegister) {
-    UseRegister(address.rm(), &used);
-  }
-  Register old_value = AllocateRegister(&used);
-  Register temp = AllocateRegister(&used);
-  PushList(used);
-  ldr(old_value, address);
-  // First check that 'old_value' contains 'old_content'.
-  // Smi test.
-  tst(old_value, Operand(kHeapObjectTag));
-  Label ok;
-  switch (old_content) {
-    case kOnlySmi:
-      b(&ok, EQ);  // Smi is OK.
-      Stop("Expected smi.");
-      break;
-    case kHeapObjectOrSmi:
-      b(&ok, EQ);  // Smi is OK.
-      // Non-smi case: Verify object pointer is word-aligned when untagged.
-      COMPILE_ASSERT(kHeapObjectTag == 1);
-      tst(old_value, Operand((kWordSize - 1) - kHeapObjectTag));
-      b(&ok, EQ);
-      Stop("Expected heap object or Smi");
-      break;
-    case kEmptyOrSmiOrNull:
-      b(&ok, EQ);  // Smi is OK.
-      // Non-smi case: Check for the special zap word or null.
-      // Note: Cannot use CompareImmediate, since IP may be in use.
-      LoadImmediate(temp, Heap::kZap32Bits);
-      cmp(old_value, Operand(temp));
-      b(&ok, EQ);
-      LoadObject(temp, Object::null_object());
-      cmp(old_value, Operand(temp));
-      b(&ok, EQ);
-      Stop("Expected zapped, Smi or null");
-      break;
-    default:
-      UNREACHABLE();
-  }
-  Bind(&ok);
-  if (VerifiedMemory::enabled()) {
-    Operand shadow_offset(GetVerifiedMemoryShadow());
-    // Adjust the address to shadow.
-    add(base, base, shadow_offset);
-    ldr(temp, address);
-    cmp(old_value, Operand(temp));
-    Label match;
-    b(&match, EQ);
-    Stop("Write barrier verification failed");
-    Bind(&match);
-    // Write new value in shadow.
-    str(new_value, address);
-    // Restore original address.
-    sub(base, base, shadow_offset);
-  }
-  str(new_value, address);
-  PopList(used);
-#else
-  str(new_value, address);
-#endif  // DEBUG
-}
-
-
 void Assembler::StoreIntoObject(Register object,
                                 const Address& dest,
                                 Register value,
                                 bool can_value_be_smi) {
   ASSERT(object != value);
-  VerifiedWrite(object, dest, value, kHeapObjectOrSmi);
+  str(value, dest);
   Label done;
   if (can_value_be_smi) {
     StoreIntoObjectFilter(object, value, &done);
@@ -1874,9 +1752,8 @@
 
 void Assembler::StoreIntoObjectNoBarrier(Register object,
                                          const Address& dest,
-                                         Register value,
-                                         FieldContent old_content) {
-  VerifiedWrite(object, dest, value, old_content);
+                                         Register value) {
+  str(value, dest);
 #if defined(DEBUG)
   Label done;
   StoreIntoObjectFilter(object, value, &done);
@@ -1887,50 +1764,48 @@
 }
 
 
-void Assembler::StoreIntoObjectNoBarrierOffset(Register object,
-                                               int32_t offset,
-                                               Register value,
-                                               FieldContent old_content) {
-  int32_t ignored = 0;
-  if (Address::CanHoldStoreOffset(kWord, offset - kHeapObjectTag, &ignored)) {
-    StoreIntoObjectNoBarrier(object, FieldAddress(object, offset), value,
-                             old_content);
-  } else {
-    AddImmediate(IP, object, offset - kHeapObjectTag);
-    StoreIntoObjectNoBarrier(object, Address(IP), value, old_content);
-  }
-}
-
-
 void Assembler::StoreIntoObjectNoBarrier(Register object,
                                          const Address& dest,
-                                         const Object& value,
-                                         FieldContent old_content) {
+                                         const Object& value) {
   ASSERT(!value.IsICData() || ICData::Cast(value).IsOriginal());
   ASSERT(!value.IsField() || Field::Cast(value).IsOriginal());
   ASSERT(value.IsSmi() || value.InVMHeap() ||
          (value.IsOld() && value.IsNotTemporaryScopedHandle()));
   // No store buffer update.
   LoadObject(IP, value);
-  VerifiedWrite(object, dest, IP, old_content);
+  str(IP, dest);
 }
 
 
 void Assembler::StoreIntoObjectNoBarrierOffset(Register object,
                                                int32_t offset,
-                                               const Object& value,
-                                               FieldContent old_content) {
-  ASSERT(!value.IsICData() || ICData::Cast(value).IsOriginal());
-  ASSERT(!value.IsField() || Field::Cast(value).IsOriginal());
+                                               Register value) {
   int32_t ignored = 0;
   if (Address::CanHoldStoreOffset(kWord, offset - kHeapObjectTag, &ignored)) {
-    StoreIntoObjectNoBarrier(object, FieldAddress(object, offset), value,
-                             old_content);
+    StoreIntoObjectNoBarrier(object, FieldAddress(object, offset), value);
   } else {
     Register base = object == R9 ? R8 : R9;
     Push(base);
     AddImmediate(base, object, offset - kHeapObjectTag);
-    StoreIntoObjectNoBarrier(object, Address(base), value, old_content);
+    StoreIntoObjectNoBarrier(object, Address(base), value);
+    Pop(base);
+  }
+}
+
+
+void Assembler::StoreIntoObjectNoBarrierOffset(Register object,
+                                               int32_t offset,
+                                               const Object& value) {
+  ASSERT(!value.IsICData() || ICData::Cast(value).IsOriginal());
+  ASSERT(!value.IsField() || Field::Cast(value).IsOriginal());
+  int32_t ignored = 0;
+  if (Address::CanHoldStoreOffset(kWord, offset - kHeapObjectTag, &ignored)) {
+    StoreIntoObjectNoBarrier(object, FieldAddress(object, offset), value);
+  } else {
+    Register base = object == R9 ? R8 : R9;
+    Push(base);
+    AddImmediate(base, object, offset - kHeapObjectTag);
+    StoreIntoObjectNoBarrier(object, Address(base), value);
     Pop(base);
   }
 }
@@ -1946,9 +1821,9 @@
   Bind(&init_loop);
   AddImmediate(begin, 2 * kWordSize);
   cmp(begin, Operand(end));
-  WriteShadowedFieldPair(begin, -2 * kWordSize, value_even, value_odd, LS);
+  strd(value_even, value_odd, begin, -2 * kWordSize, LS);
   b(&init_loop, CC);
-  WriteShadowedField(begin, -2 * kWordSize, value_even, HI);
+  str(value_even, Address(begin, -2 * kWordSize), HI);
 #if defined(DEBUG)
   Label done;
   StoreIntoObjectFilter(object, value_even, &done);
@@ -1969,11 +1844,11 @@
   ASSERT(value_odd == value_even + 1);
   intptr_t current_offset = begin_offset;
   while (current_offset + kWordSize < end_offset) {
-    WriteShadowedFieldPair(base, current_offset, value_even, value_odd);
+    strd(value_even, value_odd, base, current_offset);
     current_offset += 2*kWordSize;
   }
   while (current_offset < end_offset) {
-    WriteShadowedField(base, current_offset, value_even);
+    str(value_even, Address(base, current_offset));
     current_offset += kWordSize;
   }
 #if defined(DEBUG)
@@ -1995,7 +1870,7 @@
   Stop("New value must be Smi.");
   Bind(&done);
 #endif  // defined(DEBUG)
-  VerifiedWrite(kNoRegister, dest, value, kOnlySmi);
+  str(value, dest);
 }
 
 
@@ -3479,8 +3354,9 @@
     // If this allocation is traced, program will jump to failure path
     // (i.e. the allocation stub) which will allocate the object and trace the
     // allocation call site.
-    MaybeTraceAllocation(cls.id(), temp_reg, failure);
-    Heap::Space space = Heap::SpaceForAllocation(cls.id());
+    NOT_IN_PRODUCT(
+      MaybeTraceAllocation(cls.id(), temp_reg, failure));
+    Heap::Space space = Heap::kNew;
     ldr(temp_reg, Address(THR, Thread::heap_offset()));
     ldr(instance_reg, Address(temp_reg, Heap::TopOffset(space)));
     // TODO(koda): Protect against unsigned overflow here.
@@ -3496,7 +3372,7 @@
     // next object start and store the class in the class field of object.
     str(instance_reg, Address(temp_reg, Heap::TopOffset(space)));
 
-    LoadAllocationStatsAddress(temp_reg, cls.id());
+    NOT_IN_PRODUCT(LoadAllocationStatsAddress(temp_reg, cls.id()));
 
     ASSERT(instance_size >= kHeapObjectTag);
     AddImmediate(instance_reg, -instance_size + kHeapObjectTag);
@@ -3508,7 +3384,7 @@
     LoadImmediate(IP, tags);
     str(IP, FieldAddress(instance_reg, Object::tags_offset()));
 
-    IncrementAllocationStats(temp_reg, cls.id(), space);
+    NOT_IN_PRODUCT(IncrementAllocationStats(temp_reg, cls.id(), space));
   } else {
     b(failure);
   }
@@ -3526,8 +3402,8 @@
     // If this allocation is traced, program will jump to failure path
     // (i.e. the allocation stub) which will allocate the object and trace the
     // allocation call site.
-    MaybeTraceAllocation(cid, temp1, failure);
-    Heap::Space space = Heap::SpaceForAllocation(cid);
+    NOT_IN_PRODUCT(MaybeTraceAllocation(cid, temp1, failure));
+    Heap::Space space = Heap::kNew;
     ldr(temp1, Address(THR, Thread::heap_offset()));
     // Potential new object start.
     ldr(instance, Address(temp1, Heap::TopOffset(space)));
@@ -3541,7 +3417,7 @@
     cmp(end_address, Operand(temp2));
     b(failure, CS);
 
-    LoadAllocationStatsAddress(temp2, cid);
+    NOT_IN_PRODUCT(LoadAllocationStatsAddress(temp2, cid));
 
     // Successfully allocated the object(s), now update top to point to
     // next object start and initialize the object.
@@ -3557,7 +3433,7 @@
     str(temp1, FieldAddress(instance, Array::tags_offset()));  // Store tags.
 
     LoadImmediate(temp1, instance_size);
-    IncrementAllocationStatsWithSize(temp2, temp1, space);
+    NOT_IN_PRODUCT(IncrementAllocationStatsWithSize(temp2, temp1, space));
   } else {
     b(failure);
   }
diff --git a/runtime/vm/assembler_arm.h b/runtime/vm/assembler_arm.h
index 66e7c13..cac760b 100644
--- a/runtime/vm/assembler_arm.h
+++ b/runtime/vm/assembler_arm.h
@@ -744,14 +744,6 @@
   void PushObject(const Object& object);
   void CompareObject(Register rn, const Object& object);
 
-  // When storing into a heap object field, knowledge of the previous content
-  // is expressed through these constants.
-  enum FieldContent {
-    kEmptyOrSmiOrNull,  // Empty = garbage/zapped in release/debug mode.
-    kHeapObjectOrSmi,
-    kOnlySmi,
-  };
-
   void StoreIntoObject(Register object,  // Object we are storing into.
                        const Address& dest,  // Where we are storing into.
                        Register value,  // Value we are storing.
@@ -763,27 +755,16 @@
 
   void StoreIntoObjectNoBarrier(Register object,
                                 const Address& dest,
-                                Register value,
-                                FieldContent old_content = kHeapObjectOrSmi);
-  void InitializeFieldNoBarrier(Register object,
-                                const Address& dest,
-                                Register value) {
-    StoreIntoObjectNoBarrier(object, dest, value, kEmptyOrSmiOrNull);
-  }
-  void StoreIntoObjectNoBarrierOffset(
-      Register object,
-      int32_t offset,
-      Register value,
-      FieldContent old_content = kHeapObjectOrSmi);
+                                Register value);
   void StoreIntoObjectNoBarrier(Register object,
                                 const Address& dest,
-                                const Object& value,
-                                FieldContent old_content = kHeapObjectOrSmi);
-  void StoreIntoObjectNoBarrierOffset(
-      Register object,
-      int32_t offset,
-      const Object& value,
-      FieldContent old_content = kHeapObjectOrSmi);
+                                const Object& value);
+  void StoreIntoObjectNoBarrierOffset(Register object,
+                                      int32_t offset,
+                                      Register value);
+  void StoreIntoObjectNoBarrierOffset(Register object,
+                                      int32_t offset,
+                                      const Object& value);
 
   // Store value_even, value_odd, value_even, ... into the words in the address
   // range [begin, end), assumed to be uninitialized fields in object (tagged).
@@ -1193,27 +1174,6 @@
                                   Register value,
                                   Label* no_update);
 
-  // Helpers for write-barrier verification.
-
-  // Returns VerifiedMemory::offset() as an Operand.
-  Operand GetVerifiedMemoryShadow();
-  // Writes value to [base + offset] and also its shadow location, if enabled.
-  void WriteShadowedField(Register base,
-                          intptr_t offset,
-                          Register value,
-                          Condition cond = AL);
-  void WriteShadowedFieldPair(Register base,
-                              intptr_t offset,
-                              Register value_even,
-                              Register value_odd,
-                              Condition cond = AL);
-  // Writes new_value to address and its shadow location, if enabled, after
-  // verifying that its old value matches its shadow.
-  void VerifiedWrite(Register object,
-                     const Address& address,
-                     Register new_value,
-                     FieldContent old_content);
-
   DISALLOW_ALLOCATION();
   DISALLOW_COPY_AND_ASSIGN(Assembler);
 };
diff --git a/runtime/vm/assembler_arm64.cc b/runtime/vm/assembler_arm64.cc
index eeb831d..26c0b4a 100644
--- a/runtime/vm/assembler_arm64.cc
+++ b/runtime/vm/assembler_arm64.cc
@@ -1126,7 +1126,33 @@
 }
 
 
+void Assembler::SetupDartSP() {
+  mov(SP, CSP);
+}
+
+
+void Assembler::RestoreCSP() {
+  mov(CSP, SP);
+}
+
+
 void Assembler::EnterFrame(intptr_t frame_size) {
+  // The ARM64 ABI requires at all times
+  //   - stack limit < CSP <= stack base
+  //   - CSP mod 16 = 0
+  //   - we do not access stack memory below CSP
+  // Pratically, this means we need to keep the C stack pointer ahead of the
+  // Dart stack pointer and 16-byte aligned for signal handlers. If we knew the
+  // real stack limit, we could just set CSP to a value near it during
+  // SetupDartSP, but we do not know the real stack limit for the initial
+  // thread or threads created by the embedder.
+  // TODO(26472): It would be safer to use CSP as the Dart stack pointer, but
+  // this requires adjustments to stack handling to maintain the 16-byte
+  // alignment.
+  const intptr_t kMaxDartFrameSize = 4096;
+  sub(TMP, SP, Operand(kMaxDartFrameSize));
+  andi(CSP, TMP, Immediate(~15));
+
   PushPair(LR, FP);
   mov(FP, SP);
 
@@ -1328,9 +1354,9 @@
     // If this allocation is traced, program will jump to failure path
     // (i.e. the allocation stub) which will allocate the object and trace the
     // allocation call site.
-    MaybeTraceAllocation(cls.id(), temp_reg, failure);
+    NOT_IN_PRODUCT(MaybeTraceAllocation(cls.id(), temp_reg, failure));
     const intptr_t instance_size = cls.instance_size();
-    Heap::Space space = Heap::SpaceForAllocation(cls.id());
+    Heap::Space space = Heap::kNew;
     ldr(temp_reg, Address(THR, Thread::heap_offset()));
     ldr(instance_reg, Address(temp_reg, Heap::TopOffset(space)));
     // TODO(koda): Protect against unsigned overflow here.
@@ -1349,7 +1375,7 @@
     ASSERT(instance_size >= kHeapObjectTag);
     AddImmediate(
         instance_reg, instance_reg, -instance_size + kHeapObjectTag);
-    UpdateAllocationStats(cls.id(), space);
+    NOT_IN_PRODUCT(UpdateAllocationStats(cls.id(), space));
 
     uword tags = 0;
     tags = RawObject::SizeTag::update(instance_size, tags);
@@ -1374,8 +1400,8 @@
     // If this allocation is traced, program will jump to failure path
     // (i.e. the allocation stub) which will allocate the object and trace the
     // allocation call site.
-    MaybeTraceAllocation(cid, temp1, failure);
-    Heap::Space space = Heap::SpaceForAllocation(cid);
+    NOT_IN_PRODUCT(MaybeTraceAllocation(cid, temp1, failure));
+    Heap::Space space = Heap::kNew;
     ldr(temp1, Address(THR, Thread::heap_offset()));
     // Potential new object start.
     ldr(instance, Address(temp1, Heap::TopOffset(space)));
@@ -1394,7 +1420,7 @@
     str(end_address, Address(temp1, Heap::TopOffset(space)));
     add(instance, instance, Operand(kHeapObjectTag));
     LoadImmediate(temp2, instance_size);
-    UpdateAllocationStatsWithSize(cid, temp2, space);
+    NOT_IN_PRODUCT(UpdateAllocationStatsWithSize(cid, temp2, space));
 
     // Initialize the tags.
     // instance: new object start as a tagged pointer.
diff --git a/runtime/vm/assembler_arm64.h b/runtime/vm/assembler_arm64.h
index 735e07e..8080ab8 100644
--- a/runtime/vm/assembler_arm64.h
+++ b/runtime/vm/assembler_arm64.h
@@ -1349,18 +1349,12 @@
                            Register scratch2,
                            Label* miss);
 
+  void SetupDartSP();
+  void RestoreCSP();
+
   void EnterFrame(intptr_t frame_size);
   void LeaveFrame();
 
-  // When entering Dart code from C++, we copy the system stack pointer (CSP)
-  // to the Dart stack pointer (SP), and reserve a little space for the stack
-  // to grow.
-  void SetupDartSP(intptr_t reserved_space) {
-    ASSERT(Utils::IsAligned(reserved_space, 16));
-    mov(SP, CSP);
-    sub(CSP, CSP, Operand(reserved_space));
-  }
-
   void CheckCodePointer();
   void RestoreCodePointer();
 
diff --git a/runtime/vm/assembler_arm64_test.cc b/runtime/vm/assembler_arm64_test.cc
index ba0d646..7c7a620 100644
--- a/runtime/vm/assembler_arm64_test.cc
+++ b/runtime/vm/assembler_arm64_test.cc
@@ -390,12 +390,12 @@
 
 // Loads and Stores.
 ASSEMBLER_TEST_GENERATE(SimpleLoadStore, assembler) {
-  __ SetupDartSP(kTestStackSpace);
+  __ SetupDartSP();
   __ movz(R0, Immediate(43), 0);
   __ movz(R1, Immediate(42), 0);
   __ str(R1, Address(SP, -1*kWordSize, Address::PreIndex));
   __ ldr(R0, Address(SP, 1*kWordSize, Address::PostIndex));
-  __ mov(CSP, SP);
+  __ RestoreCSP();
   __ ret();
 }
 
@@ -407,13 +407,13 @@
 
 
 ASSEMBLER_TEST_GENERATE(SimpleLoadStoreHeapTag, assembler) {
-  __ SetupDartSP(kTestStackSpace);
+  __ SetupDartSP();
   __ movz(R0, Immediate(43), 0);
   __ movz(R1, Immediate(42), 0);
   __ add(R2, SP, Operand(1));
   __ str(R1, Address(R2, -1));
   __ ldr(R0, Address(R2, -1));
-  __ mov(CSP, SP);
+  __ RestoreCSP();
   __ ret();
 }
 
@@ -425,7 +425,7 @@
 
 
 ASSEMBLER_TEST_GENERATE(LoadStoreLargeIndex, assembler) {
-  __ SetupDartSP(kTestStackSpace);
+  __ SetupDartSP();
   __ movz(R0, Immediate(43), 0);
   __ movz(R1, Immediate(42), 0);
   // Largest negative offset that can fit in the signed 9-bit immediate field.
@@ -434,7 +434,7 @@
   __ ldr(R0, Address(SP, 31*kWordSize, Address::PostIndex));
   // Correction.
   __ add(SP, SP, Operand(kWordSize));  // Restore SP.
-  __ mov(CSP, SP);
+  __ RestoreCSP();
   __ ret();
 }
 
@@ -446,14 +446,14 @@
 
 
 ASSEMBLER_TEST_GENERATE(LoadStoreLargeOffset, assembler) {
-  __ SetupDartSP(kTestStackSpace);
+  __ SetupDartSP();
   __ movz(R0, Immediate(43), 0);
   __ movz(R1, Immediate(42), 0);
   __ sub(SP, SP, Operand(512*kWordSize));
   __ str(R1, Address(SP, 512*kWordSize, Address::Offset));
   __ add(SP, SP, Operand(512*kWordSize));
   __ ldr(R0, Address(SP));
-  __ mov(CSP, SP);
+  __ RestoreCSP();
   __ ret();
 }
 
@@ -465,7 +465,7 @@
 
 
 ASSEMBLER_TEST_GENERATE(LoadStoreExtReg, assembler) {
-  __ SetupDartSP(kTestStackSpace);
+  __ SetupDartSP();
   __ movz(R0, Immediate(43), 0);
   __ movz(R1, Immediate(42), 0);
   __ movz(R2, Immediate(0xfff8), 0);
@@ -476,7 +476,7 @@
   __ sub(SP, SP, Operand(kWordSize));
   __ ldr(R0, Address(SP));
   __ add(SP, SP, Operand(kWordSize));
-  __ mov(CSP, SP);
+  __ RestoreCSP();
   __ ret();
 }
 
@@ -488,7 +488,7 @@
 
 
 ASSEMBLER_TEST_GENERATE(LoadStoreScaledReg, assembler) {
-  __ SetupDartSP(kTestStackSpace);
+  __ SetupDartSP();
   __ movz(R0, Immediate(43), 0);
   __ movz(R1, Immediate(42), 0);
   __ movz(R2, Immediate(10), 0);
@@ -497,7 +497,7 @@
   __ str(R1, Address(SP, R2, UXTX, Address::Scaled));
   __ ldr(R0, Address(SP, R2, UXTX, Address::Scaled));
   __ add(SP, SP, Operand(10*kWordSize));
-  __ mov(CSP, SP);
+  __ RestoreCSP();
   __ ret();
 }
 
@@ -509,12 +509,12 @@
 
 
 ASSEMBLER_TEST_GENERATE(LoadSigned32Bit, assembler) {
-  __ SetupDartSP(kTestStackSpace);
+  __ SetupDartSP();
   __ LoadImmediate(R1, 0xffffffff);
   __ str(R1, Address(SP, -4, Address::PreIndex, kWord), kWord);
   __ ldr(R0, Address(SP), kWord);
   __ ldr(R1, Address(SP, 4, Address::PostIndex, kWord), kWord);
-  __ mov(CSP, SP);
+  __ RestoreCSP();
   __ ret();
 }
 
@@ -526,13 +526,13 @@
 
 
 ASSEMBLER_TEST_GENERATE(SimpleLoadStorePair, assembler) {
-  __ SetupDartSP(kTestStackSpace);
+  __ SetupDartSP();
   __ LoadImmediate(R2, 43);
   __ LoadImmediate(R3, 42);
   __ stp(R2, R3, Address(SP, -2*kWordSize, Address::PairPreIndex));
   __ ldp(R0, R1, Address(SP, 2*kWordSize, Address::PairPostIndex));
   __ sub(R0, R0, Operand(R1));
-  __ mov(CSP, SP);
+  __ RestoreCSP();
   __ ret();
 }
 
@@ -544,7 +544,7 @@
 
 
 ASSEMBLER_TEST_GENERATE(LoadStorePairOffset, assembler) {
-  __ SetupDartSP(kTestStackSpace);
+  __ SetupDartSP();
   __ LoadImmediate(R2, 43);
   __ LoadImmediate(R3, 42);
   __ sub(SP, SP, Operand(4 * kWordSize));
@@ -552,7 +552,7 @@
   __ ldp(R0, R1, Address::Pair(SP, 2 * kWordSize));
   __ add(SP, SP, Operand(4 * kWordSize));
   __ sub(R0, R0, Operand(R1));
-  __ mov(CSP, SP);
+  __ RestoreCSP();
   __ ret();
 }
 
@@ -564,7 +564,7 @@
 
 
 ASSEMBLER_TEST_GENERATE(Semaphore, assembler) {
-  __ SetupDartSP(kTestStackSpace);
+  __ SetupDartSP();
   __ movz(R0, Immediate(40), 0);
   __ movz(R1, Immediate(42), 0);
   __ Push(R0);
@@ -575,7 +575,7 @@
   __ cmp(TMP, Operand(0));
   __ b(&retry, NE);  // NE if context switch occurred between ldrex and strex.
   __ Pop(R0);  // 42
-  __ mov(CSP, SP);
+  __ RestoreCSP();
   __ ret();
 }
 
@@ -588,7 +588,7 @@
 
 
 ASSEMBLER_TEST_GENERATE(FailedSemaphore, assembler) {
-  __ SetupDartSP(kTestStackSpace);
+  __ SetupDartSP();
   __ movz(R0, Immediate(40), 0);
   __ movz(R1, Immediate(42), 0);
   __ Push(R0);
@@ -597,7 +597,7 @@
   __ stxr(TMP, R1, SP);  // IP == 1, failure
   __ Pop(R0);  // 40
   __ add(R0, R0, Operand(TMP));
-  __ mov(CSP, SP);
+  __ RestoreCSP();
   __ ret();
 }
 
@@ -723,6 +723,24 @@
 }
 
 
+ASSEMBLER_TEST_GENERATE(AndImmCsp, assembler) {
+  // Note we must maintain the ARM64 ABI invariants on CSP here.
+  __ mov(TMP, CSP);
+  __ sub(TMP2, CSP, Operand(31));
+  __ andi(CSP, TMP2, Immediate(~15));
+  __ mov(R0, CSP);
+  __ sub(R0, TMP, Operand(R0));
+  __ mov(CSP, TMP);
+  __ ret();
+}
+
+
+ASSEMBLER_TEST_RUN(AndImmCsp, test) {
+  typedef int64_t (*Int64Return)() DART_UNUSED;
+  EXPECT_EQ(32, EXECUTE_TEST_CODE_INT64(Int64Return, test->entry()));
+}
+
+
 ASSEMBLER_TEST_GENERATE(AndOneImm, assembler) {
   __ movz(R1, Immediate(43), 0);
   __ andi(R0, R1, Immediate(1));
@@ -1689,11 +1707,11 @@
 
 // Loading immediate values with the object pool.
 ASSEMBLER_TEST_GENERATE(LoadImmediatePPSmall, assembler) {
-  __ SetupDartSP(kTestStackSpace);
+  __ SetupDartSP();
   EnterTestFrame(assembler);
   __ LoadImmediate(R0, 42);
   LeaveTestFrame(assembler);
-  __ mov(CSP, SP);
+  __ RestoreCSP();
   __ ret();
 }
 
@@ -1704,11 +1722,11 @@
 
 
 ASSEMBLER_TEST_GENERATE(LoadImmediatePPMed, assembler) {
-  __ SetupDartSP(kTestStackSpace);
+  __ SetupDartSP();
   EnterTestFrame(assembler);
   __ LoadImmediate(R0, 0xf1234123);
   LeaveTestFrame(assembler);
-  __ mov(CSP, SP);
+  __ RestoreCSP();
   __ ret();
 }
 
@@ -1719,11 +1737,11 @@
 
 
 ASSEMBLER_TEST_GENERATE(LoadImmediatePPMed2, assembler) {
-  __ SetupDartSP(kTestStackSpace);
+  __ SetupDartSP();
   EnterTestFrame(assembler);
   __ LoadImmediate(R0, 0x4321f1234124);
   LeaveTestFrame(assembler);
-  __ mov(CSP, SP);
+  __ RestoreCSP();
   __ ret();
 }
 
@@ -1734,11 +1752,11 @@
 
 
 ASSEMBLER_TEST_GENERATE(LoadImmediatePPLarge, assembler) {
-  __ SetupDartSP(kTestStackSpace);
+  __ SetupDartSP();
   EnterTestFrame(assembler);
   __ LoadImmediate(R0, 0x9287436598237465);
   LeaveTestFrame(assembler);
-  __ mov(CSP, SP);
+  __ RestoreCSP();
   __ ret();
 }
 
@@ -1751,11 +1769,11 @@
 
 // LoadObject null.
 ASSEMBLER_TEST_GENERATE(LoadObjectNull, assembler) {
-  __ SetupDartSP(kTestStackSpace);
+  __ SetupDartSP();
   EnterTestFrame(assembler);
   __ LoadObject(R0, Object::null_object());
   LeaveTestFrame(assembler);
-  __ mov(CSP, SP);
+  __ RestoreCSP();
   __ ret();
 }
 
@@ -1766,11 +1784,11 @@
 
 
 ASSEMBLER_TEST_GENERATE(LoadObjectTrue, assembler) {
-  __ SetupDartSP(kTestStackSpace);
+  __ SetupDartSP();
   EnterTestFrame(assembler);
   __ LoadObject(R0, Bool::True());
   LeaveTestFrame(assembler);
-  __ mov(CSP, SP);
+  __ RestoreCSP();
   __ ret();
 }
 
@@ -1781,11 +1799,11 @@
 
 
 ASSEMBLER_TEST_GENERATE(LoadObjectFalse, assembler) {
-  __ SetupDartSP(kTestStackSpace);
+  __ SetupDartSP();
   EnterTestFrame(assembler);
   __ LoadObject(R0, Bool::False());
   LeaveTestFrame(assembler);
-  __ mov(CSP, SP);
+  __ RestoreCSP();
   __ ret();
 }
 
@@ -1940,11 +1958,11 @@
 
 
 ASSEMBLER_TEST_GENERATE(FldrdFstrdPrePostIndex, assembler) {
-  __ SetupDartSP(kTestStackSpace);
+  __ SetupDartSP();
   __ LoadDImmediate(V1, 42.0);
   __ fstrd(V1, Address(SP, -1*kWordSize, Address::PreIndex));
   __ fldrd(V0, Address(SP, 1*kWordSize, Address::PostIndex));
-  __ mov(CSP, SP);
+  __ RestoreCSP();
   __ ret();
 }
 
@@ -1956,13 +1974,13 @@
 
 
 ASSEMBLER_TEST_GENERATE(FldrsFstrsPrePostIndex, assembler) {
-  __ SetupDartSP(kTestStackSpace);
+  __ SetupDartSP();
   __ LoadDImmediate(V1, 42.0);
   __ fcvtsd(V2, V1);
   __ fstrs(V2, Address(SP, -1*kWordSize, Address::PreIndex));
   __ fldrs(V3, Address(SP, 1*kWordSize, Address::PostIndex));
   __ fcvtds(V0, V3);
-  __ mov(CSP, SP);
+  __ RestoreCSP();
   __ ret();
 }
 
@@ -1974,7 +1992,7 @@
 
 
 ASSEMBLER_TEST_GENERATE(FldrqFstrqPrePostIndex, assembler) {
-  __ SetupDartSP(kTestStackSpace);
+  __ SetupDartSP();
   __ LoadDImmediate(V1, 21.0);
   __ LoadDImmediate(V2, 21.0);
   __ LoadImmediate(R1, 42);
@@ -1987,7 +2005,7 @@
   __ PopDouble(V0);
   __ PopDouble(V1);
   __ faddd(V0, V0, V1);
-  __ mov(CSP, SP);
+  __ RestoreCSP();
   __ ret();
 }
 
@@ -2160,7 +2178,7 @@
 
 
 ASSEMBLER_TEST_GENERATE(FldrdFstrdHeapTag, assembler) {
-  __ SetupDartSP(kTestStackSpace);
+  __ SetupDartSP();
   __ LoadDImmediate(V0, 43.0);
   __ LoadDImmediate(V1, 42.0);
   __ AddImmediate(SP, SP, -1 * kWordSize);
@@ -2168,7 +2186,7 @@
   __ fstrd(V1, Address(R2, -1));
   __ fldrd(V0, Address(R2, -1));
   __ AddImmediate(SP, SP, 1 * kWordSize);
-  __ mov(CSP, SP);
+  __ RestoreCSP();
   __ ret();
 }
 
@@ -2180,7 +2198,7 @@
 
 
 ASSEMBLER_TEST_GENERATE(FldrdFstrdLargeIndex, assembler) {
-  __ SetupDartSP(kTestStackSpace);
+  __ SetupDartSP();
   __ LoadDImmediate(V0, 43.0);
   __ LoadDImmediate(V1, 42.0);
   // Largest negative offset that can fit in the signed 9-bit immediate field.
@@ -2189,7 +2207,7 @@
   __ fldrd(V0, Address(SP, 31*kWordSize, Address::PostIndex));
   // Correction.
   __ add(SP, SP, Operand(kWordSize));  // Restore SP.
-  __ mov(CSP, SP);
+  __ RestoreCSP();
   __ ret();
 }
 
@@ -2201,14 +2219,14 @@
 
 
 ASSEMBLER_TEST_GENERATE(FldrdFstrdLargeOffset, assembler) {
-  __ SetupDartSP(kTestStackSpace);
+  __ SetupDartSP();
   __ LoadDImmediate(V0, 43.0);
   __ LoadDImmediate(V1, 42.0);
   __ sub(SP, SP, Operand(512*kWordSize));
   __ fstrd(V1, Address(SP, 512*kWordSize, Address::Offset));
   __ add(SP, SP, Operand(512*kWordSize));
   __ fldrd(V0, Address(SP));
-  __ mov(CSP, SP);
+  __ RestoreCSP();
   __ ret();
 }
 
@@ -2220,7 +2238,7 @@
 
 
 ASSEMBLER_TEST_GENERATE(FldrdFstrdExtReg, assembler) {
-  __ SetupDartSP(kTestStackSpace);
+  __ SetupDartSP();
   __ LoadDImmediate(V0, 43.0);
   __ LoadDImmediate(V1, 42.0);
   __ movz(R2, Immediate(0xfff8), 0);
@@ -2231,7 +2249,7 @@
   __ sub(SP, SP, Operand(kWordSize));
   __ fldrd(V0, Address(SP));
   __ add(SP, SP, Operand(kWordSize));
-  __ mov(CSP, SP);
+  __ RestoreCSP();
   __ ret();
 }
 
@@ -2243,7 +2261,7 @@
 
 
 ASSEMBLER_TEST_GENERATE(FldrdFstrdScaledReg, assembler) {
-  __ SetupDartSP(kTestStackSpace);
+  __ SetupDartSP();
   __ LoadDImmediate(V0, 43.0);
   __ LoadDImmediate(V1, 42.0);
   __ movz(R2, Immediate(10), 0);
@@ -2252,7 +2270,7 @@
   __ fstrd(V1, Address(SP, R2, UXTX, Address::Scaled));
   __ fldrd(V0, Address(SP, R2, UXTX, Address::Scaled));
   __ add(SP, SP, Operand(10*kWordSize));
-  __ mov(CSP, SP);
+  __ RestoreCSP();
   __ ret();
 }
 
@@ -2707,7 +2725,7 @@
 
 
 ASSEMBLER_TEST_GENERATE(Vdupd, assembler) {
-  __ SetupDartSP(kTestStackSpace);
+  __ SetupDartSP();
   __ LoadDImmediate(V0, 21.0);
   __ vdupd(V1, V0, 0);
 
@@ -2719,7 +2737,7 @@
   __ fldrd(V3, Address(SP, 1 * dword_bytes, Address::PostIndex));
 
   __ faddd(V0, V2, V3);
-  __ mov(CSP, SP);
+  __ RestoreCSP();
   __ ret();
 }
 
@@ -2731,7 +2749,7 @@
 
 
 ASSEMBLER_TEST_GENERATE(Vdups, assembler) {
-  __ SetupDartSP(kTestStackSpace);
+  __ SetupDartSP();
   __ LoadDImmediate(V0, 21.0);
   __ fcvtsd(V0, V0);
   __ vdups(V1, V0, 0);
@@ -2753,7 +2771,7 @@
   __ faddd(V0, V1, V1);
   __ faddd(V0, V0, V2);
   __ faddd(V0, V0, V3);
-  __ mov(CSP, SP);
+  __ RestoreCSP();
   __ ret();
 }
 
@@ -2765,7 +2783,7 @@
 
 
 ASSEMBLER_TEST_GENERATE(Vinsd, assembler) {
-  __ SetupDartSP(kTestStackSpace);
+  __ SetupDartSP();
   __ LoadDImmediate(V5, 42.0);
   __ vinsd(V1, 1, V5, 0);  // V1[1] <- V0[0].
 
@@ -2777,7 +2795,7 @@
   __ fldrd(V3, Address(SP, 1 * dword_bytes, Address::PostIndex));
 
   __ fmovdd(V0, V3);
-  __ mov(CSP, SP);
+  __ RestoreCSP();
   __ ret();
 }
 
@@ -2789,7 +2807,7 @@
 
 
 ASSEMBLER_TEST_GENERATE(Vinss, assembler) {
-  __ SetupDartSP(kTestStackSpace);
+  __ SetupDartSP();
   __ LoadDImmediate(V0, 21.0);
   __ fcvtsd(V0, V0);
   __ vinss(V1, 3, V0, 0);
@@ -2812,7 +2830,7 @@
   __ faddd(V0, V0, V1);
   __ faddd(V0, V0, V2);
   __ faddd(V0, V0, V3);
-  __ mov(CSP, SP);
+  __ RestoreCSP();
   __ ret();
 }
 
@@ -3601,7 +3619,7 @@
 // R1: growable array.
 // R2: current thread.
 ASSEMBLER_TEST_GENERATE(StoreIntoObject, assembler) {
-  __ SetupDartSP(kTestStackSpace);
+  __ SetupDartSP();
   __ Push(CODE_REG);
   __ Push(THR);
   __ Push(LR);
@@ -3612,13 +3630,13 @@
   __ Pop(LR);
   __ Pop(THR);
   __ Pop(CODE_REG);
-  __ mov(CSP, SP);
+  __ RestoreCSP();
   __ ret();
 }
 
 
 ASSEMBLER_TEST_GENERATE(ComputeRange, assembler) {
-  __ SetupDartSP(kTestStackSpace);
+  __ SetupDartSP();
   EnterTestFrame(assembler);
   Label miss, done;
   __ ComputeRange(R0, R2, R3, &miss);
@@ -3629,7 +3647,7 @@
 
   __ Bind(&done);
   LeaveTestFrame(assembler);
-  __ mov(CSP, SP);
+  __ RestoreCSP();
   __ ret();
 }
 
diff --git a/runtime/vm/assembler_dbc_test.cc b/runtime/vm/assembler_dbc_test.cc
index 11e579e..180437e 100644
--- a/runtime/vm/assembler_dbc_test.cc
+++ b/runtime/vm/assembler_dbc_test.cc
@@ -32,6 +32,82 @@
 #define __ assembler->
 
 
+static RawClass* CreateDummyClass(const String& class_name,
+                                  const Script& script) {
+  const Class& cls = Class::Handle(Class::New(
+      Library::Handle(), class_name, script, TokenPosition::kNoSource));
+  cls.set_is_synthesized_class();  // Dummy class for testing.
+  return cls.raw();
+}
+
+
+static RawLibrary* CreateDummyLibrary(const String& library_name) {
+  return Library::New(library_name);
+}
+
+
+static RawFunction* CreateFunction(const char* name) {
+  Thread* thread = Thread::Current();
+  const String& class_name = String::Handle(Symbols::New(thread, "ownerClass"));
+  const String& lib_name = String::Handle(Symbols::New(thread, "ownerLibrary"));
+  const Script& script = Script::Handle();
+  const Class& owner_class =
+      Class::Handle(CreateDummyClass(class_name, script));
+  const Library& owner_library =
+      Library::Handle(CreateDummyLibrary(lib_name));
+  owner_class.set_library(owner_library);
+  const String& function_name = String::ZoneHandle(Symbols::New(thread, name));
+  return Function::New(function_name, RawFunction::kRegularFunction,
+                       true, false, false, false, false, owner_class,
+                       TokenPosition::kMinSource);
+}
+
+
+static void GenerateDummyCode(Assembler* assembler, const Object& result) {
+  __ PushConstant(result);
+  __ ReturnTOS();
+}
+
+
+static void MakeDummyInstanceCall(Assembler* assembler, const Object& result) {
+  // Make a dummy function.
+  Assembler _assembler_;
+  GenerateDummyCode(&_assembler_, result);
+  const char* dummy_function_name = "dummy_instance_function";
+  const Function& dummy_instance_function =
+      Function::Handle(CreateFunction(dummy_function_name));
+  Code& code =
+      Code::Handle(Code::FinalizeCode(dummy_instance_function, &_assembler_));
+  dummy_instance_function.AttachCode(code);
+
+  // Make a dummy ICData.
+  const Array& dummy_arguments_descriptor =
+      Array::Handle(ArgumentsDescriptor::New(2));
+  const ICData& ic_data = ICData::Handle(ICData::New(
+      dummy_instance_function,
+      String::Handle(dummy_instance_function.name()),
+      dummy_arguments_descriptor,
+      Thread::kNoDeoptId,
+      2,
+      /* is_static_call= */ false));
+
+  // Wire up the Function in the ICData.
+  GrowableArray<intptr_t> cids(2);
+  cids.Add(kSmiCid);
+  cids.Add(kSmiCid);
+  ic_data.AddCheck(cids, dummy_instance_function);
+
+  // For the non-Smi tests.
+  cids[0] = kBigintCid;
+  ic_data.AddCheck(cids, dummy_instance_function);
+  ICData* call_ic_data = &ICData::ZoneHandle(ic_data.Original());
+
+  // Generate the instance call.
+  const intptr_t call_ic_data_kidx = __ AddConstant(*call_ic_data);
+  __ InstanceCall2(2, call_ic_data_kidx);
+}
+
+
 ASSEMBLER_TEST_GENERATE(Simple, assembler) {
   __ PushConstant(Smi::Handle(Smi::New(42)));
   __ ReturnTOS();
@@ -43,6 +119,22 @@
 }
 
 
+ASSEMBLER_TEST_GENERATE(Nop, assembler) {
+  __ PushConstant(Smi::Handle(Smi::New(42)));
+  __ Nop(0);
+  __ Nop(0);
+  __ Nop(0);
+  __ Nop(0);
+  __ Nop(0);
+  __ ReturnTOS();
+}
+
+
+ASSEMBLER_TEST_RUN(Nop, test) {
+  EXPECT_EQ(42, EXECUTE_TEST_CODE_INTPTR(test->code()));
+}
+
+
 // Called from assembler_test.cc.
 // FP[-kParamEndSlotFromFp - 1]: growable array
 // FP[-kParamEndSlotFromFp - 2]: value
@@ -55,6 +147,55 @@
 }
 
 
+//  - OneByteStringFromCharCode rA, rX
+//
+//    Load the one-character symbol with the char code given by the Smi
+//    in FP[rX] into FP[rA].
+ASSEMBLER_TEST_GENERATE(OneByteStringFromCharCode, assembler) {
+  __ Frame(2);
+  __ LoadConstant(0, Smi::ZoneHandle(Smi::New(65)));
+  __ OneByteStringFromCharCode(1, 0);
+  __ Return(1);
+}
+
+
+ASSEMBLER_TEST_RUN(OneByteStringFromCharCode, test) {
+  EXPECT_EQ(Symbols::New(Thread::Current(), "A"),
+            EXECUTE_TEST_CODE_OBJECT(test->code()).raw());
+}
+
+
+//  - StringToCharCode rA, rX
+//
+//    Load and smi-encode the single char code of the string in FP[rX] into
+//    FP[rA]. If the string's length is not 1, load smi -1 instead.
+//
+ASSEMBLER_TEST_GENERATE(StringToCharCode, assembler) {
+  __ Frame(2);
+  __ LoadConstant(0, String::ZoneHandle(String::New("A", Heap::kOld)));
+  __ StringToCharCode(1, 0);
+  __ Return(1);
+}
+
+
+ASSEMBLER_TEST_RUN(StringToCharCode, test) {
+  EXPECT_EQ(65, EXECUTE_TEST_CODE_INTPTR(test->code()));
+}
+
+
+ASSEMBLER_TEST_GENERATE(StringToCharCodeIllegalLength, assembler) {
+  __ Frame(2);
+  __ LoadConstant(0, String::ZoneHandle(String::New("AAA", Heap::kOld)));
+  __ StringToCharCode(1, 0);
+  __ Return(1);
+}
+
+
+ASSEMBLER_TEST_RUN(StringToCharCodeIllegalLength, test) {
+  EXPECT_EQ(-1, EXECUTE_TEST_CODE_INTPTR(test->code()));
+}
+
+
 //  - AddTOS; SubTOS; MulTOS; BitOrTOS; BitAndTOS; EqualTOS; LessThanTOS;
 //    GreaterThanTOS;
 //
@@ -66,7 +207,8 @@
   __ PushConstant(Smi::Handle(Smi::New(-42)));
   __ PushConstant(Smi::Handle(Smi::New(84)));
   __ AddTOS();
-  __ PushConstant(Smi::Handle(Smi::New(0)));  // Should be skipped.
+  // Should be skipped.
+  MakeDummyInstanceCall(assembler, Smi::Handle(Smi::New(0)));
   __ ReturnTOS();
 }
 
@@ -80,7 +222,8 @@
   __ PushConstant(Smi::Handle(Smi::New(Smi::kMaxValue)));
   __ PushConstant(Smi::Handle(Smi::New(1)));
   __ AddTOS();
-  __ PushConstant(Smi::Handle(Smi::New(42)));  // Shouldn't be skipped.
+  // Shouldn't be skipped.
+  MakeDummyInstanceCall(assembler, Smi::Handle(Smi::New(42)));
   __ ReturnTOS();
 }
 
@@ -96,7 +239,8 @@
   __ PushConstant(Integer::Handle(Integer::New(numstr, Heap::kOld)));
   __ PushConstant(Smi::Handle(Smi::New(1)));
   __ AddTOS();
-  __ PushConstant(Smi::Handle(Smi::New(42)));  // Shouldn't be skipped.
+  // Shouldn't be skipped.
+  MakeDummyInstanceCall(assembler, Smi::Handle(Smi::New(42)));
   __ ReturnTOS();
 }
 
@@ -110,7 +254,8 @@
   __ PushConstant(Smi::Handle(Smi::New(30)));
   __ PushConstant(Smi::Handle(Smi::New(-12)));
   __ SubTOS();
-  __ PushConstant(Smi::Handle(Smi::New(0)));  // Should be skipped.
+  // Should be skipped.
+  MakeDummyInstanceCall(assembler, Smi::Handle(Smi::New(0)));
   __ ReturnTOS();
 }
 
@@ -124,7 +269,8 @@
   __ PushConstant(Smi::Handle(Smi::New(Smi::kMinValue)));
   __ PushConstant(Smi::Handle(Smi::New(1)));
   __ SubTOS();
-  __ PushConstant(Smi::Handle(Smi::New(42)));  // Shouldn't be skipped.
+  // Shouldn't be skipped.
+  MakeDummyInstanceCall(assembler, Smi::Handle(Smi::New(42)));
   __ ReturnTOS();
 }
 
@@ -140,7 +286,8 @@
   __ PushConstant(Integer::Handle(Integer::New(numstr, Heap::kOld)));
   __ PushConstant(Smi::Handle(Smi::New(1)));
   __ SubTOS();
-  __ PushConstant(Smi::Handle(Smi::New(42)));  // Shouldn't be skipped.
+  // Shouldn't be skipped.
+  MakeDummyInstanceCall(assembler, Smi::Handle(Smi::New(42)));
   __ ReturnTOS();
 }
 
@@ -154,7 +301,8 @@
   __ PushConstant(Smi::Handle(Smi::New(-6)));
   __ PushConstant(Smi::Handle(Smi::New(-7)));
   __ MulTOS();
-  __ PushConstant(Smi::Handle(Smi::New(0)));  // Should be skipped.
+  // Should be skipped.
+  MakeDummyInstanceCall(assembler, Smi::Handle(Smi::New(0)));
   __ ReturnTOS();
 }
 
@@ -168,7 +316,8 @@
   __ PushConstant(Smi::Handle(Smi::New(Smi::kMaxValue)));
   __ PushConstant(Smi::Handle(Smi::New(-8)));
   __ MulTOS();
-  __ PushConstant(Smi::Handle(Smi::New(42)));  // Shouldn't be skipped.
+  // Shouldn't be skipped.
+  MakeDummyInstanceCall(assembler, Smi::Handle(Smi::New(42)));
   __ ReturnTOS();
 }
 
@@ -184,7 +333,8 @@
   __ PushConstant(Integer::Handle(Integer::New(numstr, Heap::kOld)));
   __ PushConstant(Smi::Handle(Smi::New(1)));
   __ MulTOS();
-  __ PushConstant(Smi::Handle(Smi::New(42)));  // Shouldn't be skipped.
+  // Shouldn't be skipped.
+  MakeDummyInstanceCall(assembler, Smi::Handle(Smi::New(42)));
   __ ReturnTOS();
 }
 
@@ -198,7 +348,8 @@
   __ PushConstant(Smi::Handle(Smi::New(0x22)));
   __ PushConstant(Smi::Handle(Smi::New(0x08)));
   __ BitOrTOS();
-  __ PushConstant(Smi::Handle(Smi::New(0)));  // Should be skipped.
+  // Should be skipped.
+  MakeDummyInstanceCall(assembler, Smi::Handle(Smi::New(0)));
   __ ReturnTOS();
 }
 
@@ -214,7 +365,8 @@
   __ PushConstant(Integer::Handle(Integer::New(numstr, Heap::kOld)));
   __ PushConstant(Smi::Handle(Smi::New(0x08)));
   __ BitOrTOS();
-  __ PushConstant(Smi::Handle(Smi::New(42)));  // Shouldn't be skipped.
+  // Shouldn't be skipped.
+  MakeDummyInstanceCall(assembler, Smi::Handle(Smi::New(42)));
   __ ReturnTOS();
 }
 
@@ -228,7 +380,8 @@
   __ PushConstant(Smi::Handle(Smi::New(0x2a)));
   __ PushConstant(Smi::Handle(Smi::New(0xaa)));
   __ BitAndTOS();
-  __ PushConstant(Smi::Handle(Smi::New(0)));  // Should be skipped.
+  // Should be skipped.
+  MakeDummyInstanceCall(assembler, Smi::Handle(Smi::New(0)));
   __ ReturnTOS();
 }
 
@@ -244,7 +397,8 @@
   __ PushConstant(Integer::Handle(Integer::New(numstr, Heap::kOld)));
   __ PushConstant(Smi::Handle(Smi::New(0x08)));
   __ BitAndTOS();
-  __ PushConstant(Smi::Handle(Smi::New(42)));  // Shouldn't be skipped.
+  // Shouldn't be skipped.
+  MakeDummyInstanceCall(assembler, Smi::Handle(Smi::New(42)));
   __ ReturnTOS();
 }
 
@@ -258,7 +412,8 @@
   __ PushConstant(Smi::Handle(Smi::New(42)));
   __ PushConstant(Smi::Handle(Smi::New(42)));
   __ EqualTOS();
-  __ PushConstant(Bool::False());  // Should be skipped.
+  // Should be skipped.
+  MakeDummyInstanceCall(assembler, Bool::False());
   __ ReturnTOS();
 }
 
@@ -272,7 +427,8 @@
   __ PushConstant(Smi::Handle(Smi::New(42)));
   __ PushConstant(Smi::Handle(Smi::New(-42)));
   __ EqualTOS();
-  __ PushConstant(Bool::True());  // Should be skipped.
+  // Should be skipped.
+  MakeDummyInstanceCall(assembler, Bool::True());
   __ ReturnTOS();
 }
 
@@ -288,7 +444,8 @@
   __ PushConstant(Integer::Handle(Integer::New(numstr, Heap::kOld)));
   __ PushConstant(Smi::Handle(Smi::New(-42)));
   __ EqualTOS();
-  __ PushConstant(Bool::True());  // Shouldn't be skipped.
+  // Shouldn't be skipped.
+  MakeDummyInstanceCall(assembler, Bool::True());
   __ ReturnTOS();
 }
 
@@ -302,7 +459,8 @@
   __ PushConstant(Smi::Handle(Smi::New(-42)));
   __ PushConstant(Smi::Handle(Smi::New(42)));
   __ LessThanTOS();
-  __ PushConstant(Bool::False());  // Should be skipped.
+  // Should be skipped.
+  MakeDummyInstanceCall(assembler, Bool::False());
   __ ReturnTOS();
 }
 
@@ -316,7 +474,8 @@
   __ PushConstant(Smi::Handle(Smi::New(42)));
   __ PushConstant(Smi::Handle(Smi::New(-42)));
   __ LessThanTOS();
-  __ PushConstant(Bool::False());  // Should be skipped.
+  // Should be skipped.
+  MakeDummyInstanceCall(assembler, Bool::False());
   __ ReturnTOS();
 }
 
@@ -332,7 +491,8 @@
   __ PushConstant(Integer::Handle(Integer::New(numstr, Heap::kOld)));
   __ PushConstant(Smi::Handle(Smi::New(-42)));
   __ LessThanTOS();
-  __ PushConstant(Bool::True());  // Shouldn't be skipped.
+  // Shouldn't be skipped.
+  MakeDummyInstanceCall(assembler, Bool::True());
   __ ReturnTOS();
 }
 
@@ -346,7 +506,8 @@
   __ PushConstant(Smi::Handle(Smi::New(42)));
   __ PushConstant(Smi::Handle(Smi::New(-42)));
   __ GreaterThanTOS();
-  __ PushConstant(Bool::False());  // Should be skipped.
+  // Should be skipped.
+  MakeDummyInstanceCall(assembler, Bool::False());
   __ ReturnTOS();
 }
 
@@ -360,7 +521,8 @@
   __ PushConstant(Smi::Handle(Smi::New(-42)));
   __ PushConstant(Smi::Handle(Smi::New(42)));
   __ GreaterThanTOS();
-  __ PushConstant(Bool::False());  // Should be skipped.
+  // Should be skipped.
+  MakeDummyInstanceCall(assembler, Bool::False());
   __ ReturnTOS();
 }
 
@@ -376,7 +538,8 @@
   __ PushConstant(Integer::Handle(Integer::New(numstr, Heap::kOld)));
   __ PushConstant(Smi::Handle(Smi::New(-42)));
   __ GreaterThanTOS();
-  __ PushConstant(Bool::True());  // Shouldn't be skipped.
+  // Shouldn't be skipped.
+  MakeDummyInstanceCall(assembler, Bool::True());
   __ ReturnTOS();
 }
 
@@ -386,6 +549,413 @@
 }
 
 
+//  - Add, Sub, Mul, Div, Mod, Shl, Shr rA, rB, rC
+//
+//    Arithmetic operations on Smis. FP[rA] <- FP[rB] op FP[rC].
+//    If these instructions can trigger a deoptimization, the following
+//    instruction should be Deopt. If no deoptimization should be triggered,
+//    the immediately following instruction is skipped.
+ASSEMBLER_TEST_GENERATE(AddNoOverflow, assembler) {
+  __ Frame(3);
+  __ LoadConstant(0, Smi::Handle(Smi::New(20)));
+  __ LoadConstant(1, Smi::Handle(Smi::New(22)));
+  __ LoadConstant(2, Smi::Handle(Smi::New(-1)));
+  __ Add(2, 0, 1);
+  __ LoadConstant(2, Smi::Handle(Smi::New(-42)));
+  __ Return(2);
+}
+
+
+ASSEMBLER_TEST_RUN(AddNoOverflow, test) {
+  EXPECT_EQ(42, EXECUTE_TEST_CODE_INTPTR(test->code()));
+}
+
+
+ASSEMBLER_TEST_GENERATE(AddOverflow, assembler) {
+  __ Frame(3);
+  __ LoadConstant(0, Smi::Handle(Smi::New(Smi::kMaxValue)));
+  __ LoadConstant(1, Smi::Handle(Smi::New(1)));
+  __ LoadConstant(2, Smi::Handle(Smi::New(-1)));
+  __ Add(2, 0, 1);
+  __ LoadConstant(2, Smi::Handle(Smi::New(42)));
+  __ Return(2);
+}
+
+
+ASSEMBLER_TEST_RUN(AddOverflow, test) {
+  EXPECT_EQ(42, EXECUTE_TEST_CODE_INTPTR(test->code()));
+}
+
+
+ASSEMBLER_TEST_GENERATE(SubNoOverflow, assembler) {
+  __ Frame(3);
+  __ LoadConstant(0, Smi::Handle(Smi::New(64)));
+  __ LoadConstant(1, Smi::Handle(Smi::New(22)));
+  __ LoadConstant(2, Smi::Handle(Smi::New(-1)));
+  __ Sub(2, 0, 1);
+  __ LoadConstant(2, Smi::Handle(Smi::New(-42)));
+  __ Return(2);
+}
+
+
+ASSEMBLER_TEST_RUN(SubNoOverflow, test) {
+  EXPECT_EQ(42, EXECUTE_TEST_CODE_INTPTR(test->code()));
+}
+
+
+ASSEMBLER_TEST_GENERATE(SubOverflow, assembler) {
+  __ Frame(3);
+  __ LoadConstant(0, Smi::Handle(Smi::New(Smi::kMinValue)));
+  __ LoadConstant(1, Smi::Handle(Smi::New(1)));
+  __ LoadConstant(2, Smi::Handle(Smi::New(-1)));
+  __ Sub(2, 0, 1);
+  __ LoadConstant(2, Smi::Handle(Smi::New(42)));
+  __ Return(2);
+}
+
+
+ASSEMBLER_TEST_RUN(SubOverflow, test) {
+  EXPECT_EQ(42, EXECUTE_TEST_CODE_INTPTR(test->code()));
+}
+
+
+ASSEMBLER_TEST_GENERATE(MulNoOverflow, assembler) {
+  __ Frame(3);
+  __ LoadConstant(0, Smi::Handle(Smi::New(-6)));
+  __ LoadConstant(1, Smi::Handle(Smi::New(-7)));
+  __ LoadConstant(2, Smi::Handle(Smi::New(-1)));
+  __ Mul(2, 0, 1);
+  __ LoadConstant(2, Smi::Handle(Smi::New(-42)));
+  __ Return(2);
+}
+
+
+ASSEMBLER_TEST_RUN(MulNoOverflow, test) {
+  EXPECT_EQ(42, EXECUTE_TEST_CODE_INTPTR(test->code()));
+}
+
+
+ASSEMBLER_TEST_GENERATE(MulOverflow, assembler) {
+  __ Frame(3);
+  __ LoadConstant(0, Smi::Handle(Smi::New(Smi::kMaxValue)));
+  __ LoadConstant(1, Smi::Handle(Smi::New(-8)));
+  __ LoadConstant(2, Smi::Handle(Smi::New(-1)));
+  __ Mul(2, 0, 1);
+  __ LoadConstant(2, Smi::Handle(Smi::New(42)));
+  __ Return(2);
+}
+
+
+ASSEMBLER_TEST_RUN(MulOverflow, test) {
+  EXPECT_EQ(42, EXECUTE_TEST_CODE_INTPTR(test->code()));
+}
+
+
+ASSEMBLER_TEST_GENERATE(DivNoDeopt, assembler) {
+  __ Frame(3);
+  __ LoadConstant(0, Smi::Handle(Smi::New(27)));
+  __ LoadConstant(1, Smi::Handle(Smi::New(3)));
+  __ LoadConstant(2, Smi::Handle(Smi::New(-1)));
+  __ Div(2, 0, 1);
+  __ LoadConstant(2, Smi::Handle(Smi::New(-42)));
+  __ Return(2);
+}
+
+
+ASSEMBLER_TEST_RUN(DivNoDeopt, test) {
+  EXPECT_EQ(9, EXECUTE_TEST_CODE_INTPTR(test->code()));
+}
+
+
+ASSEMBLER_TEST_GENERATE(DivZero, assembler) {
+  __ Frame(3);
+  __ LoadConstant(0, Smi::Handle(Smi::New(3)));
+  __ LoadConstant(1, Smi::Handle(Smi::New(0)));
+  __ LoadConstant(2, Smi::Handle(Smi::New(-1)));
+  __ Div(2, 0, 1);
+  __ LoadConstant(2, Smi::Handle(Smi::New(42)));
+  __ Return(2);
+}
+
+
+ASSEMBLER_TEST_RUN(DivZero, test) {
+  EXPECT_EQ(42, EXECUTE_TEST_CODE_INTPTR(test->code()));
+}
+
+
+ASSEMBLER_TEST_GENERATE(DivCornerCase, assembler) {
+  __ Frame(3);
+  __ LoadConstant(0, Smi::Handle(Smi::New(Smi::kMinValue)));
+  __ LoadConstant(1, Smi::Handle(Smi::New(-1)));
+  __ LoadConstant(2, Smi::Handle(Smi::New(-1)));
+  __ Div(2, 0, 1);
+  __ LoadConstant(2, Smi::Handle(Smi::New(42)));
+  __ Return(2);
+}
+
+
+ASSEMBLER_TEST_RUN(DivCornerCase, test) {
+  EXPECT_EQ(42, EXECUTE_TEST_CODE_INTPTR(test->code()));
+}
+
+
+ASSEMBLER_TEST_GENERATE(ModPosPos, assembler) {
+  __ Frame(3);
+  __ LoadConstant(0, Smi::Handle(Smi::New(42)));
+  __ LoadConstant(1, Smi::Handle(Smi::New(4)));
+  __ LoadConstant(2, Smi::Handle(Smi::New(-1)));
+  __ Mod(2, 0, 1);
+  __ LoadConstant(2, Smi::Handle(Smi::New(-42)));
+  __ Return(2);
+}
+
+
+ASSEMBLER_TEST_RUN(ModPosPos, test) {
+  EXPECT_EQ(2, EXECUTE_TEST_CODE_INTPTR(test->code()));
+}
+
+
+ASSEMBLER_TEST_GENERATE(ModNegPos, assembler) {
+  __ Frame(3);
+  __ LoadConstant(0, Smi::Handle(Smi::New(-42)));
+  __ LoadConstant(1, Smi::Handle(Smi::New(4)));
+  __ LoadConstant(2, Smi::Handle(Smi::New(-1)));
+  __ Mod(2, 0, 1);
+  __ LoadConstant(2, Smi::Handle(Smi::New(-42)));
+  __ Return(2);
+}
+
+
+ASSEMBLER_TEST_RUN(ModNegPos, test) {
+  EXPECT_EQ(2, EXECUTE_TEST_CODE_INTPTR(test->code()));
+}
+
+
+ASSEMBLER_TEST_GENERATE(ModPosNeg, assembler) {
+  __ Frame(3);
+  __ LoadConstant(0, Smi::Handle(Smi::New(42)));
+  __ LoadConstant(1, Smi::Handle(Smi::New(-4)));
+  __ LoadConstant(2, Smi::Handle(Smi::New(-1)));
+  __ Mod(2, 0, 1);
+  __ LoadConstant(2, Smi::Handle(Smi::New(-42)));
+  __ Return(2);
+}
+
+
+ASSEMBLER_TEST_RUN(ModPosNeg, test) {
+  EXPECT_EQ(2, EXECUTE_TEST_CODE_INTPTR(test->code()));
+}
+
+
+ASSEMBLER_TEST_GENERATE(ModZero, assembler) {
+  __ Frame(3);
+  __ LoadConstant(0, Smi::Handle(Smi::New(3)));
+  __ LoadConstant(1, Smi::Handle(Smi::New(0)));
+  __ LoadConstant(2, Smi::Handle(Smi::New(-1)));
+  __ Mod(2, 0, 1);
+  __ LoadConstant(2, Smi::Handle(Smi::New(42)));
+  __ Return(2);
+}
+
+
+ASSEMBLER_TEST_RUN(ModZero, test) {
+  EXPECT_EQ(42, EXECUTE_TEST_CODE_INTPTR(test->code()));
+}
+
+
+ASSEMBLER_TEST_GENERATE(ShlNoDeopt, assembler) {
+  __ Frame(3);
+  __ LoadConstant(0, Smi::Handle(Smi::New(21)));
+  __ LoadConstant(1, Smi::Handle(Smi::New(1)));
+  __ LoadConstant(2, Smi::Handle(Smi::New(-1)));
+  __ Shl(2, 0, 1);
+  __ LoadConstant(2, Smi::Handle(Smi::New(-42)));
+  __ Return(2);
+}
+
+
+ASSEMBLER_TEST_RUN(ShlNoDeopt, test) {
+  EXPECT_EQ(42, EXECUTE_TEST_CODE_INTPTR(test->code()));
+}
+
+
+ASSEMBLER_TEST_GENERATE(ShlOverflow, assembler) {
+  __ Frame(3);
+  __ LoadConstant(0, Smi::Handle(Smi::New(Smi::kMaxValue)));
+  __ LoadConstant(1, Smi::Handle(Smi::New(1)));
+  __ LoadConstant(2, Smi::Handle(Smi::New(-1)));
+  __ Shl(2, 0, 1);
+  __ LoadConstant(2, Smi::Handle(Smi::New(42)));
+  __ Return(2);
+}
+
+
+ASSEMBLER_TEST_RUN(ShlOverflow, test) {
+  EXPECT_EQ(42, EXECUTE_TEST_CODE_INTPTR(test->code()));
+}
+
+
+ASSEMBLER_TEST_GENERATE(ShlNegShift, assembler) {
+  __ Frame(3);
+  __ LoadConstant(0, Smi::Handle(Smi::New(21)));
+  __ LoadConstant(1, Smi::Handle(Smi::New(-1)));
+  __ LoadConstant(2, Smi::Handle(Smi::New(-1)));
+  __ Shl(2, 0, 1);
+  __ LoadConstant(2, Smi::Handle(Smi::New(42)));
+  __ Return(2);
+}
+
+
+ASSEMBLER_TEST_RUN(ShlNegShift, test) {
+  EXPECT_EQ(42, EXECUTE_TEST_CODE_INTPTR(test->code()));
+}
+
+
+ASSEMBLER_TEST_GENERATE(ShrNoDeopt, assembler) {
+  __ Frame(3);
+  __ LoadConstant(0, Smi::Handle(Smi::New(84)));
+  __ LoadConstant(1, Smi::Handle(Smi::New(1)));
+  __ LoadConstant(2, Smi::Handle(Smi::New(-1)));
+  __ Shr(2, 0, 1);
+  __ LoadConstant(2, Smi::Handle(Smi::New(-42)));
+  __ Return(2);
+}
+
+
+ASSEMBLER_TEST_RUN(ShrNoDeopt, test) {
+  EXPECT_EQ(42, EXECUTE_TEST_CODE_INTPTR(test->code()));
+}
+
+
+ASSEMBLER_TEST_GENERATE(ShrNegShift, assembler) {
+  __ Frame(3);
+  __ LoadConstant(0, Smi::Handle(Smi::New(21)));
+  __ LoadConstant(1, Smi::Handle(Smi::New(-1)));
+  __ LoadConstant(2, Smi::Handle(Smi::New(-1)));
+  __ Shr(2, 0, 1);
+  __ LoadConstant(2, Smi::Handle(Smi::New(42)));
+  __ Return(2);
+}
+
+
+ASSEMBLER_TEST_RUN(ShrNegShift, test) {
+  EXPECT_EQ(42, EXECUTE_TEST_CODE_INTPTR(test->code()));
+}
+
+
+//  - Neg rA , rD
+//
+//    FP[rA] <- -FP[rD]. Assumes FP[rD] is a Smi. If there is no overflow the
+//    immediately following instruction is skipped.
+ASSEMBLER_TEST_GENERATE(NegPos, assembler) {
+  __ Frame(2);
+  __ LoadConstant(0, Smi::Handle(Smi::New(42)));
+  __ LoadConstant(1, Smi::Handle(Smi::New(-1)));
+  __ Neg(1, 0);
+  __ LoadConstant(1, Smi::Handle(Smi::New(-1)));
+  __ Return(1);
+}
+
+
+ASSEMBLER_TEST_RUN(NegPos, test) {
+  EXPECT_EQ(-42, EXECUTE_TEST_CODE_INTPTR(test->code()));
+}
+
+
+ASSEMBLER_TEST_GENERATE(NegNeg, assembler) {
+  __ Frame(2);
+  __ LoadConstant(0, Smi::Handle(Smi::New(-42)));
+  __ LoadConstant(1, Smi::Handle(Smi::New(-1)));
+  __ Neg(1, 0);
+  __ LoadConstant(1, Smi::Handle(Smi::New(-1)));
+  __ Return(1);
+}
+
+
+ASSEMBLER_TEST_RUN(NegNeg, test) {
+  EXPECT_EQ(42, EXECUTE_TEST_CODE_INTPTR(test->code()));
+}
+
+
+ASSEMBLER_TEST_GENERATE(NegOverflow, assembler) {
+  __ Frame(2);
+  __ LoadConstant(0, Smi::Handle(Smi::New(Smi::kMinValue)));
+  __ LoadConstant(1, Smi::Handle(Smi::New(-1)));
+  __ Neg(1, 0);
+  __ LoadConstant(1, Smi::Handle(Smi::New(42)));
+  __ Return(1);
+}
+
+
+ASSEMBLER_TEST_RUN(NegOverflow, test) {
+  EXPECT_EQ(42, EXECUTE_TEST_CODE_INTPTR(test->code()));
+}
+
+
+//  - BitOr, BitAnd, BitXor rA, rB, rC
+//
+//    FP[rA] <- FP[rB] op FP[rC]
+ASSEMBLER_TEST_GENERATE(BitOr, assembler) {
+  __ Frame(3);
+  __ LoadConstant(0, Smi::Handle(Smi::New(0x2)));
+  __ LoadConstant(1, Smi::Handle(Smi::New(0x28)));
+  __ LoadConstant(2, Smi::Handle(Smi::New(-1)));
+  __ BitOr(2, 0, 1);
+  __ Return(2);
+}
+
+
+ASSEMBLER_TEST_RUN(BitOr, test) {
+  EXPECT_EQ(42, EXECUTE_TEST_CODE_INTPTR(test->code()));
+}
+
+
+ASSEMBLER_TEST_GENERATE(BitAnd, assembler) {
+  __ Frame(3);
+  __ LoadConstant(0, Smi::Handle(Smi::New(0x2b)));
+  __ LoadConstant(1, Smi::Handle(Smi::New(0x6a)));
+  __ LoadConstant(2, Smi::Handle(Smi::New(-1)));
+  __ BitAnd(2, 0, 1);
+  __ Return(2);
+}
+
+
+ASSEMBLER_TEST_RUN(BitAnd, test) {
+  EXPECT_EQ(42, EXECUTE_TEST_CODE_INTPTR(test->code()));
+}
+
+
+ASSEMBLER_TEST_GENERATE(BitXor, assembler) {
+  __ Frame(3);
+  __ LoadConstant(0, Smi::Handle(Smi::New(0x37)));
+  __ LoadConstant(1, Smi::Handle(Smi::New(0x1d)));
+  __ LoadConstant(2, Smi::Handle(Smi::New(-1)));
+  __ BitXor(2, 0, 1);
+  __ Return(2);
+}
+
+
+ASSEMBLER_TEST_RUN(BitXor, test) {
+  EXPECT_EQ(42, EXECUTE_TEST_CODE_INTPTR(test->code()));
+}
+
+
+//  - BitNot rA, rD
+//
+//    FP[rA] <- ~FP[rD]. As above, assumes FP[rD] is a Smi.
+ASSEMBLER_TEST_GENERATE(BitNot, assembler) {
+  __ Frame(2);
+  __ LoadConstant(0, Smi::Handle(Smi::New(~42)));
+  __ LoadConstant(1, Smi::Handle(Smi::New(-1)));
+  __ BitNot(1, 0);
+  __ Return(1);
+}
+
+
+ASSEMBLER_TEST_RUN(BitNot, test) {
+  EXPECT_EQ(42, EXECUTE_TEST_CODE_INTPTR(test->code()));
+}
+
 //  - IfNeStrictTOS; IfEqStrictTOS; IfNeStrictNumTOS; IfEqStrictNumTOS
 //
 //    Skips the next instruction unless the given condition holds. 'Num'
@@ -777,7 +1347,7 @@
   __ PushConstant(Smi::Handle(Smi::New(41)));
   __ DropR(11);
   __ AddTOS();
-  __ PushConstant(Smi::Handle(Smi::New(0)));  // Should be skipped.
+  MakeDummyInstanceCall(assembler, Smi::Handle(Smi::New(0)));
   __ ReturnTOS();
 }
 
@@ -840,11 +1410,13 @@
 //    Push FP[rX] to the stack.
 ASSEMBLER_TEST_GENERATE(StoreLocalPush, assembler) {
   __ Frame(1);
+  __ PushConstant(Smi::Handle(Smi::New(37)));
   __ PushConstant(Smi::Handle(Smi::New(21)));
   __ StoreLocal(0);
   __ Push(0);
   __ AddTOS();
-  __ PushConstant(Smi::Handle(Smi::New(0)));  // Should be skipped.
+  // Should be skipped.
+  MakeDummyInstanceCall(assembler, Smi::Handle(Smi::New(0)));
   __ ReturnTOS();
 }
 
@@ -861,7 +1433,8 @@
   __ Push(0);
   __ Push(0);
   __ AddTOS();
-  __ PushConstant(Smi::Handle(Smi::New(0)));  // Should be skipped.
+  // Should be skipped.
+  MakeDummyInstanceCall(assembler, Smi::Handle(Smi::New(0)));
   __ ReturnTOS();
 }
 
@@ -877,7 +1450,8 @@
   __ Push(0);
   __ Push(0);
   __ AddTOS();
-  __ PushConstant(Smi::Handle(Smi::New(0)));  // Should be skipped.
+  // Should be skipped.
+  MakeDummyInstanceCall(assembler, Smi::Handle(Smi::New(0)));
   __ ReturnTOS();
 }
 
@@ -900,7 +1474,8 @@
   __ Push(0);
   __ Push(1);
   __ AddTOS();
-  __ PushConstant(Smi::Handle(Smi::New(0)));  // Should be skipped.
+  // Should be skipped.
+  MakeDummyInstanceCall(assembler, Smi::Handle(Smi::New(0)));
   __ ReturnTOS();
 }
 
@@ -951,14 +1526,16 @@
   __ PushConstant(Smi::Handle(Smi::New(1)));
   __ Push(1);
   __ AddTOS();
-  __ Jump(&error);
+  // Should be skipped.
+  MakeDummyInstanceCall(assembler, Smi::Handle(Smi::New(-1)));
   __ PopLocal(1);
 
   // Subtract 1 from FP[0].
   __ Push(0);
   __ PushConstant(Smi::Handle(Smi::New(1)));
   __ SubTOS();
-  __ Jump(&error);
+  // Should be skipped.
+  MakeDummyInstanceCall(assembler, Smi::Handle(Smi::New(-1)));
 
   // Jump to loop_entry if FP[0] != 0.
   __ StoreLocal(0);
@@ -1028,6 +1605,193 @@
   EXPECT_EQ(10, array.Length());
 }
 
+
+//  - TestSmi rA, rD
+//
+//    If FP[rA] & FP[rD] != 0, then skip the next instruction. FP[rA] and FP[rD]
+//    must be Smis.
+ASSEMBLER_TEST_GENERATE(TestSmiTrue, assembler) {
+  Label branch_taken;
+  __ Frame(2);
+  __ LoadConstant(0, Smi::Handle(Smi::New(7)));
+  __ LoadConstant(1, Smi::Handle(Smi::New(3)));
+  __ TestSmi(0, 1);
+  __ Jump(&branch_taken);
+  __ PushConstant(Bool::True());
+  __ ReturnTOS();
+  __ Bind(&branch_taken);
+  __ PushConstant(Bool::False());
+  __ ReturnTOS();
+}
+
+
+ASSEMBLER_TEST_RUN(TestSmiTrue, test) {
+  EXPECT(EXECUTE_TEST_CODE_BOOL(test->code()));
+}
+
+
+ASSEMBLER_TEST_GENERATE(TestSmiFalse, assembler) {
+  Label branch_taken;
+  __ Frame(2);
+  __ LoadConstant(0, Smi::Handle(Smi::New(8)));
+  __ LoadConstant(1, Smi::Handle(Smi::New(4)));
+  __ TestSmi(0, 1);
+  __ Jump(&branch_taken);
+  __ PushConstant(Bool::True());
+  __ ReturnTOS();
+  __ Bind(&branch_taken);
+  __ PushConstant(Bool::False());
+  __ ReturnTOS();
+}
+
+
+ASSEMBLER_TEST_RUN(TestSmiFalse, test) {
+  EXPECT(!EXECUTE_TEST_CODE_BOOL(test->code()));
+}
+
+
+//  - CheckSmi rA
+//
+//    If FP[rA] is a Smi, then skip the next instruction.
+ASSEMBLER_TEST_GENERATE(CheckSmiPass, assembler) {
+  __ Frame(1);
+  __ PushConstant(Smi::Handle(Smi::New(42)));
+  __ LoadConstant(0, Smi::Handle(Smi::New(0)));
+  __ CheckSmi(0);
+  __ PushConstant(Smi::Handle(Smi::New(-1)));
+  __ ReturnTOS();
+}
+
+
+ASSEMBLER_TEST_RUN(CheckSmiPass, test) {
+  EXPECT_EQ(42, EXECUTE_TEST_CODE_INTPTR(test->code()));
+}
+
+
+ASSEMBLER_TEST_GENERATE(CheckSmiFail, assembler) {
+  __ Frame(1);
+  __ PushConstant(Smi::Handle(Smi::New(-1)));
+  __ LoadConstant(0, Bool::True());
+  __ CheckSmi(0);
+  __ PushConstant(Smi::Handle(Smi::New(42)));
+  __ ReturnTOS();
+}
+
+
+ASSEMBLER_TEST_RUN(CheckSmiFail, test) {
+  EXPECT_EQ(42, EXECUTE_TEST_CODE_INTPTR(test->code()));
+}
+
+
+//  - CheckClassId rA, D
+//
+//    If the object at FP[rA]'s class id matches hthe class id in PP[D], then
+//    skip the following instruction.
+ASSEMBLER_TEST_GENERATE(CheckClassIdSmiPass, assembler) {
+  __ Frame(1);
+  __ LoadConstant(0, Smi::Handle(Smi::New(42)));
+  __ CheckClassId(0, kSmiCid);
+  __ LoadConstant(0, Smi::Handle(Smi::New(-1)));
+  __ Return(0);
+}
+
+
+ASSEMBLER_TEST_RUN(CheckClassIdSmiPass, test) {
+  EXPECT_EQ(42, EXECUTE_TEST_CODE_INTPTR(test->code()));
+}
+
+
+ASSEMBLER_TEST_GENERATE(CheckClassIdNonSmiPass, assembler) {
+  __ Frame(1);
+  __ LoadConstant(0, Bool::True());
+  __ CheckClassId(0, kBoolCid);
+  __ LoadConstant(0, Bool::False());
+  __ Return(0);
+}
+
+
+ASSEMBLER_TEST_RUN(CheckClassIdNonSmiPass, test) {
+  EXPECT(EXECUTE_TEST_CODE_BOOL(test->code()));
+}
+
+
+ASSEMBLER_TEST_GENERATE(CheckClassIdFail, assembler) {
+  __ Frame(1);
+  __ LoadConstant(0, Smi::Handle(Smi::New(-1)));
+  __ CheckClassId(0, kBoolCid);
+  __ LoadConstant(0, Smi::Handle(Smi::New(42)));
+  __ Return(0);
+}
+
+
+ASSEMBLER_TEST_RUN(CheckClassIdFail, test) {
+  EXPECT_EQ(42, EXECUTE_TEST_CODE_INTPTR(test->code()));
+}
+
+
+//  - If<Cond>Null rA
+//
+//    Cond is Eq or Ne. Skips the next instruction unless the given condition
+//    holds.
+ASSEMBLER_TEST_GENERATE(IfEqNullNotNull, assembler) {
+  __ Frame(2);
+  __ LoadConstant(0, Smi::Handle(Smi::New(-1)));
+  __ LoadConstant(1, Smi::Handle(Smi::New(42)));
+  __ IfEqNull(0);
+  __ LoadConstant(1, Smi::Handle(Smi::New(-1)));
+  __ Return(1);
+}
+
+
+ASSEMBLER_TEST_RUN(IfEqNullNotNull, test) {
+  EXPECT_EQ(42, EXECUTE_TEST_CODE_INTPTR(test->code()));
+}
+
+
+ASSEMBLER_TEST_GENERATE(IfEqNullIsNull, assembler) {
+  __ Frame(2);
+  __ LoadConstant(0, Object::null_object());
+  __ LoadConstant(1, Smi::Handle(Smi::New(-1)));
+  __ IfEqNull(0);
+  __ LoadConstant(1, Smi::Handle(Smi::New(42)));
+  __ Return(1);
+}
+
+
+ASSEMBLER_TEST_RUN(IfEqNullIsNull, test) {
+  EXPECT_EQ(42, EXECUTE_TEST_CODE_INTPTR(test->code()));
+}
+
+
+ASSEMBLER_TEST_GENERATE(IfNeNullIsNull, assembler) {
+  __ Frame(2);
+  __ LoadConstant(0, Object::null_object());
+  __ LoadConstant(1, Smi::Handle(Smi::New(42)));
+  __ IfNeNull(0);
+  __ LoadConstant(1, Smi::Handle(Smi::New(-1)));
+  __ Return(1);
+}
+
+
+ASSEMBLER_TEST_RUN(IfNeNullIsNull, test) {
+  EXPECT_EQ(42, EXECUTE_TEST_CODE_INTPTR(test->code()));
+}
+
+
+ASSEMBLER_TEST_GENERATE(IfNeNullNotNull, assembler) {
+  __ Frame(2);
+  __ LoadConstant(0, Smi::Handle(Smi::New(-1)));
+  __ LoadConstant(1, Smi::Handle(Smi::New(-1)));
+  __ IfNeNull(0);
+  __ LoadConstant(1, Smi::Handle(Smi::New(42)));
+  __ Return(1);
+}
+
+
+ASSEMBLER_TEST_RUN(IfNeNullNotNull, test) {
+  EXPECT_EQ(42, EXECUTE_TEST_CODE_INTPTR(test->code()));
+}
+
 }  // namespace dart
 
 #endif  // defined(TARGET_ARCH_DBC)
diff --git a/runtime/vm/assembler_ia32.cc b/runtime/vm/assembler_ia32.cc
index e8851ac..1bc37d8 100644
--- a/runtime/vm/assembler_ia32.cc
+++ b/runtime/vm/assembler_ia32.cc
@@ -14,7 +14,6 @@
 #include "vm/runtime_entry.h"
 #include "vm/stack_frame.h"
 #include "vm/stub_code.h"
-#include "vm/verified_memory.h"
 
 namespace dart {
 
@@ -1998,9 +1997,6 @@
 
 void Assembler::j(Condition condition, Label* label, bool near) {
   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
-  if (VerifiedMemory::enabled()) {
-    near = Assembler::kFarJump;
-  }
   if (label->IsBound()) {
     static const int kShortSize = 2;
     static const int kLongSize = 6;
@@ -2043,9 +2039,6 @@
 
 void Assembler::jmp(Label* label, bool near) {
   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
-  if (VerifiedMemory::enabled()) {
-    near = Assembler::kFarJump;
-  }
   if (label->IsBound()) {
     static const int kShortSize = 2;
     static const int kLongSize = 5;
@@ -2266,105 +2259,13 @@
 }
 
 
-void Assembler::VerifyHeapWord(const Address& address,
-                               FieldContent old_content) {
-#if defined(DEBUG)
-  switch (old_content) {
-    case kEmptyOrSmiOrNull:
-      VerifyUninitialized(address);
-      break;
-    case kHeapObjectOrSmi:
-      VerifyObjectOrSmi(address);
-      break;
-    case kOnlySmi:
-      VerifySmi(address);
-      break;
-  }
-#endif  // DEBUG
-  if (VerifiedMemory::enabled()) {
-    Register addr_reg = EDX;
-    Register value = EBX;
-    // Preserve registers.
-    pushl(addr_reg);
-    pushl(value);
-    leal(addr_reg, address);
-    // ASSERT(*address == *(address + offset))
-    movl(value, Address(addr_reg, 0));
-    cmpl(value, Address(addr_reg, VerifiedMemory::offset()));
-    Label ok;
-    j(EQUAL, &ok, Assembler::kNearJump);
-    Stop("Write barrier verification failed");
-    Bind(&ok);
-    popl(value);
-    popl(addr_reg);
-  }
-}
-
-
-void Assembler::VerifiedWrite(const Address& dest,
-                              Register value,
-                              FieldContent old_content) {
-  VerifyHeapWord(dest, old_content);
-  movl(dest, value);
-  if (VerifiedMemory::enabled()) {
-    Register temp = (value == EDX) ? ECX : EDX;
-    pushl(temp);
-    leal(temp, dest);
-    movl(Address(temp, VerifiedMemory::offset()), value);
-    popl(temp);
-  }
-}
-
-
-#if defined(DEBUG)
-void Assembler::VerifyObjectOrSmi(const Address& dest) {
-  Label ok;
-  testb(dest, Immediate(kHeapObjectTag));
-  j(ZERO, &ok, Assembler::kNearJump);
-  // Non-smi case: Verify object pointer is word-aligned when untagged.
-  COMPILE_ASSERT(kHeapObjectTag == 1);
-  testb(dest, Immediate((kWordSize - 1) - kHeapObjectTag));
-  j(ZERO, &ok, Assembler::kNearJump);
-  Stop("Expected heap object or Smi");
-  Bind(&ok);
-}
-
-
-void Assembler::VerifyUninitialized(const Address& dest) {
-  Label ok;
-  testb(dest, Immediate(kHeapObjectTag));
-  j(ZERO, &ok, Assembler::kNearJump);
-  // Non-smi case: Check for the special zap word or null.
-#if defined(DEBUG)
-  cmpl(dest, Immediate(Heap::kZap32Bits));
-  j(EQUAL, &ok, Assembler::kNearJump);
-#else
-#error Only supported in DEBUG mode
-#endif
-  cmpl(dest, Immediate(reinterpret_cast<uint32_t>(Object::null())));
-  j(EQUAL, &ok, Assembler::kNearJump);
-  Stop("Expected zapped, Smi or null");
-  Bind(&ok);
-}
-
-
-void Assembler::VerifySmi(const Address& dest, const char* stop_msg) {
-  Label done;
-  testb(dest, Immediate(kHeapObjectTag));
-  j(ZERO, &done, Assembler::kNearJump);
-  Stop(stop_msg);
-  Bind(&done);
-}
-#endif  // defined(DEBUG)
-
-
 // Destroys the value register.
 void Assembler::StoreIntoObject(Register object,
                                 const Address& dest,
                                 Register value,
                                 bool can_value_be_smi) {
   ASSERT(object != value);
-  VerifiedWrite(dest, value, kHeapObjectOrSmi);
+  movl(dest, value);
   Label done;
   if (can_value_be_smi) {
     StoreIntoObjectFilter(object, value, &done);
@@ -2388,9 +2289,8 @@
 
 void Assembler::StoreIntoObjectNoBarrier(Register object,
                                          const Address& dest,
-                                         Register value,
-                                         FieldContent old_content) {
-  VerifiedWrite(dest, value, old_content);
+                                         Register value) {
+  movl(dest, value);
 #if defined(DEBUG)
   Label done;
   pushl(value);
@@ -2418,30 +2318,14 @@
 
 void Assembler::StoreIntoObjectNoBarrier(Register object,
                                          const Address& dest,
-                                         const Object& value,
-                                         FieldContent old_content) {
+                                         const Object& value) {
   ASSERT(!value.IsICData() || ICData::Cast(value).IsOriginal());
   ASSERT(!value.IsField() || Field::Cast(value).IsOriginal());
-  VerifyHeapWord(dest, old_content);
   if (value.IsSmi() || value.InVMHeap()) {
     Immediate imm_value(reinterpret_cast<int32_t>(value.raw()));
     movl(dest, imm_value);
-    if (VerifiedMemory::enabled()) {
-      Register temp = ECX;
-      pushl(temp);
-      leal(temp, dest);
-      movl(Address(temp, VerifiedMemory::offset()), imm_value);
-      popl(temp);
-    }
   } else {
     UnverifiedStoreOldObject(dest, value);
-    if (VerifiedMemory::enabled()) {
-      Register temp = EDX;
-      pushl(temp);
-      leal(temp, dest);
-      UnverifiedStoreOldObject(Address(temp, VerifiedMemory::offset()), value);
-      popl(temp);
-    }
   }
   // No store buffer update.
 }
@@ -2455,37 +2339,21 @@
   Stop("New value must be Smi.");
   Bind(&done);
 #endif  // defined(DEBUG)
-  VerifiedWrite(dest, value, kOnlySmi);
+  movl(dest, value);
 }
 
 
 void Assembler::ZeroInitSmiField(const Address& dest) {
-  VerifyHeapWord(dest, kEmptyOrSmiOrNull);
   Immediate zero(Smi::RawValue(0));
   movl(dest, zero);
-  if (VerifiedMemory::enabled()) {
-    Register temp = ECX;
-    pushl(temp);
-    leal(temp, dest);
-    movl(Address(temp, VerifiedMemory::offset()), zero);
-    popl(temp);
-  }
 }
 
 
 void Assembler::IncrementSmiField(const Address& dest, int32_t increment) {
   // Note: FlowGraphCompiler::EdgeCounterIncrementSizeInBytes depends on
   // the length of this instruction sequence.
-  VerifyHeapWord(dest, kOnlySmi);
   Immediate inc_imm(Smi::RawValue(increment));
   addl(dest, inc_imm);
-  if (VerifiedMemory::enabled()) {
-    Register temp = ECX;
-    pushl(temp);
-    leal(temp, dest);
-    addl(Address(temp, VerifiedMemory::offset()), inc_imm);
-    popl(temp);
-  }
 }
 
 
@@ -2771,9 +2639,10 @@
     // If this allocation is traced, program will jump to failure path
     // (i.e. the allocation stub) which will allocate the object and trace the
     // allocation call site.
-    MaybeTraceAllocation(cls.id(), temp_reg, failure, near_jump);
+    NOT_IN_PRODUCT(
+      MaybeTraceAllocation(cls.id(), temp_reg, failure, near_jump));
     const intptr_t instance_size = cls.instance_size();
-    Heap::Space space = Heap::SpaceForAllocation(cls.id());
+    Heap::Space space = Heap::kNew;
     movl(temp_reg, Address(THR, Thread::heap_offset()));
     movl(instance_reg, Address(temp_reg, Heap::TopOffset(space)));
     addl(instance_reg, Immediate(instance_size));
@@ -2783,7 +2652,7 @@
     // Successfully allocated the object, now update top to point to
     // next object start and store the class in the class field of object.
     movl(Address(temp_reg, Heap::TopOffset(space)), instance_reg);
-    UpdateAllocationStats(cls.id(), temp_reg, space);
+    NOT_IN_PRODUCT(UpdateAllocationStats(cls.id(), temp_reg, space));
     ASSERT(instance_size >= kHeapObjectTag);
     subl(instance_reg, Immediate(instance_size - kHeapObjectTag));
     uword tags = 0;
@@ -2810,8 +2679,8 @@
     // If this allocation is traced, program will jump to failure path
     // (i.e. the allocation stub) which will allocate the object and trace the
     // allocation call site.
-    MaybeTraceAllocation(cid, temp_reg, failure, near_jump);
-    Heap::Space space = Heap::SpaceForAllocation(cid);
+    NOT_IN_PRODUCT(MaybeTraceAllocation(cid, temp_reg, failure, near_jump));
+    Heap::Space space = Heap::kNew;
     movl(temp_reg, Address(THR, Thread::heap_offset()));
     movl(instance, Address(temp_reg, Heap::TopOffset(space)));
     movl(end_address, instance);
@@ -2829,7 +2698,8 @@
     // next object start and initialize the object.
     movl(Address(temp_reg, Heap::TopOffset(space)), end_address);
     addl(instance, Immediate(kHeapObjectTag));
-    UpdateAllocationStatsWithSize(cid, instance_size, temp_reg, space);
+    NOT_IN_PRODUCT(
+        UpdateAllocationStatsWithSize(cid, instance_size, temp_reg, space));
 
     // Initialize the tags.
     uword tags = 0;
diff --git a/runtime/vm/assembler_ia32.h b/runtime/vm/assembler_ia32.h
index 5753792..77f77a6 100644
--- a/runtime/vm/assembler_ia32.h
+++ b/runtime/vm/assembler_ia32.h
@@ -636,12 +636,10 @@
     return 0xCCCCCCCC;
   }
 
-  // Note: verified_mem mode forces far jumps.
   void j(Condition condition, Label* label, bool near = kFarJump);
   void j(Condition condition, const ExternalLabel* label);
 
   void jmp(Register reg);
-  // Note: verified_mem mode forces far jumps.
   void jmp(Label* label, bool near = kFarJump);
   void jmp(const ExternalLabel* label);
 
@@ -677,14 +675,6 @@
   void CompareObject(Register reg, const Object& object);
   void LoadDoubleConstant(XmmRegister dst, double value);
 
-  // When storing into a heap object field, knowledge of the previous content
-  // is expressed through these constants.
-  enum FieldContent {
-    kEmptyOrSmiOrNull,  // Empty = garbage/zapped in release/debug mode.
-    kHeapObjectOrSmi,
-    kOnlySmi,
-  };
-
   void StoreIntoObject(Register object,  // Object we are storing into.
                        const Address& dest,  // Where we are storing into.
                        Register value,  // Value we are storing.
@@ -692,22 +682,10 @@
 
   void StoreIntoObjectNoBarrier(Register object,
                                 const Address& dest,
-                                Register value,
-                                FieldContent old_content = kHeapObjectOrSmi);
-  void InitializeFieldNoBarrier(Register object,
-                                const Address& dest,
-                                Register value) {
-    return StoreIntoObjectNoBarrier(object, dest, value, kEmptyOrSmiOrNull);
-  }
+                                Register value);
   void StoreIntoObjectNoBarrier(Register object,
                                 const Address& dest,
-                                const Object& value,
-                                FieldContent old_content = kHeapObjectOrSmi);
-  void InitializeFieldNoBarrier(Register object,
-                                const Address& dest,
-                                const Object& value) {
-    return StoreIntoObjectNoBarrier(object, dest, value, kEmptyOrSmiOrNull);
-  }
+                                const Object& value);
 
   // Stores a Smi value into a heap object field that always contains a Smi.
   void StoreIntoSmiField(const Address& dest, Register value);
@@ -1009,17 +987,6 @@
   void StoreIntoObjectFilterNoSmi(Register object,
                                   Register value,
                                   Label* no_update);
-#if defined(DEBUG)
-  void VerifyUninitialized(const Address& address);
-  void VerifyObjectOrSmi(const Address& address);
-  void VerifySmi(const Address& address, const char* stop_msg = "Expected Smi");
-#endif  // DEBUG
-  // Like VerifiedMemory::Verify(address, kWordSize) and ::Write, but also,
-  // in DEBUG mode, verifies that 'address' has content of type 'old_content'.
-  void VerifyHeapWord(const Address& address, FieldContent old_content);
-  void VerifiedWrite(const Address& dest,
-                     Register value,
-                     FieldContent old_content);
   void UnverifiedStoreOldObject(const Address& dest, const Object& value);
 
   int32_t jit_cookie();
diff --git a/runtime/vm/assembler_mips.cc b/runtime/vm/assembler_mips.cc
index c9c6549..ffd927d 100644
--- a/runtime/vm/assembler_mips.cc
+++ b/runtime/vm/assembler_mips.cc
@@ -981,9 +981,9 @@
     // If this allocation is traced, program will jump to failure path
     // (i.e. the allocation stub) which will allocate the object and trace the
     // allocation call site.
-    MaybeTraceAllocation(cls.id(), temp_reg, failure);
+    NOT_IN_PRODUCT(MaybeTraceAllocation(cls.id(), temp_reg, failure));
     const intptr_t instance_size = cls.instance_size();
-    Heap::Space space = Heap::SpaceForAllocation(cls.id());
+    Heap::Space space = Heap::kNew;
     lw(temp_reg, Address(THR, Thread::heap_offset()));
     lw(instance_reg, Address(temp_reg, Heap::TopOffset(space)));
     // TODO(koda): Protect against unsigned overflow here.
@@ -1000,7 +1000,7 @@
 
     ASSERT(instance_size >= kHeapObjectTag);
     AddImmediate(instance_reg, -instance_size + kHeapObjectTag);
-    UpdateAllocationStats(cls.id(), temp_reg, space);
+    NOT_IN_PRODUCT(UpdateAllocationStats(cls.id(), temp_reg, space));
     uword tags = 0;
     tags = RawObject::SizeTag::update(instance_size, tags);
     ASSERT(cls.id() != kIllegalCid);
@@ -1024,10 +1024,10 @@
     // If this allocation is traced, program will jump to failure path
     // (i.e. the allocation stub) which will allocate the object and trace the
     // allocation call site.
-    MaybeTraceAllocation(cid, temp1, failure);
+    NOT_IN_PRODUCT(MaybeTraceAllocation(cid, temp1, failure));
     Isolate* isolate = Isolate::Current();
     Heap* heap = isolate->heap();
-    Heap::Space space = heap->SpaceForAllocation(cid);
+    Heap::Space space = Heap::kNew;
     lw(temp1, Address(THR, Thread::heap_offset()));
     // Potential new object start.
     lw(instance, Address(temp1, heap->TopOffset(space)));
@@ -1047,7 +1047,7 @@
     sw(end_address, Address(temp1, Heap::TopOffset(space)));
     addiu(instance, instance, Immediate(kHeapObjectTag));
     LoadImmediate(temp1, instance_size);
-    UpdateAllocationStatsWithSize(cid, temp1, temp2, space);
+    NOT_IN_PRODUCT(UpdateAllocationStatsWithSize(cid, temp1, temp2, space));
 
     // Initialize the tags.
     // instance: new object start as a tagged pointer.
diff --git a/runtime/vm/assembler_x64.cc b/runtime/vm/assembler_x64.cc
index 67bc237..c266bf6 100644
--- a/runtime/vm/assembler_x64.cc
+++ b/runtime/vm/assembler_x64.cc
@@ -2489,9 +2489,6 @@
 
 void Assembler::j(Condition condition, Label* label, bool near) {
   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
-  if (VerifiedMemory::enabled()) {
-    near = Assembler::kFarJump;
-  }
   if (label->IsBound()) {
     static const int kShortSize = 2;
     static const int kLongSize = 6;
@@ -2546,9 +2543,6 @@
 
 void Assembler::jmp(Label* label, bool near) {
   AssemblerBuffer::EnsureCapacity ensured(&buffer_);
-  if (VerifiedMemory::enabled()) {
-    near = Assembler::kFarJump;
-  }
   if (label->IsBound()) {
     static const int kShortSize = 2;
     static const int kLongSize = 5;
@@ -2944,109 +2938,12 @@
 }
 
 
-void Assembler::VerifyHeapWord(const Address& address,
-                               FieldContent old_content) {
-#if defined(DEBUG)
-  switch (old_content) {
-    case kEmptyOrSmiOrNull:
-      VerifyUninitialized(address);
-      break;
-    case kHeapObjectOrSmi:
-      VerifyObjectOrSmi(address);
-      break;
-    case kOnlySmi:
-      VerifySmi(address);
-      break;
-  }
-#endif  // DEBUG
-  if (VerifiedMemory::enabled()) {
-    Register addr_reg = RDX;
-    Register value = RBX;
-    // Preserve registers.
-    pushq(addr_reg);
-    pushq(value);
-    leaq(addr_reg, address);
-    // ASSERT(*address == *(address + offset))
-    movq(value, Address(addr_reg, 0));
-    cmpq(value, Address(addr_reg, VerifiedMemory::offset()));
-    Label ok;
-    j(EQUAL, &ok);
-    static const bool kFixedLengthEncoding = true;
-    Stop("Write barrier verification failed", kFixedLengthEncoding);
-    Bind(&ok);
-    popq(value);
-    popq(addr_reg);
-  }
-}
-
-
-void Assembler::VerifiedWrite(const Address& dest,
-                              Register value,
-                              FieldContent old_content) {
-  VerifyHeapWord(dest, old_content);
-  movq(dest, value);
-  if (VerifiedMemory::enabled()) {
-    Register temp = (value == RDX) ? RCX : RDX;
-    pushq(temp);
-    leaq(temp, dest);
-    movq(Address(temp, VerifiedMemory::offset()), value);
-    popq(temp);
-  }
-}
-
-
-#if defined(DEBUG)
-void Assembler::VerifyObjectOrSmi(const Address& dest) {
-  Label ok;
-  testb(dest, Immediate(kHeapObjectTag));
-  j(ZERO, &ok, Assembler::kNearJump);
-  // Non-smi case: Verify object pointer is word-aligned when untagged.
-  COMPILE_ASSERT(kHeapObjectTag == 1);
-  testb(dest, Immediate((kWordSize - 1) - kHeapObjectTag));
-  j(ZERO, &ok, Assembler::kNearJump);
-  static const bool kFixedLengthEncoding = true;
-  Stop("Expected heap object or Smi", kFixedLengthEncoding);
-  Bind(&ok);
-}
-
-
-void Assembler::VerifyUninitialized(const Address& dest) {
-  Label ok;
-  testb(dest, Immediate(kHeapObjectTag));
-  j(ZERO, &ok, Assembler::kNearJump);
-  // Non-smi case: Check for the special zap word or null.
-#if defined(DEBUG)
-  cmpq(dest, Immediate(Heap::kZap64Bits));
-  j(EQUAL, &ok, Assembler::kNearJump);
-#else
-#error Only supported in DEBUG mode
-#endif
-  LoadObject(TMP, Object::null_object());
-  cmpq(dest, TMP);
-  j(EQUAL, &ok, Assembler::kNearJump);
-  static const bool kFixedLengthEncoding = true;
-  Stop("Expected zapped, Smi or null", kFixedLengthEncoding);
-  Bind(&ok);
-}
-
-
-void Assembler::VerifySmi(const Address& dest, const char* stop_msg) {
-  Label done;
-  testb(dest, Immediate(kHeapObjectTag));
-  j(ZERO, &done, Assembler::kNearJump);
-  static const bool kFixedLengthEncoding = true;
-  Stop(stop_msg, kFixedLengthEncoding);
-  Bind(&done);
-}
-#endif  // defined(DEBUG)
-
-
 void Assembler::StoreIntoObject(Register object,
                                 const Address& dest,
                                 Register value,
                                 bool can_value_be_smi) {
   ASSERT(object != value);
-  VerifiedWrite(dest, value, kHeapObjectOrSmi);
+  movq(dest, value);
   Label done;
   if (can_value_be_smi) {
     StoreIntoObjectFilter(object, value, &done);
@@ -3071,9 +2968,8 @@
 
 void Assembler::StoreIntoObjectNoBarrier(Register object,
                                          const Address& dest,
-                                         Register value,
-                                         FieldContent old_content) {
-  VerifiedWrite(dest, value, old_content);
+                                         Register value) {
+  movq(dest, value);
 #if defined(DEBUG)
   Label done;
   pushq(value);
@@ -3088,22 +2984,10 @@
 
 void Assembler::StoreIntoObjectNoBarrier(Register object,
                                          const Address& dest,
-                                         const Object& value,
-                                         FieldContent old_content) {
+                                         const Object& value) {
   ASSERT(!value.IsICData() || ICData::Cast(value).IsOriginal());
   ASSERT(!value.IsField() || Field::Cast(value).IsOriginal());
-  VerifyHeapWord(dest, old_content);
-  if (VerifiedMemory::enabled()) {
-    const Register temp = RCX;
-    pushq(temp);
-    leaq(temp, dest);
-    StoreObject(Address(temp, 0), value);
-    StoreObject(Address(temp, VerifiedMemory::offset()), value);
-    popq(temp);
-  } else {
-    StoreObject(dest, value);
-  }
-  // TODO(koda): Use 'object', verify that generational barrier's not needed.
+  StoreObject(dest, value);
 }
 
 
@@ -3115,39 +2999,21 @@
   Stop("New value must be Smi.");
   Bind(&done);
 #endif  // defined(DEBUG)
-  VerifiedWrite(dest, value, kOnlySmi);
+  movq(dest, value);
 }
 
 
 void Assembler::ZeroInitSmiField(const Address& dest) {
-  // TODO(koda): Add VerifySmi once we distinguish initalization.
-  VerifyHeapWord(dest, kEmptyOrSmiOrNull);
   Immediate zero(Smi::RawValue(0));
   movq(dest, zero);
-  if (VerifiedMemory::enabled()) {
-    Register temp = RCX;
-    pushq(temp);
-    leaq(temp, dest);
-    movq(Address(temp, VerifiedMemory::offset()), zero);
-    popq(temp);
-  }
 }
 
 
 void Assembler::IncrementSmiField(const Address& dest, int64_t increment) {
   // Note: FlowGraphCompiler::EdgeCounterIncrementSizeInBytes depends on
   // the length of this instruction sequence.
-  // TODO(koda): Add VerifySmi once we distinguish initalization.
-  VerifyHeapWord(dest, kOnlySmi);
   Immediate inc_imm(Smi::RawValue(increment));
   addq(dest, inc_imm);
-  if (VerifiedMemory::enabled()) {
-    Register temp = RCX;
-    pushq(temp);
-    leaq(temp, dest);
-    addq(Address(temp, VerifiedMemory::offset()), inc_imm);
-    popq(temp);
-  }
 }
 
 
@@ -3521,9 +3387,9 @@
     // If this allocation is traced, program will jump to failure path
     // (i.e. the allocation stub) which will allocate the object and trace the
     // allocation call site.
-    MaybeTraceAllocation(cls.id(), failure, near_jump);
+    NOT_IN_PRODUCT(MaybeTraceAllocation(cls.id(), failure, near_jump));
     const intptr_t instance_size = cls.instance_size();
-    Heap::Space space = Heap::SpaceForAllocation(cls.id());
+    Heap::Space space = Heap::kNew;
     movq(temp, Address(THR, Thread::heap_offset()));
     movq(instance_reg, Address(temp, Heap::TopOffset(space)));
     addq(instance_reg, Immediate(instance_size));
@@ -3533,7 +3399,7 @@
     // Successfully allocated the object, now update top to point to
     // next object start and store the class in the class field of object.
     movq(Address(temp, Heap::TopOffset(space)), instance_reg);
-    UpdateAllocationStats(cls.id(), space);
+    NOT_IN_PRODUCT(UpdateAllocationStats(cls.id(), space));
     ASSERT(instance_size >= kHeapObjectTag);
     AddImmediate(instance_reg, Immediate(kHeapObjectTag - instance_size));
     uword tags = 0;
@@ -3560,8 +3426,8 @@
     // If this allocation is traced, program will jump to failure path
     // (i.e. the allocation stub) which will allocate the object and trace the
     // allocation call site.
-    MaybeTraceAllocation(cid, failure, near_jump);
-    Heap::Space space = Heap::SpaceForAllocation(cid);
+    NOT_IN_PRODUCT(MaybeTraceAllocation(cid, failure, near_jump));
+    Heap::Space space = Heap::kNew;
     movq(temp, Address(THR, Thread::heap_offset()));
     movq(instance, Address(temp, Heap::TopOffset(space)));
     movq(end_address, instance);
@@ -3579,7 +3445,7 @@
     // next object start and initialize the object.
     movq(Address(temp, Heap::TopOffset(space)), end_address);
     addq(instance, Immediate(kHeapObjectTag));
-    UpdateAllocationStatsWithSize(cid, instance_size, space);
+    NOT_IN_PRODUCT(UpdateAllocationStatsWithSize(cid, instance_size, space));
 
     // Initialize the tags.
     // instance: new object start as a tagged pointer.
diff --git a/runtime/vm/assembler_x64.h b/runtime/vm/assembler_x64.h
index 5f5aea4..f31d37b 100644
--- a/runtime/vm/assembler_x64.h
+++ b/runtime/vm/assembler_x64.h
@@ -686,12 +686,10 @@
     return 0xCCCCCCCCCCCCCCCC;
   }
 
-  // Note: verified_mem mode forces far jumps.
   void j(Condition condition, Label* label, bool near = kFarJump);
 
   void jmp(Register reg);
   void jmp(const Address& address);
-  // Note: verified_mem mode forces far jumps.
   void jmp(Label* label, bool near = kFarJump);
   void jmp(const ExternalLabel* label);
   void jmp(const StubEntry& stub_entry);
@@ -784,14 +782,6 @@
   void PushObject(const Object& object);
   void CompareObject(Register reg, const Object& object);
 
-  // When storing into a heap object field, knowledge of the previous content
-  // is expressed through these constants.
-  enum FieldContent {
-    kEmptyOrSmiOrNull,  // Empty = garbage/zapped in release/debug mode.
-    kHeapObjectOrSmi,
-    kOnlySmi,
-  };
-
   // Destroys value.
   void StoreIntoObject(Register object,  // Object we are storing into.
                        const Address& dest,  // Where we are storing into.
@@ -800,22 +790,10 @@
 
   void StoreIntoObjectNoBarrier(Register object,
                                 const Address& dest,
-                                Register value,
-                                FieldContent old_content = kHeapObjectOrSmi);
-  void InitializeFieldNoBarrier(Register object,
-                                const Address& dest,
-                                Register value) {
-    return StoreIntoObjectNoBarrier(object, dest, value, kEmptyOrSmiOrNull);
-  }
+                                Register value);
   void StoreIntoObjectNoBarrier(Register object,
                                 const Address& dest,
-                                const Object& value,
-                                FieldContent old_content = kHeapObjectOrSmi);
-  void InitializeFieldNoBarrier(Register object,
-                                const Address& dest,
-                                const Object& value) {
-    return StoreIntoObjectNoBarrier(object, dest, value, kEmptyOrSmiOrNull);
-  }
+                                const Object& value);
 
   // Stores a Smi value into a heap object field that always contains a Smi.
   void StoreIntoSmiField(const Address& dest, Register value);
@@ -1107,17 +1085,6 @@
   void StoreIntoObjectFilterNoSmi(Register object,
                                   Register value,
                                   Label* no_update);
-#if defined(DEBUG)
-  void VerifyUninitialized(const Address& address);
-  void VerifyObjectOrSmi(const Address& address);
-  void VerifySmi(const Address& address, const char* stop_msg = "Expected Smi");
-#endif  // DEBUG
-  // Like VerifiedMemory::Verify(address, kWordSize) and ::Write, but also,
-  // in DEBUG mode, verifies that 'address' has content of type 'old_content'.
-  void VerifyHeapWord(const Address& address, FieldContent old_content);
-  void VerifiedWrite(const Address& dest,
-                     Register value,
-                     FieldContent old_content);
   // Unaware of write barrier (use StoreInto* methods for storing to objects).
   void MoveImmediate(const Address& dst, const Immediate& imm);
 
diff --git a/runtime/vm/assert_test.cc b/runtime/vm/assert_test.cc
index 625f23b..dc935f1 100644
--- a/runtime/vm/assert_test.cc
+++ b/runtime/vm/assert_test.cc
@@ -46,3 +46,18 @@
   EXPECT_FLOAT_EQ(15.43, 15.44, 0.01);
   EXPECT_FLOAT_EQ(1.43, 1.43, 0.00);
 }
+
+
+UNIT_TEST_CASE(Fail0) {
+  FAIL("This test fails");
+}
+
+
+UNIT_TEST_CASE(Fail1) {
+  FAIL1("This test fails with one argument: %d", 4);
+}
+
+
+UNIT_TEST_CASE(Fail2) {
+  FAIL2("This test fails with two arguments: %d, %d", -100, 42);
+}
diff --git a/runtime/vm/ast.h b/runtime/vm/ast.h
index 790e94c..972263c 100644
--- a/runtime/vm/ast.h
+++ b/runtime/vm/ast.h
@@ -540,15 +540,16 @@
   ClosureNode(TokenPosition token_pos,
               const Function& function,
               AstNode* receiver,  // Non-null for implicit instance closures.
-              LocalScope* scope)  // Null for implicit closures.
+              LocalScope* scope)  // Null for implicit closures or functions
+                                  // that already have a ContextScope because
+                                  // they were compiled before.
       : AstNode(token_pos),
         function_(function),
         receiver_(receiver),
         scope_(scope),
         is_deferred_reference_(false) {
     ASSERT(function_.IsZoneHandle());
-    ASSERT((function_.IsNonImplicitClosureFunction() &&
-            (receiver_ == NULL) && (scope_ != NULL)) ||
+    ASSERT((function_.IsNonImplicitClosureFunction() && (receiver_ == NULL)) ||
            (function_.IsImplicitInstanceClosureFunction() &&
             (receiver_ != NULL) && (scope_ == NULL)) ||
            (function_.IsImplicitStaticClosureFunction() &&
@@ -1242,11 +1243,13 @@
   StoreInstanceFieldNode(TokenPosition token_pos,
                          AstNode* instance,
                          const Field& field,
-                         AstNode* value)
+                         AstNode* value,
+                         bool is_initializer)
       : AstNode(token_pos),
         instance_(instance),
         field_(*MayCloneField(field)),
-        value_(value) {
+        value_(value),
+        is_initializer_(is_initializer) {
     ASSERT(instance_ != NULL);
     ASSERT(field_.IsZoneHandle());
     ASSERT(value_ != NULL);
@@ -1255,6 +1258,7 @@
   AstNode* instance() const { return instance_; }
   const Field& field() const { return field_; }
   AstNode* value() const { return value_; }
+  bool is_initializer() const { return is_initializer_; }
 
   virtual void VisitChildren(AstNodeVisitor* visitor) const {
     instance()->Visit(visitor);
@@ -1267,6 +1271,7 @@
   AstNode* instance_;
   const Field& field_;
   AstNode* value_;
+  const bool is_initializer_;
 
   DISALLOW_IMPLICIT_CONSTRUCTORS(StoreInstanceFieldNode);
 };
diff --git a/runtime/vm/atomic.h b/runtime/vm/atomic.h
index c182a84..b6e5f28 100644
--- a/runtime/vm/atomic.h
+++ b/runtime/vm/atomic.h
@@ -77,6 +77,8 @@
 
 #if defined(TARGET_OS_ANDROID)
 #include "vm/atomic_android.h"
+#elif defined(TARGET_OS_FUCHSIA)
+#include "vm/atomic_fuchsia.h"
 #elif defined(TARGET_OS_LINUX)
 #include "vm/atomic_linux.h"
 #elif defined(TARGET_OS_MACOS)
diff --git a/runtime/vm/atomic_fuchsia.h b/runtime/vm/atomic_fuchsia.h
new file mode 100644
index 0000000..d6c0f57
--- /dev/null
+++ b/runtime/vm/atomic_fuchsia.h
@@ -0,0 +1,66 @@
+// Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+#ifndef VM_ATOMIC_FUCHSIA_H_
+#define VM_ATOMIC_FUCHSIA_H_
+
+#if !defined VM_ATOMIC_H_
+#error Do not include atomic_fuchsia.h directly. Use atomic.h instead.
+#endif
+
+#if !defined(TARGET_OS_FUCHSIA)
+#error This file should only be included on Fuchsia builds.
+#endif
+
+#include "platform/assert.h"
+
+namespace dart {
+
+inline uintptr_t AtomicOperations::FetchAndIncrement(uintptr_t* p) {
+  UNIMPLEMENTED();
+  return 0;
+}
+
+
+inline void AtomicOperations::IncrementBy(intptr_t* p, intptr_t value) {
+  UNIMPLEMENTED();
+}
+
+
+inline void AtomicOperations::IncrementInt64By(int64_t* p, int64_t value) {
+  UNIMPLEMENTED();
+}
+
+
+inline uintptr_t AtomicOperations::FetchAndDecrement(uintptr_t* p) {
+  UNIMPLEMENTED();
+  return 0;
+}
+
+
+inline void AtomicOperations::DecrementBy(intptr_t* p, intptr_t value) {
+  UNIMPLEMENTED();
+}
+
+
+#if !defined(USING_SIMULATOR_ATOMICS)
+inline uword AtomicOperations::CompareAndSwapWord(uword* ptr,
+                                                  uword old_value,
+                                                  uword new_value) {
+  UNIMPLEMENTED();
+  return 0;
+}
+
+
+inline uint32_t AtomicOperations::CompareAndSwapUint32(uint32_t* ptr,
+                                                       uint32_t old_value,
+                                                       uint32_t new_value) {
+  UNIMPLEMENTED();
+  return 0;
+}
+#endif  // !defined(USING_SIMULATOR_ATOMICS)
+
+}  // namespace dart
+
+#endif  // VM_ATOMIC_FUCHSIA_H_
diff --git a/runtime/vm/become.cc b/runtime/vm/become.cc
index 0f4586f..13b7439 100644
--- a/runtime/vm/become.cc
+++ b/runtime/vm/become.cc
@@ -8,7 +8,6 @@
 #include "platform/utils.h"
 
 #include "vm/dart_api_state.h"
-#include "vm/freelist.h"
 #include "vm/isolate_reload.h"
 #include "vm/object.h"
 #include "vm/raw_object.h"
@@ -18,34 +17,54 @@
 
 namespace dart {
 
-DECLARE_FLAG(bool, trace_reload);
+ForwardingCorpse* ForwardingCorpse::AsForwarder(uword addr, intptr_t size) {
+  ASSERT(size >= kObjectAlignment);
+  ASSERT(Utils::IsAligned(size, kObjectAlignment));
+
+  ForwardingCorpse* result = reinterpret_cast<ForwardingCorpse*>(addr);
+
+  uword tags = 0;
+  tags = RawObject::SizeTag::update(size, tags);
+  tags = RawObject::ClassIdTag::update(kForwardingCorpse, tags);
+
+  result->tags_ = tags;
+  if (size > RawObject::SizeTag::kMaxSizeTag) {
+    *result->SizeAddress() = size;
+  }
+  result->set_target(Object::null());
+  return result;
+}
+
+
+void ForwardingCorpse::InitOnce() {
+  ASSERT(sizeof(ForwardingCorpse) == kObjectAlignment);
+  ASSERT(OFFSET_OF(ForwardingCorpse, tags_) == Object::tags_offset());
+}
+
 
 // Free list elements are used as a marker for forwarding objects. This is
 // safe because we cannot reach free list elements from live objects. Ideally
 // forwarding objects would have their own class id. See TODO below.
 static bool IsForwardingObject(RawObject* object) {
-  return object->IsHeapObject() && object->IsFreeListElement();
+  return object->IsHeapObject() && object->IsForwardingCorpse();
 }
 
 
 static RawObject* GetForwardedObject(RawObject* object) {
   ASSERT(IsForwardingObject(object));
   uword addr = reinterpret_cast<uword>(object) - kHeapObjectTag;
-  FreeListElement* forwarder = reinterpret_cast<FreeListElement*>(addr);
-  RawObject* new_target = reinterpret_cast<RawObject*>(forwarder->next());
-  return new_target;
+  ForwardingCorpse* forwarder = reinterpret_cast<ForwardingCorpse*>(addr);
+  return forwarder->target();
 }
 
 
 static void ForwardObjectTo(RawObject* before_obj, RawObject* after_obj) {
   const intptr_t size_before = before_obj->Size();
 
-  // TODO(rmacnak): We should use different cids for forwarding corpses and
-  // free list elements.
   uword corpse_addr = reinterpret_cast<uword>(before_obj) - kHeapObjectTag;
-  FreeListElement* forwarder = FreeListElement::AsElement(corpse_addr,
-                                                          size_before);
-  forwarder->set_next(reinterpret_cast<FreeListElement*>(after_obj));
+  ForwardingCorpse* forwarder = ForwardingCorpse::AsForwarder(corpse_addr,
+                                                              size_before);
+  forwarder->set_target(after_obj);
   if (!IsForwardingObject(before_obj)) {
     FATAL("become: ForwardObjectTo failure.");
   }
@@ -129,49 +148,14 @@
 };
 
 
-#if defined(DEBUG)
-class NoFreeListTargetsVisitor : public ObjectPointerVisitor {
- public:
-  explicit NoFreeListTargetsVisitor(Isolate* isolate)
-      : ObjectPointerVisitor(isolate) { }
-
-  virtual void VisitPointers(RawObject** first, RawObject** last) {
-    for (RawObject** p = first; p <= last; p++) {
-      RawObject* target = *p;
-      if (target->IsHeapObject()) {
-        ASSERT(!target->IsFreeListElement());
-      }
-    }
-  }
-
- private:
-  DISALLOW_COPY_AND_ASSIGN(NoFreeListTargetsVisitor);
-};
-#endif
-
-
 void Become::ElementsForwardIdentity(const Array& before, const Array& after) {
   Thread* thread = Thread::Current();
   Isolate* isolate = thread->isolate();
   Heap* heap = isolate->heap();
 
-  {
-    // TODO(rmacnak): Investigate why this is necessary.
-    heap->CollectGarbage(Heap::kNew);
-  }
-
   TIMELINE_FUNCTION_GC_DURATION(thread, "Become::ElementsForwardIdentity");
   HeapIterationScope his;
 
-#if defined(DEBUG)
-  {
-    // There should be no pointers to free list elements / forwarding corpses.
-    NoFreeListTargetsVisitor visitor(isolate);
-    isolate->VisitObjectPointers(&visitor, true);
-    heap->VisitObjectPointers(&visitor);
-  }
-#endif
-
   // Setup forwarding pointers.
   ASSERT(before.Length() == after.Length());
   for (intptr_t i = 0; i < before.Length(); i++) {
@@ -190,7 +174,10 @@
     if (before_obj->IsVMHeapObject()) {
       FATAL("become: Cannot forward VM heap objects");
     }
-    if (after_obj->IsFreeListElement()) {
+    if (before_obj->IsForwardingCorpse()) {
+      FATAL("become: Cannot forward to multiple targets");
+    }
+    if (after_obj->IsForwardingCorpse()) {
       // The Smalltalk become does allow this, and for very special cases
       // it is important (shape changes to Class or Mixin), but as these
       // cases do not arise in Dart, better to prohibit it.
@@ -216,22 +203,18 @@
     heap->VisitObjects(&object_visitor);
     pointer_visitor.VisitingObject(NULL);
 
-    TIR_Print("Performed %" Pd " heap and %" Pd " handle replacements\n",
-              pointer_visitor.count(),
-              handle_visitor.count());
+#if !defined(PRODUCT)
+    tds.SetNumArguments(2);
+    tds.FormatArgument(0, "Remapped objects", "%" Pd,  before.Length());
+    tds.FormatArgument(1, "Remapped references", "%" Pd,
+                       pointer_visitor.count() + handle_visitor.count());
+#endif
   }
 
 #if defined(DEBUG)
   for (intptr_t i = 0; i < before.Length(); i++) {
     ASSERT(before.At(i) == after.At(i));
   }
-
-  {
-    // There should be no pointers to forwarding corpses.
-    NoFreeListTargetsVisitor visitor(isolate);
-    isolate->VisitObjectPointers(&visitor, true);
-    heap->VisitObjectPointers(&visitor);
-  }
 #endif
 }
 
diff --git a/runtime/vm/become.h b/runtime/vm/become.h
index 9773553..b78524d 100644
--- a/runtime/vm/become.h
+++ b/runtime/vm/become.h
@@ -6,11 +6,71 @@
 #define VM_BECOME_H_
 
 #include "vm/allocation.h"
+#include "vm/raw_object.h"
 
 namespace dart {
 
 class Array;
 
+// Objects that are a source in a become are tranformed into forwarding
+// corpses pointing to the corresponding target. Forwarding corpses have the
+// same heap sizes as the source object to ensure the heap remains walkable.
+// If the heap sizes is small enough to be encoded in the size field of the
+// header, a forwarding corpse consists only of a header and the target pointer.
+// If the heap size is too big to be encoded in the header's size field, the
+// word after the target pointer contains the size.  This is the same
+// representation as a FreeListElement.
+class ForwardingCorpse {
+ public:
+  RawObject* target() const {
+    return target_;
+  }
+  void set_target(RawObject* target) {
+    target_ = target;
+  }
+
+  intptr_t Size() {
+    intptr_t size = RawObject::SizeTag::decode(tags_);
+    if (size != 0) return size;
+    return *SizeAddress();
+  }
+
+  static ForwardingCorpse* AsForwarder(uword addr, intptr_t size);
+
+  static void InitOnce();
+
+  // Used to allocate class for forwarding corpses in Object::InitOnce.
+  class FakeInstance {
+   public:
+    FakeInstance() { }
+    static cpp_vtable vtable() { return 0; }
+    static intptr_t InstanceSize() { return 0; }
+    static intptr_t NextFieldOffset() { return -kWordSize; }
+    static const ClassId kClassId = kForwardingCorpse;
+    static bool IsInstance() { return true; }
+
+   private:
+    DISALLOW_ALLOCATION();
+    DISALLOW_COPY_AND_ASSIGN(FakeInstance);
+  };
+
+ private:
+  // This layout mirrors the layout of RawObject.
+  uword tags_;
+  RawObject* target_;
+
+  // Returns the address of the embedded size.
+  intptr_t* SizeAddress() const {
+    uword addr = reinterpret_cast<uword>(&target_) + kWordSize;
+    return reinterpret_cast<intptr_t*>(addr);
+  }
+
+  // ForwardingCorpses cannot be allocated. Instead references to them are
+  // created using the AsForwarder factory method.
+  DISALLOW_ALLOCATION();
+  DISALLOW_IMPLICIT_CONSTRUCTORS(ForwardingCorpse);
+};
+
 // TODO(johnmccutchan): Refactor this class so that it is not all static and
 // provides utility methods for building the mapping of before and after.
 class Become : public AllStatic {
diff --git a/runtime/vm/benchmark_test.cc b/runtime/vm/benchmark_test.cc
index 1d56af7..b630698 100644
--- a/runtime/vm/benchmark_test.cc
+++ b/runtime/vm/benchmark_test.cc
@@ -11,6 +11,7 @@
 #include "platform/assert.h"
 #include "platform/globals.h"
 
+#include "vm/clustered_snapshot.h"
 #include "vm/compiler_stats.h"
 #include "vm/dart_api_impl.h"
 #include "vm/stack_frame.h"
diff --git a/runtime/vm/bootstrap_natives.h b/runtime/vm/bootstrap_natives.h
index 3855eaa..4bb5e93 100644
--- a/runtime/vm/bootstrap_natives.h
+++ b/runtime/vm/bootstrap_natives.h
@@ -56,6 +56,7 @@
   V(SendPortImpl_get_id, 1)                                                    \
   V(SendPortImpl_get_hashcode, 1)                                              \
   V(SendPortImpl_sendInternal_, 2)                                             \
+  V(Smi_bitAndFromSmi, 2)                                                      \
   V(Smi_shlFromInt, 2)                                                         \
   V(Smi_shrFromInt, 2)                                                         \
   V(Smi_bitNegate, 1)                                                          \
@@ -356,6 +357,7 @@
   V(ClassID_getID, 1)                                                          \
   V(VMService_SendIsolateServiceMessage, 2)                                    \
   V(VMService_SendRootServiceMessage, 1)                                       \
+  V(VMService_SendObjectRootServiceMessage, 1)                                 \
   V(VMService_OnStart, 0)                                                      \
   V(VMService_OnExit, 0)                                                       \
   V(VMService_OnServerAddressChange, 1)                                        \
diff --git a/runtime/vm/class_finalizer.cc b/runtime/vm/class_finalizer.cc
index 027072b..6554d23 100644
--- a/runtime/vm/class_finalizer.cc
+++ b/runtime/vm/class_finalizer.cc
@@ -12,6 +12,7 @@
 #include "vm/log.h"
 #include "vm/object_store.h"
 #include "vm/symbols.h"
+#include "vm/timeline.h"
 
 namespace dart {
 
@@ -118,6 +119,8 @@
 // b) after the user classes are loaded (dart_api).
 bool ClassFinalizer::ProcessPendingClasses() {
   Thread* thread = Thread::Current();
+  NOT_IN_PRODUCT(TimelineDurationScope tds(thread, Timeline::GetIsolateStream(),
+                                           "ProcessPendingClasses"));
   Isolate* isolate = thread->isolate();
   ASSERT(isolate != NULL);
   HANDLESCOPE(thread);
diff --git a/runtime/vm/class_table.cc b/runtime/vm/class_table.cc
index 8cb9fa1..4549fb8 100644
--- a/runtime/vm/class_table.cc
+++ b/runtime/vm/class_table.cc
@@ -2,8 +2,9 @@
 // for details. All rights reserved. Use of this source code is governed by a
 // BSD-style license that can be found in the LICENSE file.
 
-#include "vm/atomic.h"
 #include "vm/class_table.h"
+
+#include "vm/atomic.h"
 #include "vm/flags.h"
 #include "vm/freelist.h"
 #include "vm/growable_array.h"
@@ -35,6 +36,7 @@
       table_[i] = vm_class_table->At(i);
     }
     table_[kFreeListElement] = vm_class_table->At(kFreeListElement);
+    table_[kForwardingCorpse] = vm_class_table->At(kForwardingCorpse);
     table_[kDynamicCid] = vm_class_table->At(kDynamicCid);
     table_[kVoidCid] = vm_class_table->At(kVoidCid);
     class_heap_stats_table_ = reinterpret_cast<ClassHeapStats*>(
@@ -142,10 +144,7 @@
 }
 
 
-void ClassTable::RegisterAt(intptr_t index, const Class& cls) {
-  ASSERT(Thread::Current()->IsMutatorThread());
-  ASSERT(index != kIllegalCid);
-  ASSERT(index >= kNumPredefinedCids);
+void ClassTable::AllocateIndex(intptr_t index) {
   if (index >= capacity_) {
     // Grow the capacity of the class table.
     // TODO(koda): Add ClassTable::Grow to share code.
@@ -170,15 +169,24 @@
     class_heap_stats_table_ = new_stats_table;
     ASSERT(capacity_increment_ >= 1);
   }
+
   ASSERT(table_[index] == 0);
-  cls.set_id(index);
-  table_[index] = cls.raw();
   if (index >= top_) {
     top_ = index + 1;
   }
 }
 
 
+void ClassTable::RegisterAt(intptr_t index, const Class& cls) {
+  ASSERT(Thread::Current()->IsMutatorThread());
+  ASSERT(index != kIllegalCid);
+  ASSERT(index >= kNumPredefinedCids);
+  AllocateIndex(index);
+  cls.set_id(index);
+  table_[index] = cls.raw();
+}
+
+
 #if defined(DEBUG)
 void ClassTable::Unregister(intptr_t index) {
   table_[index] = 0;
@@ -218,9 +226,6 @@
     if (!HasValidClassAt(i)) {
       continue;
     }
-    if (i == kFreeListElement) {
-      continue;
-    }
     cls = At(i);
     if (cls.raw() != reinterpret_cast<RawClass*>(0)) {
       name = cls.Name();
@@ -230,6 +235,7 @@
 }
 
 
+#ifndef PRODUCT
 void ClassTable::PrintToJSONObject(JSONObject* object) {
   if (!FLAG_support_service) {
     return;
@@ -246,6 +252,7 @@
     }
   }
 }
+#endif  // PRODUCT
 
 
 void ClassHeapStats::Initialize() {
@@ -325,6 +332,7 @@
 }
 
 
+#ifndef PRODUCT
 void ClassHeapStats::PrintToJSONObject(const Class& cls,
                                        JSONObject* obj) const {
   if (!FLAG_support_service) {
@@ -361,6 +369,7 @@
   obj->AddProperty("promotedInstances", promoted_count);
   obj->AddProperty("promotedBytes", promoted_size);
 }
+#endif
 
 
 void ClassTable::UpdateAllocatedNew(intptr_t cid, intptr_t size) {
@@ -395,7 +404,10 @@
 
 
 ClassHeapStats* ClassTable::StatsWithUpdatedSize(intptr_t cid) {
-  if (!HasValidClassAt(cid) || (cid == kFreeListElement) || (cid == kSmiCid)) {
+  if (!HasValidClassAt(cid) ||
+      (cid == kFreeListElement) ||
+      (cid == kForwardingCorpse) ||
+      (cid == kSmiCid)) {
     return NULL;
   }
   Class& cls = Class::Handle(At(cid));
@@ -471,7 +483,7 @@
 
 
 intptr_t ClassTable::StateOffsetFor(intptr_t cid) {
-  return ClassOffsetFor(cid)+ ClassHeapStats::state_offset();
+  return ClassOffsetFor(cid) + ClassHeapStats::state_offset();
 }
 
 
@@ -484,6 +496,7 @@
 }
 
 
+#ifndef PRODUCT
 void ClassTable::AllocationProfilePrintJSON(JSONStream* stream) {
   if (!FLAG_support_service) {
     return;
@@ -525,6 +538,7 @@
     }
   }
 }
+#endif
 
 
 void ClassTable::ResetAllocationAccumulators() {
diff --git a/runtime/vm/class_table.h b/runtime/vm/class_table.h
index af774a0..9a12fc3e 100644
--- a/runtime/vm/class_table.h
+++ b/runtime/vm/class_table.h
@@ -116,7 +116,9 @@
   void ResetAccumulator();
   void UpdatePromotedAfterNewGC();
   void UpdateSize(intptr_t instance_size);
+#ifndef PRODUCT
   void PrintToJSONObject(const Class& cls, JSONObject* obj) const;
+#endif
   void Verify();
 
   bool trace_allocation() const {
@@ -179,6 +181,8 @@
 
   void Register(const Class& cls);
 
+  void AllocateIndex(intptr_t index);
+
   void RegisterAt(intptr_t index, const Class& cls);
 
 #if defined(DEBUG)
@@ -190,8 +194,9 @@
   void Validate();
 
   void Print();
-
+#ifndef PRODUCT
   void PrintToJSONObject(JSONObject* object);
+#endif
 
   // Used by the generated code.
   static intptr_t table_offset() {
diff --git a/runtime/vm/clustered_snapshot.cc b/runtime/vm/clustered_snapshot.cc
new file mode 100644
index 0000000..e938fef
--- /dev/null
+++ b/runtime/vm/clustered_snapshot.cc
@@ -0,0 +1,4998 @@
+// Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+#include "vm/clustered_snapshot.h"
+
+#include "platform/assert.h"
+#include "vm/bootstrap.h"
+#include "vm/class_finalizer.h"
+#include "vm/dart.h"
+#include "vm/dart_entry.h"
+#include "vm/exceptions.h"
+#include "vm/heap.h"
+#include "vm/lockers.h"
+#include "vm/longjump.h"
+#include "vm/native_entry.h"
+#include "vm/object.h"
+#include "vm/object_store.h"
+#include "vm/stub_code.h"
+#include "vm/symbols.h"
+#include "vm/timeline.h"
+#include "vm/version.h"
+
+namespace dart {
+
+static RawObject* AllocateUninitialized(PageSpace* old_space, intptr_t size) {
+  ASSERT(Utils::IsAligned(size, kObjectAlignment));
+  uword address = old_space->TryAllocateDataBumpLocked(size,
+                                                       PageSpace::kForceGrowth);
+  if (address == 0) {
+    FATAL("Out of memory");
+  }
+  return reinterpret_cast<RawObject*>(address + kHeapObjectTag);
+}
+
+
+void Deserializer::InitializeHeader(RawObject* raw,
+                                    intptr_t class_id,
+                                    intptr_t size,
+                                    bool is_vm_isolate,
+                                    bool is_canonical) {
+  ASSERT(Utils::IsAligned(size, kObjectAlignment));
+  uword tags = 0;
+  tags = RawObject::ClassIdTag::update(class_id, tags);
+  tags = RawObject::SizeTag::update(size, tags);
+  tags = RawObject::VMHeapObjectTag::update(is_vm_isolate, tags);
+  tags = RawObject::CanonicalObjectTag::update(is_canonical, tags);
+  raw->ptr()->tags_ = tags;
+}
+
+
+class ClassSerializationCluster : public SerializationCluster {
+ public:
+  explicit ClassSerializationCluster(intptr_t num_cids) :
+      predefined_(kNumPredefinedCids), objects_(num_cids) { }
+  virtual ~ClassSerializationCluster() { }
+
+  void Trace(Serializer* s, RawObject* object) {
+    RawClass* cls = Class::RawCast(object);
+    intptr_t class_id = cls->ptr()->id_;
+
+    if (class_id < kNumPredefinedCids) {
+      // These classes are allocated by Object::Init or Object::InitOnce, so the
+      // deserializer must find them in the class table instead of allocating
+      // them.
+      predefined_.Add(cls);
+    } else {
+      objects_.Add(cls);
+    }
+
+    RawObject** from = cls->from();
+    RawObject** to = cls->to_snapshot(s->kind());
+    for (RawObject** p = from; p <= to; p++) {
+      s->Push(*p);
+    }
+  }
+
+  void WriteAlloc(Serializer* s) {
+    s->WriteCid(kClassCid);
+    intptr_t count = predefined_.length();
+    s->Write<intptr_t>(count);
+    for (intptr_t i = 0; i < count; i++) {
+      RawClass* cls = predefined_[i];
+      intptr_t class_id = cls->ptr()->id_;
+      s->Write<intptr_t>(class_id);
+      s->AssignRef(cls);
+    }
+    count = objects_.length();
+    s->Write<intptr_t>(count);
+    for (intptr_t i = 0; i < count; i++) {
+      RawClass* cls = objects_[i];
+      s->AssignRef(cls);
+    }
+  }
+
+  void WriteFill(Serializer* s) {
+    intptr_t count = predefined_.length();
+    for (intptr_t i = 0; i < count; i++) {
+      WriteClass(s, predefined_[i]);
+    }
+    count = objects_.length();
+    for (intptr_t i = 0; i < count; i++) {
+      WriteClass(s, objects_[i]);
+    }
+  }
+
+  void WriteClass(Serializer* s, RawClass* cls) {
+    Snapshot::Kind kind = s->kind();
+    RawObject** from = cls->from();
+    RawObject** to = cls->to_snapshot(kind);
+    for (RawObject** p = from; p <= to; p++) {
+      s->WriteRef(*p);
+    }
+    intptr_t class_id = cls->ptr()->id_;
+    s->WriteCid(class_id);
+    s->Write<int32_t>(cls->ptr()->instance_size_in_words_);
+    s->Write<int32_t>(cls->ptr()->next_field_offset_in_words_);
+    s->Write<int32_t>(cls->ptr()->type_arguments_field_offset_in_words_);
+    s->Write<uint16_t>(cls->ptr()->num_type_arguments_);
+    s->Write<uint16_t>(cls->ptr()->num_own_type_arguments_);
+    s->Write<uint16_t>(cls->ptr()->num_native_fields_);
+    s->WriteTokenPosition(cls->ptr()->token_pos_);
+    s->Write<uint16_t>(cls->ptr()->state_bits_);
+  }
+
+ private:
+  GrowableArray<RawClass*> predefined_;
+  GrowableArray<RawClass*> objects_;
+};
+
+
+class ClassDeserializationCluster : public DeserializationCluster {
+ public:
+  ClassDeserializationCluster() { }
+  virtual ~ClassDeserializationCluster() { }
+
+  void ReadAlloc(Deserializer* d) {
+    predefined_start_index_ = d->next_index();
+    PageSpace* old_space = d->heap()->old_space();
+    intptr_t count = d->Read<intptr_t>();
+    ClassTable* table = d->isolate()->class_table();
+    for (intptr_t i = 0; i < count; i++) {
+      intptr_t class_id = d->Read<intptr_t>();
+      ASSERT(table->HasValidClassAt(class_id));
+      RawClass* cls = table->At(class_id);
+      ASSERT(cls != NULL);
+      d->AssignRef(cls);
+    }
+    predefined_stop_index_ = d->next_index();
+
+    start_index_ = d->next_index();
+    count = d->Read<intptr_t>();
+    for (intptr_t i = 0; i < count; i++) {
+      d->AssignRef(AllocateUninitialized(old_space,
+                                         Class::InstanceSize()));
+    }
+    stop_index_ = d->next_index();
+  }
+
+  void ReadFill(Deserializer* d) {
+    Snapshot::Kind kind = d->kind();
+    bool is_vm_object = d->isolate() == Dart::vm_isolate();
+    ClassTable* table = d->isolate()->class_table();
+
+    for (intptr_t id = predefined_start_index_;
+         id < predefined_stop_index_;
+         id++) {
+      RawClass* cls = reinterpret_cast<RawClass*>(d->Ref(id));
+      RawObject** from = cls->from();
+      RawObject** to_snapshot = cls->to_snapshot(kind);
+      for (RawObject** p = from; p <= to_snapshot; p++) {
+        *p = d->ReadRef();
+      }
+
+      intptr_t class_id = d->ReadCid();
+      cls->ptr()->id_ = class_id;
+      cls->ptr()->instance_size_in_words_ = d->Read<int32_t>();
+      cls->ptr()->next_field_offset_in_words_ = d->Read<int32_t>();
+      cls->ptr()->type_arguments_field_offset_in_words_ = d->Read<int32_t>();
+      cls->ptr()->num_type_arguments_ = d->Read<uint16_t>();
+      cls->ptr()->num_own_type_arguments_ = d->Read<uint16_t>();
+      cls->ptr()->num_native_fields_ = d->Read<uint16_t>();
+      cls->ptr()->token_pos_ = d->ReadTokenPosition();
+      cls->ptr()->state_bits_ = d->Read<uint16_t>();
+    }
+
+    for (intptr_t id = start_index_; id < stop_index_; id++) {
+      RawClass* cls = reinterpret_cast<RawClass*>(d->Ref(id));
+      Deserializer::InitializeHeader(cls, kClassCid, Class::InstanceSize(),
+                                     is_vm_object);
+      RawObject** from = cls->from();
+      RawObject** to_snapshot = cls->to_snapshot(kind);
+      RawObject** to = cls->to();
+      for (RawObject** p = from; p <= to_snapshot; p++) {
+        *p = d->ReadRef();
+      }
+      for (RawObject** p = to_snapshot + 1; p <= to; p++) {
+        *p = Object::null();
+      }
+
+      intptr_t class_id = d->ReadCid();
+
+      ASSERT(class_id >= kNumPredefinedCids);
+      Instance fake;
+      cls->ptr()->handle_vtable_ = fake.vtable();
+
+      cls->ptr()->id_ = class_id;
+      cls->ptr()->instance_size_in_words_ = d->Read<int32_t>();
+      cls->ptr()->next_field_offset_in_words_ = d->Read<int32_t>();
+      cls->ptr()->type_arguments_field_offset_in_words_ = d->Read<int32_t>();
+      cls->ptr()->num_type_arguments_ = d->Read<uint16_t>();
+      cls->ptr()->num_own_type_arguments_ = d->Read<uint16_t>();
+      cls->ptr()->num_native_fields_ = d->Read<uint16_t>();
+      cls->ptr()->token_pos_ = d->ReadTokenPosition();
+      cls->ptr()->state_bits_ = d->Read<uint16_t>();
+
+      table->AllocateIndex(class_id);
+      table->SetAt(class_id, cls);
+    }
+  }
+
+  void PostLoad(const Array& refs, Snapshot::Kind kind, Zone* zone) {
+    NOT_IN_PRODUCT(TimelineDurationScope tds(Thread::Current(),
+        Timeline::GetIsolateStream(), "PostLoadClass"));
+
+    Class& cls = Class::Handle(zone);
+    for (intptr_t i = predefined_start_index_;
+         i < predefined_stop_index_;
+         i++) {
+      cls ^= refs.At(i);
+      cls.RehashConstants(zone);
+    }
+    for (intptr_t i = start_index_; i < stop_index_; i++) {
+      cls ^= refs.At(i);
+      cls.RehashConstants(zone);
+    }
+  }
+
+ private:
+  intptr_t predefined_start_index_;
+  intptr_t predefined_stop_index_;
+};
+
+
+class UnresolvedClassSerializationCluster : public SerializationCluster {
+ public:
+  UnresolvedClassSerializationCluster() { }
+  virtual ~UnresolvedClassSerializationCluster() { }
+
+  void Trace(Serializer* s, RawObject* object) {
+    RawUnresolvedClass* cls = UnresolvedClass::RawCast(object);
+    objects_.Add(cls);
+
+    RawObject** from = cls->from();
+    RawObject** to = cls->to();
+    for (RawObject** p = from; p <= to; p++) {
+      s->Push(*p);
+    }
+  }
+
+  void WriteAlloc(Serializer* s) {
+    s->WriteCid(kUnresolvedClassCid);
+    intptr_t count = objects_.length();
+    s->Write<intptr_t>(count);
+    for (intptr_t i = 0; i < count; i++) {
+      RawUnresolvedClass* cls = objects_[i];
+      s->AssignRef(cls);
+    }
+  }
+
+  void WriteFill(Serializer* s) {
+    intptr_t count = objects_.length();
+    s->Write<intptr_t>(count);
+    for (intptr_t i = 0; i < count; i++) {
+      RawUnresolvedClass* cls = objects_[i];
+      RawObject** from = cls->from();
+      RawObject** to = cls->to();
+      for (RawObject** p = from; p <= to; p++) {
+        s->WriteRef(*p);
+      }
+      s->WriteTokenPosition(cls->ptr()->token_pos_);
+    }
+  }
+
+ private:
+  GrowableArray<RawUnresolvedClass*> objects_;
+};
+
+
+class UnresolvedClassDeserializationCluster : public DeserializationCluster {
+ public:
+  UnresolvedClassDeserializationCluster() { }
+  virtual ~UnresolvedClassDeserializationCluster() { }
+
+  void ReadAlloc(Deserializer* d) {
+    start_index_ = d->next_index();
+    PageSpace* old_space = d->heap()->old_space();
+    intptr_t count = d->Read<intptr_t>();
+    for (intptr_t i = 0; i < count; i++) {
+      d->AssignRef(AllocateUninitialized(old_space,
+                                         UnresolvedClass::InstanceSize()));
+    }
+    stop_index_ = d->next_index();
+  }
+
+  void ReadFill(Deserializer* d) {
+    bool is_vm_object = d->isolate() == Dart::vm_isolate();
+
+    for (intptr_t id = start_index_; id < stop_index_; id++) {
+      RawUnresolvedClass* cls =
+          reinterpret_cast<RawUnresolvedClass*>(d->Ref(id));
+      Deserializer::InitializeHeader(cls, kUnresolvedClassCid,
+                                     UnresolvedClass::InstanceSize(),
+                                     is_vm_object);
+      RawObject** from = cls->from();
+      RawObject** to = cls->to();
+      for (RawObject** p = from; p <= to; p++) {
+        *p = d->ReadRef();
+      }
+      cls->ptr()->token_pos_ = d->ReadTokenPosition();
+    }
+  }
+};
+
+
+class TypeArgumentsSerializationCluster : public SerializationCluster {
+ public:
+  TypeArgumentsSerializationCluster() { }
+  virtual ~TypeArgumentsSerializationCluster() { }
+
+  void Trace(Serializer* s, RawObject* object) {
+    RawTypeArguments* type_args = TypeArguments::RawCast(object);
+    objects_.Add(type_args);
+
+    s->Push(type_args->ptr()->instantiations_);
+    intptr_t length = Smi::Value(type_args->ptr()->length_);
+    for (intptr_t i = 0; i < length; i++) {
+      s->Push(type_args->ptr()->types()[i]);
+    }
+  }
+
+  void WriteAlloc(Serializer* s) {
+    s->WriteCid(kTypeArgumentsCid);
+    intptr_t count = objects_.length();
+    s->Write<intptr_t>(count);
+    for (intptr_t i = 0; i < count; i++) {
+      RawTypeArguments* type_args = objects_[i];
+      intptr_t length = Smi::Value(type_args->ptr()->length_);
+      s->Write<intptr_t>(length);
+      s->AssignRef(type_args);
+    }
+  }
+
+  void WriteFill(Serializer* s) {
+    intptr_t count = objects_.length();
+    for (intptr_t i = 0; i < count; i++) {
+      RawTypeArguments* type_args = objects_[i];
+      intptr_t length = Smi::Value(type_args->ptr()->length_);
+      s->Write<intptr_t>(length);
+      s->Write<bool>(type_args->IsCanonical());
+      intptr_t hash = Smi::Value(type_args->ptr()->hash_);
+      s->Write<int32_t>(hash);
+      s->WriteRef(type_args->ptr()->instantiations_);
+      for (intptr_t j = 0; j < length; j++) {
+        s->WriteRef(type_args->ptr()->types()[j]);
+      }
+    }
+  }
+
+ private:
+  GrowableArray<RawTypeArguments*> objects_;
+};
+
+
+class TypeArgumentsDeserializationCluster : public DeserializationCluster {
+ public:
+  TypeArgumentsDeserializationCluster() { }
+  virtual ~TypeArgumentsDeserializationCluster() { }
+
+  void ReadAlloc(Deserializer* d) {
+    start_index_ = d->next_index();
+    PageSpace* old_space = d->heap()->old_space();
+    intptr_t count = d->Read<intptr_t>();
+    for (intptr_t i = 0; i < count; i++) {
+      intptr_t length = d->Read<intptr_t>();
+      d->AssignRef(AllocateUninitialized(old_space,
+                                         TypeArguments::InstanceSize(length)));
+    }
+    stop_index_ = d->next_index();
+  }
+
+  void ReadFill(Deserializer* d) {
+    bool is_vm_object = d->isolate() == Dart::vm_isolate();
+
+    for (intptr_t id = start_index_; id < stop_index_; id++) {
+      RawTypeArguments* type_args =
+          reinterpret_cast<RawTypeArguments*>(d->Ref(id));
+      intptr_t length = d->Read<intptr_t>();
+      bool is_canonical = d->Read<bool>();
+      Deserializer::InitializeHeader(type_args, kTypeArgumentsCid,
+                                     TypeArguments::InstanceSize(length),
+                                     is_vm_object, is_canonical);
+      type_args->ptr()->length_ = Smi::New(length);
+      type_args->ptr()->hash_ = Smi::New(d->Read<int32_t>());
+      type_args->ptr()->instantiations_ =
+          reinterpret_cast<RawArray*>(d->ReadRef());
+      for (intptr_t j = 0; j < length; j++) {
+        type_args->ptr()->types()[j] =
+            reinterpret_cast<RawAbstractType*>(d->ReadRef());
+      }
+    }
+  }
+};
+
+
+class PatchClassSerializationCluster : public SerializationCluster {
+ public:
+  PatchClassSerializationCluster() { }
+  virtual ~PatchClassSerializationCluster() { }
+
+  void Trace(Serializer* s, RawObject* object) {
+    RawPatchClass* cls = PatchClass::RawCast(object);
+    objects_.Add(cls);
+
+    RawObject** from = cls->from();
+    RawObject** to = cls->to();
+    for (RawObject** p = from; p <= to; p++) {
+      s->Push(*p);
+    }
+  }
+
+  void WriteAlloc(Serializer* s) {
+    s->WriteCid(kPatchClassCid);
+    intptr_t count = objects_.length();
+    s->Write<intptr_t>(count);
+    for (intptr_t i = 0; i < count; i++) {
+      RawPatchClass* cls = objects_[i];
+      s->AssignRef(cls);
+    }
+  }
+
+  void WriteFill(Serializer* s) {
+    intptr_t count = objects_.length();
+    for (intptr_t i = 0; i < count; i++) {
+      RawPatchClass* cls = objects_[i];
+      RawObject** from = cls->from();
+      RawObject** to = cls->to();
+      for (RawObject** p = from; p <= to; p++) {
+        s->WriteRef(*p);
+      }
+    }
+  }
+
+ private:
+  GrowableArray<RawPatchClass*> objects_;
+};
+
+
+class PatchClassDeserializationCluster : public DeserializationCluster {
+ public:
+  PatchClassDeserializationCluster() { }
+  virtual ~PatchClassDeserializationCluster() { }
+
+  void ReadAlloc(Deserializer* d) {
+    start_index_ = d->next_index();
+    PageSpace* old_space = d->heap()->old_space();
+    intptr_t count = d->Read<intptr_t>();
+    for (intptr_t i = 0; i < count; i++) {
+      d->AssignRef(AllocateUninitialized(old_space,
+                                         PatchClass::InstanceSize()));
+    }
+    stop_index_ = d->next_index();
+  }
+
+  void ReadFill(Deserializer* d) {
+    bool is_vm_object = d->isolate() == Dart::vm_isolate();
+
+    for (intptr_t id = start_index_; id < stop_index_; id++) {
+      RawPatchClass* cls = reinterpret_cast<RawPatchClass*>(d->Ref(id));
+      Deserializer::InitializeHeader(cls, kPatchClassCid,
+                                     PatchClass::InstanceSize(), is_vm_object);
+      RawObject** from = cls->from();
+      RawObject** to = cls->to();
+      for (RawObject** p = from; p <= to; p++) {
+        *p = d->ReadRef();
+      }
+    }
+  }
+};
+
+
+class FunctionSerializationCluster : public SerializationCluster {
+ public:
+  FunctionSerializationCluster() { }
+  virtual ~FunctionSerializationCluster() { }
+
+  void Trace(Serializer* s, RawObject* object) {
+    RawFunction* func = Function::RawCast(object);
+    objects_.Add(func);
+
+    RawObject** from = func->from();
+    RawObject** to = func->to_snapshot();
+    for (RawObject** p = from; p <= to; p++) {
+      s->Push(*p);
+    }
+    if (s->kind() == Snapshot::kAppNoJIT) {
+      s->Push(func->ptr()->code_);
+    } else if (s->kind() == Snapshot::kAppWithJIT) {
+      s->Push(func->ptr()->unoptimized_code_);
+      s->Push(func->ptr()->ic_data_array_);
+    }
+  }
+
+  void WriteAlloc(Serializer* s) {
+    s->WriteCid(kFunctionCid);
+    intptr_t count = objects_.length();
+    s->Write<intptr_t>(count);
+    for (intptr_t i = 0; i < count; i++) {
+      RawFunction* func = objects_[i];
+      s->AssignRef(func);
+    }
+  }
+
+  void WriteFill(Serializer* s) {
+    Snapshot::Kind kind = s->kind();
+    intptr_t count = objects_.length();
+    for (intptr_t i = 0; i < count; i++) {
+      RawFunction* func = objects_[i];
+      RawObject** from = func->from();
+      RawObject** to = func->to_snapshot();
+      for (RawObject** p = from; p <= to; p++) {
+        s->WriteRef(*p);
+      }
+      if (kind == Snapshot::kAppNoJIT) {
+        s->WriteRef(func->ptr()->code_);
+      } else if (s->kind() == Snapshot::kAppWithJIT) {
+        s->WriteRef(func->ptr()->unoptimized_code_);
+        s->WriteRef(func->ptr()->ic_data_array_);
+      }
+
+      s->WriteTokenPosition(func->ptr()->token_pos_);
+      s->WriteTokenPosition(func->ptr()->end_token_pos_);
+      s->Write<int16_t>(func->ptr()->num_fixed_parameters_);
+      s->Write<int16_t>(func->ptr()->num_optional_parameters_);
+      s->Write<uint32_t>(func->ptr()->kind_tag_);
+      if (kind == Snapshot::kAppNoJIT) {
+        // Omit fields used to support de/reoptimization.
+      } else {
+        bool is_optimized = Code::IsOptimized(func->ptr()->code_);
+        if (is_optimized) {
+          s->Write<int32_t>(FLAG_optimization_counter_threshold);
+        } else {
+          s->Write<int32_t>(0);
+        }
+        s->Write<int8_t>(func->ptr()->deoptimization_counter_);
+        s->Write<uint16_t>(func->ptr()->optimized_instruction_count_);
+        s->Write<uint16_t>(func->ptr()->optimized_call_site_count_);
+      }
+    }
+  }
+
+ private:
+  GrowableArray<RawFunction*> objects_;
+};
+
+
+class FunctionDeserializationCluster : public DeserializationCluster {
+ public:
+  FunctionDeserializationCluster() { }
+  virtual ~FunctionDeserializationCluster() { }
+
+  void ReadAlloc(Deserializer* d) {
+    start_index_ = d->next_index();
+    PageSpace* old_space = d->heap()->old_space();
+    intptr_t count = d->Read<intptr_t>();
+    for (intptr_t i = 0; i < count; i++) {
+      d->AssignRef(AllocateUninitialized(old_space,
+                                         Function::InstanceSize()));
+    }
+    stop_index_ = d->next_index();
+  }
+
+  void ReadFill(Deserializer* d) {
+    Snapshot::Kind kind = d->kind();
+    bool is_vm_object = d->isolate() == Dart::vm_isolate();
+
+    for (intptr_t id = start_index_; id < stop_index_; id++) {
+      RawFunction* func = reinterpret_cast<RawFunction*>(d->Ref(id));
+      Deserializer::InitializeHeader(func, kFunctionCid,
+                                     Function::InstanceSize(), is_vm_object);
+      RawObject** from = func->from();
+      RawObject** to_snapshot = func->to_snapshot();
+      RawObject** to = func->to();
+      for (RawObject** p = from; p <= to_snapshot; p++) {
+        *p = d->ReadRef();
+      }
+      for (RawObject** p = to_snapshot + 1; p <= to; p++) {
+        *p = Object::null();
+      }
+      if (kind == Snapshot::kAppNoJIT) {
+        func->ptr()->code_ = reinterpret_cast<RawCode*>(d->ReadRef());
+      } else if (kind == Snapshot::kAppWithJIT) {
+        func->ptr()->unoptimized_code_ =
+            reinterpret_cast<RawCode*>(d->ReadRef());
+        func->ptr()->ic_data_array_ = reinterpret_cast<RawArray*>(d->ReadRef());
+      }
+
+#if defined(DEBUG)
+      func->ptr()->entry_point_ = 0;
+#endif
+
+      func->ptr()->token_pos_ = d->ReadTokenPosition();
+      func->ptr()->end_token_pos_ = d->ReadTokenPosition();
+      func->ptr()->num_fixed_parameters_ = d->Read<int16_t>();
+      func->ptr()->num_optional_parameters_ = d->Read<int16_t>();
+      func->ptr()->kind_tag_ = d->Read<uint32_t>();
+      if (kind == Snapshot::kAppNoJIT) {
+        // Omit fields used to support de/reoptimization.
+      } else {
+        func->ptr()->usage_counter_ = d->Read<int32_t>();
+        func->ptr()->deoptimization_counter_ = d->Read<int8_t>();
+        func->ptr()->optimized_instruction_count_ = d->Read<uint16_t>();
+        func->ptr()->optimized_call_site_count_ = d->Read<uint16_t>();
+      }
+    }
+  }
+
+  void PostLoad(const Array& refs, Snapshot::Kind kind, Zone* zone) {
+    NOT_IN_PRODUCT(TimelineDurationScope tds(Thread::Current(),
+        Timeline::GetIsolateStream(), "PostLoadFunction"));
+
+    if (kind == Snapshot::kAppNoJIT) {
+      Function& func = Function::Handle(zone);
+      for (intptr_t i = start_index_; i < stop_index_; i++) {
+        func ^= refs.At(i);
+        ASSERT(func.raw()->ptr()->code_->IsCode());
+        uword entry_point = func.raw()->ptr()->code_->ptr()->entry_point_;
+        ASSERT(entry_point != 0);
+        func.raw()->ptr()->entry_point_ = entry_point;
+      }
+    } else if (kind == Snapshot::kAppWithJIT) {
+      Function& func = Function::Handle(zone);
+      Code& code = Code::Handle(zone);
+      for (intptr_t i = start_index_; i < stop_index_; i++) {
+        func ^= refs.At(i);
+        code ^= func.unoptimized_code();
+        if (!code.IsNull()) {
+          func.SetInstructions(code);
+          func.set_was_compiled(true);
+        } else {
+          func.ClearCode();
+          func.set_was_compiled(false);
+        }
+      }
+    } else {
+      Function& func = Function::Handle(zone);
+      for (intptr_t i = start_index_; i < stop_index_; i++) {
+        func ^= refs.At(i);
+        func.ClearICDataArray();
+        func.ClearCode();
+        func.set_was_compiled(false);
+      }
+    }
+  }
+};
+
+
+class ClosureDataSerializationCluster : public SerializationCluster {
+ public:
+  ClosureDataSerializationCluster() { }
+  virtual ~ClosureDataSerializationCluster() { }
+
+  void Trace(Serializer* s, RawObject* object) {
+    RawClosureData* data = ClosureData::RawCast(object);
+    objects_.Add(data);
+
+    RawObject** from = data->from();
+    RawObject** to = data->to();
+    for (RawObject** p = from; p <= to; p++) {
+      s->Push(*p);
+    }
+  }
+
+  void WriteAlloc(Serializer* s) {
+    s->WriteCid(kClosureDataCid);
+    intptr_t count = objects_.length();
+    s->Write<intptr_t>(count);
+    for (intptr_t i = 0; i < count; i++) {
+      RawClosureData* data = objects_[i];
+      s->AssignRef(data);
+    }
+  }
+
+  void WriteFill(Serializer* s) {
+    intptr_t count = objects_.length();
+    for (intptr_t i = 0; i < count; i++) {
+      RawClosureData* data = objects_[i];
+      RawObject** from = data->from();
+      RawObject** to = data->to();
+      for (RawObject** p = from; p <= to; p++) {
+        s->WriteRef(*p);
+      }
+    }
+  }
+
+ private:
+  GrowableArray<RawClosureData*> objects_;
+};
+
+
+class ClosureDataDeserializationCluster : public DeserializationCluster {
+ public:
+  ClosureDataDeserializationCluster() { }
+  virtual ~ClosureDataDeserializationCluster() { }
+
+  void ReadAlloc(Deserializer* d) {
+    start_index_ = d->next_index();
+    PageSpace* old_space = d->heap()->old_space();
+    intptr_t count = d->Read<intptr_t>();
+    for (intptr_t i = 0; i < count; i++) {
+      d->AssignRef(AllocateUninitialized(old_space,
+                                         ClosureData::InstanceSize()));
+    }
+    stop_index_ = d->next_index();
+  }
+
+  void ReadFill(Deserializer* d) {
+    bool is_vm_object = d->isolate() == Dart::vm_isolate();
+
+    for (intptr_t id = start_index_; id < stop_index_; id++) {
+      RawClosureData* data = reinterpret_cast<RawClosureData*>(d->Ref(id));
+      Deserializer::InitializeHeader(data, kClosureDataCid,
+                                     ClosureData::InstanceSize(), is_vm_object);
+      RawObject** from = data->from();
+      RawObject** to = data->to();
+      for (RawObject** p = from; p <= to; p++) {
+        *p = d->ReadRef();
+      }
+    }
+  }
+};
+
+
+class RedirectionDataSerializationCluster : public SerializationCluster {
+ public:
+  RedirectionDataSerializationCluster() { }
+  virtual ~RedirectionDataSerializationCluster() { }
+
+  void Trace(Serializer* s, RawObject* object) {
+    RawRedirectionData* data = RedirectionData::RawCast(object);
+    objects_.Add(data);
+
+    RawObject** from = data->from();
+    RawObject** to = data->to();
+    for (RawObject** p = from; p <= to; p++) {
+      s->Push(*p);
+    }
+  }
+
+  void WriteAlloc(Serializer* s) {
+    s->WriteCid(kRedirectionDataCid);
+    intptr_t count = objects_.length();
+    s->Write<intptr_t>(count);
+    for (intptr_t i = 0; i < count; i++) {
+      RawRedirectionData* data = objects_[i];
+      s->AssignRef(data);
+    }
+  }
+
+  void WriteFill(Serializer* s) {
+    intptr_t count = objects_.length();
+    for (intptr_t i = 0; i < count; i++) {
+      RawRedirectionData* data = objects_[i];
+      RawObject** from = data->from();
+      RawObject** to = data->to();
+      for (RawObject** p = from; p <= to; p++) {
+        s->WriteRef(*p);
+      }
+    }
+  }
+
+ private:
+  GrowableArray<RawRedirectionData*> objects_;
+};
+
+
+class RedirectionDataDeserializationCluster : public DeserializationCluster {
+ public:
+  RedirectionDataDeserializationCluster() { }
+  virtual ~RedirectionDataDeserializationCluster() { }
+
+  void ReadAlloc(Deserializer* d) {
+    start_index_ = d->next_index();
+    PageSpace* old_space = d->heap()->old_space();
+    intptr_t count = d->Read<intptr_t>();
+    for (intptr_t i = 0; i < count; i++) {
+      d->AssignRef(AllocateUninitialized(old_space,
+                                         RedirectionData::InstanceSize()));
+    }
+    stop_index_ = d->next_index();
+  }
+
+  void ReadFill(Deserializer* d) {
+    bool is_vm_object = d->isolate() == Dart::vm_isolate();
+
+    for (intptr_t id = start_index_; id < stop_index_; id++) {
+      RawRedirectionData* data =
+          reinterpret_cast<RawRedirectionData*>(d->Ref(id));
+      Deserializer::InitializeHeader(data, kRedirectionDataCid,
+                                     RedirectionData::InstanceSize(),
+                                     is_vm_object);
+      RawObject** from = data->from();
+      RawObject** to = data->to();
+      for (RawObject** p = from; p <= to; p++) {
+        *p = d->ReadRef();
+      }
+    }
+  }
+};
+
+
+class FieldSerializationCluster : public SerializationCluster {
+ public:
+  FieldSerializationCluster() { }
+  virtual ~FieldSerializationCluster() { }
+
+  void Trace(Serializer* s, RawObject* object) {
+    RawField* field = Field::RawCast(object);
+    objects_.Add(field);
+
+    Snapshot::Kind kind = s->kind();
+
+    s->Push(field->ptr()->name_);
+    s->Push(field->ptr()->owner_);
+    s->Push(field->ptr()->type_);
+    // Write out the initial static value or field offset.
+    if (Field::StaticBit::decode(field->ptr()->kind_bits_)) {
+      if (kind == Snapshot::kAppNoJIT) {
+        // For precompiled static fields, the value was already reset and
+        // initializer_ now contains a Function.
+        s->Push(field->ptr()->value_.static_value_);
+      } else if (Field::ConstBit::decode(field->ptr()->kind_bits_)) {
+        // Do not reset const fields.
+        s->Push(field->ptr()->value_.static_value_);
+      } else {
+        // Otherwise, for static fields we write out the initial static value.
+        s->Push(field->ptr()->initializer_.saved_value_);
+      }
+    } else {
+      s->Push(field->ptr()->value_.offset_);
+    }
+    // Write out the initializer function or saved initial value.
+    if (kind == Snapshot::kAppNoJIT) {
+      s->Push(field->ptr()->initializer_.precompiled_);
+    } else {
+      s->Push(field->ptr()->initializer_.saved_value_);
+    }
+    if (kind != Snapshot::kAppNoJIT) {
+      // Write out the guarded list length.
+      s->Push(field->ptr()->guarded_list_length_);
+    }
+  }
+
+  void WriteAlloc(Serializer* s) {
+    s->WriteCid(kFieldCid);
+    intptr_t count = objects_.length();
+    s->Write<intptr_t>(count);
+    for (intptr_t i = 0; i < count; i++) {
+      RawField* field = objects_[i];
+      s->AssignRef(field);
+    }
+  }
+
+  void WriteFill(Serializer* s) {
+    Snapshot::Kind kind = s->kind();
+    intptr_t count = objects_.length();
+    for (intptr_t i = 0; i < count; i++) {
+      RawField* field = objects_[i];
+
+      s->WriteRef(field->ptr()->name_);
+      s->WriteRef(field->ptr()->owner_);
+      s->WriteRef(field->ptr()->type_);
+      // Write out the initial static value or field offset.
+      if (Field::StaticBit::decode(field->ptr()->kind_bits_)) {
+        if (kind == Snapshot::kAppNoJIT) {
+          // For precompiled static fields, the value was already reset and
+          // initializer_ now contains a Function.
+          s->WriteRef(field->ptr()->value_.static_value_);
+        } else if (Field::ConstBit::decode(field->ptr()->kind_bits_)) {
+          // Do not reset const fields.
+          s->WriteRef(field->ptr()->value_.static_value_);
+        } else {
+          // Otherwise, for static fields we write out the initial static value.
+          s->WriteRef(field->ptr()->initializer_.saved_value_);
+        }
+      } else {
+        s->WriteRef(field->ptr()->value_.offset_);
+      }
+      // Write out the initializer function or saved initial value.
+      if (kind == Snapshot::kAppNoJIT) {
+        s->WriteRef(field->ptr()->initializer_.precompiled_);
+      } else {
+        s->WriteRef(field->ptr()->initializer_.saved_value_);
+      }
+      if (kind != Snapshot::kAppNoJIT) {
+        // Write out the guarded list length.
+        s->WriteRef(field->ptr()->guarded_list_length_);
+      }
+
+      if (kind != Snapshot::kAppNoJIT) {
+        s->WriteTokenPosition(field->ptr()->token_pos_);
+        s->WriteCid(field->ptr()->guarded_cid_);
+        s->WriteCid(field->ptr()->is_nullable_);
+      }
+      s->Write<uint8_t>(field->ptr()->kind_bits_);
+    }
+  }
+
+ private:
+  GrowableArray<RawField*> objects_;
+};
+
+
+class FieldDeserializationCluster : public DeserializationCluster {
+ public:
+  FieldDeserializationCluster() { }
+  virtual ~FieldDeserializationCluster() { }
+
+  void ReadAlloc(Deserializer* d) {
+    start_index_ = d->next_index();
+    PageSpace* old_space = d->heap()->old_space();
+    intptr_t count = d->Read<intptr_t>();
+    for (intptr_t i = 0; i < count; i++) {
+      d->AssignRef(AllocateUninitialized(old_space, Field::InstanceSize()));
+    }
+    stop_index_ = d->next_index();
+  }
+
+  void ReadFill(Deserializer* d) {
+    Snapshot::Kind kind = d->kind();
+    bool is_vm_object = d->isolate() == Dart::vm_isolate();
+
+    for (intptr_t id = start_index_; id < stop_index_; id++) {
+      RawField* field = reinterpret_cast<RawField*>(d->Ref(id));
+      Deserializer::InitializeHeader(field, kFieldCid,
+                                     Field::InstanceSize(), is_vm_object);
+      RawObject** from = field->from();
+      RawObject** to_snapshot = field->to_snapshot(kind);
+      RawObject** to = field->to();
+      for (RawObject** p = from; p <= to_snapshot; p++) {
+        *p = d->ReadRef();
+      }
+      for (RawObject** p = to_snapshot + 1; p <= to; p++) {
+        *p = Object::null();
+      }
+
+      if (kind != Snapshot::kAppNoJIT) {
+        field->ptr()->token_pos_ = d->ReadTokenPosition();
+        field->ptr()->guarded_cid_ = d->ReadCid();
+        field->ptr()->is_nullable_ = d->ReadCid();
+      }
+      field->ptr()->kind_bits_ = d->Read<uint8_t>();
+    }
+  }
+
+  void PostLoad(const Array& refs, Snapshot::Kind kind, Zone* zone) {
+    NOT_IN_PRODUCT(TimelineDurationScope tds(Thread::Current(),
+        Timeline::GetIsolateStream(), "PostLoadField"));
+
+    Field& field = Field::Handle(zone);
+    if (!FLAG_use_field_guards) {
+      for (intptr_t i = start_index_; i < stop_index_; i++) {
+        field ^= refs.At(i);
+        field.set_guarded_cid(kDynamicCid);
+        field.set_is_nullable(true);
+        field.set_guarded_list_length(Field::kNoFixedLength);
+        field.set_guarded_list_length_in_object_offset(
+            Field::kUnknownLengthOffset);
+      }
+    } else {
+      for (intptr_t i = start_index_; i < stop_index_; i++) {
+        field ^= refs.At(i);
+        field.InitializeGuardedListLengthInObjectOffset();
+      }
+    }
+  }
+};
+
+
+class LiteralTokenSerializationCluster : public SerializationCluster {
+ public:
+  LiteralTokenSerializationCluster() { }
+  virtual ~LiteralTokenSerializationCluster() { }
+
+  void Trace(Serializer* s, RawObject* object) {
+    RawLiteralToken* token = LiteralToken::RawCast(object);
+    objects_.Add(token);
+
+    RawObject** from = token->from();
+    RawObject** to = token->to();
+    for (RawObject** p = from; p <= to; p++) {
+      s->Push(*p);
+    }
+  }
+
+  void WriteAlloc(Serializer* s) {
+    s->WriteCid(kLiteralTokenCid);
+    intptr_t count = objects_.length();
+    s->Write<intptr_t>(count);
+    for (intptr_t i = 0; i < count; i++) {
+      RawLiteralToken* token = objects_[i];
+      s->AssignRef(token);
+    }
+  }
+
+  void WriteFill(Serializer* s) {
+    intptr_t count = objects_.length();
+    for (intptr_t i = 0; i < count; i++) {
+      RawLiteralToken* token = objects_[i];
+      RawObject** from = token->from();
+      RawObject** to = token->to();
+      for (RawObject** p = from; p <= to; p++) {
+        s->WriteRef(*p);
+      }
+      s->Write<int32_t>(token->ptr()->kind_);
+    }
+  }
+
+ private:
+  GrowableArray<RawLiteralToken*> objects_;
+};
+
+
+class LiteralTokenDeserializationCluster : public DeserializationCluster {
+ public:
+  LiteralTokenDeserializationCluster() { }
+  virtual ~LiteralTokenDeserializationCluster() { }
+
+  void ReadAlloc(Deserializer* d) {
+    start_index_ = d->next_index();
+    PageSpace* old_space = d->heap()->old_space();
+    intptr_t count = d->Read<intptr_t>();
+    for (intptr_t i = 0; i < count; i++) {
+      d->AssignRef(AllocateUninitialized(old_space,
+                                         LiteralToken::InstanceSize()));
+    }
+    stop_index_ = d->next_index();
+  }
+
+  void ReadFill(Deserializer* d) {
+    bool is_vm_object = d->isolate() == Dart::vm_isolate();
+
+    for (intptr_t id = start_index_; id < stop_index_; id++) {
+      RawLiteralToken* token = reinterpret_cast<RawLiteralToken*>(d->Ref(id));
+      Deserializer::InitializeHeader(token, kLiteralTokenCid,
+                                     LiteralToken::InstanceSize(),
+                                     is_vm_object);
+      RawObject** from = token->from();
+      RawObject** to = token->to();
+      for (RawObject** p = from; p <= to; p++) {
+        *p = d->ReadRef();
+      }
+      token->ptr()->kind_ = static_cast<Token::Kind>(d->Read<int32_t>());
+    }
+  }
+};
+
+
+class TokenStreamSerializationCluster : public SerializationCluster {
+ public:
+  TokenStreamSerializationCluster() { }
+  virtual ~TokenStreamSerializationCluster() { }
+
+  void Trace(Serializer* s, RawObject* object) {
+    RawTokenStream* stream = TokenStream::RawCast(object);
+    objects_.Add(stream);
+
+    RawObject** from = stream->from();
+    RawObject** to = stream->to();
+    for (RawObject** p = from; p <= to; p++) {
+      s->Push(*p);
+    }
+  }
+
+  void WriteAlloc(Serializer* s) {
+    s->WriteCid(kTokenStreamCid);
+    intptr_t count = objects_.length();
+    s->Write<intptr_t>(count);
+    for (intptr_t i = 0; i < count; i++) {
+      RawTokenStream* stream = objects_[i];
+      s->AssignRef(stream);
+    }
+  }
+
+  void WriteFill(Serializer* s) {
+    intptr_t count = objects_.length();
+    for (intptr_t i = 0; i < count; i++) {
+      RawTokenStream* stream = objects_[i];
+      RawObject** from = stream->from();
+      RawObject** to = stream->to();
+      for (RawObject** p = from; p <= to; p++) {
+        s->WriteRef(*p);
+      }
+    }
+  }
+
+ private:
+  GrowableArray<RawTokenStream*> objects_;
+};
+
+
+class TokenStreamDeserializationCluster : public DeserializationCluster {
+ public:
+  TokenStreamDeserializationCluster() { }
+  virtual ~TokenStreamDeserializationCluster() { }
+
+  void ReadAlloc(Deserializer* d) {
+    start_index_ = d->next_index();
+    PageSpace* old_space = d->heap()->old_space();
+    intptr_t count = d->Read<intptr_t>();
+    for (intptr_t i = 0; i < count; i++) {
+      d->AssignRef(AllocateUninitialized(old_space,
+                                         TokenStream::InstanceSize()));
+    }
+    stop_index_ = d->next_index();
+  }
+
+  void ReadFill(Deserializer* d) {
+    bool is_vm_object = d->isolate() == Dart::vm_isolate();
+
+    for (intptr_t id = start_index_; id < stop_index_; id++) {
+      RawTokenStream* stream = reinterpret_cast<RawTokenStream*>(d->Ref(id));
+      Deserializer::InitializeHeader(stream, kTokenStreamCid,
+                                     TokenStream::InstanceSize(), is_vm_object);
+      RawObject** from = stream->from();
+      RawObject** to = stream->to();
+      for (RawObject** p = from; p <= to; p++) {
+        *p = d->ReadRef();
+      }
+    }
+  }
+};
+
+
+class ScriptSerializationCluster : public SerializationCluster {
+ public:
+  ScriptSerializationCluster() { }
+  virtual ~ScriptSerializationCluster() { }
+
+  void Trace(Serializer* s, RawObject* object) {
+    RawScript* script = Script::RawCast(object);
+    objects_.Add(script);
+
+    RawObject** from = script->from();
+    RawObject** to = script->to_snapshot(s->kind());
+    for (RawObject** p = from; p <= to; p++) {
+      s->Push(*p);
+    }
+  }
+
+  void WriteAlloc(Serializer* s) {
+    s->WriteCid(kScriptCid);
+    intptr_t count = objects_.length();
+    s->Write<intptr_t>(count);
+    for (intptr_t i = 0; i < count; i++) {
+      RawScript* script = objects_[i];
+      s->AssignRef(script);
+    }
+  }
+
+  void WriteFill(Serializer* s) {
+    Snapshot::Kind kind = s->kind();
+    intptr_t count = objects_.length();
+    for (intptr_t i = 0; i < count; i++) {
+      RawScript* script = objects_[i];
+      RawObject** from = script->from();
+      RawObject** to = script->to_snapshot(kind);
+      for (RawObject** p = from; p <= to; p++) {
+        s->WriteRef(*p);
+      }
+
+      s->Write<int32_t>(script->ptr()->line_offset_);
+      s->Write<int32_t>(script->ptr()->col_offset_);
+      s->Write<int8_t>(script->ptr()->kind_);
+    }
+  }
+
+ private:
+  GrowableArray<RawScript*> objects_;
+};
+
+
+class ScriptDeserializationCluster : public DeserializationCluster {
+ public:
+  ScriptDeserializationCluster() { }
+  virtual ~ScriptDeserializationCluster() { }
+
+  void ReadAlloc(Deserializer* d) {
+    start_index_ = d->next_index();
+    PageSpace* old_space = d->heap()->old_space();
+    intptr_t count = d->Read<intptr_t>();
+    for (intptr_t i = 0; i < count; i++) {
+      d->AssignRef(AllocateUninitialized(old_space, Script::InstanceSize()));
+    }
+    stop_index_ = d->next_index();
+  }
+
+  void ReadFill(Deserializer* d) {
+    Snapshot::Kind kind = d->kind();
+    bool is_vm_object = d->isolate() == Dart::vm_isolate();
+
+    for (intptr_t id = start_index_; id < stop_index_; id++) {
+      RawScript* script = reinterpret_cast<RawScript*>(d->Ref(id));
+      Deserializer::InitializeHeader(script, kScriptCid,
+                                     Script::InstanceSize(), is_vm_object);
+      RawObject** from = script->from();
+      RawObject** to_snapshot = script->to_snapshot(kind);
+      RawObject** to = script->to();
+      for (RawObject** p = from; p <= to_snapshot; p++) {
+        *p = d->ReadRef();
+      }
+      for (RawObject** p = to_snapshot + 1; p <= to; p++) {
+        *p = Object::null();
+      }
+
+      script->ptr()->line_offset_ = d->Read<int32_t>();
+      script->ptr()->col_offset_ = d->Read<int32_t>();
+      script->ptr()->kind_ = d->Read<int8_t>();
+      script->ptr()->load_timestamp_ = 0;
+    }
+  }
+};
+
+
+class LibrarySerializationCluster : public SerializationCluster {
+ public:
+  LibrarySerializationCluster() { }
+  virtual ~LibrarySerializationCluster() { }
+
+  void Trace(Serializer* s, RawObject* object) {
+    RawLibrary* lib = Library::RawCast(object);
+    objects_.Add(lib);
+
+    RawObject** from = lib->from();
+    RawObject** to = lib->to_snapshot();
+    for (RawObject** p = from; p <= to; p++) {
+      s->Push(*p);
+    }
+  }
+
+  void WriteAlloc(Serializer* s) {
+    s->WriteCid(kLibraryCid);
+    intptr_t count = objects_.length();
+    s->Write<intptr_t>(count);
+    for (intptr_t i = 0; i < count; i++) {
+      RawLibrary* lib = objects_[i];
+      s->AssignRef(lib);
+    }
+  }
+
+  void WriteFill(Serializer* s) {
+    intptr_t count = objects_.length();
+    for (intptr_t i = 0; i < count; i++) {
+      RawLibrary* lib = objects_[i];
+      RawObject** from = lib->from();
+      RawObject** to = lib->to_snapshot();
+      for (RawObject** p = from; p <= to; p++) {
+        s->WriteRef(*p);
+      }
+
+      s->Write<int32_t>(lib->ptr()->index_);
+      s->Write<uint16_t>(lib->ptr()->num_imports_);
+      s->Write<int8_t>(lib->ptr()->load_state_);
+      s->Write<bool>(lib->ptr()->corelib_imported_);
+      s->Write<bool>(lib->ptr()->is_dart_scheme_);
+      s->Write<bool>(lib->ptr()->debuggable_);
+    }
+  }
+
+ private:
+  GrowableArray<RawLibrary*> objects_;
+};
+
+class LibraryDeserializationCluster : public DeserializationCluster {
+ public:
+  LibraryDeserializationCluster() { }
+  virtual ~LibraryDeserializationCluster() { }
+
+  void ReadAlloc(Deserializer* d) {
+    start_index_ = d->next_index();
+    PageSpace* old_space = d->heap()->old_space();
+    intptr_t count = d->Read<intptr_t>();
+    for (intptr_t i = 0; i < count; i++) {
+      d->AssignRef(AllocateUninitialized(old_space, Library::InstanceSize()));
+    }
+    stop_index_ = d->next_index();
+  }
+
+  void ReadFill(Deserializer* d) {
+    bool is_vm_object = d->isolate() == Dart::vm_isolate();
+
+    for (intptr_t id = start_index_; id < stop_index_; id++) {
+      RawLibrary* lib = reinterpret_cast<RawLibrary*>(d->Ref(id));
+      Deserializer::InitializeHeader(lib, kLibraryCid,
+                                     Library::InstanceSize(), is_vm_object);
+      RawObject** from = lib->from();
+      RawObject** to_snapshot = lib->to_snapshot();
+      RawObject** to = lib->to();
+      for (RawObject** p = from; p <= to_snapshot; p++) {
+        *p = d->ReadRef();
+      }
+      for (RawObject** p = to_snapshot + 1; p <= to; p++) {
+        *p = Object::null();
+      }
+
+      lib->ptr()->native_entry_resolver_ = NULL;
+      lib->ptr()->native_entry_symbol_resolver_ = NULL;
+      lib->ptr()->index_ = d->Read<int32_t>();
+      lib->ptr()->num_imports_ = d->Read<uint16_t>();
+      lib->ptr()->load_state_ = d->Read<int8_t>();
+      lib->ptr()->corelib_imported_ = d->Read<bool>();
+      lib->ptr()->is_dart_scheme_ = d->Read<bool>();
+      lib->ptr()->debuggable_ = d->Read<bool>();
+      lib->ptr()->is_in_fullsnapshot_ = true;
+    }
+  }
+
+  void PostLoad(const Array& refs, Snapshot::Kind kind, Zone* zone) {
+    // TODO(rmacnak): This is surprisingly slow, roughly 20% of deserialization
+    // time for the JIT. Maybe make the lookups happy with a null?
+
+    NOT_IN_PRODUCT(TimelineDurationScope tds(Thread::Current(),
+        Timeline::GetIsolateStream(), "PostLoadLibrary"));
+
+    Library& lib = Library::Handle(zone);
+    for (intptr_t i = start_index_; i < stop_index_; i++) {
+      lib ^= refs.At(i);
+      const intptr_t kInitialNameCacheSize = 64;
+      lib.InitResolvedNamesCache(kInitialNameCacheSize);
+    }
+  }
+};
+
+
+class NamespaceSerializationCluster : public SerializationCluster {
+ public:
+  NamespaceSerializationCluster() { }
+  virtual ~NamespaceSerializationCluster() { }
+
+  void Trace(Serializer* s, RawObject* object) {
+    RawNamespace* ns = Namespace::RawCast(object);
+    objects_.Add(ns);
+
+    RawObject** from = ns->from();
+    RawObject** to = ns->to();
+    for (RawObject** p = from; p <= to; p++) {
+      s->Push(*p);
+    }
+  }
+
+  void WriteAlloc(Serializer* s) {
+    s->WriteCid(kNamespaceCid);
+    intptr_t count = objects_.length();
+    s->Write<intptr_t>(count);
+    for (intptr_t i = 0; i < count; i++) {
+      RawNamespace* ns = objects_[i];
+      s->AssignRef(ns);
+    }
+  }
+
+  void WriteFill(Serializer* s) {
+    intptr_t count = objects_.length();
+    for (intptr_t i = 0; i < count; i++) {
+      RawNamespace* ns = objects_[i];
+      RawObject** from = ns->from();
+      RawObject** to = ns->to();
+      for (RawObject** p = from; p <= to; p++) {
+        s->WriteRef(*p);
+      }
+    }
+  }
+
+ private:
+  GrowableArray<RawNamespace*> objects_;
+};
+
+
+class NamespaceDeserializationCluster : public DeserializationCluster {
+ public:
+  NamespaceDeserializationCluster() { }
+  virtual ~NamespaceDeserializationCluster() { }
+
+  void ReadAlloc(Deserializer* d) {
+    start_index_ = d->next_index();
+    PageSpace* old_space = d->heap()->old_space();
+    intptr_t count = d->Read<intptr_t>();
+    for (intptr_t i = 0; i < count; i++) {
+      d->AssignRef(AllocateUninitialized(old_space, Namespace::InstanceSize()));
+    }
+    stop_index_ = d->next_index();
+  }
+
+  void ReadFill(Deserializer* d) {
+    bool is_vm_object = d->isolate() == Dart::vm_isolate();
+
+    for (intptr_t id = start_index_; id < stop_index_; id++) {
+      RawNamespace* ns = reinterpret_cast<RawNamespace*>(d->Ref(id));
+      Deserializer::InitializeHeader(ns, kNamespaceCid,
+                                     Namespace::InstanceSize(), is_vm_object);
+      RawObject** from = ns->from();
+      RawObject** to = ns->to();
+      for (RawObject** p = from; p <= to; p++) {
+        *p = d->ReadRef();
+      }
+    }
+  }
+};
+
+
+class CodeSerializationCluster : public SerializationCluster {
+ public:
+  CodeSerializationCluster() { }
+  virtual ~CodeSerializationCluster() { }
+
+  void Trace(Serializer* s, RawObject* object) {
+    RawCode* code = Code::RawCast(object);
+    objects_.Add(code);
+
+    s->Push(code->ptr()->object_pool_);
+    s->Push(code->ptr()->owner_);
+    s->Push(code->ptr()->exception_handlers_);
+    s->Push(code->ptr()->pc_descriptors_);
+    s->Push(code->ptr()->stackmaps_);
+  }
+
+  void WriteAlloc(Serializer* s) {
+    s->WriteCid(kCodeCid);
+    intptr_t count = objects_.length();
+    s->Write<intptr_t>(count);
+    for (intptr_t i = 0; i < count; i++) {
+      RawCode* code = objects_[i];
+      s->AssignRef(code);
+    }
+  }
+
+  void WriteFill(Serializer* s) {
+    Snapshot::Kind kind = s->kind();
+    intptr_t count = objects_.length();
+    for (intptr_t i = 0; i < count; i++) {
+      RawCode* code = objects_[i];
+
+      intptr_t pointer_offsets_length =
+          Code::PtrOffBits::decode(code->ptr()->state_bits_);
+      if (pointer_offsets_length != 0) {
+        FATAL("Cannot serialize code with embedded pointers");
+      }
+      if (kind == Snapshot::kAppNoJIT) {
+        // No disabled code in precompilation.
+        ASSERT(code->ptr()->instructions_ == code->ptr()->active_instructions_);
+      } else {
+        ASSERT(kind == Snapshot::kAppWithJIT);
+        // We never include optimized code in JIT precompilation. Deoptimization
+        // requires code patching and we cannot patch code that is shared
+        // between isolates and should not mutate memory allocated by the
+        // embedder.
+        bool is_optimized = Code::PtrOffBits::decode(code->ptr()->state_bits_);
+        if (is_optimized) {
+          FATAL("Cannot include optimized code in a JIT snapshot");
+        }
+      }
+
+      RawInstructions* instr = code->ptr()->instructions_;
+      int32_t text_offset = s->GetTextOffset(instr, code);
+      s->Write<int32_t>(text_offset);
+
+      s->WriteRef(code->ptr()->object_pool_);
+      s->WriteRef(code->ptr()->owner_);
+      s->WriteRef(code->ptr()->exception_handlers_);
+      s->WriteRef(code->ptr()->pc_descriptors_);
+      s->WriteRef(code->ptr()->stackmaps_);
+
+      s->Write<int32_t>(code->ptr()->state_bits_);
+    }
+  }
+
+ private:
+  GrowableArray<RawCode*> objects_;
+};
+
+
+class CodeDeserializationCluster : public DeserializationCluster {
+ public:
+  CodeDeserializationCluster() { }
+  virtual ~CodeDeserializationCluster() { }
+
+  void ReadAlloc(Deserializer* d) {
+    start_index_ = d->next_index();
+    PageSpace* old_space = d->heap()->old_space();
+    intptr_t count = d->Read<intptr_t>();
+    for (intptr_t i = 0; i < count; i++) {
+      d->AssignRef(AllocateUninitialized(old_space, Code::InstanceSize(0)));
+    }
+    stop_index_ = d->next_index();
+  }
+
+  void ReadFill(Deserializer* d) {
+    bool is_vm_object = d->isolate() == Dart::vm_isolate();
+
+    for (intptr_t id = start_index_; id < stop_index_; id++) {
+      RawCode* code = reinterpret_cast<RawCode*>(d->Ref(id));
+      Deserializer::InitializeHeader(code, kCodeCid,
+                                     Code::InstanceSize(0), is_vm_object);
+
+      int32_t text_offset = d->Read<int32_t>();
+      RawInstructions* instr = reinterpret_cast<RawInstructions*>(
+          d->GetInstructionsAt(text_offset) + kHeapObjectTag);
+      uword entry_point = Instructions::EntryPoint(instr);
+
+      code->ptr()->entry_point_ = entry_point;
+      code->ptr()->active_instructions_ = instr;
+      code->ptr()->instructions_ = instr;
+      code->ptr()->object_pool_ =
+          reinterpret_cast<RawObjectPool*>(d->ReadRef());
+      code->ptr()->owner_ = d->ReadRef();
+      code->ptr()->exception_handlers_ =
+          reinterpret_cast<RawExceptionHandlers*>(d->ReadRef());
+      code->ptr()->pc_descriptors_ =
+          reinterpret_cast<RawPcDescriptors*>(d->ReadRef());
+      code->ptr()->stackmaps_ =
+          reinterpret_cast<RawArray*>(d->ReadRef());
+
+      code->ptr()->deopt_info_array_ = Array::null();
+      code->ptr()->static_calls_target_table_ = Array::null();
+      code->ptr()->var_descriptors_ = LocalVarDescriptors::null();
+      code->ptr()->inlined_metadata_ = Array::null();
+      code->ptr()->code_source_map_ = CodeSourceMap::null();
+      code->ptr()->comments_ = Array::null();
+      code->ptr()->return_address_metadata_ = Object::null();
+
+      code->ptr()->compile_timestamp_ = 0;
+      code->ptr()->state_bits_ = d->Read<int32_t>();
+      code->ptr()->lazy_deopt_pc_offset_ = -1;
+    }
+  }
+};
+
+
+class ObjectPoolSerializationCluster : public SerializationCluster {
+ public:
+  ObjectPoolSerializationCluster() { }
+  virtual ~ObjectPoolSerializationCluster() { }
+
+  void Trace(Serializer* s, RawObject* object) {
+    RawObjectPool* pool = ObjectPool::RawCast(object);
+    objects_.Add(pool);
+
+    intptr_t length = pool->ptr()->length_;
+    RawTypedData* info_array = pool->ptr()->info_array_;
+
+    for (intptr_t i = 0; i < length; i++) {
+      ObjectPool::EntryType entry_type =
+          static_cast<ObjectPool::EntryType>(info_array->ptr()->data()[i]);
+      if (entry_type == ObjectPool::kTaggedObject) {
+        s->Push(pool->ptr()->data()[i].raw_obj_);
+      }
+    }
+
+    // TODO(rmacnak): Allocate the object pool and its info array together.
+  }
+
+  void WriteAlloc(Serializer* s) {
+    s->WriteCid(kObjectPoolCid);
+    intptr_t count = objects_.length();
+    s->Write<intptr_t>(count);
+    for (intptr_t i = 0; i < count; i++) {
+      RawObjectPool* pool = objects_[i];
+      intptr_t length = pool->ptr()->length_;
+      s->Write<intptr_t>(length);
+      s->AssignRef(pool);
+    }
+  }
+
+  void WriteFill(Serializer* s) {
+    intptr_t count = objects_.length();
+    for (intptr_t i = 0; i < count; i++) {
+      RawObjectPool* pool = objects_[i];
+      RawTypedData* info_array = pool->ptr()->info_array_;
+      intptr_t length = pool->ptr()->length_;
+      s->Write<intptr_t>(length);
+      for (intptr_t j = 0; j < length; j++) {
+        ObjectPool::EntryType entry_type =
+            static_cast<ObjectPool::EntryType>(info_array->ptr()->data()[j]);
+        s->Write<int8_t>(entry_type);
+        RawObjectPool::Entry& entry = pool->ptr()->data()[j];
+        switch (entry_type) {
+          case ObjectPool::kTaggedObject: {
+#if !defined(TARGET_ARCH_DBC)
+            if (entry.raw_obj_ ==
+                StubCode::CallNativeCFunction_entry()->code()) {
+              // Natives can run while precompiling, becoming linked and
+              // switching their stub. Reset to the initial stub used for
+              // lazy-linking.
+              s->WriteRef(StubCode::CallBootstrapCFunction_entry()->code());
+              break;
+            }
+#endif
+          s->WriteRef(entry.raw_obj_);
+          break;
+        }
+        case ObjectPool::kImmediate: {
+          s->Write<intptr_t>(entry.raw_value_);
+          break;
+        }
+        case ObjectPool::kNativeEntry: {
+          // Write nothing. Will initialize with the lazy link entry.
+#if defined(TARGET_ARCH_DBC)
+          UNREACHABLE();   // DBC does not support lazy native call linking.
+#endif
+          break;
+        }
+        default:
+          UNREACHABLE();
+        }
+      }
+    }
+  }
+
+ private:
+  GrowableArray<RawObjectPool*> objects_;
+};
+
+
+class ObjectPoolDeserializationCluster : public DeserializationCluster {
+ public:
+  ObjectPoolDeserializationCluster() { }
+  virtual ~ObjectPoolDeserializationCluster() { }
+
+  void ReadAlloc(Deserializer* d) {
+    start_index_ = d->next_index();
+    PageSpace* old_space = d->heap()->old_space();
+    intptr_t count = d->Read<intptr_t>();
+    for (intptr_t i = 0; i < count; i++) {
+      intptr_t length = d->Read<intptr_t>();
+      d->AssignRef(AllocateUninitialized(old_space,
+                                         ObjectPool::InstanceSize(length)));
+    }
+    stop_index_ = d->next_index();
+  }
+
+  void ReadFill(Deserializer* d) {
+    bool is_vm_object = d->isolate() == Dart::vm_isolate();
+    PageSpace* old_space = d->heap()->old_space();
+    for (intptr_t id = start_index_; id < stop_index_; id += 1) {
+      intptr_t length = d->Read<intptr_t>();
+      RawTypedData* info_array = reinterpret_cast<RawTypedData*>(
+          AllocateUninitialized(old_space, TypedData::InstanceSize(length)));
+      Deserializer::InitializeHeader(info_array, kTypedDataUint8ArrayCid,
+                                     TypedData::InstanceSize(length),
+                                     is_vm_object);
+      info_array->ptr()->length_ = Smi::New(length);
+      RawObjectPool* pool = reinterpret_cast<RawObjectPool*>(d->Ref(id + 0));
+      Deserializer::InitializeHeader(pool, kObjectPoolCid,
+                                     ObjectPool::InstanceSize(length),
+                                     is_vm_object);
+      pool->ptr()->length_ = length;
+      pool->ptr()->info_array_ = info_array;
+      for (intptr_t j = 0; j < length; j++) {
+        ObjectPool::EntryType entry_type =
+            static_cast<ObjectPool::EntryType>(d->Read<int8_t>());
+        info_array->ptr()->data()[j] = entry_type;
+        RawObjectPool::Entry& entry = pool->ptr()->data()[j];
+        switch (entry_type) {
+          case ObjectPool::kTaggedObject:
+            entry.raw_obj_ = d->ReadRef();
+            break;
+          case ObjectPool::kImmediate:
+            entry.raw_value_ = d->Read<intptr_t>();
+            break;
+          case ObjectPool::kNativeEntry: {
+#if !defined(TARGET_ARCH_DBC)
+            // Read nothing. Initialize with the lazy link entry.
+            uword new_entry = NativeEntry::LinkNativeCallEntry();
+            entry.raw_value_ = static_cast<intptr_t>(new_entry);
+#else
+            UNREACHABLE();  // DBC does not support lazy native call linking.
+#endif
+            break;
+          }
+          default:
+            UNREACHABLE();
+        }
+      }
+    }
+  }
+};
+
+
+// PcDescriptor, Stackmap, OneByteString, TwoByteString
+class RODataSerializationCluster : public SerializationCluster {
+ public:
+  explicit RODataSerializationCluster(intptr_t cid) : cid_(cid) { }
+  virtual ~RODataSerializationCluster() { }
+
+  void Trace(Serializer* s, RawObject* object) {
+    objects_.Add(object);
+
+    // A string's hash must already be computed when we write it because it
+    // will be loaded into read-only memory.
+    if (cid_ == kOneByteStringCid) {
+      RawOneByteString* str = static_cast<RawOneByteString*>(object);
+      if (str->ptr()->hash_ == Smi::New(0)) {
+        intptr_t hash = String::Hash(str->ptr()->data(),
+                                     Smi::Value(str->ptr()->length_));
+        str->ptr()->hash_ = Smi::New(hash);
+      }
+      ASSERT(str->ptr()->hash_ != Smi::New(0));
+    } else if (cid_ == kTwoByteStringCid) {
+      RawTwoByteString* str = static_cast<RawTwoByteString*>(object);
+      if (str->ptr()->hash_ == Smi::New(0)) {
+        intptr_t hash = String::Hash(str->ptr()->data(),
+                                     Smi::Value(str->ptr()->length_) * 2);
+        str->ptr()->hash_ = Smi::New(hash);
+      }
+      ASSERT(str->ptr()->hash_ != Smi::New(0));
+    }
+  }
+
+  void WriteAlloc(Serializer* s) {
+    s->WriteCid(cid_);
+    intptr_t count = objects_.length();
+    s->Write<intptr_t>(count);
+    for (intptr_t i = 0; i < count; i++) {
+      RawObject* object = objects_[i];
+      int32_t rodata_offset = s->GetRODataOffset(object);
+      s->Write<int32_t>(rodata_offset);
+      s->AssignRef(object);
+    }
+  }
+
+  void WriteFill(Serializer* s) {
+    // No-op.
+  }
+
+ private:
+  const intptr_t cid_;
+  GrowableArray<RawObject*> objects_;
+};
+
+
+class RODataDeserializationCluster : public DeserializationCluster {
+ public:
+  RODataDeserializationCluster() { }
+  virtual ~RODataDeserializationCluster() { }
+
+  void ReadAlloc(Deserializer* d) {
+    intptr_t count = d->Read<intptr_t>();
+    for (intptr_t i = 0; i < count; i++) {
+      int32_t rodata_offset = d->Read<int32_t>();
+      d->AssignRef(d->GetObjectAt(rodata_offset));
+    }
+  }
+
+  void ReadFill(Deserializer* d) {
+    // No-op.
+  }
+};
+
+
+class LocalVarDescriptorsSerializationCluster : public SerializationCluster {
+ public:
+  LocalVarDescriptorsSerializationCluster() { }
+  virtual ~LocalVarDescriptorsSerializationCluster() { }
+
+  void Trace(Serializer* s, RawObject* object) { UNIMPLEMENTED(); }
+  void WriteAlloc(Serializer* s) {}
+  void WriteFill(Serializer* s) {}
+
+ private:
+  GrowableArray<RawClass*> objects_;
+};
+
+
+class ExceptionHandlersSerializationCluster : public SerializationCluster {
+ public:
+  ExceptionHandlersSerializationCluster() { }
+  virtual ~ExceptionHandlersSerializationCluster() { }
+
+  void Trace(Serializer* s, RawObject* object) {
+    RawExceptionHandlers* handlers = ExceptionHandlers::RawCast(object);
+    objects_.Add(handlers);
+
+    s->Push(handlers->ptr()->handled_types_data_);
+  }
+
+  void WriteAlloc(Serializer* s) {
+    s->WriteCid(kExceptionHandlersCid);
+    intptr_t count = objects_.length();
+    s->Write<intptr_t>(count);
+    for (intptr_t i = 0; i < count; i++) {
+      RawExceptionHandlers* handlers = objects_[i];
+      intptr_t length = handlers->ptr()->num_entries_;
+      s->Write<intptr_t>(length);
+      s->AssignRef(handlers);
+    }
+  }
+
+  void WriteFill(Serializer* s) {
+    intptr_t count = objects_.length();
+    for (intptr_t i = 0; i < count; i++) {
+      RawExceptionHandlers* handlers = objects_[i];
+      intptr_t length = handlers->ptr()->num_entries_;
+      s->Write<intptr_t>(length);
+      s->WriteRef(handlers->ptr()->handled_types_data_);
+
+      uint8_t* data = reinterpret_cast<uint8_t*>(handlers->ptr()->data());
+      intptr_t length_in_bytes =
+          length * sizeof(RawExceptionHandlers::HandlerInfo);
+      s->WriteBytes(data, length_in_bytes);
+    }
+  }
+
+ private:
+  GrowableArray<RawExceptionHandlers*> objects_;
+};
+
+
+class ExceptionHandlersDeserializationCluster : public DeserializationCluster {
+ public:
+  ExceptionHandlersDeserializationCluster() { }
+  virtual ~ExceptionHandlersDeserializationCluster() { }
+
+  void ReadAlloc(Deserializer* d) {
+    start_index_ = d->next_index();
+    PageSpace* old_space = d->heap()->old_space();
+    intptr_t count = d->Read<intptr_t>();
+    for (intptr_t i = 0; i < count; i++) {
+      intptr_t length = d->Read<intptr_t>();
+      d->AssignRef(AllocateUninitialized(old_space,
+          ExceptionHandlers::InstanceSize(length)));
+    }
+    stop_index_ = d->next_index();
+  }
+
+  void ReadFill(Deserializer* d) {
+    bool is_vm_object = d->isolate() == Dart::vm_isolate();
+
+    for (intptr_t id = start_index_; id < stop_index_; id++) {
+      RawExceptionHandlers* handlers =
+          reinterpret_cast<RawExceptionHandlers*>(d->Ref(id));
+      intptr_t length = d->Read<intptr_t>();
+      Deserializer::InitializeHeader(handlers, kExceptionHandlersCid,
+                                     ExceptionHandlers::InstanceSize(length),
+                                     is_vm_object);
+      handlers->ptr()->num_entries_ = length;
+      handlers->ptr()->handled_types_data_ =
+          reinterpret_cast<RawArray*>(d->ReadRef());
+
+      uint8_t* data = reinterpret_cast<uint8_t*>(handlers->ptr()->data());
+      intptr_t length_in_bytes =
+          length * sizeof(RawExceptionHandlers::HandlerInfo);
+      d->ReadBytes(data, length_in_bytes);
+    }
+  }
+};
+
+class ContextSerializationCluster : public SerializationCluster {
+ public:
+  ContextSerializationCluster() { }
+  virtual ~ContextSerializationCluster() { }
+
+  void Trace(Serializer* s, RawObject* object) {
+    RawContext* context = Context::RawCast(object);
+    objects_.Add(context);
+
+    s->Push(context->ptr()->parent_);
+    intptr_t length = context->ptr()->num_variables_;
+    for (intptr_t i = 0; i < length; i++) {
+      s->Push(context->ptr()->data()[i]);
+    }
+  }
+
+  void WriteAlloc(Serializer* s) {
+    s->WriteCid(kContextCid);
+    intptr_t count = objects_.length();
+    s->Write<intptr_t>(count);
+    for (intptr_t i = 0; i < count; i++) {
+      RawContext* context = objects_[i];
+      intptr_t length = context->ptr()->num_variables_;
+      s->Write<intptr_t>(length);
+      s->AssignRef(context);
+    }
+  }
+
+  void WriteFill(Serializer* s) {
+    intptr_t count = objects_.length();
+    for (intptr_t i = 0; i < count; i++) {
+      RawContext* context = objects_[i];
+      intptr_t length = context->ptr()->num_variables_;
+      s->Write<intptr_t>(length);
+      s->WriteRef(context->ptr()->parent_);
+      for (intptr_t j = 0; j < length; j++) {
+        s->WriteRef(context->ptr()->data()[j]);
+      }
+    }
+  }
+
+ private:
+  GrowableArray<RawContext*> objects_;
+};
+
+
+class ContextDeserializationCluster : public DeserializationCluster {
+ public:
+  ContextDeserializationCluster() { }
+  virtual ~ContextDeserializationCluster() { }
+
+  void ReadAlloc(Deserializer* d) {
+    start_index_ = d->next_index();
+    PageSpace* old_space = d->heap()->old_space();
+    intptr_t count = d->Read<intptr_t>();
+    for (intptr_t i = 0; i < count; i++) {
+      intptr_t length = d->Read<intptr_t>();
+      d->AssignRef(AllocateUninitialized(old_space,
+                                         Context::InstanceSize(length)));
+    }
+    stop_index_ = d->next_index();
+  }
+
+  void ReadFill(Deserializer* d) {
+    bool is_vm_object = d->isolate() == Dart::vm_isolate();
+
+    for (intptr_t id = start_index_; id < stop_index_; id++) {
+      RawContext* context = reinterpret_cast<RawContext*>(d->Ref(id));
+      intptr_t length = d->Read<intptr_t>();
+      Deserializer::InitializeHeader(context, kContextCid,
+                                     Context::InstanceSize(length),
+                                     is_vm_object);
+      context->ptr()->num_variables_ = length;
+      context->ptr()->parent_ = reinterpret_cast<RawContext*>(d->ReadRef());
+      for (intptr_t j = 0; j < length; j++) {
+        context->ptr()->data()[j] = d->ReadRef();
+      }
+    }
+  }
+};
+
+
+class ContextScopeSerializationCluster : public SerializationCluster {
+ public:
+  ContextScopeSerializationCluster() { }
+  virtual ~ContextScopeSerializationCluster() { }
+
+  void Trace(Serializer* s, RawObject* object) {
+    RawContextScope* scope = ContextScope::RawCast(object);
+    objects_.Add(scope);
+
+    intptr_t length = scope->ptr()->num_variables_;
+    RawObject** from = scope->from();
+    RawObject** to = scope->to(length);
+    for (RawObject** p = from; p <= to; p++) {
+      s->Push(*p);
+    }
+  }
+
+  void WriteAlloc(Serializer* s) {
+    s->WriteCid(kContextScopeCid);
+    intptr_t count = objects_.length();
+    s->Write<intptr_t>(count);
+    for (intptr_t i = 0; i < count; i++) {
+      RawContextScope* scope = objects_[i];
+      intptr_t length = scope->ptr()->num_variables_;
+      s->Write<intptr_t>(length);
+      s->AssignRef(scope);
+    }
+  }
+
+  void WriteFill(Serializer* s) {
+    intptr_t count = objects_.length();
+    for (intptr_t i = 0; i < count; i++) {
+      RawContextScope* scope = objects_[i];
+      intptr_t length = scope->ptr()->num_variables_;
+      s->Write<intptr_t>(length);
+      s->Write<bool>(scope->ptr()->is_implicit_);
+      RawObject** from = scope->from();
+      RawObject** to = scope->to(length);
+      for (RawObject** p = from; p <= to; p++) {
+        s->WriteRef(*p);
+      }
+    }
+  }
+
+ private:
+  GrowableArray<RawContextScope*> objects_;
+};
+
+
+class ContextScopeDeserializationCluster : public DeserializationCluster {
+ public:
+  ContextScopeDeserializationCluster() { }
+  virtual ~ContextScopeDeserializationCluster() { }
+
+  void ReadAlloc(Deserializer* d) {
+    start_index_ = d->next_index();
+    PageSpace* old_space = d->heap()->old_space();
+    intptr_t count = d->Read<intptr_t>();
+    for (intptr_t i = 0; i < count; i++) {
+      intptr_t length = d->Read<intptr_t>();
+      d->AssignRef(AllocateUninitialized(old_space,
+                                         ContextScope::InstanceSize(length)));
+    }
+    stop_index_ = d->next_index();
+  }
+
+  void ReadFill(Deserializer* d) {
+    bool is_vm_object = d->isolate() == Dart::vm_isolate();
+
+    for (intptr_t id = start_index_; id < stop_index_; id++) {
+      RawContextScope* scope = reinterpret_cast<RawContextScope*>(d->Ref(id));
+      intptr_t length = d->Read<intptr_t>();
+      Deserializer::InitializeHeader(scope, kContextScopeCid,
+                                     ContextScope::InstanceSize(length),
+                                     is_vm_object);
+      scope->ptr()->num_variables_ = length;
+      scope->ptr()->is_implicit_ = d->Read<bool>();
+      RawObject** from = scope->from();
+      RawObject** to = scope->to(length);
+      for (RawObject** p = from; p <= to; p++) {
+        *p = d->ReadRef();
+      }
+    }
+  }
+};
+
+
+class ICDataSerializationCluster : public SerializationCluster {
+ public:
+  ICDataSerializationCluster() { }
+  virtual ~ICDataSerializationCluster() { }
+
+  void Trace(Serializer* s, RawObject* object) {
+    RawICData* ic = ICData::RawCast(object);
+    objects_.Add(ic);
+
+    RawObject** from = ic->from();
+    RawObject** to = ic->to_snapshot(s->kind());
+    for (RawObject** p = from; p <= to; p++) {
+      s->Push(*p);
+    }
+  }
+
+  void WriteAlloc(Serializer* s) {
+    s->WriteCid(kICDataCid);
+    intptr_t count = objects_.length();
+    s->Write<intptr_t>(count);
+    for (intptr_t i = 0; i < count; i++) {
+      RawICData* ic = objects_[i];
+      s->AssignRef(ic);
+    }
+  }
+
+  void WriteFill(Serializer* s) {
+    Snapshot::Kind kind = s->kind();
+    intptr_t count = objects_.length();
+    for (intptr_t i = 0; i < count; i++) {
+      RawICData* ic = objects_[i];
+      RawObject** from = ic->from();
+      RawObject** to = ic->to_snapshot(kind);
+      for (RawObject** p = from; p <= to; p++) {
+        s->WriteRef(*p);
+      }
+      s->Write<int32_t>(ic->ptr()->deopt_id_);
+      s->Write<uint32_t>(ic->ptr()->state_bits_);
+#if defined(TAG_IC_DATA)
+      s->Write<intptr_t>(ic->ptr()->tag_);
+#endif
+    }
+  }
+
+ private:
+  GrowableArray<RawICData*> objects_;
+};
+
+
+class ICDataDeserializationCluster : public DeserializationCluster {
+ public:
+  ICDataDeserializationCluster() { }
+  virtual ~ICDataDeserializationCluster() { }
+
+  void ReadAlloc(Deserializer* d) {
+    start_index_ = d->next_index();
+    PageSpace* old_space = d->heap()->old_space();
+    intptr_t count = d->Read<intptr_t>();
+    for (intptr_t i = 0; i < count; i++) {
+      d->AssignRef(AllocateUninitialized(old_space, ICData::InstanceSize()));
+    }
+    stop_index_ = d->next_index();
+  }
+
+  void ReadFill(Deserializer* d) {
+    Snapshot::Kind kind = d->kind();
+    bool is_vm_object = d->isolate() == Dart::vm_isolate();
+
+    for (intptr_t id = start_index_; id < stop_index_; id++) {
+      RawICData* ic = reinterpret_cast<RawICData*>(d->Ref(id));
+      Deserializer::InitializeHeader(ic, kICDataCid,
+                                     ICData::InstanceSize(), is_vm_object);
+      RawObject** from = ic->from();
+      RawObject** to_snapshot = ic->to_snapshot(kind);
+      RawObject** to = ic->to();
+      for (RawObject** p = from; p <= to_snapshot; p++) {
+        *p = d->ReadRef();
+      }
+      for (RawObject** p = to_snapshot + 1; p <= to; p++) {
+        *p = Object::null();
+      }
+      ic->ptr()->deopt_id_ = d->Read<int32_t>();
+      ic->ptr()->state_bits_ = d->Read<int32_t>();
+#if defined(TAG_IC_DATA)
+      ic->ptr()->tag_ = d->Read<intptr_t>();
+#endif
+    }
+  }
+
+  void PostLoad(const Array& refs, Snapshot::Kind kind, Zone* zone) {
+    NOT_IN_PRODUCT(TimelineDurationScope tds(Thread::Current(),
+        Timeline::GetIsolateStream(), "PostLoadICData"));
+
+    if (kind == Snapshot::kAppNoJIT) {
+      ICData& ic = ICData::Handle(zone);
+      Object& funcOrCode = Object::Handle(zone);
+      Code& code = Code::Handle(zone);
+      Smi& entry_point = Smi::Handle(zone);
+      for (intptr_t i = start_index_; i < stop_index_; i++) {
+        ic ^= refs.At(i);
+        for (intptr_t j = 0; j < ic.NumberOfChecks(); j++) {
+          funcOrCode = ic.GetTargetOrCodeAt(j);
+          if (funcOrCode.IsCode()) {
+            code ^= funcOrCode.raw();
+            entry_point = Smi::FromAlignedAddress(code.EntryPoint());
+            ic.SetEntryPointAt(j, entry_point);
+          }
+        }
+      }
+    }
+  }
+};
+
+
+class MegamorphicCacheSerializationCluster : public SerializationCluster {
+ public:
+  MegamorphicCacheSerializationCluster() { }
+  virtual ~MegamorphicCacheSerializationCluster() { }
+
+  void Trace(Serializer* s, RawObject* object) {
+    RawMegamorphicCache* cache = MegamorphicCache::RawCast(object);
+    objects_.Add(cache);
+
+    RawObject** from = cache->from();
+    RawObject** to = cache->to();
+    for (RawObject** p = from; p <= to; p++) {
+      s->Push(*p);
+    }
+  }
+
+  void WriteAlloc(Serializer* s) {
+    s->WriteCid(kMegamorphicCacheCid);
+    intptr_t count = objects_.length();
+    s->Write<intptr_t>(count);
+    for (intptr_t i = 0; i < count; i++) {
+      RawMegamorphicCache* cache = objects_[i];
+      s->AssignRef(cache);
+    }
+  }
+
+  void WriteFill(Serializer* s) {
+    intptr_t count = objects_.length();
+    for (intptr_t i = 0; i < count; i++) {
+      RawMegamorphicCache* cache = objects_[i];
+      RawObject** from = cache->from();
+      RawObject** to = cache->to();
+      for (RawObject** p = from; p <= to; p++) {
+        s->WriteRef(*p);
+      }
+      s->Write<int32_t>(cache->ptr()->filled_entry_count_);
+    }
+  }
+
+ private:
+  GrowableArray<RawMegamorphicCache*> objects_;
+};
+
+
+class MegamorphicCacheDeserializationCluster : public DeserializationCluster {
+ public:
+  MegamorphicCacheDeserializationCluster() { }
+  virtual ~MegamorphicCacheDeserializationCluster() { }
+
+  void ReadAlloc(Deserializer* d) {
+    start_index_ = d->next_index();
+    PageSpace* old_space = d->heap()->old_space();
+    intptr_t count = d->Read<intptr_t>();
+    for (intptr_t i = 0; i < count; i++) {
+      d->AssignRef(AllocateUninitialized(old_space,
+                                         MegamorphicCache::InstanceSize()));
+    }
+    stop_index_ = d->next_index();
+  }
+
+  void ReadFill(Deserializer* d) {
+    bool is_vm_object = d->isolate() == Dart::vm_isolate();
+
+    for (intptr_t id = start_index_; id < stop_index_; id++) {
+      RawMegamorphicCache* cache =
+            reinterpret_cast<RawMegamorphicCache*>(d->Ref(id));
+      Deserializer::InitializeHeader(cache, kMegamorphicCacheCid,
+                                     MegamorphicCache::InstanceSize(),
+                                     is_vm_object);
+      RawObject** from = cache->from();
+      RawObject** to = cache->to();
+      for (RawObject** p = from; p <= to; p++) {
+        *p = d->ReadRef();
+      }
+      cache->ptr()->filled_entry_count_ = d->Read<int32_t>();
+    }
+  }
+};
+
+
+class SubtypeTestCacheSerializationCluster : public SerializationCluster {
+ public:
+  SubtypeTestCacheSerializationCluster() { }
+  virtual ~SubtypeTestCacheSerializationCluster() { }
+
+  void Trace(Serializer* s, RawObject* object) {
+    RawSubtypeTestCache* cache = SubtypeTestCache::RawCast(object);
+    objects_.Add(cache);
+    s->Push(cache->ptr()->cache_);
+  }
+
+  void WriteAlloc(Serializer* s) {
+    s->WriteCid(kSubtypeTestCacheCid);
+    intptr_t count = objects_.length();
+    s->Write<intptr_t>(count);
+    for (intptr_t i = 0; i < count; i++) {
+      RawSubtypeTestCache* cache = objects_[i];
+      s->AssignRef(cache);
+    }
+  }
+
+  void WriteFill(Serializer* s) {
+    intptr_t count = objects_.length();
+    for (intptr_t i = 0; i < count; i++) {
+      RawSubtypeTestCache* cache = objects_[i];
+      s->WriteRef(cache->ptr()->cache_);
+    }
+  }
+
+ private:
+  GrowableArray<RawSubtypeTestCache*> objects_;
+};
+
+
+class SubtypeTestCacheDeserializationCluster : public DeserializationCluster {
+ public:
+  SubtypeTestCacheDeserializationCluster() { }
+  virtual ~SubtypeTestCacheDeserializationCluster() { }
+
+  void ReadAlloc(Deserializer* d) {
+    start_index_ = d->next_index();
+    PageSpace* old_space = d->heap()->old_space();
+    intptr_t count = d->Read<intptr_t>();
+    for (intptr_t i = 0; i < count; i++) {
+      d->AssignRef(AllocateUninitialized(old_space,
+                                         SubtypeTestCache::InstanceSize()));
+    }
+    stop_index_ = d->next_index();
+  }
+
+  void ReadFill(Deserializer* d) {
+    bool is_vm_object = d->isolate() == Dart::vm_isolate();
+
+    for (intptr_t id = start_index_; id < stop_index_; id++) {
+      RawSubtypeTestCache* cache =
+          reinterpret_cast<RawSubtypeTestCache*>(d->Ref(id));
+      Deserializer::InitializeHeader(cache, kSubtypeTestCacheCid,
+                                     SubtypeTestCache::InstanceSize(),
+                                     is_vm_object);
+      cache->ptr()->cache_ = reinterpret_cast<RawArray*>(d->ReadRef());
+    }
+  }
+};
+
+
+class LanguageErrorSerializationCluster : public SerializationCluster {
+ public:
+  LanguageErrorSerializationCluster() { }
+  virtual ~LanguageErrorSerializationCluster() { }
+
+  void Trace(Serializer* s, RawObject* object) {
+    RawLanguageError* error = LanguageError::RawCast(object);
+    objects_.Add(error);
+
+    RawObject** from = error->from();
+    RawObject** to = error->to();
+    for (RawObject** p = from; p <= to; p++) {
+      s->Push(*p);
+    }
+  }
+
+  void WriteAlloc(Serializer* s) {
+    s->WriteCid(kLanguageErrorCid);
+    intptr_t count = objects_.length();
+    s->Write<intptr_t>(count);
+    for (intptr_t i = 0; i < count; i++) {
+      RawLanguageError* error = objects_[i];
+      s->AssignRef(error);
+    }
+  }
+
+  void WriteFill(Serializer* s) {
+    intptr_t count = objects_.length();
+    for (intptr_t i = 0; i < count; i++) {
+      RawLanguageError* error = objects_[i];
+      RawObject** from = error->from();
+      RawObject** to = error->to();
+      for (RawObject** p = from; p <= to; p++) {
+        s->WriteRef(*p);
+      }
+      s->WriteTokenPosition(error->ptr()->token_pos_);
+      s->Write<bool>(error->ptr()->report_after_token_);
+      s->Write<int8_t>(error->ptr()->kind_);
+    }
+  }
+
+ private:
+  GrowableArray<RawLanguageError*> objects_;
+};
+
+
+class LanguageErrorDeserializationCluster : public DeserializationCluster {
+ public:
+  LanguageErrorDeserializationCluster() { }
+  virtual ~LanguageErrorDeserializationCluster() { }
+
+  void ReadAlloc(Deserializer* d) {
+    start_index_ = d->next_index();
+    PageSpace* old_space = d->heap()->old_space();
+    intptr_t count = d->Read<intptr_t>();
+    for (intptr_t i = 0; i < count; i++) {
+      d->AssignRef(AllocateUninitialized(old_space,
+                                         LanguageError::InstanceSize()));
+    }
+    stop_index_ = d->next_index();
+  }
+
+  void ReadFill(Deserializer* d) {
+    bool is_vm_object = d->isolate() == Dart::vm_isolate();
+
+    for (intptr_t id = start_index_; id < stop_index_; id++) {
+      RawLanguageError* error = reinterpret_cast<RawLanguageError*>(d->Ref(id));
+      Deserializer::InitializeHeader(error, kLanguageErrorCid,
+                                     LanguageError::InstanceSize(),
+                                     is_vm_object);
+      RawObject** from = error->from();
+      RawObject** to = error->to();
+      for (RawObject** p = from; p <= to; p++) {
+        *p = d->ReadRef();
+      }
+      error->ptr()->token_pos_ = d->ReadTokenPosition();
+      error->ptr()->report_after_token_ = d->Read<bool>();
+      error->ptr()->kind_ = d->Read<int8_t>();
+    }
+  }
+};
+
+
+class UnhandledExceptionSerializationCluster : public SerializationCluster {
+ public:
+  UnhandledExceptionSerializationCluster() { }
+  virtual ~UnhandledExceptionSerializationCluster() { }
+
+  void Trace(Serializer* s, RawObject* object) {
+    RawUnhandledException* exception = UnhandledException::RawCast(object);
+    objects_.Add(exception);
+
+    RawObject** from = exception->from();
+    RawObject** to = exception->to();
+    for (RawObject** p = from; p <= to; p++) {
+      s->Push(*p);
+    }
+  }
+
+  void WriteAlloc(Serializer* s) {
+    s->WriteCid(kUnhandledExceptionCid);
+    intptr_t count = objects_.length();
+    s->Write<intptr_t>(count);
+    for (intptr_t i = 0; i < count; i++) {
+      RawUnhandledException* exception = objects_[i];
+      s->AssignRef(exception);
+    }
+  }
+
+  void WriteFill(Serializer* s) {
+    intptr_t count = objects_.length();
+    for (intptr_t i = 0; i < count; i++) {
+      RawUnhandledException* exception = objects_[i];
+      RawObject** from = exception->from();
+      RawObject** to = exception->to();
+      for (RawObject** p = from; p <= to; p++) {
+        s->WriteRef(*p);
+      }
+    }
+  }
+
+ private:
+  GrowableArray<RawUnhandledException*> objects_;
+};
+
+
+class UnhandledExceptionDeserializationCluster : public DeserializationCluster {
+ public:
+  UnhandledExceptionDeserializationCluster() { }
+  virtual ~UnhandledExceptionDeserializationCluster() { }
+
+  void ReadAlloc(Deserializer* d) {
+    start_index_ = d->next_index();
+    PageSpace* old_space = d->heap()->old_space();
+    intptr_t count = d->Read<intptr_t>();
+    for (intptr_t i = 0; i < count; i++) {
+      d->AssignRef(AllocateUninitialized(old_space,
+                                         UnhandledException::InstanceSize()));
+    }
+    stop_index_ = d->next_index();
+  }
+
+  void ReadFill(Deserializer* d) {
+    bool is_vm_object = d->isolate() == Dart::vm_isolate();
+
+    for (intptr_t id = start_index_; id < stop_index_; id++) {
+      RawUnhandledException* exception =
+          reinterpret_cast<RawUnhandledException*>(d->Ref(id));
+      Deserializer::InitializeHeader(exception, kUnhandledExceptionCid,
+                                     UnhandledException::InstanceSize(),
+                                     is_vm_object);
+      RawObject** from = exception->from();
+      RawObject** to = exception->to();
+      for (RawObject** p = from; p <= to; p++) {
+        *p = d->ReadRef();
+      }
+    }
+  }
+};
+
+
+class InstanceSerializationCluster : public SerializationCluster {
+ public:
+  explicit InstanceSerializationCluster(intptr_t cid) : cid_(cid) {
+    RawClass* cls = Isolate::Current()->class_table()->At(cid);
+    next_field_offset_ =
+        cls->ptr()->next_field_offset_in_words_ << kWordSizeLog2;
+    instance_size_in_words_ = cls->ptr()->instance_size_in_words_;
+    ASSERT(next_field_offset_ > 0);
+    ASSERT(instance_size_in_words_ > 0);
+  }
+  virtual ~InstanceSerializationCluster() { }
+
+  void Trace(Serializer* s, RawObject* object) {
+    RawInstance* instance = Instance::RawCast(object);
+    objects_.Add(instance);
+
+    intptr_t offset = Instance::NextFieldOffset();
+    while (offset < next_field_offset_) {
+      RawObject* raw_obj = *reinterpret_cast<RawObject**>(
+          reinterpret_cast<uword>(instance->ptr()) + offset);
+      s->Push(raw_obj);
+      offset += kWordSize;
+    }
+  }
+
+  void WriteAlloc(Serializer* s) {
+    s->Write<intptr_t>(cid_);
+    intptr_t count = objects_.length();
+    s->Write<intptr_t>(count);
+
+    s->Write<intptr_t>(next_field_offset_);
+    s->Write<intptr_t>(instance_size_in_words_);
+
+    for (intptr_t i = 0; i < count; i++) {
+      RawInstance* instance = objects_[i];
+      s->AssignRef(instance);
+    }
+  }
+
+  void WriteFill(Serializer* s) {
+    intptr_t count = objects_.length();
+    for (intptr_t i = 0; i < count; i++) {
+      RawInstance* instance = objects_[i];
+      s->Write<bool>(instance->IsCanonical());
+      intptr_t offset = Instance::NextFieldOffset();
+      while (offset < next_field_offset_) {
+        RawObject* raw_obj = *reinterpret_cast<RawObject**>(
+            reinterpret_cast<uword>(instance->ptr()) + offset);
+        s->WriteRef(raw_obj);
+        offset += kWordSize;
+      }
+    }
+  }
+
+ private:
+  const intptr_t cid_;
+  intptr_t next_field_offset_;
+  intptr_t instance_size_in_words_;
+  GrowableArray<RawInstance*> objects_;
+};
+
+
+class InstanceDeserializationCluster : public DeserializationCluster {
+ public:
+  explicit InstanceDeserializationCluster(intptr_t cid) : cid_(cid) { }
+  virtual ~InstanceDeserializationCluster() { }
+
+  void ReadAlloc(Deserializer* d) {
+    start_index_ = d->next_index();
+    PageSpace* old_space = d->heap()->old_space();
+    intptr_t count = d->Read<intptr_t>();
+    next_field_offset_  = d->Read<intptr_t>();
+    instance_size_in_words_  = d->Read<intptr_t>();
+    intptr_t instance_size =
+        Object::RoundedAllocationSize(instance_size_in_words_ * kWordSize);
+    for (intptr_t i = 0; i < count; i++) {
+      d->AssignRef(AllocateUninitialized(old_space, instance_size));
+    }
+    stop_index_ = d->next_index();
+  }
+
+  void ReadFill(Deserializer* d) {
+    intptr_t instance_size =
+        Object::RoundedAllocationSize(instance_size_in_words_ * kWordSize);
+    bool is_vm_object = d->isolate() == Dart::vm_isolate();
+
+    for (intptr_t id = start_index_; id < stop_index_; id++) {
+      RawInstance* instance = reinterpret_cast<RawInstance*>(d->Ref(id));
+      bool is_canonical = d->Read<bool>();
+      Deserializer::InitializeHeader(instance, cid_,
+                                     instance_size,
+                                     is_vm_object, is_canonical);
+      intptr_t offset = Instance::NextFieldOffset();
+      while (offset < next_field_offset_) {
+        RawObject** p = reinterpret_cast<RawObject**>(
+            reinterpret_cast<uword>(instance->ptr()) + offset);
+        *p = d->ReadRef();
+        offset += kWordSize;
+      }
+      if (offset < instance_size) {
+        RawObject** p = reinterpret_cast<RawObject**>(
+            reinterpret_cast<uword>(instance->ptr()) + offset);
+        *p = Object::null();
+        offset += kWordSize;
+      }
+      ASSERT(offset == instance_size);
+    }
+  }
+
+ private:
+  const intptr_t cid_;
+  intptr_t next_field_offset_;
+  intptr_t instance_size_in_words_;
+};
+
+
+class LibraryPrefixSerializationCluster : public SerializationCluster {
+ public:
+  LibraryPrefixSerializationCluster() { }
+  virtual ~LibraryPrefixSerializationCluster() { }
+
+  void Trace(Serializer* s, RawObject* object) {
+    RawLibraryPrefix* prefix = LibraryPrefix::RawCast(object);
+    objects_.Add(prefix);
+
+    RawObject** from = prefix->from();
+    RawObject** to = prefix->to();
+    for (RawObject** p = from; p <= to; p++) {
+      s->Push(*p);
+    }
+  }
+
+  void WriteAlloc(Serializer* s) {
+    s->WriteCid(kLibraryPrefixCid);
+    intptr_t count = objects_.length();
+    s->Write<intptr_t>(count);
+    for (intptr_t i = 0; i < count; i++) {
+      RawLibraryPrefix* prefix = objects_[i];
+      s->AssignRef(prefix);
+    }
+  }
+
+  void WriteFill(Serializer* s) {
+    intptr_t count = objects_.length();
+    for (intptr_t i = 0; i < count; i++) {
+      RawLibraryPrefix* prefix = objects_[i];
+      RawObject** from = prefix->from();
+      RawObject** to = prefix->to();
+      for (RawObject** p = from; p <= to; p++) {
+        s->WriteRef(*p);
+      }
+      s->Write<uint16_t>(prefix->ptr()->num_imports_);
+      s->Write<bool>(prefix->ptr()->is_deferred_load_);
+      s->Write<bool>(prefix->ptr()->is_loaded_);
+    }
+  }
+
+ private:
+  GrowableArray<RawLibraryPrefix*> objects_;
+};
+
+
+class LibraryPrefixDeserializationCluster : public DeserializationCluster {
+ public:
+  LibraryPrefixDeserializationCluster() { }
+  virtual ~LibraryPrefixDeserializationCluster() { }
+
+  void ReadAlloc(Deserializer* d) {
+    start_index_ = d->next_index();
+    PageSpace* old_space = d->heap()->old_space();
+    intptr_t count = d->Read<intptr_t>();
+    for (intptr_t i = 0; i < count; i++) {
+      d->AssignRef(AllocateUninitialized(old_space,
+                                         LibraryPrefix::InstanceSize()));
+    }
+    stop_index_ = d->next_index();
+  }
+
+  void ReadFill(Deserializer* d) {
+    bool is_vm_object = d->isolate() == Dart::vm_isolate();
+
+    for (intptr_t id = start_index_; id < stop_index_; id++) {
+      RawLibraryPrefix* prefix =
+          reinterpret_cast<RawLibraryPrefix*>(d->Ref(id));
+      Deserializer::InitializeHeader(prefix, kLibraryPrefixCid,
+                                     LibraryPrefix::InstanceSize(),
+                                     is_vm_object);
+      RawObject** from = prefix->from();
+      RawObject** to = prefix->to();
+      for (RawObject** p = from; p <= to; p++) {
+        *p = d->ReadRef();
+      }
+      prefix->ptr()->num_imports_ = d->Read<uint16_t>();
+      prefix->ptr()->is_deferred_load_ = d->Read<bool>();
+      prefix->ptr()->is_loaded_ = d->Read<bool>();
+    }
+  }
+};
+
+
+class TypeSerializationCluster : public SerializationCluster {
+ public:
+  TypeSerializationCluster() { }
+  virtual ~TypeSerializationCluster() { }
+
+  void Trace(Serializer* s, RawObject* object) {
+    RawType* type = Type::RawCast(object);
+    if (type->IsCanonical()) {
+      canonical_objects_.Add(type);
+    } else {
+      objects_.Add(type);
+    }
+
+    RawObject** from = type->from();
+    RawObject** to = type->to();
+    for (RawObject** p = from; p <= to; p++) {
+      s->Push(*p);
+    }
+
+    RawSmi* raw_type_class_id = Smi::RawCast(type->ptr()->type_class_id_);
+    RawClass* type_class =
+        s->isolate()->class_table()->At(Smi::Value(raw_type_class_id));
+    s->Push(type_class);
+  }
+
+  void WriteAlloc(Serializer* s) {
+    s->WriteCid(kTypeCid);
+    intptr_t count = canonical_objects_.length();
+    s->Write<intptr_t>(count);
+    for (intptr_t i = 0; i < count; i++) {
+      RawType* type = canonical_objects_[i];
+      s->AssignRef(type);
+    }
+    count = objects_.length();
+    s->Write<intptr_t>(count);
+    for (intptr_t i = 0; i < count; i++) {
+      RawType* type = objects_[i];
+      s->AssignRef(type);
+    }
+  }
+
+  void WriteFill(Serializer* s) {
+    intptr_t count = canonical_objects_.length();
+    for (intptr_t i = 0; i < count; i++) {
+      RawType* type = canonical_objects_[i];
+      RawObject** from = type->from();
+      RawObject** to = type->to();
+      for (RawObject** p = from; p <= to; p++) {
+        s->WriteRef(*p);
+      }
+      s->WriteTokenPosition(type->ptr()->token_pos_);
+      s->Write<int8_t>(type->ptr()->type_state_);
+    }
+    count = objects_.length();
+    for (intptr_t i = 0; i < count; i++) {
+      RawType* type = objects_[i];
+      RawObject** from = type->from();
+      RawObject** to = type->to();
+      for (RawObject** p = from; p <= to; p++) {
+        s->WriteRef(*p);
+      }
+      s->WriteTokenPosition(type->ptr()->token_pos_);
+      s->Write<int8_t>(type->ptr()->type_state_);
+    }
+  }
+
+ private:
+  GrowableArray<RawType*> canonical_objects_;
+  GrowableArray<RawType*> objects_;
+};
+
+
+class TypeDeserializationCluster : public DeserializationCluster {
+ public:
+  TypeDeserializationCluster() { }
+  virtual ~TypeDeserializationCluster() { }
+
+  void ReadAlloc(Deserializer* d) {
+    canonical_start_index_ = d->next_index();
+    PageSpace* old_space = d->heap()->old_space();
+    intptr_t count = d->Read<intptr_t>();
+    for (intptr_t i = 0; i < count; i++) {
+      d->AssignRef(AllocateUninitialized(old_space, Type::InstanceSize()));
+    }
+    canonical_stop_index_ = d->next_index();
+
+    start_index_ = d->next_index();
+    count = d->Read<intptr_t>();
+    for (intptr_t i = 0; i < count; i++) {
+      d->AssignRef(AllocateUninitialized(old_space, Type::InstanceSize()));
+    }
+    stop_index_ = d->next_index();
+  }
+
+  void ReadFill(Deserializer* d) {
+    bool is_vm_object = d->isolate() == Dart::vm_isolate();
+
+    for (intptr_t id = canonical_start_index_;
+         id < canonical_stop_index_;
+         id++) {
+      RawType* type = reinterpret_cast<RawType*>(d->Ref(id));
+      Deserializer::InitializeHeader(type, kTypeCid,
+                                     Type::InstanceSize(), is_vm_object, true);
+      RawObject** from = type->from();
+      RawObject** to = type->to();
+      for (RawObject** p = from; p <= to; p++) {
+        *p = d->ReadRef();
+      }
+      type->ptr()->token_pos_ = d->ReadTokenPosition();
+      type->ptr()->type_state_ = d->Read<int8_t>();
+    }
+
+    for (intptr_t id = start_index_; id < stop_index_; id++) {
+      RawType* type = reinterpret_cast<RawType*>(d->Ref(id));
+      Deserializer::InitializeHeader(type, kTypeCid,
+                                     Type::InstanceSize(), is_vm_object);
+      RawObject** from = type->from();
+      RawObject** to = type->to();
+      for (RawObject** p = from; p <= to; p++) {
+        *p = d->ReadRef();
+      }
+      type->ptr()->token_pos_ = d->ReadTokenPosition();
+      type->ptr()->type_state_ = d->Read<int8_t>();
+    }
+  }
+
+ private:
+  intptr_t canonical_start_index_;
+  intptr_t canonical_stop_index_;
+};
+
+
+class TypeRefSerializationCluster : public SerializationCluster {
+ public:
+  TypeRefSerializationCluster() { }
+  virtual ~TypeRefSerializationCluster() { }
+
+  void Trace(Serializer* s, RawObject* object) {
+    RawTypeRef* type = TypeRef::RawCast(object);
+    objects_.Add(type);
+
+    RawObject** from = type->from();
+    RawObject** to = type->to();
+    for (RawObject** p = from; p <= to; p++) {
+      s->Push(*p);
+    }
+  }
+
+  void WriteAlloc(Serializer* s) {
+    s->WriteCid(kTypeRefCid);
+    intptr_t count = objects_.length();
+    s->Write<intptr_t>(count);
+    for (intptr_t i = 0; i < count; i++) {
+      RawTypeRef* type = objects_[i];
+      s->AssignRef(type);
+    }
+  }
+
+  void WriteFill(Serializer* s) {
+    intptr_t count = objects_.length();
+    for (intptr_t i = 0; i < count; i++) {
+      RawTypeRef* type = objects_[i];
+      RawObject** from = type->from();
+      RawObject** to = type->to();
+      for (RawObject** p = from; p <= to; p++) {
+        s->WriteRef(*p);
+      }
+    }
+  }
+
+ private:
+  GrowableArray<RawTypeRef*> objects_;
+};
+
+
+class TypeRefDeserializationCluster : public DeserializationCluster {
+ public:
+  TypeRefDeserializationCluster() { }
+  virtual ~TypeRefDeserializationCluster() { }
+
+  void ReadAlloc(Deserializer* d) {
+    start_index_ = d->next_index();
+    PageSpace* old_space = d->heap()->old_space();
+    intptr_t count = d->Read<intptr_t>();
+    for (intptr_t i = 0; i < count; i++) {
+      d->AssignRef(AllocateUninitialized(old_space, TypeRef::InstanceSize()));
+    }
+    stop_index_ = d->next_index();
+  }
+
+  void ReadFill(Deserializer* d) {
+    bool is_vm_object = d->isolate() == Dart::vm_isolate();
+
+    for (intptr_t id = start_index_; id < stop_index_; id++) {
+      RawTypeRef* type = reinterpret_cast<RawTypeRef*>(d->Ref(id));
+      Deserializer::InitializeHeader(type, kTypeRefCid,
+                                     TypeRef::InstanceSize(), is_vm_object);
+      RawObject** from = type->from();
+      RawObject** to = type->to();
+      for (RawObject** p = from; p <= to; p++) {
+        *p = d->ReadRef();
+      }
+    }
+  }
+};
+
+
+class TypeParameterSerializationCluster : public SerializationCluster {
+ public:
+  TypeParameterSerializationCluster() { }
+  virtual ~TypeParameterSerializationCluster() { }
+
+  void Trace(Serializer* s, RawObject* object) {
+    RawTypeParameter* type = TypeParameter::RawCast(object);
+    objects_.Add(type);
+    ASSERT(!type->IsCanonical());
+
+    RawObject** from = type->from();
+    RawObject** to = type->to();
+    for (RawObject** p = from; p <= to; p++) {
+      s->Push(*p);
+    }
+  }
+
+  void WriteAlloc(Serializer* s) {
+    s->WriteCid(kTypeParameterCid);
+    intptr_t count = objects_.length();
+    s->Write<intptr_t>(count);
+    for (intptr_t i = 0; i < count; i++) {
+      RawTypeParameter* type = objects_[i];
+      s->AssignRef(type);
+    }
+  }
+
+  void WriteFill(Serializer* s) {
+    intptr_t count = objects_.length();
+    for (intptr_t i = 0; i < count; i++) {
+      RawTypeParameter* type = objects_[i];
+      RawObject** from = type->from();
+      RawObject** to = type->to();
+      for (RawObject** p = from; p <= to; p++) {
+        s->WriteRef(*p);
+      }
+      s->Write<intptr_t>(type->ptr()->parameterized_class_id_);
+      s->WriteTokenPosition(type->ptr()->token_pos_);
+      s->Write<int16_t>(type->ptr()->index_);
+      s->Write<int8_t>(type->ptr()->type_state_);
+    }
+  }
+
+ private:
+  GrowableArray<RawTypeParameter*> objects_;
+};
+
+
+class TypeParameterDeserializationCluster : public DeserializationCluster {
+ public:
+  TypeParameterDeserializationCluster() { }
+  virtual ~TypeParameterDeserializationCluster() { }
+
+  void ReadAlloc(Deserializer* d) {
+    start_index_ = d->next_index();
+    PageSpace* old_space = d->heap()->old_space();
+    intptr_t count = d->Read<intptr_t>();
+    for (intptr_t i = 0; i < count; i++) {
+      d->AssignRef(AllocateUninitialized(old_space,
+                                         TypeParameter::InstanceSize()));
+    }
+    stop_index_ = d->next_index();
+  }
+
+  void ReadFill(Deserializer* d) {
+    bool is_vm_object = d->isolate() == Dart::vm_isolate();
+
+    for (intptr_t id = start_index_; id < stop_index_; id++) {
+      RawTypeParameter* type = reinterpret_cast<RawTypeParameter*>(d->Ref(id));
+      Deserializer::InitializeHeader(type, kTypeParameterCid,
+                                     TypeParameter::InstanceSize(),
+                                     is_vm_object);
+      RawObject** from = type->from();
+      RawObject** to = type->to();
+      for (RawObject** p = from; p <= to; p++) {
+        *p = d->ReadRef();
+      }
+      type->ptr()->parameterized_class_id_ = d->Read<intptr_t>();
+      type->ptr()->token_pos_ = d->ReadTokenPosition();
+      type->ptr()->index_ = d->Read<int16_t>();
+      type->ptr()->type_state_ = d->Read<int8_t>();
+    }
+  }
+};
+
+
+class BoundedTypeSerializationCluster : public SerializationCluster {
+ public:
+  BoundedTypeSerializationCluster() { }
+  virtual ~BoundedTypeSerializationCluster() { }
+
+  void Trace(Serializer* s, RawObject* object) {
+    RawBoundedType* type = BoundedType::RawCast(object);
+    objects_.Add(type);
+
+    RawObject** from = type->from();
+    RawObject** to = type->to();
+    for (RawObject** p = from; p <= to; p++) {
+      s->Push(*p);
+    }
+  }
+
+  void WriteAlloc(Serializer* s) {
+    s->WriteCid(kBoundedTypeCid);
+    intptr_t count = objects_.length();
+    s->Write<intptr_t>(count);
+    for (intptr_t i = 0; i < count; i++) {
+      RawBoundedType* type = objects_[i];
+      s->AssignRef(type);
+    }
+  }
+
+  void WriteFill(Serializer* s) {
+    intptr_t count = objects_.length();
+    for (intptr_t i = 0; i < count; i++) {
+      RawBoundedType* type = objects_[i];
+      RawObject** from = type->from();
+      RawObject** to = type->to();
+      for (RawObject** p = from; p <= to; p++) {
+        s->WriteRef(*p);
+      }
+    }
+  }
+
+ private:
+  GrowableArray<RawBoundedType*> objects_;
+};
+
+
+class BoundedTypeDeserializationCluster : public DeserializationCluster {
+ public:
+  BoundedTypeDeserializationCluster() { }
+  virtual ~BoundedTypeDeserializationCluster() { }
+
+  void ReadAlloc(Deserializer* d) {
+    start_index_ = d->next_index();
+    PageSpace* old_space = d->heap()->old_space();
+    intptr_t count = d->Read<intptr_t>();
+    for (intptr_t i = 0; i < count; i++) {
+      d->AssignRef(AllocateUninitialized(old_space,
+                                         BoundedType::InstanceSize()));
+    }
+    stop_index_ = d->next_index();
+  }
+
+  void ReadFill(Deserializer* d) {
+    bool is_vm_object = d->isolate() == Dart::vm_isolate();
+
+    for (intptr_t id = start_index_; id < stop_index_; id++) {
+      RawBoundedType* type = reinterpret_cast<RawBoundedType*>(d->Ref(id));
+      Deserializer::InitializeHeader(type, kBoundedTypeCid,
+                                     BoundedType::InstanceSize(), is_vm_object);
+      RawObject** from = type->from();
+      RawObject** to = type->to();
+      for (RawObject** p = from; p <= to; p++) {
+        *p = d->ReadRef();
+      }
+    }
+  }
+};
+
+
+class ClosureSerializationCluster : public SerializationCluster {
+ public:
+  ClosureSerializationCluster() { }
+  virtual ~ClosureSerializationCluster() { }
+
+  void Trace(Serializer* s, RawObject* object) {
+    RawClosure* closure = Closure::RawCast(object);
+    objects_.Add(closure);
+
+    RawObject** from = closure->from();
+    RawObject** to = closure->to();
+    for (RawObject** p = from; p <= to; p++) {
+      s->Push(*p);
+    }
+  }
+
+  void WriteAlloc(Serializer* s) {
+    s->WriteCid(kClosureCid);
+    intptr_t count = objects_.length();
+    s->Write<intptr_t>(count);
+    for (intptr_t i = 0; i < count; i++) {
+      RawClosure* closure = objects_[i];
+      s->AssignRef(closure);
+    }
+  }
+
+  void WriteFill(Serializer* s) {
+    intptr_t count = objects_.length();
+    for (intptr_t i = 0; i < count; i++) {
+      RawClosure* closure = objects_[i];
+      s->Write<bool>(closure->IsCanonical());
+      RawObject** from = closure->from();
+      RawObject** to = closure->to();
+      for (RawObject** p = from; p <= to; p++) {
+        s->WriteRef(*p);
+      }
+    }
+  }
+
+ private:
+  GrowableArray<RawClosure*> objects_;
+};
+
+
+class ClosureDeserializationCluster : public DeserializationCluster {
+ public:
+  ClosureDeserializationCluster() { }
+  virtual ~ClosureDeserializationCluster() { }
+
+  void ReadAlloc(Deserializer* d) {
+    start_index_ = d->next_index();
+    PageSpace* old_space = d->heap()->old_space();
+    intptr_t count = d->Read<intptr_t>();
+    for (intptr_t i = 0; i < count; i++) {
+      d->AssignRef(AllocateUninitialized(old_space, Closure::InstanceSize()));
+    }
+    stop_index_ = d->next_index();
+  }
+
+  void ReadFill(Deserializer* d) {
+    bool is_vm_object = d->isolate() == Dart::vm_isolate();
+
+    for (intptr_t id = start_index_; id < stop_index_; id++) {
+      RawClosure* closure = reinterpret_cast<RawClosure*>(d->Ref(id));
+      bool is_canonical = d->Read<bool>();
+      Deserializer::InitializeHeader(closure, kClosureCid,
+                                     Closure::InstanceSize(),
+                                     is_vm_object, is_canonical);
+      RawObject** from = closure->from();
+      RawObject** to = closure->to();
+      for (RawObject** p = from; p <= to; p++) {
+        *p = d->ReadRef();
+      }
+    }
+  }
+};
+
+
+class MintSerializationCluster : public SerializationCluster {
+ public:
+  MintSerializationCluster() { }
+  virtual ~MintSerializationCluster() { }
+
+  void Trace(Serializer* s, RawObject* object) {
+    RawMint* mint = Mint::RawCast(object);
+    objects_.Add(mint);
+  }
+
+  void WriteAlloc(Serializer* s) {
+    s->WriteCid(kMintCid);
+    intptr_t count = objects_.length();
+    s->Write<intptr_t>(count);
+    for (intptr_t i = 0; i < count; i++) {
+      RawMint* mint = objects_[i];
+      s->AssignRef(mint);
+    }
+  }
+
+  void WriteFill(Serializer* s) {
+    intptr_t count = objects_.length();
+    for (intptr_t i = 0; i < count; i++) {
+      RawMint* mint = objects_[i];
+      s->Write<bool>(mint->IsCanonical());
+      s->Write<int64_t>(mint->ptr()->value_);
+    }
+  }
+
+ private:
+  GrowableArray<RawMint*> objects_;
+};
+
+
+class MintDeserializationCluster : public DeserializationCluster {
+ public:
+  MintDeserializationCluster() { }
+  virtual ~MintDeserializationCluster() { }
+
+  void ReadAlloc(Deserializer* d) {
+    start_index_ = d->next_index();
+    PageSpace* old_space = d->heap()->old_space();
+    intptr_t count = d->Read<intptr_t>();
+    for (intptr_t i = 0; i < count; i++) {
+      d->AssignRef(AllocateUninitialized(old_space, Mint::InstanceSize()));
+    }
+    stop_index_ = d->next_index();
+  }
+
+  void ReadFill(Deserializer* d) {
+    bool is_vm_object = d->isolate() == Dart::vm_isolate();
+
+    for (intptr_t id = start_index_; id < stop_index_; id++) {
+      RawMint* mint = reinterpret_cast<RawMint*>(d->Ref(id));
+      bool is_canonical = d->Read<bool>();
+      Deserializer::InitializeHeader(mint, kMintCid,
+                                     Mint::InstanceSize(),
+                                     is_vm_object, is_canonical);
+      mint->ptr()->value_ = d->Read<int64_t>();
+    }
+  }
+};
+
+
+class BigintSerializationCluster : public SerializationCluster {
+ public:
+  BigintSerializationCluster() { }
+  virtual ~BigintSerializationCluster() { }
+
+  void Trace(Serializer* s, RawObject* object) {
+    RawBigint* bigint = Bigint::RawCast(object);
+    objects_.Add(bigint);
+
+    RawObject** from = bigint->from();
+    RawObject** to = bigint->to();
+    for (RawObject** p = from; p <= to; p++) {
+      s->Push(*p);
+    }
+  }
+
+  void WriteAlloc(Serializer* s) {
+    s->WriteCid(kBigintCid);
+    intptr_t count = objects_.length();
+    s->Write<intptr_t>(count);
+    for (intptr_t i = 0; i < count; i++) {
+      RawBigint* bigint = objects_[i];
+      s->AssignRef(bigint);
+    }
+  }
+
+  void WriteFill(Serializer* s) {
+    intptr_t count = objects_.length();
+    for (intptr_t i = 0; i < count; i++) {
+      RawBigint* bigint = objects_[i];
+      s->Write<bool>(bigint->IsCanonical());
+      RawObject** from = bigint->from();
+      RawObject** to = bigint->to();
+      for (RawObject** p = from; p <= to; p++) {
+        s->WriteRef(*p);
+      }
+    }
+  }
+
+ private:
+  GrowableArray<RawBigint*> objects_;
+};
+
+
+class BigintDeserializationCluster : public DeserializationCluster {
+ public:
+  BigintDeserializationCluster() { }
+  virtual ~BigintDeserializationCluster() { }
+
+  void ReadAlloc(Deserializer* d) {
+    start_index_ = d->next_index();
+    PageSpace* old_space = d->heap()->old_space();
+    intptr_t count = d->Read<intptr_t>();
+    for (intptr_t i = 0; i < count; i++) {
+      d->AssignRef(AllocateUninitialized(old_space, Bigint::InstanceSize()));
+    }
+    stop_index_ = d->next_index();
+  }
+
+  void ReadFill(Deserializer* d) {
+    bool is_vm_object = d->isolate() == Dart::vm_isolate();
+
+    for (intptr_t id = start_index_; id < stop_index_; id++) {
+      RawBigint* bigint = reinterpret_cast<RawBigint*>(d->Ref(id));
+      bool is_canonical = d->Read<bool>();
+      Deserializer::InitializeHeader(bigint, kBigintCid,
+                                     Bigint::InstanceSize(),
+                                     is_vm_object, is_canonical);
+      RawObject** from = bigint->from();
+      RawObject** to = bigint->to();
+      for (RawObject** p = from; p <= to; p++) {
+        *p = d->ReadRef();
+      }
+    }
+  }
+};
+
+
+class DoubleSerializationCluster : public SerializationCluster {
+ public:
+  DoubleSerializationCluster() { }
+  virtual ~DoubleSerializationCluster() { }
+
+  void Trace(Serializer* s, RawObject* object) {
+    RawDouble* dbl = Double::RawCast(object);
+    objects_.Add(dbl);
+  }
+
+  void WriteAlloc(Serializer* s) {
+    s->WriteCid(kDoubleCid);
+    intptr_t count = objects_.length();
+    s->Write<intptr_t>(count);
+    for (intptr_t i = 0; i < count; i++) {
+      RawDouble* dbl = objects_[i];
+      s->AssignRef(dbl);
+    }
+  }
+
+  void WriteFill(Serializer* s) {
+    intptr_t count = objects_.length();
+    for (intptr_t i = 0; i < count; i++) {
+      RawDouble* dbl = objects_[i];
+      s->Write<bool>(dbl->IsCanonical());
+      s->Write<double>(dbl->ptr()->value_);
+    }
+  }
+
+ private:
+  GrowableArray<RawDouble*> objects_;
+};
+
+
+class DoubleDeserializationCluster : public DeserializationCluster {
+ public:
+  DoubleDeserializationCluster() { }
+  virtual ~DoubleDeserializationCluster() { }
+
+  void ReadAlloc(Deserializer* d) {
+    start_index_ = d->next_index();
+    PageSpace* old_space = d->heap()->old_space();
+    intptr_t count = d->Read<intptr_t>();
+    for (intptr_t i = 0; i < count; i++) {
+      d->AssignRef(AllocateUninitialized(old_space, Double::InstanceSize()));
+    }
+    stop_index_ = d->next_index();
+  }
+
+  void ReadFill(Deserializer* d) {
+    bool is_vm_object = d->isolate() == Dart::vm_isolate();
+
+    for (intptr_t id = start_index_; id < stop_index_; id++) {
+      RawDouble* dbl = reinterpret_cast<RawDouble*>(d->Ref(id));
+      bool is_canonical = d->Read<bool>();
+      Deserializer::InitializeHeader(dbl, kDoubleCid,
+                                     Double::InstanceSize(),
+                                     is_vm_object, is_canonical);
+      dbl->ptr()->value_ = d->Read<double>();
+    }
+  }
+};
+
+
+class GrowableObjectArraySerializationCluster : public SerializationCluster {
+ public:
+  GrowableObjectArraySerializationCluster() { }
+  virtual ~GrowableObjectArraySerializationCluster() { }
+
+  void Trace(Serializer* s, RawObject* object) {
+    RawGrowableObjectArray* array = GrowableObjectArray::RawCast(object);
+    objects_.Add(array);
+
+    RawObject** from = array->from();
+    RawObject** to = array->to();
+    for (RawObject** p = from; p <= to; p++) {
+      s->Push(*p);
+    }
+  }
+
+  void WriteAlloc(Serializer* s) {
+    s->WriteCid(kGrowableObjectArrayCid);
+    intptr_t count = objects_.length();
+    s->Write<intptr_t>(count);
+    for (intptr_t i = 0; i < count; i++) {
+      RawGrowableObjectArray* array = objects_[i];
+      s->AssignRef(array);
+    }
+  }
+
+  void WriteFill(Serializer* s) {
+    intptr_t count = objects_.length();
+    for (intptr_t i = 0; i < count; i++) {
+      RawGrowableObjectArray* array = objects_[i];
+      s->Write<bool>(array->IsCanonical());
+      RawObject** from = array->from();
+      RawObject** to = array->to();
+      for (RawObject** p = from; p <= to; p++) {
+        s->WriteRef(*p);
+      }
+    }
+  }
+
+ private:
+  GrowableArray<RawGrowableObjectArray*> objects_;
+};
+
+
+class GrowableObjectArrayDeserializationCluster
+    : public DeserializationCluster {
+ public:
+  GrowableObjectArrayDeserializationCluster() { }
+  virtual ~GrowableObjectArrayDeserializationCluster() { }
+
+  void ReadAlloc(Deserializer* d) {
+    start_index_ = d->next_index();
+    PageSpace* old_space = d->heap()->old_space();
+    intptr_t count = d->Read<intptr_t>();
+    for (intptr_t i = 0; i < count; i++) {
+      d->AssignRef(AllocateUninitialized(old_space,
+                                         GrowableObjectArray::InstanceSize()));
+    }
+    stop_index_ = d->next_index();
+  }
+
+  void ReadFill(Deserializer* d) {
+    bool is_vm_object = d->isolate() == Dart::vm_isolate();
+
+    for (intptr_t id = start_index_; id < stop_index_; id++) {
+      RawGrowableObjectArray* list =
+          reinterpret_cast<RawGrowableObjectArray*>(d->Ref(id));
+      bool is_canonical = d->Read<bool>();
+      Deserializer::InitializeHeader(list, kGrowableObjectArrayCid,
+                                     GrowableObjectArray::InstanceSize(),
+                                     is_vm_object, is_canonical);
+      RawObject** from = list->from();
+      RawObject** to = list->to();
+      for (RawObject** p = from; p <= to; p++) {
+        *p = d->ReadRef();
+      }
+    }
+  }
+};
+
+
+class TypedDataSerializationCluster : public SerializationCluster {
+ public:
+  explicit TypedDataSerializationCluster(intptr_t cid) : cid_(cid) { }
+  virtual ~TypedDataSerializationCluster() { }
+
+  void Trace(Serializer* s, RawObject* object) {
+    RawTypedData* data = TypedData::RawCast(object);
+    objects_.Add(data);
+  }
+
+  void WriteAlloc(Serializer* s) {
+    s->Write<intptr_t>(cid_);
+    intptr_t count = objects_.length();
+    s->Write<intptr_t>(count);
+    for (intptr_t i = 0; i < count; i++) {
+      RawTypedData* data = objects_[i];
+      intptr_t length = Smi::Value(data->ptr()->length_);
+      s->Write<intptr_t>(length);
+      s->AssignRef(data);
+    }
+  }
+
+  void WriteFill(Serializer* s) {
+    intptr_t count = objects_.length();
+    intptr_t element_size = TypedData::ElementSizeInBytes(cid_);
+    for (intptr_t i = 0; i < count; i++) {
+      RawTypedData* data = objects_[i];
+      intptr_t length = Smi::Value(data->ptr()->length_);
+      s->Write<intptr_t>(length);
+      s->Write<bool>(data->IsCanonical());
+      uint8_t* cdata = reinterpret_cast<uint8_t*>(data->ptr()->data());
+      s->WriteBytes(cdata, length * element_size);
+    }
+  }
+
+ private:
+  const intptr_t cid_;
+  GrowableArray<RawTypedData*> objects_;
+};
+
+
+class TypedDataDeserializationCluster : public DeserializationCluster {
+ public:
+  explicit TypedDataDeserializationCluster(intptr_t cid) : cid_(cid) { }
+  virtual ~TypedDataDeserializationCluster() { }
+
+  void ReadAlloc(Deserializer* d) {
+    start_index_ = d->next_index();
+    PageSpace* old_space = d->heap()->old_space();
+    intptr_t count = d->Read<intptr_t>();
+    intptr_t element_size = TypedData::ElementSizeInBytes(cid_);
+    for (intptr_t i = 0; i < count; i++) {
+      intptr_t length = d->Read<intptr_t>();
+      d->AssignRef(AllocateUninitialized(old_space,
+          TypedData::InstanceSize(length * element_size)));
+    }
+    stop_index_ = d->next_index();
+  }
+
+  void ReadFill(Deserializer* d) {
+    bool is_vm_object = d->isolate() == Dart::vm_isolate();
+    intptr_t element_size = TypedData::ElementSizeInBytes(cid_);
+
+    for (intptr_t id = start_index_; id < stop_index_; id++) {
+      RawTypedData* data = reinterpret_cast<RawTypedData*>(d->Ref(id));
+      intptr_t length = d->Read<intptr_t>();
+      bool is_canonical = d->Read<bool>();
+      intptr_t length_in_bytes = length * element_size;
+      Deserializer::InitializeHeader(data, cid_,
+                                     TypedData::InstanceSize(length_in_bytes),
+                                     is_vm_object, is_canonical);
+      data->ptr()->length_ = Smi::New(length);
+      uint8_t* cdata = reinterpret_cast<uint8_t*>(data->ptr()->data());
+      d->ReadBytes(cdata, length_in_bytes);
+    }
+  }
+
+ private:
+  const intptr_t cid_;
+};
+
+
+class ExternalTypedDataSerializationCluster : public SerializationCluster {
+ public:
+  explicit ExternalTypedDataSerializationCluster(intptr_t cid) : cid_(cid) { }
+  virtual ~ExternalTypedDataSerializationCluster() { }
+
+  void Trace(Serializer* s, RawObject* object) {
+    RawExternalTypedData* data = ExternalTypedData::RawCast(object);
+    objects_.Add(data);
+    ASSERT(!data->IsCanonical());
+  }
+
+  void WriteAlloc(Serializer* s) {
+    s->Write<intptr_t>(cid_);
+    intptr_t count = objects_.length();
+    s->Write<intptr_t>(count);
+    for (intptr_t i = 0; i < count; i++) {
+      RawExternalTypedData* data = objects_[i];
+      s->AssignRef(data);
+    }
+  }
+
+  void WriteFill(Serializer* s) {
+    intptr_t count = objects_.length();
+    intptr_t element_size = ExternalTypedData::ElementSizeInBytes(cid_);
+    for (intptr_t i = 0; i < count; i++) {
+      RawExternalTypedData* data = objects_[i];
+      intptr_t length = Smi::Value(data->ptr()->length_);
+      s->Write<intptr_t>(length);
+      uint8_t* cdata = reinterpret_cast<uint8_t*>(data->ptr()->data_);
+      s->WriteBytes(cdata, length * element_size);
+    }
+  }
+
+ private:
+  const intptr_t cid_;
+  GrowableArray<RawExternalTypedData*> objects_;
+};
+
+
+class ExternalTypedDataDeserializationCluster : public DeserializationCluster {
+ public:
+  explicit ExternalTypedDataDeserializationCluster(intptr_t cid) : cid_(cid) { }
+  virtual ~ExternalTypedDataDeserializationCluster() { }
+
+  void ReadAlloc(Deserializer* d) {
+    start_index_ = d->next_index();
+    PageSpace* old_space = d->heap()->old_space();
+    intptr_t count = d->Read<intptr_t>();
+    for (intptr_t i = 0; i < count; i++) {
+      d->AssignRef(AllocateUninitialized(old_space,
+                                         ExternalTypedData::InstanceSize()));
+    }
+    stop_index_ = d->next_index();
+  }
+
+  void ReadFill(Deserializer* d) {
+    bool is_vm_object = d->isolate() == Dart::vm_isolate();
+    intptr_t element_size = ExternalTypedData::ElementSizeInBytes(cid_);
+
+    for (intptr_t id = start_index_; id < stop_index_; id++) {
+      RawExternalTypedData* data =
+          reinterpret_cast<RawExternalTypedData*>(d->Ref(id));
+      intptr_t length = d->Read<intptr_t>();
+      Deserializer::InitializeHeader(data, cid_,
+                                     ExternalTypedData::InstanceSize(),
+                                     is_vm_object);
+      data->ptr()->length_ = Smi::New(length);
+      data->ptr()->data_ = const_cast<uint8_t*>(d->CurrentBufferAddress());
+      d->Advance(length * element_size);
+    }
+  }
+
+ private:
+  const intptr_t cid_;
+};
+
+
+class StacktraceSerializationCluster : public SerializationCluster {
+ public:
+  StacktraceSerializationCluster() { }
+  virtual ~StacktraceSerializationCluster() { }
+
+  void Trace(Serializer* s, RawObject* object) {
+    RawStacktrace* trace = Stacktrace::RawCast(object);
+    objects_.Add(trace);
+
+    RawObject** from = trace->from();
+    RawObject** to = trace->to();
+    for (RawObject** p = from; p <= to; p++) {
+      s->Push(*p);
+    }
+  }
+
+  void WriteAlloc(Serializer* s) {
+    s->WriteCid(kStacktraceCid);
+    intptr_t count = objects_.length();
+    s->Write<intptr_t>(count);
+    for (intptr_t i = 0; i < count; i++) {
+      RawStacktrace* trace = objects_[i];
+      s->AssignRef(trace);
+    }
+  }
+
+  void WriteFill(Serializer* s) {
+    intptr_t count = objects_.length();
+    for (intptr_t i = 0; i < count; i++) {
+      RawStacktrace* trace = objects_[i];
+      RawObject** from = trace->from();
+      RawObject** to = trace->to();
+      for (RawObject** p = from; p <= to; p++) {
+        s->WriteRef(*p);
+      }
+    }
+  }
+
+ private:
+  GrowableArray<RawStacktrace*> objects_;
+};
+
+
+class StacktraceDeserializationCluster : public DeserializationCluster {
+ public:
+  StacktraceDeserializationCluster() { }
+  virtual ~StacktraceDeserializationCluster() { }
+
+  void ReadAlloc(Deserializer* d) {
+    start_index_ = d->next_index();
+    PageSpace* old_space = d->heap()->old_space();
+    intptr_t count = d->Read<intptr_t>();
+    for (intptr_t i = 0; i < count; i++) {
+      d->AssignRef(AllocateUninitialized(old_space,
+                                         Stacktrace::InstanceSize()));
+    }
+    stop_index_ = d->next_index();
+  }
+
+  void ReadFill(Deserializer* d) {
+    bool is_vm_object = d->isolate() == Dart::vm_isolate();
+
+    for (intptr_t id = start_index_; id < stop_index_; id++) {
+      RawStacktrace* trace = reinterpret_cast<RawStacktrace*>(d->Ref(id));
+      Deserializer::InitializeHeader(trace, kStacktraceCid,
+                                     Stacktrace::InstanceSize(), is_vm_object);
+      RawObject** from = trace->from();
+      RawObject** to = trace->to();
+      for (RawObject** p = from; p <= to; p++) {
+        *p = d->ReadRef();
+      }
+    }
+  }
+};
+
+
+class RegExpSerializationCluster : public SerializationCluster {
+ public:
+  RegExpSerializationCluster() { }
+  virtual ~RegExpSerializationCluster() { }
+
+  void Trace(Serializer* s, RawObject* object) {
+    RawRegExp* regexp = RegExp::RawCast(object);
+    objects_.Add(regexp);
+
+    RawObject** from = regexp->from();
+    RawObject** to = regexp->to();
+    for (RawObject** p = from; p <= to; p++) {
+      s->Push(*p);
+    }
+  }
+
+  void WriteAlloc(Serializer* s) {
+    s->WriteCid(kRegExpCid);
+    intptr_t count = objects_.length();
+    s->Write<intptr_t>(count);
+    for (intptr_t i = 0; i < count; i++) {
+      RawRegExp* regexp = objects_[i];
+      s->AssignRef(regexp);
+    }
+  }
+
+  void WriteFill(Serializer* s) {
+    intptr_t count = objects_.length();
+    for (intptr_t i = 0; i < count; i++) {
+      RawRegExp* regexp = objects_[i];
+      RawObject** from = regexp->from();
+      RawObject** to = regexp->to();
+      for (RawObject** p = from; p <= to; p++) {
+        s->WriteRef(*p);
+      }
+
+      s->Write<intptr_t>(regexp->ptr()->num_registers_);
+      s->Write<int8_t>(regexp->ptr()->type_flags_);
+    }
+  }
+
+ private:
+  GrowableArray<RawRegExp*> objects_;
+};
+
+
+class RegExpDeserializationCluster : public DeserializationCluster {
+ public:
+  RegExpDeserializationCluster() { }
+  virtual ~RegExpDeserializationCluster() { }
+
+  void ReadAlloc(Deserializer* d) {
+    start_index_ = d->next_index();
+    PageSpace* old_space = d->heap()->old_space();
+    intptr_t count = d->Read<intptr_t>();
+    for (intptr_t i = 0; i < count; i++) {
+      d->AssignRef(AllocateUninitialized(old_space,
+                                         RegExp::InstanceSize()));
+    }
+    stop_index_ = d->next_index();
+  }
+
+  void ReadFill(Deserializer* d) {
+    bool is_vm_object = d->isolate() == Dart::vm_isolate();
+
+    for (intptr_t id = start_index_; id < stop_index_; id++) {
+      RawRegExp* regexp = reinterpret_cast<RawRegExp*>(d->Ref(id));
+      Deserializer::InitializeHeader(regexp, kRegExpCid,
+                                     RegExp::InstanceSize(), is_vm_object);
+      RawObject** from = regexp->from();
+      RawObject** to = regexp->to();
+      for (RawObject** p = from; p <= to; p++) {
+        *p = d->ReadRef();
+      }
+
+      regexp->ptr()->num_registers_ = d->Read<intptr_t>();
+      regexp->ptr()->type_flags_ = d->Read<int8_t>();
+    }
+  }
+};
+
+
+class LinkedHashMapSerializationCluster : public SerializationCluster {
+ public:
+  LinkedHashMapSerializationCluster() { }
+  virtual ~LinkedHashMapSerializationCluster() { }
+
+  void Trace(Serializer* s, RawObject* object) {
+    RawLinkedHashMap* map = LinkedHashMap::RawCast(object);
+    objects_.Add(map);
+
+    s->Push(map->ptr()->type_arguments_);
+
+    intptr_t used_data = Smi::Value(map->ptr()->used_data_);
+    RawArray* data_array = map->ptr()->data_;
+    RawObject** data_elements = data_array->ptr()->data();
+    for (intptr_t i = 0; i < used_data; i += 2) {
+      RawObject* key = data_elements[i];
+      if (key != data_array) {
+        RawObject* value = data_elements[i + 1];
+        s->Push(key);
+        s->Push(value);
+      }
+    }
+  }
+
+  void WriteAlloc(Serializer* s) {
+    s->WriteCid(kLinkedHashMapCid);
+    intptr_t count = objects_.length();
+    s->Write<intptr_t>(count);
+    for (intptr_t i = 0; i < count; i++) {
+      RawLinkedHashMap* map = objects_[i];
+      s->AssignRef(map);
+    }
+  }
+
+  void WriteFill(Serializer* s) {
+    intptr_t count = objects_.length();
+    for (intptr_t i = 0; i < count; i++) {
+      RawLinkedHashMap* map = objects_[i];
+      s->Write<bool>(map->IsCanonical());
+
+      s->WriteRef(map->ptr()->type_arguments_);
+
+      const intptr_t used_data = Smi::Value(map->ptr()->used_data_);
+      ASSERT((used_data & 1) == 0);  // Keys + values, so must be even.
+      const intptr_t deleted_keys = Smi::Value(map->ptr()->deleted_keys_);
+
+      // Write out the number of (not deleted) key/value pairs that will follow.
+      s->Write<intptr_t>((used_data >> 1) - deleted_keys);
+
+      RawArray* data_array = map->ptr()->data_;
+      RawObject** data_elements = data_array->ptr()->data();
+      for (intptr_t i = 0; i < used_data; i += 2) {
+        RawObject* key = data_elements[i];
+        if (key != data_array) {
+          RawObject* value = data_elements[i + 1];
+          s->WriteRef(key);
+          s->WriteRef(value);
+        }
+      }
+    }
+  }
+
+ private:
+  GrowableArray<RawLinkedHashMap*> objects_;
+};
+
+
+class LinkedHashMapDeserializationCluster : public DeserializationCluster {
+ public:
+  LinkedHashMapDeserializationCluster() { }
+  virtual ~LinkedHashMapDeserializationCluster() { }
+
+  void ReadAlloc(Deserializer* d) {
+    start_index_ = d->next_index();
+    PageSpace* old_space = d->heap()->old_space();
+    intptr_t count = d->Read<intptr_t>();
+    for (intptr_t i = 0; i < count; i++) {
+      d->AssignRef(AllocateUninitialized(old_space,
+                                         LinkedHashMap::InstanceSize()));
+    }
+    stop_index_ = d->next_index();
+  }
+
+  void ReadFill(Deserializer* d) {
+    bool is_vm_object = d->isolate() == Dart::vm_isolate();
+    PageSpace* old_space = d->heap()->old_space();
+
+    for (intptr_t id = start_index_; id < stop_index_; id++) {
+      RawLinkedHashMap* map = reinterpret_cast<RawLinkedHashMap*>(d->Ref(id));
+      bool is_canonical = d->Read<bool>();
+      Deserializer::InitializeHeader(map, kLinkedHashMapCid,
+                                     LinkedHashMap::InstanceSize(),
+                                     is_vm_object, is_canonical);
+
+      map->ptr()->type_arguments_ =
+          reinterpret_cast<RawTypeArguments*>(d->ReadRef());
+
+      // TODO(rmacnak): Reserve ref ids and co-allocate in ReadAlloc.
+      intptr_t pairs = d->Read<intptr_t>();
+      intptr_t used_data = pairs << 1;
+      intptr_t data_size = Utils::Maximum(
+          Utils::RoundUpToPowerOfTwo(used_data),
+          static_cast<uintptr_t>(LinkedHashMap::kInitialIndexSize));
+
+      RawArray* data = reinterpret_cast<RawArray*>(
+          AllocateUninitialized(old_space, Array::InstanceSize(data_size)));
+      data->ptr()->type_arguments_ = TypeArguments::null();
+      data->ptr()->length_ = Smi::New(data_size);
+      intptr_t i;
+      for (i = 0; i < used_data; i++) {
+        data->ptr()->data()[i] = d->ReadRef();
+      }
+      for (; i < data_size; i++) {
+        data->ptr()->data()[i] = Object::null();
+      }
+
+      map->ptr()->index_ = TypedData::null();
+      map->ptr()->hash_mask_ = Smi::New(0);
+      map->ptr()->data_ = data;
+      map->ptr()->used_data_ = Smi::New(used_data);
+      map->ptr()->deleted_keys_ = Smi::New(0);
+    }
+  }
+};
+
+
+class ArraySerializationCluster : public SerializationCluster {
+ public:
+  explicit ArraySerializationCluster(intptr_t cid) : cid_(cid) { }
+  virtual ~ArraySerializationCluster() { }
+
+  void Trace(Serializer* s, RawObject* object) {
+    RawArray* array = Array::RawCast(object);
+    objects_.Add(array);
+
+    s->Push(array->ptr()->type_arguments_);
+    intptr_t length = Smi::Value(array->ptr()->length_);
+    for (intptr_t i = 0; i < length; i++) {
+      s->Push(array->ptr()->data()[i]);
+    }
+  }
+
+  void WriteAlloc(Serializer* s) {
+    s->WriteCid(cid_);
+    intptr_t count = objects_.length();
+    s->Write<intptr_t>(count);
+    for (intptr_t i = 0; i < count; i++) {
+      RawArray* array = objects_[i];
+      intptr_t length = Smi::Value(array->ptr()->length_);
+      s->Write<intptr_t>(length);
+      s->AssignRef(array);
+    }
+  }
+
+  void WriteFill(Serializer* s) {
+    intptr_t count = objects_.length();
+    for (intptr_t i = 0; i < count; i++) {
+      RawArray* array = objects_[i];
+      intptr_t length = Smi::Value(array->ptr()->length_);
+      s->Write<intptr_t>(length);
+      s->Write<bool>(array->IsCanonical());
+      s->WriteRef(array->ptr()->type_arguments_);
+      for (intptr_t j = 0; j < length; j++) {
+        s->WriteRef(array->ptr()->data()[j]);
+      }
+    }
+  }
+
+ private:
+  intptr_t cid_;
+  GrowableArray<RawArray*> objects_;
+};
+
+
+class ArrayDeserializationCluster : public DeserializationCluster {
+ public:
+  explicit ArrayDeserializationCluster(intptr_t cid) : cid_(cid) { }
+  virtual ~ArrayDeserializationCluster() { }
+
+  void ReadAlloc(Deserializer* d) {
+    start_index_ = d->next_index();
+    PageSpace* old_space = d->heap()->old_space();
+    intptr_t count = d->Read<intptr_t>();
+    for (intptr_t i = 0; i < count; i++) {
+      intptr_t length = d->Read<intptr_t>();
+      d->AssignRef(AllocateUninitialized(old_space,
+                                         Array::InstanceSize(length)));
+    }
+    stop_index_ = d->next_index();
+  }
+
+  void ReadFill(Deserializer* d) {
+    bool is_vm_object = d->isolate() == Dart::vm_isolate();
+
+    for (intptr_t id = start_index_; id < stop_index_; id++) {
+      RawArray* array = reinterpret_cast<RawArray*>(d->Ref(id));
+      intptr_t length = d->Read<intptr_t>();
+      bool is_canonical = d->Read<bool>();
+      Deserializer::InitializeHeader(array, cid_,
+                                     Array::InstanceSize(length),
+                                     is_vm_object, is_canonical);
+      array->ptr()->type_arguments_ =
+          reinterpret_cast<RawTypeArguments*>(d->ReadRef());
+      array->ptr()->length_ = Smi::New(length);
+      for (intptr_t j = 0; j < length; j++) {
+        array->ptr()->data()[j] = d->ReadRef();
+      }
+    }
+  }
+
+ private:
+  const intptr_t cid_;
+};
+
+
+class OneByteStringSerializationCluster : public SerializationCluster {
+ public:
+  OneByteStringSerializationCluster() { }
+  virtual ~OneByteStringSerializationCluster() { }
+
+  void Trace(Serializer* s, RawObject* object) {
+    RawOneByteString* str = reinterpret_cast<RawOneByteString*>(object);
+    objects_.Add(str);
+  }
+
+  void WriteAlloc(Serializer* s) {
+    s->WriteCid(kOneByteStringCid);
+    intptr_t count = objects_.length();
+    s->Write<intptr_t>(count);
+    for (intptr_t i = 0; i < count; i++) {
+      RawOneByteString* str = objects_[i];
+      intptr_t length = Smi::Value(str->ptr()->length_);
+      s->Write<intptr_t>(length);
+      s->AssignRef(str);
+    }
+  }
+
+  void WriteFill(Serializer* s) {
+    intptr_t count = objects_.length();
+    for (intptr_t i = 0; i < count; i++) {
+      RawOneByteString* str = objects_[i];
+      intptr_t length = Smi::Value(str->ptr()->length_);
+      s->Write<intptr_t>(length);
+      s->Write<bool>(str->IsCanonical());
+      intptr_t hash = Smi::Value(str->ptr()->hash_);
+      s->Write<int32_t>(hash);
+      s->WriteBytes(str->ptr()->data(), length);
+    }
+  }
+
+ private:
+  GrowableArray<RawOneByteString*> objects_;
+};
+
+
+class OneByteStringDeserializationCluster : public DeserializationCluster {
+ public:
+  OneByteStringDeserializationCluster() { }
+  virtual ~OneByteStringDeserializationCluster() { }
+
+  void ReadAlloc(Deserializer* d) {
+    start_index_ = d->next_index();
+    PageSpace* old_space = d->heap()->old_space();
+    intptr_t count = d->Read<intptr_t>();
+    for (intptr_t i = 0; i < count; i++) {
+      intptr_t length = d->Read<intptr_t>();
+      d->AssignRef(AllocateUninitialized(old_space,
+                                         OneByteString::InstanceSize(length)));
+    }
+    stop_index_ = d->next_index();
+  }
+
+  void ReadFill(Deserializer* d) {
+    bool is_vm_object = d->isolate() == Dart::vm_isolate();
+
+    for (intptr_t id = start_index_; id < stop_index_; id++) {
+      RawOneByteString* str = reinterpret_cast<RawOneByteString*>(d->Ref(id));
+      intptr_t length = d->Read<intptr_t>();
+      bool is_canonical = d->Read<bool>();
+      Deserializer::InitializeHeader(str, kOneByteStringCid,
+                                     OneByteString::InstanceSize(length),
+                                     is_vm_object, is_canonical);
+      str->ptr()->length_ = Smi::New(length);
+      str->ptr()->hash_ = Smi::New(d->Read<intptr_t>());
+      for (intptr_t j = 0; j < length; j++) {
+        str->ptr()->data()[j] = d->Read<uint8_t>();
+      }
+    }
+  }
+};
+
+
+class TwoByteStringSerializationCluster : public SerializationCluster {
+ public:
+  TwoByteStringSerializationCluster() { }
+  virtual ~TwoByteStringSerializationCluster() { }
+
+  void Trace(Serializer* s, RawObject* object) {
+    RawTwoByteString* str = reinterpret_cast<RawTwoByteString*>(object);
+    objects_.Add(str);
+  }
+
+  void WriteAlloc(Serializer* s) {
+    s->WriteCid(kTwoByteStringCid);
+    intptr_t count = objects_.length();
+    s->Write<intptr_t>(count);
+    for (intptr_t i = 0; i < count; i++) {
+      RawTwoByteString* str = objects_[i];
+      intptr_t length = Smi::Value(str->ptr()->length_);
+      s->Write<intptr_t>(length);
+      s->AssignRef(str);
+    }
+  }
+
+  void WriteFill(Serializer* s) {
+    intptr_t count = objects_.length();
+    for (intptr_t i = 0; i < count; i++) {
+      RawTwoByteString* str = objects_[i];
+      intptr_t length = Smi::Value(str->ptr()->length_);
+      s->Write<intptr_t>(length);
+      s->Write<bool>(str->IsCanonical());
+      intptr_t hash = Smi::Value(str->ptr()->hash_);
+      s->Write<int32_t>(hash);
+      s->WriteBytes(reinterpret_cast<uint8_t*>(str->ptr()->data()), length * 2);
+    }
+  }
+
+ private:
+  GrowableArray<RawTwoByteString*> objects_;
+};
+
+
+class TwoByteStringDeserializationCluster : public DeserializationCluster {
+ public:
+  TwoByteStringDeserializationCluster() { }
+  virtual ~TwoByteStringDeserializationCluster() { }
+
+  void ReadAlloc(Deserializer* d) {
+    start_index_ = d->next_index();
+    PageSpace* old_space = d->heap()->old_space();
+    intptr_t count = d->Read<intptr_t>();
+    for (intptr_t i = 0; i < count; i++) {
+      intptr_t length = d->Read<intptr_t>();
+      d->AssignRef(AllocateUninitialized(old_space,
+                                         TwoByteString::InstanceSize(length)));
+    }
+    stop_index_ = d->next_index();
+  }
+
+  void ReadFill(Deserializer* d) {
+    bool is_vm_object = d->isolate() == Dart::vm_isolate();
+
+    for (intptr_t id = start_index_; id < stop_index_; id++) {
+      RawTwoByteString* str =
+          reinterpret_cast<RawTwoByteString*>(d->Ref(id));
+      intptr_t length = d->Read<intptr_t>();
+      bool is_canonical = d->Read<bool>();
+      Deserializer::InitializeHeader(str, kTwoByteStringCid,
+                                     TwoByteString::InstanceSize(length),
+                                     is_vm_object, is_canonical);
+      str->ptr()->length_ = Smi::New(length);
+      str->ptr()->hash_ = Smi::New(d->Read<int32_t>());
+      uint8_t* cdata = reinterpret_cast<uint8_t*>(str->ptr()->data());
+      d->ReadBytes(cdata, length * 2);
+    }
+  }
+};
+
+
+Serializer::Serializer(Thread* thread,
+                       Snapshot::Kind kind,
+                       uint8_t** buffer,
+                       ReAlloc alloc,
+                       intptr_t initial_size,
+                       InstructionsWriter* instructions_writer)
+    : StackResource(thread),
+      heap_(thread->isolate()->heap()),
+      zone_(thread->zone()),
+      kind_(kind),
+      stream_(buffer, alloc, initial_size),
+      instructions_writer_(instructions_writer),
+      clusters_by_cid_(NULL),
+      stack_(),
+      num_cids_(0),
+      num_base_objects_(0),
+      num_written_objects_(0),
+      next_ref_index_(1) {
+  num_cids_ = thread->isolate()->class_table()->NumCids();
+  clusters_by_cid_ = new SerializationCluster*[num_cids_];
+  for (intptr_t i = 0; i < num_cids_; i++) {
+    clusters_by_cid_[i] = NULL;
+  }
+}
+
+
+Serializer::~Serializer() {
+  delete[] clusters_by_cid_;
+}
+
+
+SerializationCluster* Serializer::NewClusterForClass(intptr_t cid) {
+  Zone* Z = zone_;
+  if ((cid > kNumPredefinedCids) ||
+      (cid == kInstanceCid) ||
+      RawObject::IsTypedDataViewClassId(cid)) {
+    Push(isolate()->class_table()->At(cid));
+    return new (Z) InstanceSerializationCluster(cid);
+  }
+  if (RawObject::IsExternalTypedDataClassId(cid)) {
+    return new (Z) ExternalTypedDataSerializationCluster(cid);
+  }
+  if (RawObject::IsTypedDataClassId(cid)) {
+    return new (Z) TypedDataSerializationCluster(cid);
+  }
+
+  switch (cid) {
+    case kClassCid: return new (Z) ClassSerializationCluster(num_cids_);
+    case kUnresolvedClassCid:
+      return new (Z) UnresolvedClassSerializationCluster();
+    case kTypeArgumentsCid: return new (Z) TypeArgumentsSerializationCluster();
+    case kPatchClassCid: return new (Z) PatchClassSerializationCluster();
+    case kFunctionCid: return new (Z) FunctionSerializationCluster();
+    case kClosureDataCid: return new (Z) ClosureDataSerializationCluster();
+    case kRedirectionDataCid:
+      return new (Z) RedirectionDataSerializationCluster();
+    case kFieldCid: return new (Z) FieldSerializationCluster();
+    case kLiteralTokenCid: return new (Z) LiteralTokenSerializationCluster();
+    case kTokenStreamCid: return new (Z) TokenStreamSerializationCluster();
+    case kScriptCid: return new (Z) ScriptSerializationCluster();
+    case kLibraryCid: return new (Z) LibrarySerializationCluster();
+    case kNamespaceCid: return new (Z) NamespaceSerializationCluster();
+    case kCodeCid: return new (Z) CodeSerializationCluster();
+    case kObjectPoolCid: return new (Z) ObjectPoolSerializationCluster();
+    case kPcDescriptorsCid:
+      return new (Z) RODataSerializationCluster(kPcDescriptorsCid);
+    case kStackmapCid:
+      return new (Z) RODataSerializationCluster(kStackmapCid);
+    case kExceptionHandlersCid:
+      return new (Z) ExceptionHandlersSerializationCluster();
+    case kContextCid: return new (Z) ContextSerializationCluster();
+    case kContextScopeCid: return new (Z) ContextScopeSerializationCluster();
+    case kICDataCid: return new (Z) ICDataSerializationCluster();
+    case kMegamorphicCacheCid:
+      return new (Z) MegamorphicCacheSerializationCluster();
+    case kSubtypeTestCacheCid:
+      return new (Z) SubtypeTestCacheSerializationCluster();
+    case kLanguageErrorCid:
+      return new (Z) LanguageErrorSerializationCluster();
+    case kUnhandledExceptionCid:
+      return new (Z) UnhandledExceptionSerializationCluster();
+    case kLibraryPrefixCid: return new (Z) LibraryPrefixSerializationCluster();
+    case kTypeCid: return new (Z) TypeSerializationCluster();
+    case kTypeRefCid: return new (Z) TypeRefSerializationCluster();
+    case kTypeParameterCid: return new (Z) TypeParameterSerializationCluster();
+    case kBoundedTypeCid: return new (Z) BoundedTypeSerializationCluster();
+    case kClosureCid: return new (Z) ClosureSerializationCluster();
+    case kMintCid: return new (Z) MintSerializationCluster();
+    case kBigintCid: return new (Z) BigintSerializationCluster();
+    case kDoubleCid: return new (Z) DoubleSerializationCluster();
+    case kGrowableObjectArrayCid:
+      return new (Z) GrowableObjectArraySerializationCluster();
+    case kStacktraceCid: return new (Z) StacktraceSerializationCluster();
+    case kRegExpCid: return new (Z) RegExpSerializationCluster();
+    case kLinkedHashMapCid: return new (Z) LinkedHashMapSerializationCluster();
+    case kArrayCid:
+      return new (Z) ArraySerializationCluster(kArrayCid);
+    case kImmutableArrayCid:
+      return new (Z) ArraySerializationCluster(kImmutableArrayCid);
+    case kOneByteStringCid: {
+      if (Snapshot::IncludesCode(kind_)) {
+        return new (Z) RODataSerializationCluster(kOneByteStringCid);
+      } else {
+        return new (Z) OneByteStringSerializationCluster();
+      }
+    }
+    case kTwoByteStringCid: {
+      if (Snapshot::IncludesCode(kind_)) {
+        return new (Z) RODataSerializationCluster(kTwoByteStringCid);
+      } else {
+        return new (Z) TwoByteStringSerializationCluster();
+      }
+    }
+    default: break;
+  }
+
+  FATAL1("No cluster defined for cid %" Pd, cid);
+  return NULL;
+}
+
+
+void Serializer::Trace(RawObject* object) {
+  intptr_t cid;
+  if (!object->IsHeapObject()) {
+    cid = kSmiCid;
+  } else {
+    cid = object->GetClassId();
+  }
+
+  SerializationCluster* cluster = clusters_by_cid_[cid];
+  if (cluster == NULL) {
+    cluster = NewClusterForClass(cid);
+    clusters_by_cid_[cid] = cluster;
+  }
+  ASSERT(cluster != NULL);
+  cluster->Trace(this, object);
+}
+
+
+void Serializer::WriteVersionAndFeatures() {
+  const char* expected_version = Version::SnapshotString();
+  ASSERT(expected_version != NULL);
+  const intptr_t version_len = strlen(expected_version);
+  WriteBytes(reinterpret_cast<const uint8_t*>(expected_version), version_len);
+
+  const char* expected_features = Dart::FeaturesString(kind_);
+  ASSERT(expected_features != NULL);
+  const intptr_t features_len = strlen(expected_features);
+  WriteBytes(reinterpret_cast<const uint8_t*>(expected_features),
+             features_len + 1);
+  free(const_cast<char*>(expected_features));
+}
+
+
+#if defined(DEBUG)
+static const intptr_t kSectionMarker = 0xABAB;
+#endif
+
+void Serializer::Serialize() {
+  while (stack_.length() > 0) {
+    Trace(stack_.RemoveLast());
+  }
+
+  intptr_t num_clusters = 0;
+  for (intptr_t cid = 1; cid < num_cids_; cid++) {
+    SerializationCluster* cluster = clusters_by_cid_[cid];
+    if (cluster != NULL) {
+      num_clusters++;
+    }
+  }
+
+  intptr_t num_objects = num_base_objects_ + num_written_objects_;
+
+  Write<int32_t>(num_objects);
+  Write<int32_t>(num_clusters);
+
+  for (intptr_t cid = 1; cid < num_cids_; cid++) {
+    SerializationCluster* cluster = clusters_by_cid_[cid];
+    if (cluster != NULL) {
+      cluster->WriteAlloc(this);
+#if defined(DEBUG)
+      Write<intptr_t>(next_ref_index_);
+#endif
+    }
+  }
+
+  // We should have assigned a ref to every object we pushed.
+  ASSERT((next_ref_index_ - 1) == num_objects);
+
+  for (intptr_t cid = 1; cid < num_cids_; cid++) {
+    SerializationCluster* cluster = clusters_by_cid_[cid];
+    if (cluster != NULL) {
+      cluster->WriteFill(this);
+#if defined(DEBUG)
+      Write<intptr_t>(kSectionMarker);
+#endif
+    }
+  }
+}
+
+
+void Serializer::AddVMIsolateBaseObjects() {
+  // These objects are always allocated by Object::InitOnce, so they are not
+  // written into the snapshot.
+
+  AddBaseObject(Object::null());
+  AddBaseObject(Object::sentinel().raw());
+  AddBaseObject(Object::transition_sentinel().raw());
+  AddBaseObject(Object::empty_array().raw());
+  AddBaseObject(Object::zero_array().raw());
+  AddBaseObject(Object::dynamic_type().raw());
+  AddBaseObject(Object::void_type().raw());
+  AddBaseObject(Bool::True().raw());
+  AddBaseObject(Bool::False().raw());
+  AddBaseObject(Object::extractor_parameter_types().raw());
+  AddBaseObject(Object::extractor_parameter_names().raw());
+  AddBaseObject(Object::empty_context_scope().raw());
+  AddBaseObject(Object::empty_descriptors().raw());
+  AddBaseObject(Object::empty_var_descriptors().raw());
+  AddBaseObject(Object::empty_exception_handlers().raw());
+
+  for (intptr_t i = 0; i < ArgumentsDescriptor::kCachedDescriptorCount; i++) {
+    AddBaseObject(ArgumentsDescriptor::cached_args_descriptors_[i]);
+  }
+  for (intptr_t i = 0; i < ICData::kCachedICDataArrayCount; i++) {
+    AddBaseObject(ICData::cached_icdata_arrays_[i]);
+  }
+
+  ClassTable* table = isolate()->class_table();
+  for (intptr_t cid = kClassCid; cid <= kUnwindErrorCid; cid++) {
+    // Error has no class object.
+    if (cid != kErrorCid) {
+      ASSERT(table->HasValidClassAt(cid));
+      AddBaseObject(table->At(cid));
+    }
+  }
+  AddBaseObject(table->At(kDynamicCid));
+  AddBaseObject(table->At(kVoidCid));
+}
+
+
+intptr_t Serializer::WriteVMSnapshot(const Array& symbols,
+                                     const Array& scripts) {
+  NoSafepointScope no_safepoint;
+
+  AddVMIsolateBaseObjects();
+
+  // Push roots.
+  Push(symbols.raw());
+  Push(scripts.raw());
+  if (Snapshot::IncludesCode(kind_)) {
+    StubCode::Push(this);
+  }
+
+  Serialize();
+
+  // Write roots.
+  WriteRef(symbols.raw());
+  WriteRef(scripts.raw());
+  if (Snapshot::IncludesCode(kind_)) {
+    StubCode::WriteRef(this);
+  }
+
+#if defined(DEBUG)
+  Write<intptr_t>(kSectionMarker);
+#endif
+
+  // Note we are not clearing the object id table. The full ref table
+  // of the vm isolate snapshot serves as the base objects for the
+  // regular isolate snapshot.
+
+  // Return the number of objects, -1 accounts for unused ref 0.
+  return next_ref_index_ - 1;
+}
+
+
+void Serializer::WriteFullSnapshot(intptr_t num_base_objects,
+                                   ObjectStore* object_store) {
+  NoSafepointScope no_safepoint;
+
+  if (num_base_objects == 0) {
+    // Units tests not writing a new vm isolate.
+    const Array& base_objects = Object::vm_isolate_snapshot_object_table();
+    for (intptr_t i = 1; i < base_objects.Length(); i++) {
+      AddBaseObject(base_objects.At(i));
+    }
+  } else {
+    // Base objects carried over from WriteVMIsolateSnapshot.
+    num_base_objects_ += num_base_objects;
+    next_ref_index_ += num_base_objects;
+  }
+
+  // Push roots.
+  RawObject** from = object_store->from();
+  RawObject** to = object_store->to_snapshot(kind_);
+  for (RawObject** p = from; p <= to; p++) {
+    Push(*p);
+  }
+
+  Serialize();
+
+  // Write roots.
+  for (RawObject** p = from; p <= to; p++) {
+    WriteRef(*p);
+  }
+
+#if defined(DEBUG)
+  Write<intptr_t>(kSectionMarker);
+#endif
+
+  heap_->ResetObjectIdTable();
+}
+
+
+Deserializer::Deserializer(Thread* thread,
+                           Snapshot::Kind kind,
+                           const uint8_t* buffer,
+                           intptr_t size,
+                           const uint8_t* instructions_buffer,
+                           const uint8_t* data_buffer)
+    : StackResource(thread),
+      heap_(thread->isolate()->heap()),
+      zone_(thread->zone()),
+      kind_(kind),
+      stream_(buffer, size),
+      instructions_reader_(NULL),
+      refs_(NULL),
+      next_ref_index_(1),
+      clusters_(NULL) {
+  if (Snapshot::IncludesCode(kind)) {
+    ASSERT(instructions_buffer != NULL);
+  }
+  if (instructions_buffer != NULL) {
+    instructions_reader_ =
+        new (zone_) InstructionsReader(instructions_buffer, data_buffer);
+  }
+}
+
+
+Deserializer::~Deserializer() {
+  delete[] clusters_;
+}
+
+
+DeserializationCluster* Deserializer::ReadCluster() {
+  intptr_t cid = ReadCid();
+
+  Zone* Z = zone_;
+  if ((cid > kNumPredefinedCids) ||
+      (cid == kInstanceCid) ||
+      RawObject::IsTypedDataViewClassId(cid)) {
+    return new (Z) InstanceDeserializationCluster(cid);
+  }
+  if (RawObject::IsExternalTypedDataClassId(cid)) {
+    return new (Z) ExternalTypedDataDeserializationCluster(cid);
+  }
+  if (RawObject::IsTypedDataClassId(cid)) {
+    return new (Z) TypedDataDeserializationCluster(cid);
+  }
+
+  switch (cid) {
+    case kClassCid: return new (Z) ClassDeserializationCluster();
+    case kUnresolvedClassCid:
+      return new (Z) UnresolvedClassDeserializationCluster();
+    case kTypeArgumentsCid:
+      return new (Z) TypeArgumentsDeserializationCluster();
+    case kPatchClassCid: return new (Z) PatchClassDeserializationCluster();
+    case kFunctionCid: return new (Z) FunctionDeserializationCluster();
+    case kClosureDataCid: return new (Z) ClosureDataDeserializationCluster();
+    case kRedirectionDataCid:
+      return new (Z) RedirectionDataDeserializationCluster();
+    case kFieldCid: return new (Z) FieldDeserializationCluster();
+    case kLiteralTokenCid: return new (Z) LiteralTokenDeserializationCluster();
+    case kTokenStreamCid: return new (Z) TokenStreamDeserializationCluster();
+    case kScriptCid: return new (Z) ScriptDeserializationCluster();
+    case kLibraryCid: return new (Z) LibraryDeserializationCluster();
+    case kNamespaceCid: return new (Z) NamespaceDeserializationCluster();
+    case kCodeCid: return new (Z) CodeDeserializationCluster();
+    case kObjectPoolCid: return new (Z) ObjectPoolDeserializationCluster();
+    case kPcDescriptorsCid:
+    case kStackmapCid:
+      return new (Z) RODataDeserializationCluster();
+    case kExceptionHandlersCid:
+      return new (Z) ExceptionHandlersDeserializationCluster();
+    case kContextCid: return new (Z) ContextDeserializationCluster();
+    case kContextScopeCid: return new (Z) ContextScopeDeserializationCluster();
+    case kICDataCid: return new (Z) ICDataDeserializationCluster();
+    case kMegamorphicCacheCid:
+      return new (Z) MegamorphicCacheDeserializationCluster();
+    case kSubtypeTestCacheCid:
+      return new (Z) SubtypeTestCacheDeserializationCluster();
+    case kLanguageErrorCid:
+      return new (Z) LanguageErrorDeserializationCluster();
+    case kUnhandledExceptionCid:
+      return new (Z) UnhandledExceptionDeserializationCluster();
+    case kLibraryPrefixCid:
+      return new (Z) LibraryPrefixDeserializationCluster();
+    case kTypeCid: return new (Z) TypeDeserializationCluster();
+    case kTypeRefCid: return new (Z) TypeRefDeserializationCluster();
+    case kTypeParameterCid:
+      return new (Z) TypeParameterDeserializationCluster();
+    case kBoundedTypeCid: return new (Z) BoundedTypeDeserializationCluster();
+    case kClosureCid: return new (Z) ClosureDeserializationCluster();
+    case kMintCid: return new (Z) MintDeserializationCluster();
+    case kBigintCid: return new (Z) BigintDeserializationCluster();
+    case kDoubleCid: return new (Z) DoubleDeserializationCluster();
+    case kGrowableObjectArrayCid:
+      return new (Z) GrowableObjectArrayDeserializationCluster();
+    case kStacktraceCid: return new (Z) StacktraceDeserializationCluster();
+    case kRegExpCid: return new (Z) RegExpDeserializationCluster();
+    case kLinkedHashMapCid:
+      return new (Z) LinkedHashMapDeserializationCluster();
+    case kArrayCid:
+      return new (Z) ArrayDeserializationCluster(kArrayCid);
+    case kImmutableArrayCid:
+      return new (Z) ArrayDeserializationCluster(kImmutableArrayCid);
+    case kOneByteStringCid: {
+      if (Snapshot::IncludesCode(kind_)) {
+        return new (Z) RODataDeserializationCluster();
+      } else {
+        return new (Z) OneByteStringDeserializationCluster();
+      }
+    }
+    case kTwoByteStringCid: {
+      if (Snapshot::IncludesCode(kind_)) {
+        return new (Z) RODataDeserializationCluster();
+      } else {
+        return new (Z) TwoByteStringDeserializationCluster();
+      }
+    }
+    default: break;
+  }
+  FATAL1("No cluster defined for cid %" Pd, cid);
+  return NULL;
+}
+
+
+RawApiError* Deserializer::VerifyVersionAndFeatures() {
+  // If the version string doesn't match, return an error.
+  // Note: New things are allocated only if we're going to return an error.
+
+  const char* expected_version = Version::SnapshotString();
+  ASSERT(expected_version != NULL);
+  const intptr_t version_len = strlen(expected_version);
+  if (PendingBytes() < version_len) {
+    const intptr_t kMessageBufferSize = 128;
+    char message_buffer[kMessageBufferSize];
+    OS::SNPrint(message_buffer,
+                kMessageBufferSize,
+                "No full snapshot version found, expected '%s'",
+                expected_version);
+    // This can also fail while bringing up the VM isolate, so make sure to
+    // allocate the error message in old space.
+    const String& msg = String::Handle(String::New(message_buffer, Heap::kOld));
+    return ApiError::New(msg, Heap::kOld);
+  }
+
+  const char* version = reinterpret_cast<const char*>(CurrentBufferAddress());
+  ASSERT(version != NULL);
+  if (strncmp(version, expected_version, version_len)) {
+    const intptr_t kMessageBufferSize = 256;
+    char message_buffer[kMessageBufferSize];
+    char* actual_version = OS::StrNDup(version, version_len);
+    OS::SNPrint(message_buffer,
+                kMessageBufferSize,
+                "Wrong %s snapshot version, expected '%s' found '%s'",
+                (Snapshot::IsFull(kind_)) ? "full" : "script",
+                expected_version,
+                actual_version);
+    free(actual_version);
+    // This can also fail while bringing up the VM isolate, so make sure to
+    // allocate the error message in old space.
+    const String& msg = String::Handle(String::New(message_buffer, Heap::kOld));
+    return ApiError::New(msg, Heap::kOld);
+  }
+  Advance(version_len);
+
+  const char* expected_features = Dart::FeaturesString(kind_);
+  ASSERT(expected_features != NULL);
+  const intptr_t expected_len = strlen(expected_features);
+
+  const char* features = reinterpret_cast<const char*>(CurrentBufferAddress());
+  ASSERT(features != NULL);
+  intptr_t buffer_len = OS::StrNLen(features, PendingBytes());
+  if ((buffer_len != expected_len) ||
+      strncmp(features, expected_features, expected_len)) {
+    const intptr_t kMessageBufferSize = 256;
+    char message_buffer[kMessageBufferSize];
+    char* actual_features = OS::StrNDup(features, buffer_len < 128 ? buffer_len
+                                                                   : 128);
+    OS::SNPrint(message_buffer,
+                kMessageBufferSize,
+                "Wrong features in snapshot, expected '%s' found '%s'",
+                expected_features,
+                actual_features);
+    free(const_cast<char*>(expected_features));
+    free(actual_features);
+    // This can also fail while bringing up the VM isolate, so make sure to
+    // allocate the error message in old space.
+    const String& msg = String::Handle(String::New(message_buffer, Heap::kOld));
+    return ApiError::New(msg, Heap::kOld);
+  }
+  free(const_cast<char*>(expected_features));
+  Advance(expected_len + 1);
+  return ApiError::null();
+}
+
+
+void Deserializer::Prepare() {
+  num_objects_ = Read<int32_t>();
+  num_clusters_ = Read<int32_t>();
+
+  clusters_ = new DeserializationCluster*[num_clusters_];
+  refs_ = Array::New(num_objects_ + 1, Heap::kOld);
+}
+
+
+void Deserializer::Deserialize() {
+  // TODO(rmacnak): Verify num of base objects.
+
+  {
+    NOT_IN_PRODUCT(TimelineDurationScope tds(thread(),
+        Timeline::GetIsolateStream(), "ReadAlloc"));
+    for (intptr_t i = 0; i < num_clusters_; i++) {
+      clusters_[i] = ReadCluster();
+      clusters_[i]->ReadAlloc(this);
+#if defined(DEBUG)
+      intptr_t serializers_next_ref_index_ = Read<intptr_t>();
+      ASSERT(serializers_next_ref_index_ == next_ref_index_);
+#endif
+    }
+  }
+
+  // We should have completely filled the ref array.
+  ASSERT((next_ref_index_ - 1) == num_objects_);
+
+  {
+    NOT_IN_PRODUCT(TimelineDurationScope tds(thread(),
+        Timeline::GetIsolateStream(), "ReadFill"));
+    for (intptr_t i = 0; i < num_clusters_; i++) {
+      clusters_[i]->ReadFill(this);
+#if defined(DEBUG)
+      intptr_t section_marker = Read<intptr_t>();
+      ASSERT(section_marker == kSectionMarker);
+#endif
+    }
+  }
+}
+
+class HeapLocker : public StackResource {
+ public:
+  HeapLocker(Thread* thread, PageSpace* page_space)
+      : StackResource(thread), page_space_(page_space) {
+        page_space_->AcquireDataLock();
+  }
+  ~HeapLocker() {
+    page_space_->ReleaseDataLock();
+  }
+
+ private:
+  PageSpace* page_space_;
+};
+
+
+void Deserializer::AddVMIsolateBaseObjects() {
+  // These objects are always allocated by Object::InitOnce, so they are not
+  // written into the snapshot.
+
+  AddBaseObject(Object::null());
+  AddBaseObject(Object::sentinel().raw());
+  AddBaseObject(Object::transition_sentinel().raw());
+  AddBaseObject(Object::empty_array().raw());
+  AddBaseObject(Object::zero_array().raw());
+  AddBaseObject(Object::dynamic_type().raw());
+  AddBaseObject(Object::void_type().raw());
+  AddBaseObject(Bool::True().raw());
+  AddBaseObject(Bool::False().raw());
+  AddBaseObject(Object::extractor_parameter_types().raw());
+  AddBaseObject(Object::extractor_parameter_names().raw());
+  AddBaseObject(Object::empty_context_scope().raw());
+  AddBaseObject(Object::empty_descriptors().raw());
+  AddBaseObject(Object::empty_var_descriptors().raw());
+  AddBaseObject(Object::empty_exception_handlers().raw());
+
+  for (intptr_t i = 0; i < ArgumentsDescriptor::kCachedDescriptorCount; i++) {
+    AddBaseObject(ArgumentsDescriptor::cached_args_descriptors_[i]);
+  }
+  for (intptr_t i = 0; i < ICData::kCachedICDataArrayCount; i++) {
+    AddBaseObject(ICData::cached_icdata_arrays_[i]);
+  }
+
+  ClassTable* table = isolate()->class_table();
+  for (intptr_t cid = kClassCid; cid <= kUnwindErrorCid; cid++) {
+    // Error has no class object.
+    if (cid != kErrorCid) {
+      ASSERT(table->HasValidClassAt(cid));
+      AddBaseObject(table->At(cid));
+    }
+  }
+  AddBaseObject(table->At(kDynamicCid));
+  AddBaseObject(table->At(kVoidCid));
+}
+
+
+void Deserializer::ReadVMSnapshot() {
+  Array& symbol_table = Array::Handle(zone_);
+  Array& refs = Array::Handle(zone_);
+  Prepare();
+
+  {
+    NoSafepointScope no_safepoint;
+    HeapLocker hl(thread(), heap_->old_space());
+
+    AddVMIsolateBaseObjects();
+
+    Deserialize();
+
+    // Read roots.
+    symbol_table ^= ReadRef();
+    isolate()->object_store()->set_symbol_table(symbol_table);
+    ReadRef();  // Script list.
+    if (Snapshot::IncludesCode(kind_)) {
+      StubCode::ReadRef(this);
+    }
+
+#if defined(DEBUG)
+    intptr_t section_marker = Read<intptr_t>();
+    ASSERT(section_marker == kSectionMarker);
+#endif
+
+    refs = refs_;
+    refs_ = NULL;
+  }
+
+  Symbols::InitOnceFromSnapshot(isolate());
+
+  Object::set_vm_isolate_snapshot_object_table(refs);
+
+#if defined(DEBUG)
+  isolate()->ValidateClassTable();
+#endif
+}
+
+void Deserializer::ReadFullSnapshot(ObjectStore* object_store) {
+  Array& refs = Array::Handle();
+  Prepare();
+
+  {
+    NoSafepointScope no_safepoint;
+    HeapLocker hl(thread(), heap_->old_space());
+
+    // N.B.: Skipping index 0 because ref 0 is illegal.
+    const Array& base_objects = Object::vm_isolate_snapshot_object_table();
+    for (intptr_t i = 1; i < base_objects.Length(); i++) {
+      AddBaseObject(base_objects.At(i));
+    }
+
+    Deserialize();
+
+    // Read roots.
+    RawObject** from = object_store->from();
+    RawObject** to = object_store->to_snapshot(kind_);
+    for (RawObject** p = from; p <= to; p++) {
+      *p = ReadRef();
+    }
+
+#if defined(DEBUG)
+    intptr_t section_marker = Read<intptr_t>();
+    ASSERT(section_marker == kSectionMarker);
+#endif
+
+    refs = refs_;
+    refs_ = NULL;
+  }
+
+#if defined(DEBUG)
+  Isolate* isolate = thread()->isolate();
+  isolate->ValidateClassTable();
+  isolate->heap()->Verify();
+#endif
+
+  {
+    NOT_IN_PRODUCT(TimelineDurationScope tds(thread(),
+        Timeline::GetIsolateStream(), "PostLoad"));
+    for (intptr_t i = 0; i < num_clusters_; i++) {
+      clusters_[i]->PostLoad(refs, kind_, zone_);
+    }
+  }
+
+  // Setup native resolver for bootstrap impl.
+  Bootstrap::SetupNativeResolver();
+}
+
+
+// An object visitor which will iterate over all the script objects in the heap
+// and either count them or collect them into an array. This is used during
+// full snapshot generation of the VM isolate to write out all script
+// objects and their accompanying token streams.
+class ScriptVisitor : public ObjectVisitor {
+ public:
+  explicit ScriptVisitor(Thread* thread) :
+      objHandle_(Object::Handle(thread->zone())),
+      count_(0),
+      scripts_(NULL) {}
+
+  ScriptVisitor(Thread* thread, const Array* scripts) :
+      objHandle_(Object::Handle(thread->zone())),
+      count_(0),
+      scripts_(scripts) {}
+
+  void VisitObject(RawObject* obj) {
+    if (obj->IsScript()) {
+      if (scripts_ != NULL) {
+        objHandle_ = obj;
+        scripts_->SetAt(count_, objHandle_);
+      }
+      count_ += 1;
+    }
+  }
+
+  intptr_t count() const { return count_; }
+
+ private:
+  Object& objHandle_;
+  intptr_t count_;
+  const Array* scripts_;
+};
+
+
+FullSnapshotWriter::FullSnapshotWriter(Snapshot::Kind kind,
+                                       uint8_t** vm_isolate_snapshot_buffer,
+                                       uint8_t** isolate_snapshot_buffer,
+                                       ReAlloc alloc,
+                                       InstructionsWriter* instructions_writer)
+    : thread_(Thread::Current()),
+      kind_(kind),
+      vm_isolate_snapshot_buffer_(vm_isolate_snapshot_buffer),
+      isolate_snapshot_buffer_(isolate_snapshot_buffer),
+      alloc_(alloc),
+      vm_isolate_snapshot_size_(0),
+      isolate_snapshot_size_(0),
+      instructions_writer_(instructions_writer),
+      scripts_(Array::Handle(zone())),
+      saved_symbol_table_(Array::Handle(zone())),
+      new_vm_symbol_table_(Array::Handle(zone())) {
+  ASSERT(isolate_snapshot_buffer_ != NULL);
+  ASSERT(alloc_ != NULL);
+  ASSERT(isolate() != NULL);
+  ASSERT(ClassFinalizer::AllClassesFinalized());
+  ASSERT(isolate() != NULL);
+  ASSERT(heap() != NULL);
+  ObjectStore* object_store = isolate()->object_store();
+  ASSERT(object_store != NULL);
+
+#if defined(DEBUG)
+  // Ensure the class table is valid.
+  isolate()->ValidateClassTable();
+#endif
+  // Can't have any mutation happening while we're serializing.
+  ASSERT(isolate()->background_compiler() == NULL);
+
+  if (vm_isolate_snapshot_buffer != NULL) {
+    NOT_IN_PRODUCT(TimelineDurationScope tds(thread(),
+        Timeline::GetIsolateStream(), "PrepareNewVMIsolate"));
+
+    // Collect all the script objects and their accompanying token stream
+    // objects into an array so that we can write it out as part of the VM
+    // isolate snapshot. We first count the number of script objects, allocate
+    // an array and then fill it up with the script objects.
+    ScriptVisitor scripts_counter(thread());
+    heap()->IterateOldObjects(&scripts_counter);
+    Dart::vm_isolate()->heap()->IterateOldObjects(&scripts_counter);
+    intptr_t count = scripts_counter.count();
+    scripts_ = Array::New(count, Heap::kOld);
+    ScriptVisitor script_visitor(thread(), &scripts_);
+    heap()->IterateOldObjects(&script_visitor);
+    Dart::vm_isolate()->heap()->IterateOldObjects(&script_visitor);
+    ASSERT(script_visitor.count() == count);
+
+    // Tuck away the current symbol table.
+    saved_symbol_table_ = object_store->symbol_table();
+
+    // Create a unified symbol table that will be written as the vm isolate's
+    // symbol table.
+    new_vm_symbol_table_ = Symbols::UnifiedSymbolTable();
+
+    // Create an empty symbol table that will be written as the isolate's symbol
+    // table.
+    Symbols::SetupSymbolTable(isolate());
+  } else {
+    // Reuse the current vm isolate.
+  }
+}
+
+FullSnapshotWriter::~FullSnapshotWriter() {
+  // We may run Dart code afterwards, restore the symbol table if needed.
+  if (!saved_symbol_table_.IsNull()) {
+    isolate()->object_store()->set_symbol_table(saved_symbol_table_);
+    saved_symbol_table_ = Array::null();
+  }
+  new_vm_symbol_table_ = Array::null();
+  scripts_ = Array::null();
+}
+
+
+intptr_t FullSnapshotWriter::WriteVmIsolateSnapshot() {
+  NOT_IN_PRODUCT(TimelineDurationScope tds(thread(),
+      Timeline::GetIsolateStream(), "WriteVmIsolateSnapshot"));
+
+  ASSERT(vm_isolate_snapshot_buffer_ != NULL);
+  Serializer serializer(thread(),
+                        kind_,
+                        vm_isolate_snapshot_buffer_,
+                        alloc_,
+                        kInitialSize,
+                        instructions_writer_);
+
+  serializer.ReserveHeader();
+  serializer.WriteVersionAndFeatures();
+  /*
+   * Now Write out the following
+   * - the symbol table
+   * - all the scripts and token streams for these scripts
+   * - the stub code (precompiled snapshots only)
+   **/
+  intptr_t num_objects = serializer.WriteVMSnapshot(new_vm_symbol_table_,
+                                                    scripts_);
+  serializer.FillHeader(serializer.kind());
+
+  vm_isolate_snapshot_size_ = serializer.bytes_written();
+  return num_objects;
+}
+
+
+void FullSnapshotWriter::WriteIsolateFullSnapshot(
+    intptr_t num_base_objects) {
+  NOT_IN_PRODUCT(TimelineDurationScope tds(thread(),
+      Timeline::GetIsolateStream(), "WriteIsolateFullSnapshot"));
+
+  Serializer serializer(thread(),
+                        kind_,
+                        isolate_snapshot_buffer_,
+                        alloc_,
+                        kInitialSize,
+                        instructions_writer_);
+  ObjectStore* object_store = isolate()->object_store();
+  ASSERT(object_store != NULL);
+
+  serializer.ReserveHeader();
+  serializer.WriteVersionAndFeatures();
+  serializer.WriteFullSnapshot(num_base_objects, object_store);
+  serializer.FillHeader(serializer.kind());
+
+  isolate_snapshot_size_ = serializer.bytes_written();
+}
+
+
+void FullSnapshotWriter::WriteFullSnapshot() {
+  intptr_t num_base_objects;
+  if (vm_isolate_snapshot_buffer() != NULL) {
+    num_base_objects = WriteVmIsolateSnapshot();
+    ASSERT(num_base_objects != 0);
+  } else {
+    num_base_objects = 0;
+  }
+
+  WriteIsolateFullSnapshot(num_base_objects);
+
+  if (Snapshot::IncludesCode(kind_)) {
+    instructions_writer_->Write();
+
+    OS::Print("VMIsolate(CodeSize): %" Pd "\n", VmIsolateSnapshotSize());
+    OS::Print("Isolate(CodeSize): %" Pd "\n", IsolateSnapshotSize());
+    OS::Print("Instructions(CodeSize): %" Pd "\n",
+              instructions_writer_->binary_size());
+    intptr_t total = VmIsolateSnapshotSize() +
+                     IsolateSnapshotSize() +
+                     instructions_writer_->binary_size();
+    OS::Print("Total(CodeSize): %" Pd "\n", total);
+  }
+}
+
+
+RawApiError* IsolateSnapshotReader::ReadFullSnapshot() {
+  Deserializer deserializer(thread_,
+                            kind_,
+                            buffer_,
+                            size_,
+                            instructions_buffer_,
+                            data_buffer_);
+
+  RawApiError* error = deserializer.VerifyVersionAndFeatures();
+  if (error != ApiError::null()) {
+    return error;
+  }
+
+  deserializer.ReadFullSnapshot(thread_->isolate()->object_store());
+
+  return ApiError::null();
+}
+
+
+RawApiError* VmIsolateSnapshotReader::ReadVmIsolateSnapshot() {
+  Deserializer deserializer(thread_,
+                            kind_,
+                            buffer_,
+                            size_,
+                            instructions_buffer_,
+                            data_buffer_);
+
+  RawApiError* error = deserializer.VerifyVersionAndFeatures();
+  if (error != ApiError::null()) {
+    return error;
+  }
+
+  deserializer.ReadVMSnapshot();
+
+  Dart::set_instructions_snapshot_buffer(instructions_buffer_);
+  Dart::set_data_snapshot_buffer(data_buffer_);
+
+  return ApiError::null();
+}
+
+}  // namespace dart
diff --git a/runtime/vm/clustered_snapshot.h b/runtime/vm/clustered_snapshot.h
new file mode 100644
index 0000000..b2bdd0c
--- /dev/null
+++ b/runtime/vm/clustered_snapshot.h
@@ -0,0 +1,478 @@
+// Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+#ifndef VM_CLUSTERED_SNAPSHOT_H_
+#define VM_CLUSTERED_SNAPSHOT_H_
+
+#include "platform/assert.h"
+#include "vm/allocation.h"
+#include "vm/bitfield.h"
+#include "vm/datastream.h"
+#include "vm/exceptions.h"
+#include "vm/globals.h"
+#include "vm/growable_array.h"
+#include "vm/heap.h"
+#include "vm/isolate.h"
+#include "vm/object.h"
+#include "vm/snapshot.h"
+#include "vm/version.h"
+#include "vm/visitor.h"
+
+namespace dart {
+
+// Forward declarations.
+class Serializer;
+class Deserializer;
+class ObjectStore;
+
+// For full snapshots, we use a clustered snapshot format that trades longer
+// serialization time for faster deserialization time and smaller snapshots.
+// Objects are clustered by class to allow writing type information once per
+// class instead once per object, and to allow filling the objects in a tight
+// loop. The snapshot has two major sections: the first describes how to
+// allocate the objects and the second describes how to initialize them.
+// Deserialization starts by allocating a reference array large enough to hold
+// the base objects (objects already available to both the serializer and
+// deserializer) and the objects written in the snapshot. The allocation section
+// is then read for each cluster, filling the reference array. Then the
+// initialization/fill secton is read for each cluster, using the indices into
+// the reference array to fill pointers. At this point, every object has been
+// touched exactly once and in order, making this approach very cache friendly.
+// Finally, each cluster is given an opportunity to perform some fix-ups that
+// require the graph has been fully loaded, such as rehashing, though most
+// clusters do not require fixups.
+
+class SerializationCluster : public ZoneAllocated {
+ public:
+  virtual ~SerializationCluster() { }
+
+  // Add [object] to the cluster and push its outgoing references.
+  virtual void Trace(Serializer* serializer, RawObject* object) = 0;
+
+  // Write the cluster type and information needed to allocate the cluster's
+  // objects. For fixed sized objects, this is just the object count. For
+  // variable sized objects, this is the object count and length of each object.
+  virtual void WriteAlloc(Serializer* serializer) = 0;
+
+  // Write the byte and reference data of the cluster's objects.
+  virtual void WriteFill(Serializer* serializer) = 0;
+};
+
+
+class DeserializationCluster : public ZoneAllocated {
+ public:
+  DeserializationCluster() : start_index_(-1), stop_index_(-1) { }
+  virtual ~DeserializationCluster() { }
+
+  // Allocate memory for all objects in the cluster and write their addresses
+  // into the ref array. Do not touch this memory.
+  virtual void ReadAlloc(Deserializer* deserializer) = 0;
+
+  // Initialize the cluster's objects. Do not touch the memory of other objects.
+  virtual void ReadFill(Deserializer* deserializer) = 0;
+
+  // Complete any action that requires the full graph to be deserialized, such
+  // as rehashing.
+  virtual void PostLoad(const Array& refs, Snapshot::Kind kind, Zone* zone) { }
+
+ protected:
+  // The range of the ref array that belongs to this cluster.
+  intptr_t start_index_;
+  intptr_t stop_index_;
+};
+
+
+enum {
+  kRefTagSize = 1,
+  kRefTagShift = 1,
+  kRefTagMask = 1,
+  kSmiRefTag = 0x0,
+  kHeapRefTag = 0x1,
+};
+
+
+class Serializer : public StackResource {
+ public:
+  Serializer(Thread* thread,
+             Snapshot::Kind kind,
+             uint8_t** buffer,
+             ReAlloc alloc,
+             intptr_t initial_size,
+             InstructionsWriter* instructions_writer_);
+  ~Serializer();
+
+  intptr_t WriteVMSnapshot(const Array& symbols, const Array& scripts);
+  void WriteFullSnapshot(intptr_t num_base_objects, ObjectStore* object_store);
+
+  void AddVMIsolateBaseObjects();
+
+  void AddBaseObject(RawObject* base_object) {
+    AssignRef(base_object);
+    num_base_objects_++;
+  }
+
+  void AssignRef(RawObject* object) {
+    ASSERT(next_ref_index_ != 0);
+    heap_->SetObjectId(object, next_ref_index_);
+    ASSERT(heap_->GetObjectId(object) == next_ref_index_);
+    next_ref_index_++;
+  }
+
+  void Push(RawObject* object) {
+    if (!object->IsHeapObject()) {
+      return;
+    }
+
+    if (object->IsCode() && !Snapshot::IncludesCode(kind_)) {
+      return;  // Do not trace, will write null.
+    }
+
+    if (object->IsSendPort()) {
+      // TODO(rmacnak): Do a better job of resetting fields in precompilation
+      // and assert this is unreachable.
+      return;  // Do not trace, will write null.
+    }
+
+    intptr_t id = heap_->GetObjectId(object);
+    if (id == 0) {
+      heap_->SetObjectId(object, 1);
+      ASSERT(heap_->GetObjectId(object) != 0);
+      stack_.Add(object);
+      num_written_objects_++;
+    }
+  }
+
+  void AddUntracedRef() {
+    num_written_objects_++;
+  }
+
+  void Trace(RawObject* object);
+
+  SerializationCluster* NewClusterForClass(intptr_t cid);
+
+  void ReserveHeader() {
+    // Make room for recording snapshot buffer size.
+    stream_.set_current(stream_.buffer() + Snapshot::kHeaderSize);
+  }
+
+  void FillHeader(Snapshot::Kind kind) {
+    int64_t* data = reinterpret_cast<int64_t*>(stream_.buffer());
+    data[Snapshot::kLengthIndex] = stream_.bytes_written();
+    data[Snapshot::kSnapshotFlagIndex] = kind;
+  }
+
+  void WriteVersionAndFeatures();
+
+  void Serialize();
+  intptr_t bytes_written() { return stream_.bytes_written(); }
+
+  // Writes raw data to the stream (basic type).
+  // sizeof(T) must be in {1,2,4,8}.
+  template <typename T>
+  void Write(T value) {
+    WriteStream::Raw<sizeof(T), T>::Write(&stream_, value);
+  }
+
+  void WriteBytes(const uint8_t* addr, intptr_t len) {
+    stream_.WriteBytes(addr, len);
+  }
+
+  void WriteRef(RawObject* object) {
+    if (!object->IsHeapObject()) {
+      ASSERT(static_cast<intptr_t>(kSmiRefTag) ==
+             static_cast<intptr_t>(kSmiTag));
+      Write<intptr_t>(reinterpret_cast<intptr_t>(object));
+      return;
+    }
+
+    intptr_t id = heap_->GetObjectId(object);
+    if (id == 0) {
+      if (object->IsCode() && !Snapshot::IncludesCode(kind_)) {
+        WriteRef(Object::null());
+        return;
+      }
+      if (object->IsSendPort()) {
+        // TODO(rmacnak): Do a better job of resetting fields in precompilation
+        // and assert this is unreachable.
+        WriteRef(Object::null());
+        return;
+      }
+      FATAL("Missing ref");
+    }
+    Write<intptr_t>((id << kRefTagShift) | kHeapRefTag);
+  }
+
+  void WriteTokenPosition(TokenPosition pos) {
+    Write<int32_t>(pos.SnapshotEncode());
+  }
+
+  void WriteCid(intptr_t cid) {
+    COMPILE_ASSERT(RawObject::kClassIdTagSize <= 32);
+    Write<int32_t>(cid);
+  }
+
+  int32_t GetTextOffset(RawInstructions* instr, RawCode* code) {
+    return instructions_writer_->GetOffsetFor(instr, code);
+  }
+
+  int32_t GetRODataOffset(RawObject* object) {
+    return instructions_writer_->GetObjectOffsetFor(object);
+  }
+
+  Snapshot::Kind kind() const { return kind_; }
+
+ private:
+  Heap* heap_;
+  Zone* zone_;
+  Snapshot::Kind kind_;
+  WriteStream stream_;
+  InstructionsWriter* instructions_writer_;
+  SerializationCluster** clusters_by_cid_;
+  GrowableArray<RawObject*> stack_;
+  intptr_t num_cids_;
+  intptr_t num_base_objects_;
+  intptr_t num_written_objects_;
+  intptr_t next_ref_index_;
+
+  DISALLOW_IMPLICIT_CONSTRUCTORS(Serializer);
+};
+
+
+class Deserializer : public StackResource {
+ public:
+  Deserializer(Thread* thread,
+               Snapshot::Kind kind,
+               const uint8_t* buffer,
+               intptr_t size,
+               const uint8_t* instructions_buffer,
+               const uint8_t* data_buffer);
+  ~Deserializer();
+
+  void ReadFullSnapshot(ObjectStore* object_store);
+  void ReadVMSnapshot();
+
+  void AddVMIsolateBaseObjects();
+
+  static void InitializeHeader(RawObject* raw,
+                               intptr_t cid,
+                               intptr_t size,
+                               bool is_vm_isolate,
+                               bool is_canonical = false);
+
+  // Reads raw data (for basic types).
+  // sizeof(T) must be in {1,2,4,8}.
+  template <typename T>
+  T Read() {
+    return ReadStream::Raw<sizeof(T), T>::Read(&stream_);
+  }
+
+  void ReadBytes(uint8_t* addr, intptr_t len) {
+    stream_.ReadBytes(addr, len);
+  }
+
+  const uint8_t* CurrentBufferAddress() const {
+    return stream_.AddressOfCurrentPosition();
+  }
+
+  void Advance(intptr_t value) {
+    stream_.Advance(value);
+  }
+
+  intptr_t PendingBytes() const {
+    return stream_.PendingBytes();
+  }
+
+  void AddBaseObject(RawObject* base_object) {
+    AssignRef(base_object);
+  }
+
+  void AssignRef(RawObject* object) {
+    ASSERT(next_ref_index_ <= num_objects_);
+    refs_->ptr()->data()[next_ref_index_] = object;
+    next_ref_index_++;
+  }
+
+  RawObject* Ref(intptr_t index) const {
+    ASSERT(index > 0);
+    ASSERT(index <= num_objects_);
+    return refs_->ptr()->data()[index];
+  }
+
+  RawObject* ReadRef() {
+    intptr_t index = Read<intptr_t>();
+    if ((index & kRefTagMask) == kSmiRefTag) {
+      ASSERT(static_cast<intptr_t>(kSmiRefTag) ==
+             static_cast<intptr_t>(kSmiTag));
+      return reinterpret_cast<RawSmi*>(index);
+    }
+    return Ref(index >> kRefTagShift);
+  }
+
+  TokenPosition ReadTokenPosition() {
+    return TokenPosition::SnapshotDecode(Read<int32_t>());
+  }
+
+  intptr_t ReadCid() {
+    COMPILE_ASSERT(RawObject::kClassIdTagSize <= 32);
+    return Read<int32_t>();
+  }
+
+  uword GetInstructionsAt(int32_t offset) {
+    return instructions_reader_->GetInstructionsAt(offset);
+  }
+
+  RawObject* GetObjectAt(int32_t offset) {
+    return instructions_reader_->GetObjectAt(offset);
+  }
+
+  RawApiError* VerifyVersionAndFeatures();
+
+  void Prepare();
+  void Deserialize();
+
+  DeserializationCluster* ReadCluster();
+
+  intptr_t next_index() const { return next_ref_index_; }
+  Heap* heap() const { return heap_; }
+  Snapshot::Kind kind() const { return kind_; }
+
+ private:
+  Heap* heap_;
+  Zone* zone_;
+  Snapshot::Kind kind_;
+  ReadStream stream_;
+  InstructionsReader* instructions_reader_;
+  intptr_t num_objects_;
+  intptr_t num_clusters_;
+  RawArray* refs_;
+  intptr_t next_ref_index_;
+  DeserializationCluster** clusters_;
+};
+
+
+class FullSnapshotWriter {
+ public:
+  static const intptr_t kInitialSize = 64 * KB;
+  FullSnapshotWriter(Snapshot::Kind kind,
+                     uint8_t** vm_isolate_snapshot_buffer,
+                     uint8_t** isolate_snapshot_buffer,
+                     ReAlloc alloc,
+                     InstructionsWriter* instructions_writer);
+  ~FullSnapshotWriter();
+
+  uint8_t** vm_isolate_snapshot_buffer() const {
+    return vm_isolate_snapshot_buffer_;
+  }
+
+  uint8_t** isolate_snapshot_buffer() const {
+    return isolate_snapshot_buffer_;
+  }
+
+  Thread* thread() const { return thread_; }
+  Zone* zone() const { return thread_->zone(); }
+  Isolate* isolate() const { return thread_->isolate(); }
+  Heap* heap() const { return isolate()->heap(); }
+
+  // Writes a full snapshot of the Isolate.
+  void WriteFullSnapshot();
+
+  intptr_t VmIsolateSnapshotSize() const {
+    return vm_isolate_snapshot_size_;
+  }
+  intptr_t IsolateSnapshotSize() const {
+    return isolate_snapshot_size_;
+  }
+
+ private:
+  // Writes a snapshot of the VM Isolate.
+  intptr_t WriteVmIsolateSnapshot();
+
+  // Writes a full snapshot of a regular Dart Isolate.
+  void WriteIsolateFullSnapshot(intptr_t num_base_objects);
+
+  Thread* thread_;
+  Snapshot::Kind kind_;
+  uint8_t** vm_isolate_snapshot_buffer_;
+  uint8_t** isolate_snapshot_buffer_;
+  ReAlloc alloc_;
+  intptr_t vm_isolate_snapshot_size_;
+  intptr_t isolate_snapshot_size_;
+  ForwardList* forward_list_;
+  InstructionsWriter* instructions_writer_;
+  Array& scripts_;
+  Array& saved_symbol_table_;
+  Array& new_vm_symbol_table_;
+
+  DISALLOW_COPY_AND_ASSIGN(FullSnapshotWriter);
+};
+
+
+class VmIsolateSnapshotReader {
+ public:
+  VmIsolateSnapshotReader(Snapshot::Kind kind,
+                          const uint8_t* buffer,
+                          intptr_t size,
+                          const uint8_t* instructions_buffer,
+                          const uint8_t* data_buffer,
+                          Thread* thread) :
+      kind_(kind),
+      thread_(thread),
+      buffer_(buffer),
+      size_(size),
+      instructions_buffer_(instructions_buffer),
+      data_buffer_(data_buffer) {
+    thread->isolate()->set_compilation_allowed(kind != Snapshot::kAppNoJIT);
+  }
+
+  ~VmIsolateSnapshotReader() { }
+
+  RawApiError* ReadVmIsolateSnapshot();
+
+ private:
+  Snapshot::Kind kind_;
+  Thread* thread_;
+  const uint8_t* buffer_;
+  intptr_t size_;
+  const uint8_t* instructions_buffer_;
+  const uint8_t* data_buffer_;
+
+  DISALLOW_COPY_AND_ASSIGN(VmIsolateSnapshotReader);
+};
+
+
+class IsolateSnapshotReader {
+ public:
+  IsolateSnapshotReader(Snapshot::Kind kind,
+                        const uint8_t* buffer,
+                        intptr_t size,
+                        const uint8_t* instructions_buffer,
+                        const uint8_t* data_buffer,
+                        Thread* thread) :
+      kind_(kind),
+      thread_(thread),
+      buffer_(buffer),
+      size_(size),
+      instructions_buffer_(instructions_buffer),
+      data_buffer_(data_buffer) {
+    thread->isolate()->set_compilation_allowed(kind != Snapshot::kAppNoJIT);
+  }
+
+  ~IsolateSnapshotReader() {}
+
+  RawApiError* ReadFullSnapshot();
+
+ private:
+  Snapshot::Kind kind_;
+  Thread* thread_;
+  const uint8_t* buffer_;
+  intptr_t size_;
+  const uint8_t* instructions_buffer_;
+  const uint8_t* data_buffer_;
+
+  DISALLOW_COPY_AND_ASSIGN(IsolateSnapshotReader);
+};
+
+}  // namespace dart
+
+#endif  // VM_CLUSTERED_SNAPSHOT_H_
diff --git a/runtime/vm/code_generator.cc b/runtime/vm/code_generator.cc
index 5d4e3cb..4b572a7 100644
--- a/runtime/vm/code_generator.cc
+++ b/runtime/vm/code_generator.cc
@@ -63,6 +63,7 @@
 
 DECLARE_FLAG(int, reload_every);
 DECLARE_FLAG(bool, reload_every_optimized);
+DECLARE_FLAG(bool, reload_every_back_off);
 
 #ifdef DEBUG
 DEFINE_FLAG(charp, gc_at_instance_allocation, NULL,
@@ -109,8 +110,7 @@
   if (length.IsSmi()) {
     const intptr_t len = Smi::Cast(length).Value();
     if ((len >= 0) && (len <= Array::kMaxElements)) {
-      Heap::Space space = isolate->heap()->SpaceForAllocation(kArrayCid);
-      const Array& array = Array::Handle(Array::New(len, space));
+      const Array& array = Array::Handle(Array::New(len, Heap::kNew));
       arguments.SetReturn(array);
       TypeArguments& element_type =
           TypeArguments::CheckedHandle(arguments.ArgAt(1));
@@ -159,7 +159,7 @@
     }
   }
 #endif
-  Heap::Space space = isolate->heap()->SpaceForAllocation(cls.id());
+  Heap::Space space = Heap::kNew;
   const Instance& instance = Instance::Handle(Instance::New(cls, space));
 
   arguments.SetReturn(instance);
@@ -1297,7 +1297,10 @@
     DeoptimizeFunctionsOnStack();
   }
   if (do_reload) {
-    NOT_IN_PRODUCT(isolate->OnStackReload();)
+    if (FLAG_reload_every_back_off) {
+      FLAG_reload_every *= 2;
+    }
+    NOT_IN_PRODUCT(isolate->ReloadSources();)
   }
   if (FLAG_support_debugger && do_stacktrace) {
     String& var_name = String::Handle();
diff --git a/runtime/vm/code_patcher_arm64_test.cc b/runtime/vm/code_patcher_arm64_test.cc
index 3b5e61e..b1cdfdd 100644
--- a/runtime/vm/code_patcher_arm64_test.cc
+++ b/runtime/vm/code_patcher_arm64_test.cc
@@ -40,7 +40,8 @@
                                                          target_name,
                                                          args_descriptor,
                                                          15,
-                                                         1));
+                                                         1,
+                                                         false));
 
   // Code accessing pp is generated, but not executed. Uninitialized pp is OK.
   __ set_constant_pool_allowed(true);
diff --git a/runtime/vm/code_patcher_arm_test.cc b/runtime/vm/code_patcher_arm_test.cc
index b1ca06f..3671eb8 100644
--- a/runtime/vm/code_patcher_arm_test.cc
+++ b/runtime/vm/code_patcher_arm_test.cc
@@ -40,7 +40,8 @@
                                                          target_name,
                                                          args_descriptor,
                                                          15,
-                                                         1));
+                                                         1,
+                                                         false));
 
   // Code accessing pp is generated, but not executed. Uninitialized pp is OK.
   __ set_constant_pool_allowed(true);
diff --git a/runtime/vm/code_patcher_ia32_test.cc b/runtime/vm/code_patcher_ia32_test.cc
index c035345..e7a6fab 100644
--- a/runtime/vm/code_patcher_ia32_test.cc
+++ b/runtime/vm/code_patcher_ia32_test.cc
@@ -40,7 +40,8 @@
                                                          target_name,
                                                          args_descriptor,
                                                          15,
-                                                         1));
+                                                         1,
+                                                         false));
 
   __ LoadObject(ECX, ic_data);
   __ Call(*StubCode::OneArgCheckInlineCache_entry());
diff --git a/runtime/vm/code_patcher_mips_test.cc b/runtime/vm/code_patcher_mips_test.cc
index c8d65bf..b6d4109 100644
--- a/runtime/vm/code_patcher_mips_test.cc
+++ b/runtime/vm/code_patcher_mips_test.cc
@@ -40,7 +40,8 @@
                                                          target_name,
                                                          args_descriptor,
                                                          15,
-                                                         1));
+                                                         1,
+                                                         false));
 
   __ LoadObject(S5, ic_data);
   __ BranchLinkPatchable(*StubCode::OneArgCheckInlineCache_entry());
diff --git a/runtime/vm/code_patcher_x64_test.cc b/runtime/vm/code_patcher_x64_test.cc
index 410ba1a..c6e88c2 100644
--- a/runtime/vm/code_patcher_x64_test.cc
+++ b/runtime/vm/code_patcher_x64_test.cc
@@ -40,7 +40,7 @@
                                                          target_name,
                                                          args_descriptor,
                                                          15,
-                                                         1));
+                                                         1, false));
 
   // Code accessing pp is generated, but not executed. Uninitialized pp is OK.
   __ set_constant_pool_allowed(true);
diff --git a/runtime/vm/compiler.cc b/runtime/vm/compiler.cc
index a15dca3..cc3b691 100644
--- a/runtime/vm/compiler.cc
+++ b/runtime/vm/compiler.cc
@@ -1316,12 +1316,15 @@
     }
 
     if (FLAG_disassemble && FlowGraphPrinter::ShouldPrint(function)) {
+      SafepointOperationScope safepoint_scope(thread);
       Disassembler::DisassembleCode(function, optimized);
     } else if (FLAG_disassemble_optimized &&
                optimized &&
                FlowGraphPrinter::ShouldPrint(function)) {
+      SafepointOperationScope safepoint_scope(thread);
       Disassembler::DisassembleCode(function, true);
     }
+
     DEBUG_ONLY(CheckInliningIntervals(function));
     return Error::null();
   } else {
diff --git a/runtime/vm/compiler_stats.cc b/runtime/vm/compiler_stats.cc
index d15df53..e8d39b3 100644
--- a/runtime/vm/compiler_stats.cc
+++ b/runtime/vm/compiler_stats.cc
@@ -26,8 +26,8 @@
   }
 
   void VisitObject(RawObject* raw_obj) {
-    if (raw_obj->IsFreeListElement()) {
-      return;
+    if (raw_obj->IsPseudoObject()) {
+      return;  // Cannot be wrapped in handles.
     }
     obj_ = raw_obj;
     if (obj_.GetClassId() == TokenStream::kClassId) {
diff --git a/runtime/vm/constant_propagator.cc b/runtime/vm/constant_propagator.cc
index 4409f3f..6b97f7d 100644
--- a/runtime/vm/constant_propagator.cc
+++ b/runtime/vm/constant_propagator.cc
@@ -1514,8 +1514,9 @@
 
           changed = true;
 
-          if (FLAG_trace_constant_propagation) {
-            OS::Print("Eliminated branch in B%" Pd " common target B%" Pd "\n",
+          if (FLAG_trace_constant_propagation &&
+              FlowGraphPrinter::ShouldPrint(graph_->function())) {
+            THR_Print("Eliminated branch in B%" Pd " common target B%" Pd "\n",
                       block->block_id(), join->block_id());
           }
         }
@@ -1533,7 +1534,8 @@
 
 
 void ConstantPropagator::Transform() {
-  if (FLAG_trace_constant_propagation) {
+  if (FLAG_trace_constant_propagation &&
+      FlowGraphPrinter::ShouldPrint(graph_->function())) {
     FlowGraphPrinter::PrintGraph("Before CP", graph_);
   }
 
@@ -1546,8 +1548,9 @@
        b.Advance()) {
     BlockEntryInstr* block = b.Current();
     if (!reachable_->Contains(block->preorder_number())) {
-      if (FLAG_trace_constant_propagation) {
-        OS::Print("Unreachable B%" Pd "\n", block->block_id());
+      if (FLAG_trace_constant_propagation &&
+          FlowGraphPrinter::ShouldPrint(graph_->function())) {
+        THR_Print("Unreachable B%" Pd "\n", block->block_id());
       }
       // Remove all uses in unreachable blocks.
       block->ClearAllInstructions();
@@ -1618,8 +1621,9 @@
           !defn->IsStoreIndexed() &&
           !defn->IsStoreInstanceField() &&
           !defn->IsStoreStaticField()) {
-        if (FLAG_trace_constant_propagation) {
-          OS::Print("Constant v%" Pd " = %s\n",
+        if (FLAG_trace_constant_propagation &&
+            FlowGraphPrinter::ShouldPrint(graph_->function())) {
+          THR_Print("Constant v%" Pd " = %s\n",
                     defn->ssa_temp_index(),
                     defn->constant_value().ToCString());
         }
@@ -1681,7 +1685,8 @@
   GrowableArray<BitVector*> dominance_frontier;
   graph_->ComputeDominators(&dominance_frontier);
 
-  if (FLAG_trace_constant_propagation) {
+  if (FLAG_trace_constant_propagation &&
+      FlowGraphPrinter::ShouldPrint(graph_->function())) {
     FlowGraphPrinter::PrintGraph("After CP", graph_);
   }
 }
diff --git a/runtime/vm/constants_arm64.h b/runtime/vm/constants_arm64.h
index 870bb34..e855b0f 100644
--- a/runtime/vm/constants_arm64.h
+++ b/runtime/vm/constants_arm64.h
@@ -996,7 +996,15 @@
         return R31IsSP;
       }
     }
-    // TODO(zra): Handle for logical immediate operations.
+    if (IsLogicalImmOp()) {
+      const int op = Bits(29, 2);
+      const bool set_flags = op == 3;
+      if (set_flags) {
+        return R31IsZR;
+      } else {
+        return R31IsSP;
+      }
+    }
     return R31IsZR;
   }
 
diff --git a/runtime/vm/constants_dbc.h b/runtime/vm/constants_dbc.h
index 657ffb0..5d901b8 100644
--- a/runtime/vm/constants_dbc.h
+++ b/runtime/vm/constants_dbc.h
@@ -83,6 +83,11 @@
 //
 //    Unreachable instruction.
 //
+//  - Nop D
+//
+//    This instuction does nothing. It may refer to an object in the constant
+//    pool that may be decoded by other instructions.
+//
 //  - Compile
 //
 //    Compile current function and start executing newly produced code
@@ -152,6 +157,16 @@
 //
 //    Invoke native function SP[-1] with argc_tag SP[0].
 //
+//  - OneByteStringFromCharCode rA, rX
+//
+//    Load the one-character symbol with the char code given by the Smi
+//    in FP[rX] into FP[rA].
+//
+//  - StringToCharCode rA, rX
+//
+//    Load and smi-encode the single char code of the string in FP[rX] into
+//    FP[rA]. If the string's length is not 1, load smi -1 instead.
+//
 //  - AddTOS; SubTOS; MulTOS; BitOrTOS; BitAndTOS; EqualTOS; LessThanTOS;
 //    GreaterThanTOS;
 //
@@ -160,7 +175,29 @@
 //    then pops operands and pushes result on the stack and skips the next
 //    instruction (which implements a slow path fallback).
 //
-//  - StoreStaticTOS D
+//  - Add, Sub, Mul, Div, Mod, Shl, Shr rA, rB, rC
+//
+//    Arithmetic operations on Smis. FP[rA] <- FP[rB] op FP[rC].
+//    If these instructions can trigger a deoptimization, the following
+//    instruction should be Deopt. If no deoptimization should be triggered,
+//    the immediately following instruction is skipped. These instructions
+//    expect their operands to be Smis, but don't check that they are.
+//
+//  - Neg rA , rD
+//
+//    FP[rA] <- -FP[rD]. Assumes FP[rD] is a Smi. If there is no overflow the
+//    immediately following instruction is skipped.
+//
+//  - BitOr, BitAnd, BitXor rA, rB, rC
+//
+//    FP[rA] <- FP[rB] op FP[rC]. These instructions expect their operands to be
+//    Smis, but don't check that they are.
+//
+//  - BitNot rA, rD
+//
+//    FP[rA] <- ~FP[rD]. As above, assumes FP[rD] is a Smi.
+//
+//  - StoreStaticT`OS D
 //
 //    Stores TOS into the static field PP[D].
 //
@@ -186,6 +223,11 @@
 //        IfNeStrictTOS
 //        Jump T         ;; jump if not equal
 //
+//  - If<Cond>Null rA
+//
+//    Cond is Eq or Ne. Skips the next instruction unless the given condition
+//    holds.
+//
 //  - CreateArrayTOS
 //
 //    Allocate array of length SP[0] with type arguments SP[-1].
@@ -314,6 +356,12 @@
 //
 //    Instantiate type arguments PP[D] with instantiator SP[0].
 //
+//  - InstanceOf A
+//
+//    Test if instance SP[-3] with type arguments SP[-2] is (A = 0) or is not
+//    (A = 1) a subtype of SP[-1] using SubtypeTestCache SP[0], with result
+//    placed at top of stack.
+//
 //  - AssertAssignable D
 //
 //    Assert that SP[-3] is assignable to variable named SP[0] of type
@@ -323,6 +371,33 @@
 //
 //    Assert that TOS is a boolean (A = 1) or that TOS is not null (A = 0).
 //
+//  - TestSmi rA, rD
+//
+//    If FP[rA] & FP[rD] != 0, then skip the next instruction. FP[rA] and FP[rD]
+//    must be Smis.
+//
+//  - CheckSmi rA
+//
+//    If FP[rA] is a Smi, then skip the next instruction.
+//
+//  - CheckClassId rA, D
+//
+//    If the object at FP[rA]'s class id matches the class id D, then skip the
+//    following instruction.
+//
+//  - CheckDenseSwitch rA, D
+//
+//    Skips the next 3 instructions if the object at FP[rA] is a valid class for
+//    a dense switch with low cid encoded in the following Nop instruction, and
+//    the cid mask encoded in the Nop instruction after that, or if D == 1 and
+//    FP[rA] is a Smi. Skips 2 instructions otherwise.
+//
+//  - CheckCids rA, rB, rC
+//
+//    Skips rC + 1 instructions if the object at FP[rA] is a Smi and
+//    rB == 1, or if FP[rA]'s cid is found in the array of cids encoded by the
+//    following rC Nop instructions. Otherwise skips only rC instructions.
+//
 //  - CheckStack
 //
 //    Compare SP against isolate stack limit and call StackOverflow handler if
@@ -359,7 +434,7 @@
 //    e.g. in bytecode sequences like
 //
 //    InstanceCall ... <- lazy deopt inside first call
-//    InstanceCall ... <- patches seconds call with Deopt
+//    InstanceCall ... <- patches second call with Deopt
 //
 // BYTECODE LIST FORMAT
 //
@@ -385,6 +460,7 @@
 //
 #define BYTECODES_LIST(V)                              \
   V(Trap,                            0, ___, ___, ___) \
+  V(Nop,                             D, lit, ___, ___) \
   V(Compile,                         0, ___, ___, ___) \
   V(HotCheck,                      A_D, num, num, ___) \
   V(Intrinsic,                       A, num, ___, ___) \
@@ -404,12 +480,14 @@
   V(StoreLocal,                      X, xeg, ___, ___) \
   V(PopLocal,                        X, xeg, ___, ___) \
   V(StaticCall,                    A_D, num, num, ___) \
-  V(InstanceCall1,                  A_D, num, num, ___) \
+  V(InstanceCall1,                 A_D, num, num, ___) \
   V(InstanceCall2,                 A_D, num, num, ___) \
   V(InstanceCall1Opt,              A_D, num, num, ___) \
   V(InstanceCall2Opt,              A_D, num, num, ___) \
   V(NativeCall,                      0, ___, ___, ___) \
   V(NativeBootstrapCall,             0, ___, ___, ___) \
+  V(OneByteStringFromCharCode,     A_X, reg, xeg, ___) \
+  V(StringToCharCode,              A_X, reg, xeg, ___) \
   V(AddTOS,                          0, ___, ___, ___) \
   V(SubTOS,                          0, ___, ___, ___) \
   V(MulTOS,                          0, ___, ___, ___) \
@@ -418,6 +496,18 @@
   V(EqualTOS,                        0, ___, ___, ___) \
   V(LessThanTOS,                     0, ___, ___, ___) \
   V(GreaterThanTOS,                  0, ___, ___, ___) \
+  V(Add,                         A_B_C, reg, reg, reg) \
+  V(Sub,                         A_B_C, reg, reg, reg) \
+  V(Mul,                         A_B_C, reg, reg, reg) \
+  V(Div,                         A_B_C, reg, reg, reg) \
+  V(Mod,                         A_B_C, reg, reg, reg) \
+  V(Shl,                         A_B_C, reg, reg, reg) \
+  V(Shr,                         A_B_C, reg, reg, reg) \
+  V(Neg,                           A_D, reg, reg, ___) \
+  V(BitOr,                       A_B_C, reg, reg, reg) \
+  V(BitAnd,                      A_B_C, reg, reg, reg) \
+  V(BitXor,                      A_B_C, reg, reg, reg) \
+  V(BitNot,                        A_D, reg, reg, ___) \
   V(StoreStaticTOS,                  D, lit, ___, ___) \
   V(PushStatic,                      D, lit, ___, ___) \
   V(InitStaticTOS,                   0, ___, ___, ___) \
@@ -429,6 +519,8 @@
   V(IfEqStrict,                    A_D, reg, reg, ___) \
   V(IfNeStrictNum,                 A_D, reg, reg, ___) \
   V(IfEqStrictNum,                 A_D, reg, reg, ___) \
+  V(IfEqNull,                        A, reg, ___, ___) \
+  V(IfNeNull,                        A, reg, ___, ___) \
   V(CreateArrayTOS,                  0, ___, ___, ___) \
   V(Allocate,                        D, lit, ___, ___) \
   V(AllocateT,                       0, ___, ___, ___) \
@@ -451,8 +543,14 @@
   V(MoveSpecial,                   A_D, reg, num, ___) \
   V(InstantiateType,                 D, lit, ___, ___) \
   V(InstantiateTypeArgumentsTOS,   A_D, num, lit, ___) \
+  V(InstanceOf,                      A, num, ___, ___) \
   V(AssertAssignable,                D, num, lit, ___) \
   V(AssertBoolean,                   A, num, ___, ___) \
+  V(TestSmi,                       A_D, reg, reg, ___) \
+  V(CheckSmi,                        A, reg, ___, ___) \
+  V(CheckClassId,                  A_D, reg, num, ___) \
+  V(CheckDenseSwitch,              A_D, reg, num, ___) \
+  V(CheckCids,                   A_B_C, reg, num, ___) \
   V(CheckStack,                      0, ___, ___, ___) \
   V(DebugStep,                       0, ___, ___, ___) \
   V(DebugBreak,                      A, num, ___, ___) \
@@ -570,7 +668,11 @@
 const FpuRegister FpuTMP = kFakeFpuRegister;
 const intptr_t kNumberOfFpuRegisters = 1;
 
-enum Condition { EQ, NE };
+// After a comparison, the condition NEXT_IS_TRUE means the following
+// instruction is executed if the comparision is true and skipped over overwise.
+// Conidition NEXT_IS_FALSE means the following instruction is executed if the
+// comparison is false and skipped over otherwise.
+enum Condition { NEXT_IS_TRUE, NEXT_IS_FALSE };
 
 }  // namespace dart
 
diff --git a/runtime/vm/cpuinfo_fuchsia.cc b/runtime/vm/cpuinfo_fuchsia.cc
new file mode 100644
index 0000000..04fe196
--- /dev/null
+++ b/runtime/vm/cpuinfo_fuchsia.cc
@@ -0,0 +1,48 @@
+// Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+#include "vm/globals.h"
+#if defined(TARGET_OS_FUCHSIA)
+
+#include "vm/cpuinfo.h"
+
+#include "platform/assert.h"
+
+// TODO(zra): Use "vm/cpuid.h"
+
+namespace dart {
+
+CpuInfoMethod CpuInfo::method_ = kCpuInfoDefault;
+const char* CpuInfo::fields_[kCpuInfoMax] = {0};
+
+void CpuInfo::InitOnce() {
+  UNIMPLEMENTED();
+}
+
+
+void CpuInfo::Cleanup() {
+  UNIMPLEMENTED();
+}
+
+
+bool CpuInfo::FieldContains(CpuInfoIndices idx, const char* search_string) {
+  UNIMPLEMENTED();
+  return false;
+}
+
+
+const char* CpuInfo::ExtractField(CpuInfoIndices idx) {
+  UNIMPLEMENTED();
+  return "<undefined>";
+}
+
+
+bool CpuInfo::HasField(const char* field) {
+  UNIMPLEMENTED();
+  return false;
+}
+
+}  // namespace dart
+
+#endif  // defined(TARGET_OS_FUCHSIA)
diff --git a/runtime/vm/dart.cc b/runtime/vm/dart.cc
index f4821e4..62c7226 100644
--- a/runtime/vm/dart.cc
+++ b/runtime/vm/dart.cc
@@ -4,6 +4,8 @@
 
 #include "vm/dart.h"
 
+#include "vm/become.h"
+#include "vm/clustered_snapshot.h"
 #include "vm/code_observers.h"
 #include "vm/cpu.h"
 #include "vm/dart_api_state.h"
@@ -146,6 +148,7 @@
   Isolate::InitOnce();
   PortMap::InitOnce();
   FreeListElement::InitOnce();
+  ForwardingCorpse::InitOnce();
   Api::InitOnce();
   NOT_IN_PRODUCT(CodeObservers::InitOnce());
   if (FLAG_profiler) {
@@ -308,6 +311,24 @@
 }
 
 
+// This waits until only the VM isolate and the service isolate remains in the
+// list, i.e. list length == 2.
+void Dart::WaitForApplicationIsolateShutdown() {
+  ASSERT(!Isolate::creation_enabled_);
+  MonitorLocker ml(Isolate::isolates_list_monitor_);
+  while ((Isolate::isolates_list_head_ != NULL) &&
+         (Isolate::isolates_list_head_->next_ != NULL) &&
+         (Isolate::isolates_list_head_->next_->next_ != NULL)) {
+    ml.Wait();
+  }
+  ASSERT(
+      ((Isolate::isolates_list_head_ == Dart::vm_isolate()) &&
+       ServiceIsolate::IsServiceIsolate(Isolate::isolates_list_head_->next_)) ||
+      ((Isolate::isolates_list_head_->next_ == Dart::vm_isolate()) &&
+       ServiceIsolate::IsServiceIsolate(Isolate::isolates_list_head_)));
+}
+
+
 // This waits until only the VM isolate remains in the list.
 void Dart::WaitForIsolateShutdown() {
   ASSERT(!Isolate::creation_enabled_);
@@ -369,6 +390,16 @@
     }
     Isolate::KillAllIsolates(Isolate::kInternalKillMsg);
 
+    // Wait for all isolates, but the service and the vm isolate to shut down.
+    // Only do that if there is a service isolate running.
+    if (ServiceIsolate::IsRunning()) {
+      if (FLAG_trace_shutdown) {
+        OS::PrintErr("[+%" Pd64 "ms] SHUTDOWN: Shutting down app isolates\n",
+                     timestamp());
+      }
+      WaitForApplicationIsolateShutdown();
+    }
+
     // Shutdown the service isolate.
     if (FLAG_trace_shutdown) {
       OS::PrintErr("[+%" Pd64 "ms] SHUTDOWN: Shutting down service isolate\n",
@@ -376,7 +407,7 @@
     }
     ServiceIsolate::Shutdown();
 
-    // Wait for all application isolates and the service isolate to shutdown
+    // Wait for the remaining isolate (service isolate) to shutdown
     // before shutting down the thread pool.
     if (FLAG_trace_shutdown) {
       OS::PrintErr("[+%" Pd64 "ms] SHUTDOWN: Waiting for isolate shutdown\n",
@@ -643,6 +674,12 @@
 #elif defined(TARGET_ARCH_DBC64)
     buffer.AddString(" dbc64");
 #endif
+  } else if (Snapshot::IsFull(kind)) {
+#if defined(ARCH_IS_32BIT)
+  buffer.AddString(" 32");
+#else
+  buffer.AddString(" 64");
+#endif
   }
 
   return buffer.Steal();
@@ -653,7 +690,6 @@
   Isolate* isolate = Isolate::Current();
   void* callback_data = isolate->init_callback_data();
   Dart_IsolateShutdownCallback callback = Isolate::ShutdownCallback();
-  ServiceIsolate::SendIsolateShutdownMessage();
   if (callback != NULL) {
     (callback)(callback_data);
   }
diff --git a/runtime/vm/dart.h b/runtime/vm/dart.h
index ea40e43..10ffea3 100644
--- a/runtime/vm/dart.h
+++ b/runtime/vm/dart.h
@@ -117,6 +117,7 @@
 
  private:
   static void WaitForIsolateShutdown();
+  static void WaitForApplicationIsolateShutdown();
 
   static Isolate* vm_isolate_;
   static int64_t start_time_;
diff --git a/runtime/vm/dart_api_impl.cc b/runtime/vm/dart_api_impl.cc
index 95ae648..c9942da 100644
--- a/runtime/vm/dart_api_impl.cc
+++ b/runtime/vm/dart_api_impl.cc
@@ -9,6 +9,7 @@
 #include "platform/assert.h"
 #include "lib/stacktrace.h"
 #include "vm/class_finalizer.h"
+#include "vm/clustered_snapshot.h"
 #include "vm/compiler.h"
 #include "vm/dart.h"
 #include "vm/dart_api_impl.h"
@@ -42,6 +43,7 @@
 #include "vm/timeline.h"
 #include "vm/timer.h"
 #include "vm/unicode.h"
+#include "vm/uri.h"
 #include "vm/verifier.h"
 #include "vm/version.h"
 
@@ -480,6 +482,9 @@
   CHECK_API_SCOPE(T);
   HANDLESCOPE(T);
   CHECK_CALLBACK_STATE(T);
+  // Ensure we transition safepoint state to VM if we are not already in
+  // that state.
+  TransitionToVM transition(T);
 
   va_list args;
   va_start(args, format);
@@ -847,23 +852,20 @@
 
 DART_EXPORT Dart_Handle Dart_PropagateError(Dart_Handle handle) {
   Thread* thread = Thread::Current();
-  {
-    const Object& obj = Object::Handle(thread->zone(),
-        Api::UnwrapHandle(handle));
-    if (!obj.IsError()) {
-      return Api::NewError(
-          "%s expects argument 'handle' to be an error handle.  "
-          "Did you forget to check Dart_IsError first?",
-          CURRENT_FUNC);
-    }
+  TransitionNativeToVM transition(thread);
+  const Object& obj = Object::Handle(thread->zone(),
+                                     Api::UnwrapHandle(handle));
+  if (!obj.IsError()) {
+    return Api::NewError(
+        "%s expects argument 'handle' to be an error handle.  "
+        "Did you forget to check Dart_IsError first?",
+        CURRENT_FUNC);
   }
   if (thread->top_exit_frame_info() == 0) {
     // There are no dart frames on the stack so it would be illegal to
     // propagate an error here.
     return Api::NewError("No Dart frames on stack, cannot propagate error.");
   }
-
-  TransitionNativeToVM transition(thread);
   // Unwind all the API scopes till the exit frame before propagating.
   const Error* error;
   {
@@ -975,6 +977,7 @@
   Thread* thread = Thread::Current();
   Isolate* isolate = thread->isolate();
   CHECK_ISOLATE(isolate);
+  NoSafepointScope no_safepoint_scope;
   ApiState* state = isolate->api_state();
   ASSERT(state != NULL);
   PersistentHandle* ref = PersistentHandle::Cast(object);
@@ -987,6 +990,7 @@
   Thread* thread = Thread::Current();
   Isolate* isolate = thread->isolate();
   CHECK_ISOLATE(isolate);
+  NoSafepointScope no_safepoint_scope;
   ApiState* state = isolate->api_state();
   ASSERT(state != NULL);
   FinalizablePersistentHandle* weak_ref =
@@ -1061,6 +1065,7 @@
 DART_EXPORT void Dart_DeletePersistentHandle(Dart_PersistentHandle object) {
   Isolate* isolate = Isolate::Current();
   CHECK_ISOLATE(isolate);
+  NoSafepointScope no_safepoint_scope;
   ApiState* state = isolate->api_state();
   ASSERT(state != NULL);
   PersistentHandle* ref = PersistentHandle::Cast(object);
@@ -1076,6 +1081,7 @@
     Dart_WeakPersistentHandle object) {
   Isolate* isolate = reinterpret_cast<Isolate*>(current_isolate);
   CHECK_ISOLATE(isolate);
+  NoSafepointScope no_safepoint_scope;
   ASSERT(isolate == Isolate::Current());
   ApiState* state = isolate->api_state();
   ASSERT(state != NULL);
@@ -1091,8 +1097,10 @@
 DART_EXPORT Dart_Handle Dart_SetGcCallbacks(
     Dart_GcPrologueCallback prologue_callback,
     Dart_GcEpilogueCallback epilogue_callback) {
-  Isolate* isolate = Isolate::Current();
+  Thread* thread = Thread::Current();
+  Isolate* isolate = thread->isolate();
   CHECK_ISOLATE(isolate);
+  DARTSCOPE(thread);
   if (prologue_callback != NULL) {
     if (isolate->gc_prologue_callback() != NULL) {
       return Api::NewError(
@@ -1290,13 +1298,14 @@
     StackZone zone(T);
     HandleScope handle_scope(T);
     Dart::RunShutdownCallback();
+    // The Thread structure is disassociated from the isolate, we do the
+    // safepoint transition explicity here instead of using the TransitionXXX
+    // scope objects as the original transition happened outside this scope in
+    // Dart_EnterIsolate/Dart_CreateIsolate.
+    T->ExitSafepoint();
+    T->set_execution_state(Thread::kThreadInVM);
+    ServiceIsolate::SendIsolateShutdownMessage();
   }
-  // The Thread structure is disassociated from the isolate, we do the
-  // safepoint transition explicity here instead of using the TransitionXXX
-  // scope objects as the original transition happened outside this scope in
-  // Dart_EnterIsolate/Dart_CreateIsolate.
-  T->ExitSafepoint();
-  T->set_execution_state(Thread::kThreadInVM);
   Dart::ShutdownIsolate();
 }
 
@@ -1309,6 +1318,7 @@
 DART_EXPORT void* Dart_CurrentIsolateData() {
   Isolate* isolate = Isolate::Current();
   CHECK_ISOLATE(isolate);
+  NoSafepointScope no_safepoint_scope;
   return isolate->init_callback_data();
 }
 
@@ -1318,6 +1328,7 @@
     FATAL1("%s expects argument 'isolate' to be non-null.",  CURRENT_FUNC);
   }
   // TODO(16615): Validate isolate parameter.
+  NoSafepointScope no_safepoint_scope;
   Isolate* iso = reinterpret_cast<Isolate*>(isolate);
   return iso->init_callback_data();
 }
@@ -1371,6 +1382,7 @@
 DART_EXPORT bool Dart_ShouldPauseOnStart() {
   Isolate* isolate = Isolate::Current();
   CHECK_ISOLATE(isolate);
+  NoSafepointScope no_safepoint_scope;
   return isolate->message_handler()->should_pause_on_start();
 }
 
@@ -1378,6 +1390,7 @@
 DART_EXPORT void Dart_SetShouldPauseOnStart(bool should_pause) {
   Isolate* isolate = Isolate::Current();
   CHECK_ISOLATE(isolate);
+  NoSafepointScope no_safepoint_scope;
   if (isolate->is_runnable()) {
     FATAL1("%s expects the current isolate to not be runnable yet.",
            CURRENT_FUNC);
@@ -1389,6 +1402,7 @@
 DART_EXPORT bool Dart_IsPausedOnStart() {
   Isolate* isolate = Isolate::Current();
   CHECK_ISOLATE(isolate);
+  NoSafepointScope no_safepoint_scope;
   return isolate->message_handler()->is_paused_on_start();
 }
 
@@ -1396,6 +1410,7 @@
 DART_EXPORT void Dart_SetPausedOnStart(bool paused) {
   Isolate* isolate = Isolate::Current();
   CHECK_ISOLATE(isolate);
+  NoSafepointScope no_safepoint_scope;
   if (isolate->message_handler()->is_paused_on_start() != paused) {
     isolate->message_handler()->PausedOnStart(paused);
   }
@@ -1405,6 +1420,7 @@
 DART_EXPORT bool Dart_ShouldPauseOnExit() {
   Isolate* isolate = Isolate::Current();
   CHECK_ISOLATE(isolate);
+  NoSafepointScope no_safepoint_scope;
   return isolate->message_handler()->should_pause_on_exit();
 }
 
@@ -1412,6 +1428,7 @@
 DART_EXPORT void Dart_SetShouldPauseOnExit(bool should_pause) {
   Isolate* isolate = Isolate::Current();
   CHECK_ISOLATE(isolate);
+  NoSafepointScope no_safepoint_scope;
   return isolate->message_handler()->set_should_pause_on_exit(should_pause);
 }
 
@@ -1419,6 +1436,7 @@
 DART_EXPORT bool Dart_IsPausedOnExit() {
   Isolate* isolate = Isolate::Current();
   CHECK_ISOLATE(isolate);
+  NoSafepointScope no_safepoint_scope;
   return isolate->message_handler()->is_paused_on_exit();
 }
 
@@ -1426,6 +1444,7 @@
 DART_EXPORT void Dart_SetPausedOnExit(bool paused) {
   Isolate* isolate = Isolate::Current();
   CHECK_ISOLATE(isolate);
+  NoSafepointScope no_safepoint_scope;
   if (isolate->message_handler()->is_paused_on_exit() != paused) {
     isolate->message_handler()->PausedOnExit(paused);
   }
@@ -1557,6 +1576,7 @@
     FATAL1("%s expects argument 'isolate' to be non-null.",  CURRENT_FUNC);
   }
   // TODO(16615): Validate isolate parameter.
+  TransitionNativeToVM transition(Thread::Current());
   Isolate* iso = reinterpret_cast<Isolate*>(isolate);
   iso->SendInternalLibMessage(Isolate::kInterruptMsg, iso->pause_capability());
 }
@@ -1584,6 +1604,7 @@
     Dart_MessageNotifyCallback message_notify_callback) {
   Isolate* isolate = Isolate::Current();
   CHECK_ISOLATE(isolate);
+  NoSafepointScope no_safepoint_scope;
   isolate->set_message_notify_callback(message_notify_callback);
 }
 
@@ -1591,6 +1612,7 @@
 DART_EXPORT Dart_MessageNotifyCallback Dart_GetMessageNotifyCallback() {
   Isolate* isolate = Isolate::Current();
   CHECK_ISOLATE(isolate);
+  NoSafepointScope no_safepoint_scope;
   return isolate->message_notify_callback();
 }
 
@@ -1700,6 +1722,7 @@
 DART_EXPORT bool Dart_HasServiceMessages() {
   Isolate* isolate = Isolate::Current();
   ASSERT(isolate);
+  NoSafepointScope no_safepoint_scope;
   return isolate->message_handler()->HasOOBMessages();
 }
 
@@ -1707,6 +1730,7 @@
 DART_EXPORT bool Dart_HasLivePorts() {
   Isolate* isolate = Isolate::Current();
   ASSERT(isolate);
+  NoSafepointScope no_safepoint_scope;
   return isolate->message_handler()->HasLivePorts();
 }
 
@@ -1784,6 +1808,7 @@
   Thread* thread = Thread::Current();
   Isolate* isolate = thread->isolate();
   CHECK_ISOLATE(isolate);
+  NoSafepointScope no_safepoint_scope;
   ApiLocalScope* new_scope = thread->api_reusable_scope();
   if (new_scope == NULL) {
     new_scope = new ApiLocalScope(thread->api_top_scope(),
@@ -1802,6 +1827,7 @@
 DART_EXPORT void Dart_ExitScope() {
   Thread* T = Thread::Current();
   CHECK_API_SCOPE(T);
+  NoSafepointScope no_safepoint_scope;
   ApiLocalScope* scope = T->api_top_scope();
   ApiLocalScope* reusable_scope = T->api_reusable_scope();
   T->set_api_top_scope(scope->previous());  // Reset top scope to previous.
@@ -4577,20 +4603,16 @@
   if (Api::IsError(exception)) {
     ::Dart_PropagateError(exception);
   }
-
-  {
-    const Instance& excp = Api::UnwrapInstanceHandle(zone, exception);
-    if (excp.IsNull()) {
-      RETURN_TYPE_ERROR(zone, exception, Instance);
-    }
+  TransitionNativeToVM transition(thread);
+  const Instance& excp = Api::UnwrapInstanceHandle(zone, exception);
+  if (excp.IsNull()) {
+    RETURN_TYPE_ERROR(zone, exception, Instance);
   }
   if (thread->top_exit_frame_info() == 0) {
     // There are no dart frames on the stack so it would be illegal to
     // throw an exception here.
     return Api::NewError("No Dart frames on stack, cannot throw exception");
   }
-
-  TransitionNativeToVM transition(thread);
   // Unwind all the API scopes till the exit frame before throwing an
   // exception.
   const Instance* saved_exception;
@@ -4613,6 +4635,7 @@
   Isolate* isolate = thread->isolate();
   CHECK_ISOLATE(isolate);
   CHECK_CALLBACK_STATE(thread);
+  TransitionNativeToVM transition(thread);
   {
     const Instance& excp = Api::UnwrapInstanceHandle(zone, exception);
     if (excp.IsNull()) {
@@ -4628,8 +4651,6 @@
     // throw an exception here.
     return Api::NewError("No Dart frames on stack, cannot throw exception");
   }
-
-  TransitionNativeToVM transition(thread);
   // Unwind all the API scopes till the exit frame before throwing an
   // exception.
   const Instance* saved_exception;
@@ -5124,6 +5145,7 @@
   } else {
     // Slow path for Mints and Bigints.
     ASSERT_CALLBACK_STATE(arguments->thread());
+    TransitionNativeToVM transition(arguments->thread());
     Api::SetIntegerReturnValue(arguments, retval);
   }
 }
@@ -5133,6 +5155,7 @@
                                            double retval) {
   NativeArguments* arguments = reinterpret_cast<NativeArguments*>(args);
   ASSERT_CALLBACK_STATE(arguments->thread());
+  TransitionNativeToVM transition(arguments->thread());
   Api::SetDoubleReturnValue(arguments, retval);
 }
 
@@ -5148,6 +5171,30 @@
 }
 
 
+DART_EXPORT Dart_Handle Dart_DefaultCanonicalizeUrl(Dart_Handle base_url,
+                                                    Dart_Handle url) {
+  API_TIMELINE_DURATION;
+  DARTSCOPE(Thread::Current());
+  CHECK_CALLBACK_STATE(T);
+
+  const String& base_uri = Api::UnwrapStringHandle(Z, base_url);
+  if (base_uri.IsNull()) {
+    RETURN_TYPE_ERROR(Z, base_url, String);
+  }
+  const String& uri = Api::UnwrapStringHandle(Z, url);
+  if (uri.IsNull()) {
+    RETURN_TYPE_ERROR(Z, url, String);
+  }
+
+  const char* resolved_uri;
+  if (!ResolveUri(uri.ToCString(), base_uri.ToCString(), &resolved_uri)) {
+    return Api::NewError("%s: Unable to canonicalize uri '%s'.",
+                         CURRENT_FUNC, uri.ToCString());
+  }
+  return Api::NewHandle(T, String::New(resolved_uri));
+}
+
+
 // NOTE: Need to pass 'result' as a parameter here in order to avoid
 // warning: variable 'result' might be clobbered by 'longjmp' or 'vfork'
 // which shows up because of the use of setjmp.
@@ -5288,13 +5335,14 @@
 
 DART_EXPORT Dart_Handle Dart_SetRootLibrary(Dart_Handle library) {
   DARTSCOPE(Thread::Current());
-  const Library& lib = Api::UnwrapLibraryHandle(Z, library);
-  if (lib.IsNull()) {
-    RETURN_TYPE_ERROR(Z, library, Library);
+  const Object& obj = Object::Handle(Z, Api::UnwrapHandle(library));
+  if (obj.IsNull() || obj.IsLibrary()) {
+    Library& lib = Library::Handle(Z);
+    lib ^= obj.raw();
+    T->isolate()->object_store()->set_root_library(lib);
+    return library;
   }
-  Isolate* isolate = Isolate::Current();
-  isolate->object_store()->set_root_library(lib);
-  return library;
+  RETURN_TYPE_ERROR(Z, library, Library);
 }
 
 
diff --git a/runtime/vm/dart_api_impl_test.cc b/runtime/vm/dart_api_impl_test.cc
index fcc4e79..53d7755 100644
--- a/runtime/vm/dart_api_impl_test.cc
+++ b/runtime/vm/dart_api_impl_test.cc
@@ -1206,6 +1206,28 @@
 }
 
 
+// Helper class to ensure new gen GC is triggered without any side effects.
+// The normal call to CollectGarbage(Heap::kNew) could potentially trigger
+// an old gen collection if there is a promotion failure and this could
+// perturb the test.
+class GCTestHelper : public AllStatic {
+ public:
+  static void CollectNewSpace(Heap::ApiCallbacks api_callbacks) {
+    bool invoke_api_callbacks = (api_callbacks == Heap::kInvokeApiCallbacks);
+    Isolate::Current()->heap()->new_space()->Scavenge(invoke_api_callbacks);
+  }
+
+  static void WaitForFinalizationTasks() {
+    Thread* thread = Thread::Current();
+    Heap* heap = thread->isolate()->heap();
+    MonitorLocker ml(heap->finalization_tasks_lock());
+    while (heap->finalization_tasks() > 0) {
+      ml.WaitWithSafepointCheck(thread);
+    }
+  }
+};
+
+
 static void ExternalStringCallbackFinalizer(void* peer) {
   *static_cast<int*>(peer) *= 2;
 }
@@ -1242,9 +1264,11 @@
     EXPECT_EQ(40, peer8);
     EXPECT_EQ(41, peer16);
     Isolate::Current()->heap()->CollectGarbage(Heap::kOld);
+    GCTestHelper::WaitForFinalizationTasks();
     EXPECT_EQ(40, peer8);
     EXPECT_EQ(41, peer16);
     Isolate::Current()->heap()->CollectGarbage(Heap::kNew);
+    GCTestHelper::WaitForFinalizationTasks();
     EXPECT_EQ(80, peer8);
     EXPECT_EQ(82, peer16);
   }
@@ -2381,13 +2405,65 @@
     TransitionNativeToVM transition(thread);
     EXPECT(peer == 0);
     Isolate::Current()->heap()->CollectGarbage(Heap::kOld);
+    GCTestHelper::WaitForFinalizationTasks();
     EXPECT(peer == 0);
     Isolate::Current()->heap()->CollectGarbage(Heap::kNew);
+    GCTestHelper::WaitForFinalizationTasks();
     EXPECT(peer == 42);
   }
 }
 
 
+static Monitor* slow_finalizers_monitor = NULL;
+static intptr_t slow_finalizers_waiting = 0;
+
+
+static void SlowFinalizer(void* isolate_callback_data,
+                          Dart_WeakPersistentHandle handle,
+                          void* peer) {
+  {
+    MonitorLocker ml(slow_finalizers_monitor);
+    slow_finalizers_waiting++;
+    while (slow_finalizers_waiting < 10) {
+      ml.Wait();
+    }
+    ml.NotifyAll();
+  }
+
+  intptr_t* count = reinterpret_cast<intptr_t*>(peer);
+  AtomicOperations::IncrementBy(count, 1);
+}
+
+
+TEST_CASE(SlowFinalizer) {
+  slow_finalizers_monitor = new Monitor();
+
+  intptr_t count = 0;
+  for (intptr_t i = 0; i < 10; i++) {
+    Dart_EnterScope();
+    Dart_Handle str1 = Dart_NewStringFromCString("Live fast");
+    Dart_NewWeakPersistentHandle(str1, &count, 0, SlowFinalizer);
+    Dart_Handle str2 = Dart_NewStringFromCString("Die young");
+    Dart_NewWeakPersistentHandle(str2, &count, 0, SlowFinalizer);
+    Dart_ExitScope();
+
+    {
+      TransitionNativeToVM transition(thread);
+      Isolate::Current()->heap()->CollectAllGarbage();
+    }
+  }
+
+  {
+    TransitionNativeToVM transition(thread);
+    GCTestHelper::WaitForFinalizationTasks();
+  }
+
+  EXPECT_EQ(20, count);
+
+  delete slow_finalizers_monitor;
+}
+
+
 static void CheckFloat32x4Data(Dart_Handle obj) {
   void* raw_data = NULL;
   intptr_t len;
@@ -2439,6 +2515,7 @@
   {
     TransitionNativeToVM transition(thread);
     Isolate::Current()->heap()->CollectGarbage(Heap::kNew);
+    GCTestHelper::WaitForFinalizationTasks();
     EXPECT(peer == 42);
   }
 }
@@ -2601,19 +2678,6 @@
 }
 
 
-// Helper class to ensure new gen GC is triggered without any side effects.
-// The normal call to CollectGarbage(Heap::kNew) could potentially trigger
-// an old gen collection if there is a promotion failure and this could
-// perturb the test.
-class GCTestHelper : public AllStatic {
- public:
-  static void CollectNewSpace(Heap::ApiCallbacks api_callbacks) {
-    bool invoke_api_callbacks = (api_callbacks == Heap::kInvokeApiCallbacks);
-    Isolate::Current()->heap()->new_space()->Scavenge(invoke_api_callbacks);
-  }
-};
-
-
 static Dart_Handle AsHandle(Dart_PersistentHandle weak) {
   return Dart_HandleFromPersistent(weak);
 }
@@ -2728,6 +2792,7 @@
     TransitionNativeToVM transition(thread);
     // Garbage collect new space again.
     GCTestHelper::CollectNewSpace(Heap::kIgnoreApiCallbacks);
+    GCTestHelper::WaitForFinalizationTasks();
   }
 
   {
@@ -2743,6 +2808,7 @@
     TransitionNativeToVM transition(thread);
     // Garbage collect old space again.
     Isolate::Current()->heap()->CollectGarbage(Heap::kOld);
+    GCTestHelper::WaitForFinalizationTasks();
   }
 
   {
@@ -2787,6 +2853,7 @@
     Isolate::Current()->heap()->CollectGarbage(Heap::kOld);
     EXPECT(peer == 0);
     GCTestHelper::CollectNewSpace(Heap::kIgnoreApiCallbacks);
+    GCTestHelper::WaitForFinalizationTasks();
     EXPECT(peer == 42);
   }
 }
@@ -2813,6 +2880,7 @@
     Isolate::Current()->heap()->CollectGarbage(Heap::kOld);
     EXPECT(peer == 0);
     GCTestHelper::CollectNewSpace(Heap::kIgnoreApiCallbacks);
+    GCTestHelper::WaitForFinalizationTasks();
     EXPECT(peer == 0);
   }
 }
@@ -2873,6 +2941,7 @@
     // Collect weakly referenced string, and promote strongly referenced string.
     GCTestHelper::CollectNewSpace(Heap::kIgnoreApiCallbacks);
     GCTestHelper::CollectNewSpace(Heap::kIgnoreApiCallbacks);
+    GCTestHelper::WaitForFinalizationTasks();
     EXPECT(heap->ExternalInWords(Heap::kNew) == 0);
     EXPECT(heap->ExternalInWords(Heap::kOld) == kWeak2ExternalSize / kWordSize);
   }
@@ -2883,6 +2952,7 @@
   {
     TransitionNativeToVM transition(thread);
     Isolate::Current()->heap()->CollectGarbage(Heap::kOld);
+    GCTestHelper::WaitForFinalizationTasks();
     EXPECT(heap->ExternalInWords(Heap::kOld) == 0);
   }
 }
@@ -2928,6 +2998,7 @@
   {
     TransitionNativeToVM transition(thread);
     Isolate::Current()->heap()->CollectGarbage(Heap::kOld);
+    GCTestHelper::WaitForFinalizationTasks();
     EXPECT(heap->ExternalInWords(Heap::kOld) == 0);
   }
 }
@@ -2964,8 +3035,6 @@
   // Expect small garbage to be collected.
   EXPECT_EQ(kHugeExternalSize,
             isolate->heap()->ExternalInWords(Heap::kOld) * kWordSize);
-  Dart_DeleteWeakPersistentHandle(reinterpret_cast<Dart_Isolate>(isolate),
-                                  weak);
   Dart_ExitScope();
 }
 
@@ -5914,6 +5983,11 @@
   lib_uri = Dart_LibraryUrl(root_lib);
   EXPECT_VALID(Dart_StringToCString(lib_uri, &uri_cstr));
   EXPECT_STREQ("dart:core", uri_cstr);  // Root library did change.
+
+  result = Dart_SetRootLibrary(Dart_Null());
+  EXPECT_VALID(result);
+  root_lib = Dart_RootLibrary();
+  EXPECT(Dart_IsNull(root_lib));  // Root library did change.
 }
 
 
@@ -8706,6 +8780,7 @@
   {
     TransitionNativeToVM transition(thread);
     Isolate::Current()->heap()->CollectAllGarbage();
+    GCTestHelper::WaitForFinalizationTasks();
   }
   EXPECT_EQ(80, peer8);
   EXPECT_EQ(82, peer16);
diff --git a/runtime/vm/dart_api_state.cc b/runtime/vm/dart_api_state.cc
new file mode 100644
index 0000000..be30a2d
--- /dev/null
+++ b/runtime/vm/dart_api_state.cc
@@ -0,0 +1,56 @@
+// Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+#include "vm/dart_api_state.h"
+
+#include "platform/assert.h"
+#include "platform/utils.h"
+#include "vm/heap.h"
+#include "vm/isolate.h"
+#include "vm/lockers.h"
+#include "vm/thread.h"
+#include "vm/timeline.h"
+
+namespace dart {
+
+BackgroundFinalizer::BackgroundFinalizer(Isolate* isolate,
+                                         FinalizationQueue* queue) :
+    isolate_(isolate),
+    queue_(queue) {
+  ASSERT(FLAG_background_finalization);
+  MonitorLocker ml(isolate->heap()->finalization_tasks_lock());
+  isolate->heap()->set_finalization_tasks(
+      isolate->heap()->finalization_tasks() + 1);
+  ml.Notify();
+}
+
+
+void BackgroundFinalizer::Run() {
+  bool result = Thread::EnterIsolateAsHelper(isolate_,
+                                             Thread::kFinalizerTask);
+  ASSERT(result);
+
+  {
+    Thread* thread = Thread::Current();
+    TIMELINE_FUNCTION_GC_DURATION(thread, "BackgroundFinalization");
+    TransitionVMToNative transition(thread);
+    for (intptr_t i = 0; i < queue_->length(); i++) {
+      FinalizablePersistentHandle* handle = (*queue_)[i];
+      FinalizablePersistentHandle::Finalize(isolate_, handle);
+    }
+    delete queue_;
+  }
+
+  // Exit isolate cleanly *before* notifying it, to avoid shutdown race.
+  Thread::ExitIsolateAsHelper();
+
+  {
+    Heap* heap = isolate_->heap();
+    MonitorLocker ml(heap->finalization_tasks_lock());
+    heap->set_finalization_tasks(heap->finalization_tasks() - 1);
+    ml.Notify();
+  }
+}
+
+}  // namespace dart
diff --git a/runtime/vm/dart_api_state.h b/runtime/vm/dart_api_state.h
index b510377..f687fb2 100644
--- a/runtime/vm/dart_api_state.h
+++ b/runtime/vm/dart_api_state.h
@@ -15,8 +15,9 @@
 #include "vm/handles.h"
 #include "vm/object.h"
 #include "vm/os.h"
-#include "vm/raw_object.h"
 #include "vm/os_thread.h"
+#include "vm/raw_object.h"
+#include "vm/thread_pool.h"
 #include "vm/visitor.h"
 #include "vm/weak_table.h"
 
@@ -24,6 +25,25 @@
 
 namespace dart {
 
+class FinalizablePersistentHandle;
+typedef MallocGrowableArray<FinalizablePersistentHandle*> FinalizationQueue;
+
+
+class BackgroundFinalizer : public ThreadPool::Task {
+ public:
+  BackgroundFinalizer(Isolate* isolate, FinalizationQueue* queue);
+  virtual ~BackgroundFinalizer() { }
+
+  void Run();
+
+ private:
+  Isolate* isolate_;
+  FinalizationQueue* queue_;
+
+  DISALLOW_IMPLICIT_CONSTRUCTORS(BackgroundFinalizer);
+};
+
+
 // Implementation of Zone support for very fast allocation of small chunks
 // of memory. The chunks cannot be deallocated individually, but instead
 // zones support deallocating all chunks in one fast operation when the
@@ -214,12 +234,12 @@
   }
 
   intptr_t external_size() const {
-    return ExternalSizeBits::decode(external_data_);
+    return ExternalSizeInWordsBits::decode(external_data_) * kWordSize;
   }
 
   void SetExternalSize(intptr_t size, Isolate* isolate) {
     ASSERT(size >= 0);
-    set_external_size(Utils::RoundUp(size, kObjectAlignment));
+    set_external_size(size);
     if (SpaceForExternal() == Heap::kNew) {
       SetExternalNewSpaceBit();
     }
@@ -227,9 +247,18 @@
   }
 
   // Called when the referent becomes unreachable.
-  void UpdateUnreachable(Isolate* isolate) {
+  void UpdateUnreachable(Isolate* isolate, FinalizationQueue* queue) {
+    if (is_queued_for_finalization()) {
+      return;
+    }
     EnsureFreeExternal(isolate);
-    Finalize(isolate, this);
+    if (queue == NULL) {
+      Finalize(isolate, this);
+    } else {
+      MarkForFinalization();
+      queue->Add(this);
+      set_is_queued_for_finalization(true);
+    }
   }
 
   // Called when the referent has moved, potentially between generations.
@@ -252,20 +281,22 @@
  private:
   enum {
     kExternalNewSpaceBit = 0,
-    kExternalSizeBits = 1,
-    kExternalSizeBitsSize = (kBitsPerWord - 1),
+    kQueuedForFinalizationBit = 1,
+    kExternalSizeBits = 2,
+    kExternalSizeBitsSize = (kBitsPerWord - 2),
   };
 
   // This part of external_data_ is the number of externally allocated bytes.
-  // TODO(koda): Measure size in words instead.
-  class ExternalSizeBits : public BitField<uword,
-                                           intptr_t,
-                                           kExternalSizeBits,
-                                           kExternalSizeBitsSize> {};
+  class ExternalSizeInWordsBits : public BitField<uword,
+                                                  intptr_t,
+                                                  kExternalSizeBits,
+                                                  kExternalSizeBitsSize> {};
   // This bit of external_data_ is true if the referent was created in new
   // space and UpdateRelocated has not yet detected any promotion.
   class ExternalNewSpaceBit :
       public BitField<uword, bool, kExternalNewSpaceBit, 1> {};
+  class QueuedForFinalizationBit :
+      public BitField<uword, bool, kQueuedForFinalizationBit, 1> {};
 
   friend class FinalizablePersistentHandles;
 
@@ -299,6 +330,11 @@
     callback_ = NULL;
   }
 
+  void MarkForFinalization() {
+    raw_ = Object::null();
+    ASSERT(callback_ != NULL);
+  }
+
   void set_raw(RawObject* raw) { raw_ = raw; }
   void set_raw(const LocalHandle& ref) { raw_ = ref.raw(); }
   void set_raw(const Object& object) { raw_ = object.raw(); }
@@ -310,8 +346,17 @@
   }
 
   void set_external_size(intptr_t size) {
-    ASSERT(ExternalSizeBits::is_valid(size));
-    external_data_ = ExternalSizeBits::update(size, external_data_);
+    intptr_t size_in_words = Utils::RoundUp(size, kObjectAlignment) / kWordSize;
+    ASSERT(ExternalSizeInWordsBits::is_valid(size_in_words));
+    external_data_ = ExternalSizeInWordsBits::update(size_in_words,
+                                                     external_data_);
+  }
+
+  bool is_queued_for_finalization() const {
+    return QueuedForFinalizationBit::decode(external_data_);
+  }
+  void set_is_queued_for_finalization(bool value) {
+    external_data_ = QueuedForFinalizationBit::update(value, external_data_);
   }
 
   bool IsSetNewSpaceBit() const {
@@ -333,10 +378,13 @@
            Heap::kNew : Heap::kOld;
   }
 
+  friend class BackgroundFinalizer;
+
   RawObject* raw_;
   void* peer_;
   uword external_data_;
   Dart_WeakPersistentHandleFinalizer callback_;
+
   DISALLOW_ALLOCATION();  // Allocated through AllocateHandle methods.
   DISALLOW_COPY_AND_ASSIGN(FinalizablePersistentHandle);
 };
@@ -501,9 +549,11 @@
       : Handles<kFinalizablePersistentHandleSizeInWords,
                 kFinalizablePersistentHandlesPerChunk,
                 kOffsetOfRawPtrInFinalizablePersistentHandle>(),
-        free_list_(NULL) { }
+      free_list_(NULL), mutex_(new Mutex()) { }
   ~FinalizablePersistentHandles() {
     free_list_ = NULL;
+    delete mutex_;
+    mutex_ = NULL;
   }
 
   // Accessors.
@@ -530,25 +580,31 @@
   // by calling FreeHandle.
   FinalizablePersistentHandle* AllocateHandle() {
     FinalizablePersistentHandle* handle;
-    if (free_list_ != NULL) {
-      handle = free_list_;
-      free_list_ = handle->Next();
-      handle->set_raw(Object::null());
-    } else {
-      handle = reinterpret_cast<FinalizablePersistentHandle*>(
-          AllocateScopedHandle());
-      handle->Clear();
+    {
+      MutexLocker ml(mutex_);
+      if (free_list_ != NULL) {
+        handle = free_list_;
+        free_list_ = handle->Next();
+        handle->set_raw(Object::null());
+        return handle;
+      }
     }
+
+    handle = reinterpret_cast<FinalizablePersistentHandle*>(
+          AllocateScopedHandle());
+    handle->Clear();
     return handle;
   }
 
   void FreeHandle(FinalizablePersistentHandle* handle) {
+    MutexLocker ml(mutex_);
     handle->FreeHandle(free_list());
     set_free_list(handle);
   }
 
   // Validate if passed in handle is a Persistent Handle.
   bool IsValidHandle(Dart_WeakPersistentHandle object) const {
+    MutexLocker ml(mutex_);
     return IsValidScopedHandle(reinterpret_cast<uword>(object));
   }
 
@@ -559,6 +615,7 @@
 
  private:
   FinalizablePersistentHandle* free_list_;
+  Mutex* mutex_;
   DISALLOW_COPY_AND_ASSIGN(FinalizablePersistentHandles);
 };
 
@@ -761,6 +818,7 @@
   ref->set_raw(object);
   ref->set_peer(peer);
   ref->set_callback(callback);
+  ref->set_is_queued_for_finalization(false);
   // This may trigger GC, so it must be called last.
   ref->SetExternalSize(external_size, isolate);
   return ref;
diff --git a/runtime/vm/dart_entry.h b/runtime/vm/dart_entry.h
index 8c530ea..e52961b 100644
--- a/runtime/vm/dart_entry.h
+++ b/runtime/vm/dart_entry.h
@@ -106,6 +106,8 @@
 
   friend class SnapshotReader;
   friend class SnapshotWriter;
+  friend class Serializer;
+  friend class Deserializer;
   friend class Simulator;
   DISALLOW_COPY_AND_ASSIGN(ArgumentsDescriptor);
 };
diff --git a/runtime/vm/deferred_objects.h b/runtime/vm/deferred_objects.h
index 35556f8..de2262c 100644
--- a/runtime/vm/deferred_objects.h
+++ b/runtime/vm/deferred_objects.h
@@ -231,20 +231,28 @@
   // a graph which can contain cycles.
   void Create();
 
+  RawObject* GetArg(intptr_t index) const {
+#if !defined(TARGET_ARCH_DBC)
+    return args_[index];
+#else
+    return args_[-index];
+#endif
+  }
+
   RawObject* GetClass() const {
-    return args_[kClassIndex];
+    return GetArg(kClassIndex);
   }
 
   RawObject* GetLength() const {
-    return args_[kLengthIndex];
+    return GetArg(kLengthIndex);
   }
 
   RawObject* GetFieldOffset(intptr_t index) const {
-    return args_[kFieldsStartIndex + kFieldEntrySize * index + kOffsetIndex];
+    return GetArg(kFieldsStartIndex + kFieldEntrySize * index + kOffsetIndex);
   }
 
   RawObject* GetValue(intptr_t index) const {
-    return args_[kFieldsStartIndex + kFieldEntrySize * index + kValueIndex];
+    return GetArg(kFieldsStartIndex + kFieldEntrySize * index + kValueIndex);
   }
 
   // Amount of fields that have to be initialized.
diff --git a/runtime/vm/deopt_instructions.cc b/runtime/vm/deopt_instructions.cc
index 1839f7f..f31a897 100644
--- a/runtime/vm/deopt_instructions.cc
+++ b/runtime/vm/deopt_instructions.cc
@@ -67,13 +67,17 @@
   // return-address. This section is copied as well, so that its contained
   // values can be updated before returning to the deoptimized function.
   // Note: on DBC stack grows upwards unlike on all other architectures.
+#if defined(TARGET_ARCH_DBC)
+  ASSERT(frame->sp() >= frame->fp());
+  const intptr_t frame_size = (frame->sp() - frame->fp()) / kWordSize;
+#else
+  ASSERT(frame->fp() >= frame->sp());
+  const intptr_t frame_size = (frame->fp() - frame->sp()) / kWordSize;
+#endif
+
   source_frame_size_ =
       + kDartFrameFixedSize  // For saved values below sp.
-#if !defined(TARGET_ARCH_DBC)
-      + ((frame->fp() - frame->sp()) / kWordSize)  // For frame size incl. sp.
-#else
-      + ((frame->sp() - frame->fp()) / kWordSize)  // For frame size incl. sp.
-#endif  // !defined(TARGET_ARCH_DBC)
+      + frame_size  // For frame size incl. sp.
       + 1  // For fp.
       + kParamEndSlotFromFp  // For saved values above fp.
       + num_args_;  // For arguments.
@@ -688,7 +692,6 @@
       return;
     }
 
-#if !defined(TARGET_ARCH_DBC)
     // We don't always have the Code object for the frame's corresponding
     // unoptimized code as it may have been collected. Use a stub as the pc
     // marker until we can recreate that Code object during deferred
@@ -696,7 +699,6 @@
     // a pc marker.
     *reinterpret_cast<RawObject**>(dest_addr) =
         StubCode::FrameAwaitingMaterialization_entry()->code();
-#endif
     deopt_context->DeferPcMarkerMaterialization(object_table_index_, dest_addr);
   }
 
diff --git a/runtime/vm/deopt_instructions.h b/runtime/vm/deopt_instructions.h
index 5dfab38..79a2c50 100644
--- a/runtime/vm/deopt_instructions.h
+++ b/runtime/vm/deopt_instructions.h
@@ -115,7 +115,7 @@
 #else
     // First argument is the lowest slot because stack is growing upwards.
     return reinterpret_cast<intptr_t*>(
-      frame->fp() - (kDartFrameFixedSize + num_args_) * kWordSize);
+        frame->fp() - (kDartFrameFixedSize + num_args_) * kWordSize);
 #endif  // !defined(TARGET_ARCH_DBC)
   }
 
diff --git a/runtime/vm/disassembler.cc b/runtime/vm/disassembler.cc
index 9f3d4a3..eef90c3 100644
--- a/runtime/vm/disassembler.cc
+++ b/runtime/vm/disassembler.cc
@@ -184,12 +184,11 @@
 }
 
 
-void Disassembler::DisassembleCode(const Function& function, bool optimized) {
-  const char* function_fullname = function.ToFullyQualifiedCString();
+void Disassembler::DisassembleCodeHelper(
+    const char* function_fullname, const Code& code, bool optimized) {
   THR_Print("Code for %sfunction '%s' {\n",
             optimized ? "optimized " : "",
             function_fullname);
-  const Code& code = Code::Handle(function.CurrentCode());
   code.Disassemble();
   THR_Print("}\n");
 
@@ -328,6 +327,22 @@
   }
 }
 
+
+void Disassembler::DisassembleCode(const Function& function, bool optimized) {
+  const char* function_fullname = function.ToFullyQualifiedCString();
+  const Code& code = Code::Handle(function.CurrentCode());
+  DisassembleCodeHelper(function_fullname, code, optimized);
+}
+
+
+void Disassembler::DisassembleCodeUnoptimized(
+    const Function& function, bool optimized) {
+  const char* function_fullname = function.ToFullyQualifiedCString();
+  const Code& code = Code::Handle(function.unoptimized_code());
+  DisassembleCodeHelper(function_fullname, code, optimized);
+}
+
+
 #endif  // !PRODUCT
 
 }  // namespace dart
diff --git a/runtime/vm/disassembler.h b/runtime/vm/disassembler.h
index c7a1eec..fc3649a 100644
--- a/runtime/vm/disassembler.h
+++ b/runtime/vm/disassembler.h
@@ -137,8 +137,13 @@
   static bool CanFindOldObject(uword addr);
 
   static void DisassembleCode(const Function& function, bool optimized);
+  static void DisassembleCodeUnoptimized(
+      const Function& function, bool optimized);
 
  private:
+  static void DisassembleCodeHelper(
+      const char* function_fullname, const Code& code, bool optimized);
+
   static const int kHexadecimalBufferSize = 32;
   static const int kUserReadableBufferSize = 256;
 };
diff --git a/runtime/vm/flag_list.h b/runtime/vm/flag_list.h
index 7b5f135..c22e43c 100644
--- a/runtime/vm/flag_list.h
+++ b/runtime/vm/flag_list.h
@@ -37,10 +37,12 @@
 #define FLAG_LIST(P, R, D, C)                                                  \
 P(always_megamorphic_calls, bool, false,                                       \
   "Instance call always as megamorphic.")                                      \
-P(background_compilation, bool, false,                                         \
+P(background_compilation, bool, USING_MULTICORE,                               \
   "Run optimizing compilation in background")                                  \
 R(background_compilation_stop_alot, false, bool, false,                        \
   "Stress test system: stop background compiler often.")                       \
+P(background_finalization, bool, USING_MULTICORE,                              \
+  "Run weak handle finalizers in background")                                  \
 R(break_at_isolate_spawn, false, bool, false,                                  \
   "Insert a one-time breakpoint at the entrypoint for all spawned isolates")   \
 C(collect_code, false, true, bool, true,                                       \
@@ -130,12 +132,6 @@
   "Precompilation compiler mode")                                              \
 C(precompiled_runtime, true, false, bool, false,                               \
   "Precompiled runtime mode")                                                  \
-R(pretenure_all, false, bool, false,                                           \
-  "Global pretenuring (for testing).")                                         \
-P(pretenure_interval, int, 10,                                                 \
-  "Back off pretenuring after this many cycles.")                              \
-P(pretenure_threshold, int, 98,                                                \
-  "Trigger pretenuring when this many percent are promoted.")                  \
 R(print_ssa_liveness, false, bool, false,                                      \
   "Print liveness for ssa variables.")                                         \
 R(print_ssa_liveranges, false, bool, false,                                    \
@@ -156,6 +152,8 @@
   "Support the disassembler.")                                                 \
 R(support_il_printer, false, bool, true,                                       \
   "Support the IL printer.")                                                   \
+R(support_reload, false, bool, true,                                           \
+  "Support isolate reload.")                                                   \
 R(support_service, false, bool, true,                                          \
   "Support the service protocol.")                                             \
 R(support_timeline, false, bool, true,                                         \
diff --git a/runtime/vm/flow_graph.cc b/runtime/vm/flow_graph.cc
index 0c8dbc0..4ab5652 100644
--- a/runtime/vm/flow_graph.cc
+++ b/runtime/vm/flow_graph.cc
@@ -120,7 +120,7 @@
 
 
 ConstantInstr* FlowGraph::GetConstant(const Object& object) {
-  ConstantInstr* constant = constant_instr_pool_.Lookup(object);
+  ConstantInstr* constant = constant_instr_pool_.LookupValue(object);
   if (constant == NULL) {
     // Otherwise, allocate and add it to the pool.
     constant = new(zone()) ConstantInstr(
diff --git a/runtime/vm/flow_graph_allocator.cc b/runtime/vm/flow_graph_allocator.cc
index d4fd498..009e500 100644
--- a/runtime/vm/flow_graph_allocator.cc
+++ b/runtime/vm/flow_graph_allocator.cc
@@ -156,8 +156,9 @@
       // TODO(vegorov) remove this once we have ported all necessary
       // instructions to DBC.
       if (!current->HasLocs()) {
-        graph_entry_->parsed_function().Bailout("SSALivenessAnalysis",
-                                                current->ToCString());
+        const char* msg = "SSALivenessAnalysis::ComputeInitialSets";
+        NOT_IN_PRODUCT(msg = current->ToCString());
+        graph_entry_->parsed_function().Bailout("SSALivenessAnalysis", msg);
       }
 #endif
 
@@ -440,6 +441,10 @@
                                        intptr_t to) {
   if (loc.IsRegister()) {
     BlockRegisterLocation(loc, from, to, blocked_cpu_registers_, cpu_regs_);
+#if defined(TARGET_ARCH_DBC)
+    last_used_register_ = Utils::Maximum(last_used_register_,
+                                         loc.register_code());
+#endif
   } else if (loc.IsFpuRegister()) {
     BlockRegisterLocation(loc, from, to, blocked_fpu_registers_, fpu_regs_);
   } else {
@@ -2741,10 +2746,6 @@
       registers_[reg]->Add(range);
     }
   }
-
-#if defined(TARGET_ARCH_DBC)
-  last_used_register_ = -1;
-#endif
 }
 
 
@@ -3009,6 +3010,10 @@
 
   DiscoverLoops();
 
+#if defined(TARGET_ARCH_DBC)
+  last_used_register_ = -1;
+#endif
+
   BuildLiveRanges();
 
   if (FLAG_print_ssa_liveranges) {
@@ -3037,6 +3042,7 @@
   AllocateUnallocatedRanges();
 #if defined(TARGET_ARCH_DBC)
   const intptr_t last_used_cpu_register = last_used_register_;
+  last_used_register_ = -1;
 #endif
 
   cpu_spill_slot_count_ = spill_slots_.length();
diff --git a/runtime/vm/flow_graph_builder.cc b/runtime/vm/flow_graph_builder.cc
index bd07667..aeea819 100644
--- a/runtime/vm/flow_graph_builder.cc
+++ b/runtime/vm/flow_graph_builder.cc
@@ -2411,6 +2411,7 @@
   // compiler.  If it was not, set it here.
   if (function.context_scope() == ContextScope::null()) {
     ASSERT(!is_implicit);
+    ASSERT(node->scope() != NULL);
     const ContextScope& context_scope = ContextScope::ZoneHandle(
         Z, node->scope()->PreserveOuterScope(owner()->context_level()));
     ASSERT(!function.HasCode());
@@ -2611,40 +2612,27 @@
 }
 
 
-static intptr_t GetResultCidOfNativeFactory(const Function& function) {
+static bool IsNativeListFactory(const Function& function) {
   switch (function.recognized_kind()) {
     case MethodRecognizer::kTypedData_Int8Array_factory:
-      return kTypedDataInt8ArrayCid;
     case MethodRecognizer::kTypedData_Uint8Array_factory:
-      return kTypedDataUint8ArrayCid;
     case MethodRecognizer::kTypedData_Uint8ClampedArray_factory:
-      return kTypedDataUint8ClampedArrayCid;
     case MethodRecognizer::kTypedData_Int16Array_factory:
-      return kTypedDataInt16ArrayCid;
     case MethodRecognizer::kTypedData_Uint16Array_factory:
-      return kTypedDataUint16ArrayCid;
     case MethodRecognizer::kTypedData_Int32Array_factory:
-      return kTypedDataInt32ArrayCid;
     case MethodRecognizer::kTypedData_Uint32Array_factory:
-      return kTypedDataUint32ArrayCid;
     case MethodRecognizer::kTypedData_Int64Array_factory:
-      return kTypedDataInt64ArrayCid;
     case MethodRecognizer::kTypedData_Uint64Array_factory:
-      return kTypedDataUint64ArrayCid;
     case MethodRecognizer::kTypedData_Float32Array_factory:
-      return kTypedDataFloat32ArrayCid;
     case MethodRecognizer::kTypedData_Float64Array_factory:
-      return kTypedDataFloat64ArrayCid;
     case MethodRecognizer::kTypedData_Float32x4Array_factory:
-      return kTypedDataFloat32x4ArrayCid;
     case MethodRecognizer::kTypedData_Int32x4Array_factory:
-      return kTypedDataInt32x4ArrayCid;
     case MethodRecognizer::kTypedData_Float64x2Array_factory:
-      return kTypedDataFloat64x2ArrayCid;
+      return true;
     default:
       break;
   }
-  return kDynamicCid;
+  return false;
 }
 
 
@@ -2660,12 +2648,11 @@
                              node->arguments()->names(),
                              arguments,
                              owner()->ic_data_array());
-  if (node->function().is_native()) {
-    const intptr_t result_cid = GetResultCidOfNativeFactory(node->function());
-    if (result_cid != kDynamicCid) {
-      call->set_result_cid(result_cid);
-      call->set_is_native_list_factory(true);
-    }
+  if (node->function().is_native() && IsNativeListFactory(node->function())) {
+    call->set_is_native_list_factory(true);
+  }
+  if (node->function().recognized_kind() != MethodRecognizer::kUnknown) {
+    call->set_result_cid(MethodRecognizer::ResultCid(node->function()));
   }
   ReturnDefinition(call);
 }
@@ -2819,6 +2806,9 @@
       // (0) type-arguments, (1) length.
       ASSERT(!LoadFieldInstr::IsFixedLengthArrayCid(result_cid) ||
              arguments->length() == 2);
+    } else if (node->constructor().recognized_kind() !=
+               MethodRecognizer::kUnknown) {
+      call->set_result_cid(MethodRecognizer::ResultCid(node->constructor()));
     }
     ReturnDefinition(call);
     return;
@@ -3670,7 +3660,7 @@
                                      kEmitStoreBarrier,
                                      token_pos);
   // Maybe initializing unboxed store.
-  store->set_is_potential_unboxed_initialization(true);
+  store->set_is_initialization(node->is_initializer());
   ReturnDefinition(store);
 }
 
diff --git a/runtime/vm/flow_graph_compiler.cc b/runtime/vm/flow_graph_compiler.cc
index b403bbf..13159ba 100644
--- a/runtime/vm/flow_graph_compiler.cc
+++ b/runtime/vm/flow_graph_compiler.cc
@@ -987,6 +987,25 @@
 }
 
 
+#if defined(TARGET_ARCH_DBC)
+void FlowGraphCompiler::EmitDeopt(intptr_t deopt_id,
+                                  ICData::DeoptReasonId reason,
+                                  uint32_t flags) {
+  ASSERT(is_optimizing());
+  ASSERT(!intrinsic_mode());
+  CompilerDeoptInfo* info =
+      new(zone()) CompilerDeoptInfo(deopt_id,
+                                    reason,
+                                    flags,
+                                    pending_deoptimization_env_);
+
+  deopt_infos_.Add(info);
+  assembler()->Deopt(0, /*is_eager =*/ 1);
+  info->set_pc_offset(assembler()->CodeSize());
+}
+#endif  // defined(TARGET_ARCH_DBC)
+
+
 void FlowGraphCompiler::FinalizeExceptionHandlers(const Code& code) {
   ASSERT(exception_handlers_list_ != NULL);
   const ExceptionHandlers& handlers = ExceptionHandlers::Handle(
@@ -1717,17 +1736,19 @@
     ASSERT(res->deopt_id() == deopt_id);
     ASSERT(res->target_name() == target_name.raw());
     ASSERT(res->NumArgsTested() == num_args_tested);
+    ASSERT(!res->is_static_call());
     return res;
   }
   const ICData& ic_data = ICData::ZoneHandle(zone(), ICData::New(
       parsed_function().function(), target_name,
-      arguments_descriptor, deopt_id, num_args_tested));
+      arguments_descriptor, deopt_id, num_args_tested, false));
 #if defined(TAG_IC_DATA)
   ic_data.set_tag(Instruction::kInstanceCall);
 #endif
   if (deopt_id_to_ic_data_ != NULL) {
     (*deopt_id_to_ic_data_)[deopt_id] = &ic_data;
   }
+  ASSERT(!ic_data.is_static_call());
   return &ic_data;
 }
 
@@ -1743,11 +1764,12 @@
     ASSERT(res->deopt_id() == deopt_id);
     ASSERT(res->target_name() == target.name());
     ASSERT(res->NumArgsTested() == num_args_tested);
+    ASSERT(res->is_static_call());
     return res;
   }
   const ICData& ic_data = ICData::ZoneHandle(zone(), ICData::New(
       parsed_function().function(), String::Handle(zone(), target.name()),
-      arguments_descriptor, deopt_id, num_args_tested));
+      arguments_descriptor, deopt_id, num_args_tested, true));
   ic_data.AddTarget(target);
 #if defined(TAG_IC_DATA)
   ic_data.set_tag(Instruction::kStaticCall);
diff --git a/runtime/vm/flow_graph_compiler.h b/runtime/vm/flow_graph_compiler.h
index a94eaa6..0a30dc9 100644
--- a/runtime/vm/flow_graph_compiler.h
+++ b/runtime/vm/flow_graph_compiler.h
@@ -528,6 +528,15 @@
                       ICData::DeoptReasonId reason,
                       uint32_t flags = 0);
 
+#if defined(TARGET_ARCH_DBC)
+  void EmitDeopt(intptr_t deopt_id,
+                 ICData::DeoptReasonId reason,
+                 uint32_t flags = 0);
+
+  // If the cid does not fit in 16 bits, then this will cause a bailout.
+  uint16_t ToEmbeddableCid(intptr_t cid, Instruction* instruction);
+#endif  // defined(TARGET_ARCH_DBC)
+
   void AddDeoptIndexAtCall(intptr_t deopt_id, TokenPosition token_pos);
 
   void AddSlowPathCode(SlowPathCode* slow_path);
diff --git a/runtime/vm/flow_graph_compiler_arm.cc b/runtime/vm/flow_graph_compiler_arm.cc
index 269e668..4162943 100644
--- a/runtime/vm/flow_graph_compiler_arm.cc
+++ b/runtime/vm/flow_graph_compiler_arm.cc
@@ -20,7 +20,6 @@
 #include "vm/stack_frame.h"
 #include "vm/stub_code.h"
 #include "vm/symbols.h"
-#include "vm/verified_memory.h"
 
 namespace dart {
 
@@ -1252,8 +1251,7 @@
 #endif  // DEBUG
   __ LoadFieldFromOffset(kWord, R1, R0, Array::element_offset(edge_id));
   __ add(R1, R1, Operand(Smi::RawValue(1)));
-  __ StoreIntoObjectNoBarrierOffset(
-      R0, Array::element_offset(edge_id), R1, Assembler::kOnlySmi);
+  __ StoreIntoObjectNoBarrierOffset(R0, Array::element_offset(edge_id), R1);
 #if defined(DEBUG)
   assembler_->set_use_far_branches(old_use_far_branches);
 #endif  // DEBUG
diff --git a/runtime/vm/flow_graph_compiler_dbc.cc b/runtime/vm/flow_graph_compiler_dbc.cc
index 86352cc..8fb0b4c 100644
--- a/runtime/vm/flow_graph_compiler_dbc.cc
+++ b/runtime/vm/flow_graph_compiler_dbc.cc
@@ -20,7 +20,6 @@
 #include "vm/stack_frame.h"
 #include "vm/stub_code.h"
 #include "vm/symbols.h"
-#include "vm/verified_memory.h"
 
 namespace dart {
 
@@ -209,7 +208,7 @@
 
 void CompilerDeoptInfoWithStub::GenerateCode(FlowGraphCompiler* compiler,
                                              intptr_t stub_ix) {
-  UNIMPLEMENTED();
+  UNREACHABLE();
 }
 
 
@@ -233,14 +232,22 @@
   __ PushConstant(dst_type);
   __ PushConstant(dst_name);
   __ AssertAssignable(__ AddConstant(test_cache));
+  if (is_optimizing()) {
+    // Register allocator does not think that our first input (also used as
+    // output) needs to be kept alive across the call because that is how code
+    // is written on other platforms (where registers are always spilled across
+    // the call): inputs are consumed by operation and output is produced so
+    // neither are alive at the safepoint.
+    // We have to mark the slot alive manually to ensure that GC
+    // visits it.
+    locs->SetStackBit(locs->out(0).reg());
+  }
   RecordSafepoint(locs);
   AddCurrentDescriptor(RawPcDescriptors::kOther, deopt_id, token_pos);
   if (is_optimizing()) {
     // Assert assignable keeps the instance on the stack as the result,
     // all other arguments are popped.
-    // In optimized code we need to drop it because optimized code
-    // expects the result in the register and it is already there
-    // because locs()->in(0).reg() == locs()->out(0).reg().
+    ASSERT(locs->out(0).reg() == locs->in(0).reg());
     __ Drop1();
   }
 }
@@ -390,6 +397,16 @@
 }
 
 
+uint16_t FlowGraphCompiler::ToEmbeddableCid(intptr_t cid,
+                                            Instruction* instruction) {
+  if (!Utils::IsUint(16, cid)) {
+    instruction->Unsupported(this);
+    UNREACHABLE();
+  }
+  return static_cast<uint16_t>(cid);
+}
+
+
 #undef __
 #define __ compiler_->assembler()->
 
@@ -409,6 +426,7 @@
     __ LoadConstant(destination.reg(), source.constant());
   } else {
     compiler_->Bailout("Unsupported move");
+    UNREACHABLE();
   }
 
   move->Eliminate();
diff --git a/runtime/vm/flow_graph_compiler_ia32.cc b/runtime/vm/flow_graph_compiler_ia32.cc
index 64d92a9..4a036e2 100644
--- a/runtime/vm/flow_graph_compiler_ia32.cc
+++ b/runtime/vm/flow_graph_compiler_ia32.cc
@@ -22,7 +22,6 @@
 #include "vm/stack_frame.h"
 #include "vm/stub_code.h"
 #include "vm/symbols.h"
-#include "vm/verified_memory.h"
 
 namespace dart {
 
diff --git a/runtime/vm/flow_graph_compiler_x64.cc b/runtime/vm/flow_graph_compiler_x64.cc
index b76da0b..5cc5c93 100644
--- a/runtime/vm/flow_graph_compiler_x64.cc
+++ b/runtime/vm/flow_graph_compiler_x64.cc
@@ -19,7 +19,6 @@
 #include "vm/stack_frame.h"
 #include "vm/stub_code.h"
 #include "vm/symbols.h"
-#include "vm/verified_memory.h"
 
 namespace dart {
 
diff --git a/runtime/vm/flow_graph_inliner.cc b/runtime/vm/flow_graph_inliner.cc
index e92bc3f..f93b1d1 100644
--- a/runtime/vm/flow_graph_inliner.cc
+++ b/runtime/vm/flow_graph_inliner.cc
@@ -863,7 +863,7 @@
           }
         }
 
-        if (FLAG_support_il_printer && FLAG_trace_inlining &&
+        if (FLAG_support_il_printer && trace_inlining() &&
             (FLAG_print_flow_graph || FLAG_print_flow_graph_optimized)) {
           THR_Print("Callee graph for inlining %s\n",
                     function.ToFullyQualifiedCString());
@@ -1001,7 +1001,6 @@
  private:
   friend class PolymorphicInliner;
 
-
   static bool Contains(const GrowableArray<intptr_t>& a, intptr_t deopt_id) {
     for (intptr_t i = 0; i < a.length(); i++) {
       if (a[i] == deopt_id) return true;
@@ -1470,7 +1469,8 @@
 
 bool PolymorphicInliner::TryInliningPoly(intptr_t receiver_cid,
                                         const Function& target) {
-  if (TryInlineRecognizedMethod(receiver_cid, target)) {
+  if (owner_->inliner_->use_speculative_inlining() &&
+      TryInlineRecognizedMethod(receiver_cid, target)) {
     owner_->inlined_ = true;
     return true;
   }
@@ -1773,7 +1773,8 @@
                     String::Handle(old_checks.target_name()),
                     Array::Handle(old_checks.arguments_descriptor()),
                     old_checks.deopt_id(),
-                    1));  // Number of args tested.
+                    1,        // Number of args tested.
+                    false));  // is_static_call
     for (intptr_t i = 0; i < non_inlined_variants_.length(); ++i) {
       new_checks.AddReceiverCheck(non_inlined_variants_[i].cid,
                                   *non_inlined_variants_[i].target,
@@ -2459,6 +2460,33 @@
 }
 
 
+static bool InlineGrowableArraySetter(FlowGraph* flow_graph,
+                                      intptr_t offset,
+                                      StoreBarrierType store_barrier_type,
+                                      Instruction* call,
+                                      TargetEntryInstr** entry,
+                                      Definition** last) {
+  Definition* array = call->ArgumentAt(0);
+  Definition* value = call->ArgumentAt(1);
+
+  *entry = new(Z) TargetEntryInstr(flow_graph->allocate_block_id(),
+                                   call->GetBlock()->try_index());
+  (*entry)->InheritDeoptTarget(Z, call);
+
+  // This is an internal method, no need to check argument types.
+  StoreInstanceFieldInstr* store = new(Z) StoreInstanceFieldInstr(
+      offset,
+      new(Z) Value(array),
+      new(Z) Value(value),
+      store_barrier_type,
+      call->token_pos());
+  flow_graph->AppendTo(*entry, store, call->env(), FlowGraph::kEffect);
+  *last = store;
+
+  return true;
+}
+
+
 static intptr_t PrepareInlineByteArrayBaseOp(
     FlowGraph* flow_graph,
     Instruction* call,
@@ -2651,7 +2679,8 @@
                                 i_call->function_name(),
                                 Object::empty_array(),  // Dummy args. descr.
                                 Thread::kNoDeoptId,
-                                1);
+                                1,
+                                false);
       value_check.AddReceiverCheck(kSmiCid, target);
       break;
     }
@@ -2663,7 +2692,8 @@
                                   i_call->function_name(),
                                   Object::empty_array(),  // Dummy args. descr.
                                   Thread::kNoDeoptId,
-                                  1);
+                                  1,
+                                  false);
         value_check.AddReceiverCheck(kSmiCid, target);
       }
       break;
@@ -2674,7 +2704,8 @@
                                 i_call->function_name(),
                                 Object::empty_array(),  // Dummy args. descr.
                                 Thread::kNoDeoptId,
-                                1);
+                                1,
+                                false);
       value_check.AddReceiverCheck(kDoubleCid, target);
       break;
     }
@@ -2684,7 +2715,8 @@
                                 i_call->function_name(),
                                 Object::empty_array(),  // Dummy args. descr.
                                 Thread::kNoDeoptId,
-                                1);
+                                1,
+                                false);
       value_check.AddReceiverCheck(kInt32x4Cid, target);
       break;
     }
@@ -2694,7 +2726,8 @@
                                 i_call->function_name(),
                                 Object::empty_array(),  // Dummy args. descr.
                                 Thread::kNoDeoptId,
-                                1);
+                                1,
+                                false);
       value_check.AddReceiverCheck(kFloat32x4Cid, target);
       break;
     }
@@ -2870,6 +2903,10 @@
     intptr_t cid,
     TargetEntryInstr** entry,
     Definition** last) {
+  ASSERT((cid == kOneByteStringCid) ||
+         (cid == kTwoByteStringCid) ||
+         (cid == kExternalOneByteStringCid) ||
+         (cid == kExternalTwoByteStringCid));
   Definition* str = call->ArgumentAt(0);
   Definition* index = call->ArgumentAt(1);
 
@@ -2883,6 +2920,62 @@
 }
 
 
+bool FlowGraphInliner::TryReplaceInstanceCallWithInline(
+    FlowGraph* flow_graph,
+    ForwardInstructionIterator* iterator,
+    InstanceCallInstr* call) {
+  Function& target = Function::Handle(Z);
+  GrowableArray<intptr_t> class_ids;
+  call->ic_data()->GetCheckAt(0, &class_ids, &target);
+  const intptr_t receiver_cid = class_ids[0];
+
+  TargetEntryInstr* entry;
+  Definition* last;
+  if (!FlowGraphInliner::TryInlineRecognizedMethod(flow_graph,
+                                                   receiver_cid,
+                                                   target,
+                                                   call,
+                                                   call->ArgumentAt(0),
+                                                   call->token_pos(),
+                                                   *call->ic_data(),
+                                                   &entry, &last)) {
+    return false;
+  }
+
+  // Insert receiver class check if needed.
+  if (MethodRecognizer::PolymorphicTarget(target) ||
+      flow_graph->InstanceCallNeedsClassCheck(call, target.kind())) {
+    Instruction* check = GetCheckClass(
+        flow_graph,
+        call->ArgumentAt(0),
+        ICData::ZoneHandle(Z, call->ic_data()->AsUnaryClassChecks()),
+        call->deopt_id(),
+        call->token_pos());
+    flow_graph->InsertBefore(call, check, call->env(), FlowGraph::kEffect);
+  }
+
+  // Remove the original push arguments.
+  for (intptr_t i = 0; i < call->ArgumentCount(); ++i) {
+    PushArgumentInstr* push = call->PushArgumentAt(i);
+    push->ReplaceUsesWith(push->value()->definition());
+    push->RemoveFromGraph();
+  }
+  // Replace all uses of this definition with the result.
+  call->ReplaceUsesWith(last);
+  // Finally insert the sequence other definition in place of this one in the
+  // graph.
+  call->previous()->LinkTo(entry->next());
+  entry->UnuseAllInputs();  // Entry block is not in the graph.
+  last->LinkTo(call);
+  // Remove through the iterator.
+  ASSERT(iterator->Current() == call);
+  iterator->RemoveCurrentFromGraph();
+  call->set_previous(NULL);
+  call->set_next(NULL);
+  return true;
+}
+
+
 bool FlowGraphInliner::TryInlineRecognizedMethod(FlowGraph* flow_graph,
                                                  intptr_t receiver_cid,
                                                  const Function& target,
@@ -2892,11 +2985,6 @@
                                                  const ICData& ic_data,
                                                  TargetEntryInstr** entry,
                                                  Definition** last) {
-  if (FLAG_precompiled_mode) {
-    // The graphs generated below include deopts.
-    return false;
-  }
-
   ICData& value_check = ICData::ZoneHandle(Z);
   MethodRecognizer::Kind kind = MethodRecognizer::RecognizeKind(target);
   switch (kind) {
@@ -3127,6 +3215,18 @@
       return InlineDoubleOp(flow_graph, Token::kMUL, call, entry, last);
     case MethodRecognizer::kDoubleDiv:
       return InlineDoubleOp(flow_graph, Token::kDIV, call, entry, last);
+    case MethodRecognizer::kGrowableArraySetData:
+      ASSERT(receiver_cid == kGrowableObjectArrayCid);
+      ASSERT(ic_data.NumberOfChecks() == 1);
+      return InlineGrowableArraySetter(
+          flow_graph, GrowableObjectArray::data_offset(), kEmitStoreBarrier,
+          call, entry, last);
+    case MethodRecognizer::kGrowableArraySetLength:
+      ASSERT(receiver_cid == kGrowableObjectArrayCid);
+      ASSERT(ic_data.NumberOfChecks() == 1);
+      return InlineGrowableArraySetter(
+          flow_graph, GrowableObjectArray::length_offset(), kNoStoreBarrier,
+          call, entry, last);
     default:
       return false;
   }
diff --git a/runtime/vm/flow_graph_inliner.h b/runtime/vm/flow_graph_inliner.h
index ade98b5..04bca93 100644
--- a/runtime/vm/flow_graph_inliner.h
+++ b/runtime/vm/flow_graph_inliner.h
@@ -13,7 +13,9 @@
 class Definition;
 class Field;
 class FlowGraph;
+class ForwardInstructionIterator;
 class Function;
+class InstanceCallInstr;
 class Instruction;
 class TargetEntryInstr;
 
@@ -42,6 +44,11 @@
 
   bool trace_inlining() const { return trace_inlining_; }
 
+  static bool TryReplaceInstanceCallWithInline(
+      FlowGraph* flow_graph,
+      ForwardInstructionIterator* iterator,
+      InstanceCallInstr* call);
+
   static bool TryInlineRecognizedMethod(FlowGraph* flow_graph,
                                         intptr_t receiver_cid,
                                         const Function& target,
@@ -52,6 +59,8 @@
                                         TargetEntryInstr** entry,
                                         Definition** last);
 
+  bool use_speculative_inlining() const { return use_speculative_inlining_; }
+
  private:
   friend class CallSiteInliner;
 
diff --git a/runtime/vm/flow_graph_range_analysis.cc b/runtime/vm/flow_graph_range_analysis.cc
index 18eaa36..70a9dc5 100644
--- a/runtime/vm/flow_graph_range_analysis.cc
+++ b/runtime/vm/flow_graph_range_analysis.cc
@@ -910,7 +910,7 @@
     // Attempt to find equivalent instruction that was already scheduled.
     // If the instruction is still in the graph (it could have been
     // un-scheduled by a rollback action) and it dominates the sink - use it.
-    Instruction* emitted = map_.Lookup(instruction);
+    Instruction* emitted = map_.LookupValue(instruction);
     if (emitted != NULL &&
         !emitted->WasEliminated() &&
         sink->IsDominatedBy(emitted)) {
@@ -1569,7 +1569,8 @@
           target->PredecessorAt(0)->last_instruction()->AsBranch();
       if (target == branch->true_successor()) {
         // True unreachable.
-        if (FLAG_trace_constant_propagation) {
+        if (FLAG_trace_constant_propagation &&
+            FlowGraphPrinter::ShouldPrint(flow_graph_->function())) {
           THR_Print("Range analysis: True unreachable (B%" Pd ")\n",
                     branch->true_successor()->block_id());
         }
@@ -1577,7 +1578,8 @@
       } else {
         ASSERT(target == branch->false_successor());
         // False unreachable.
-        if (FLAG_trace_constant_propagation) {
+        if (FLAG_trace_constant_propagation &&
+            FlowGraphPrinter::ShouldPrint(flow_graph_->function())) {
           THR_Print("Range analysis: False unreachable (B%" Pd ")\n",
                     branch->false_successor()->block_id());
         }
diff --git a/runtime/vm/flow_graph_type_propagator.cc b/runtime/vm/flow_graph_type_propagator.cc
index d0ffe77..2ee8ed1 100644
--- a/runtime/vm/flow_graph_type_propagator.cc
+++ b/runtime/vm/flow_graph_type_propagator.cc
@@ -888,6 +888,15 @@
 }
 
 
+CompileType PolymorphicInstanceCallInstr::ComputeType() const {
+  if (!HasSingleRecognizedTarget()) return CompileType::Dynamic();
+  const Function& target = Function::Handle(ic_data().GetTargetAt(0));
+  return (target.recognized_kind() != MethodRecognizer::kUnknown)
+      ? CompileType::FromCid(MethodRecognizer::ResultCid(target))
+      : CompileType::Dynamic();
+}
+
+
 CompileType StaticCallInstr::ComputeType() const {
   if (result_cid_ != kDynamicCid) {
     return CompileType::FromCid(result_cid_);
diff --git a/runtime/vm/freelist.cc b/runtime/vm/freelist.cc
index b8df851..ceae578 100644
--- a/runtime/vm/freelist.cc
+++ b/runtime/vm/freelist.cc
@@ -4,13 +4,12 @@
 
 #include "vm/freelist.h"
 
-#include <map>
-
 #include "vm/bit_set.h"
+#include "vm/hash_map.h"
 #include "vm/lockers.h"
 #include "vm/object.h"
-#include "vm/raw_object.h"
 #include "vm/os_thread.h"
+#include "vm/raw_object.h"
 
 namespace dart {
 
@@ -26,8 +25,7 @@
   uword tags = 0;
   tags = RawObject::SizeTag::update(size, tags);
   tags = RawObject::ClassIdTag::update(kFreeListElement, tags);
-  // All words in a freelist element header must look like smis; see
-  // TryAllocateSmiInitializedLocked.
+  // All words in a freelist element header should look like Smis.
   ASSERT(!reinterpret_cast<RawObject*>(tags)->IsHeapObject());
 
   result->tags_ = tags;
@@ -286,26 +284,53 @@
 }
 
 
+class IntptrPair {
+ public:
+  IntptrPair() : first_(-1), second_(-1) {}
+  IntptrPair(intptr_t first, intptr_t second)
+      : first_(first), second_(second) {}
+
+  intptr_t first() const { return first_; }
+  intptr_t second() const { return second_; }
+  void set_second(intptr_t s) { second_ = s; }
+
+  bool operator==(const IntptrPair& other) {
+    return (first_ == other.first_) && (second_ == other.second_);
+  }
+
+  bool operator!=(const IntptrPair& other) {
+    return (first_ != other.first_) || (second_ != other.second_);
+  }
+
+ private:
+  intptr_t first_;
+  intptr_t second_;
+};
+
+
 void FreeList::PrintLarge() const {
   int large_sizes = 0;
   int large_objects = 0;
   intptr_t large_bytes = 0;
-  std::map<intptr_t, intptr_t> sorted;
-  std::map<intptr_t, intptr_t>::iterator it;
+  MallocDirectChainedHashMap<NumbersKeyValueTrait<IntptrPair> > map;
   FreeListElement* node;
   for (node = free_lists_[kNumLists]; node != NULL; node = node->next()) {
-    it = sorted.find(node->Size());
-    if (it != sorted.end()) {
-      it->second += 1;
-    } else {
+    IntptrPair* pair = map.Lookup(node->Size());
+    if (pair == NULL) {
       large_sizes += 1;
-      sorted.insert(std::make_pair(node->Size(), 1));
+      map.Insert(IntptrPair(node->Size(), 1));
+    } else {
+      pair->set_second(pair->second() + 1);
     }
     large_objects += 1;
   }
-  for (it = sorted.begin(); it != sorted.end(); ++it) {
-    intptr_t size = it->first;
-    intptr_t list_length = it->second;
+
+  MallocDirectChainedHashMap<NumbersKeyValueTrait<IntptrPair> >::Iterator it =
+      map.GetIterator();
+  IntptrPair* pair;
+  while ((pair = it.Next()) != NULL) {
+    intptr_t size = pair->first();
+    intptr_t list_length = pair->second();
     intptr_t list_bytes = list_length * size;
     large_bytes += list_bytes;
     OS::Print("large %3" Pd " [%8" Pd " bytes] : "
diff --git a/runtime/vm/freelist.h b/runtime/vm/freelist.h
index 2c47512..975e50c 100644
--- a/runtime/vm/freelist.h
+++ b/runtime/vm/freelist.h
@@ -18,8 +18,7 @@
 // pointer to chain elements of the list together. For objects larger than the
 // object size encodable in tags field, the size of the element is embedded in
 // the element at the address following the next_ field. All words written by
-// the freelist are guaranteed to look like smis, as required by
-// TryAllocateSmiInitializedLocked.
+// the freelist are guaranteed to look like Smis.
 // A FreeListElement never has its header mark bit set.
 class FreeListElement {
  public:
diff --git a/runtime/vm/gc_marker.cc b/runtime/vm/gc_marker.cc
index 2ac946a..ae0e205 100644
--- a/runtime/vm/gc_marker.cc
+++ b/runtime/vm/gc_marker.cc
@@ -434,19 +434,21 @@
 
 class MarkingWeakVisitor : public HandleVisitor {
  public:
-  MarkingWeakVisitor() : HandleVisitor(Thread::Current()) {
-  }
+  MarkingWeakVisitor(Thread* thread, FinalizationQueue* queue) :
+      HandleVisitor(thread), queue_(queue) { }
 
   void VisitHandle(uword addr) {
     FinalizablePersistentHandle* handle =
         reinterpret_cast<FinalizablePersistentHandle*>(addr);
     RawObject* raw_obj = handle->raw();
     if (IsUnreachable(raw_obj)) {
-      handle->UpdateUnreachable(thread()->isolate());
+      handle->UpdateUnreachable(thread()->isolate(), queue_);
     }
   }
 
  private:
+  FinalizationQueue* queue_;
+
   DISALLOW_COPY_AND_ASSIGN(MarkingWeakVisitor);
 };
 
@@ -706,8 +708,19 @@
       mark.DrainMarkingStack();
       {
         TIMELINE_FUNCTION_GC_DURATION(thread, "WeakHandleProcessing");
-        MarkingWeakVisitor mark_weak;
-        IterateWeakRoots(isolate, &mark_weak);
+        if (FLAG_background_finalization) {
+          FinalizationQueue* queue = new FinalizationQueue();
+          MarkingWeakVisitor mark_weak(thread, queue);
+          IterateWeakRoots(isolate, &mark_weak);
+          if (queue->length() > 0) {
+            Dart::thread_pool()->Run(new BackgroundFinalizer(isolate, queue));
+          } else {
+            delete queue;
+          }
+        } else {
+          MarkingWeakVisitor mark_weak(thread, NULL);
+          IterateWeakRoots(isolate, &mark_weak);
+        }
       }
       // All marking done; detach code, etc.
       FinalizeResultsFrom(&mark);
@@ -743,8 +756,19 @@
       // Phase 2: Weak processing on main thread.
       {
         TIMELINE_FUNCTION_GC_DURATION(thread, "WeakHandleProcessing");
-        MarkingWeakVisitor mark_weak;
-        IterateWeakRoots(isolate, &mark_weak);
+        if (FLAG_background_finalization) {
+          FinalizationQueue* queue = new FinalizationQueue();
+          MarkingWeakVisitor mark_weak(thread, queue);
+          IterateWeakRoots(isolate, &mark_weak);
+          if (queue->length() > 0) {
+            Dart::thread_pool()->Run(new BackgroundFinalizer(isolate, queue));
+          } else {
+            delete queue;
+          }
+        } else {
+          MarkingWeakVisitor mark_weak(thread, NULL);
+          IterateWeakRoots(isolate, &mark_weak);
+        }
       }
       barrier.Sync();
 
diff --git a/runtime/vm/hash_map.h b/runtime/vm/hash_map.h
index f2caa9e..372ff7f 100644
--- a/runtime/vm/hash_map.h
+++ b/runtime/vm/hash_map.h
@@ -5,28 +5,39 @@
 #ifndef VM_HASH_MAP_H_
 #define VM_HASH_MAP_H_
 
+#include "vm/growable_array.h"  // For Malloc, EmptyBase
 #include "vm/zone.h"
 
 namespace dart {
 
-template <typename KeyValueTrait>
-class DirectChainedHashMap: public ValueObject {
+template<typename KeyValueTrait, typename B, typename Allocator = Zone>
+class BaseDirectChainedHashMap : public B {
  public:
-  DirectChainedHashMap() : array_size_(0),
-                           lists_size_(0),
-                           count_(0),
-                           array_(NULL),
-                           lists_(NULL),
-                           free_list_head_(kNil) {
+  explicit BaseDirectChainedHashMap(Allocator* allocator)
+      : array_size_(0),
+        lists_size_(0),
+        count_(0),
+        array_(NULL),
+        lists_(NULL),
+        free_list_head_(kNil),
+        allocator_(allocator) {
     ResizeLists(kInitialSize);
     Resize(kInitialSize);
   }
 
-  DirectChainedHashMap(const DirectChainedHashMap& other);
+  BaseDirectChainedHashMap(const BaseDirectChainedHashMap& other);
+
+  ~BaseDirectChainedHashMap() {
+    allocator_->template Free<HashMapListElement>(array_, array_size_);
+    allocator_->template Free<HashMapListElement>(lists_, lists_size_);
+  }
 
   void Insert(typename KeyValueTrait::Pair kv);
 
-  typename KeyValueTrait::Value Lookup(typename KeyValueTrait::Key key) const;
+  typename KeyValueTrait::Value LookupValue(
+      typename KeyValueTrait::Key key) const;
+
+  typename KeyValueTrait::Pair* Lookup(typename KeyValueTrait::Key key) const;
 
   bool IsEmpty() const { return count_ == 0; }
 
@@ -43,6 +54,29 @@
     }
   }
 
+  class Iterator {
+   public:
+    typename KeyValueTrait::Pair* Next();
+
+    void Reset() {
+      array_index_ = 0;
+      list_index_ = kNil;
+    }
+
+   private:
+    explicit Iterator(const BaseDirectChainedHashMap& map)
+        : map_(map), array_index_(0), list_index_(kNil) {}
+
+    const BaseDirectChainedHashMap& map_;
+    intptr_t array_index_;
+    intptr_t list_index_;
+
+    template<typename T, typename Bs, typename A>
+    friend class BaseDirectChainedHashMap;
+  };
+
+  Iterator GetIterator() const { return Iterator(*this); }
+
  protected:
   // A linked list of T values.  Stored in arrays.
   struct HashMapListElement {
@@ -72,12 +106,31 @@
   // with a given hash.  Colliding elements are stored in linked lists.
   HashMapListElement* lists_;  // The linked lists containing hash collisions.
   intptr_t free_list_head_;  // Unused elements in lists_ are on the free list.
+  Allocator* allocator_;
 };
 
 
-template <typename KeyValueTrait>
-typename KeyValueTrait::Value
-    DirectChainedHashMap<KeyValueTrait>::
+template<typename KeyValueTrait, typename B, typename Allocator>
+BaseDirectChainedHashMap<KeyValueTrait, B, Allocator>::
+    BaseDirectChainedHashMap(const BaseDirectChainedHashMap& other)
+  : B(),
+    array_size_(other.array_size_),
+    lists_size_(other.lists_size_),
+    count_(other.count_),
+    array_(other.allocator_->template Alloc<HashMapListElement>(
+        other.array_size_)),
+    lists_(other.allocator_->template Alloc<HashMapListElement>(
+        other.lists_size_)),
+    free_list_head_(other.free_list_head_),
+    allocator_(other.allocator_) {
+  memmove(array_, other.array_, array_size_ * sizeof(HashMapListElement));
+  memmove(lists_, other.lists_, lists_size_ * sizeof(HashMapListElement));
+}
+
+
+template<typename KeyValueTrait, typename B, typename Allocator>
+typename KeyValueTrait::Pair*
+    BaseDirectChainedHashMap<KeyValueTrait, B, Allocator>::
         Lookup(typename KeyValueTrait::Key key) const {
   const typename KeyValueTrait::Value kNoValue =
       KeyValueTrait::ValueOf(typename KeyValueTrait::Pair());
@@ -86,40 +139,69 @@
   uword pos = Bound(hash);
   if (KeyValueTrait::ValueOf(array_[pos].kv) != kNoValue) {
     if (KeyValueTrait::IsKeyEqual(array_[pos].kv, key)) {
-      return KeyValueTrait::ValueOf(array_[pos].kv);
+      return &array_[pos].kv;
     }
 
     intptr_t next = array_[pos].next;
     while (next != kNil) {
       if (KeyValueTrait::IsKeyEqual(lists_[next].kv, key)) {
-        return KeyValueTrait::ValueOf(lists_[next].kv);
+        return &lists_[next].kv;
       }
       next = lists_[next].next;
     }
   }
-  return kNoValue;
+  return NULL;
 }
 
 
-template <typename KeyValueTrait>
-DirectChainedHashMap<KeyValueTrait>::
-    DirectChainedHashMap(const DirectChainedHashMap& other)
-  : ValueObject(),
-    array_size_(other.array_size_),
-    lists_size_(other.lists_size_),
-    count_(other.count_),
-    array_(Thread::Current()->zone()->
-           Alloc<HashMapListElement>(other.array_size_)),
-    lists_(Thread::Current()->zone()->
-           Alloc<HashMapListElement>(other.lists_size_)),
-    free_list_head_(other.free_list_head_) {
-  memmove(array_, other.array_, array_size_ * sizeof(HashMapListElement));
-  memmove(lists_, other.lists_, lists_size_ * sizeof(HashMapListElement));
+template<typename KeyValueTrait, typename B, typename Allocator>
+typename KeyValueTrait::Value
+    BaseDirectChainedHashMap<KeyValueTrait, B, Allocator>::
+        LookupValue(typename KeyValueTrait::Key key) const {
+  const typename KeyValueTrait::Value kNoValue =
+      KeyValueTrait::ValueOf(typename KeyValueTrait::Pair());
+  typename KeyValueTrait::Pair* pair = Lookup(key);
+  return (pair == NULL) ? kNoValue : KeyValueTrait::ValueOf(*pair);
 }
 
 
-template <typename KeyValueTrait>
-void DirectChainedHashMap<KeyValueTrait>::Resize(intptr_t new_size) {
+template<typename KeyValueTrait, typename B, typename Allocator>
+typename KeyValueTrait::Pair*
+    BaseDirectChainedHashMap<KeyValueTrait, B, Allocator>::Iterator::Next() {
+  const typename KeyValueTrait::Pair kNoPair = typename KeyValueTrait::Pair();
+
+  if (array_index_ < map_.array_size_) {
+    // If we're not in the middle of a list, find the next array slot.
+    if (list_index_ == kNil) {
+      while ((map_.array_[array_index_].kv == kNoPair) &&
+             (array_index_ < map_.array_size_)) {
+        array_index_++;
+      }
+      if (array_index_ < map_.array_size_) {
+        // When we're done with the list, we'll continue with the next array
+        // slot.
+        const intptr_t old_array_index = array_index_;
+        array_index_++;
+        list_index_ = map_.array_[old_array_index].next;
+        return &map_.array_[old_array_index].kv;
+      } else {
+        return NULL;
+      }
+    }
+
+    // Otherwise, return the current lists_ entry, advancing list_index_.
+    intptr_t current = list_index_;
+    list_index_ = map_.lists_[current].next;
+    return &map_.lists_[current].kv;
+  }
+
+  return NULL;
+}
+
+
+template<typename KeyValueTrait, typename B, typename Allocator>
+void BaseDirectChainedHashMap<KeyValueTrait, B, Allocator>::Resize(
+    intptr_t new_size) {
   const typename KeyValueTrait::Value kNoValue =
       KeyValueTrait::ValueOf(typename KeyValueTrait::Pair());
 
@@ -133,7 +215,7 @@
   }
 
   HashMapListElement* new_array =
-      Thread::Current()->zone()->Alloc<HashMapListElement>(new_size);
+      allocator_->template Alloc<HashMapListElement>(new_size);
   InitArray(new_array, new_size);
 
   HashMapListElement* old_array = array_;
@@ -163,16 +245,17 @@
   }
   USE(old_count);
   ASSERT(count_ == old_count);
+  allocator_->template Free<HashMapListElement>(old_array, old_size);
 }
 
 
-template <typename T>
-void DirectChainedHashMap<T>::ResizeLists(intptr_t new_size) {
+template<typename KeyValueTrait, typename B, typename Allocator>
+void BaseDirectChainedHashMap<KeyValueTrait, B, Allocator>::ResizeLists(
+    intptr_t new_size) {
   ASSERT(new_size > lists_size_);
 
   HashMapListElement* new_lists =
-      Thread::Current()->zone()->
-      Alloc<HashMapListElement>(new_size);
+      allocator_->template Alloc<HashMapListElement>(new_size);
   InitArray(new_lists, new_size);
 
   HashMapListElement* old_lists = lists_;
@@ -188,11 +271,12 @@
     lists_[i].next = free_list_head_;
     free_list_head_ = i;
   }
+  allocator_->template Free<HashMapListElement>(old_lists, old_size);
 }
 
 
-template <typename KeyValueTrait>
-void DirectChainedHashMap<KeyValueTrait>::
+template<typename KeyValueTrait, typename B, typename Allocator>
+void BaseDirectChainedHashMap<KeyValueTrait, B, Allocator>::
     Insert(typename KeyValueTrait::Pair kv) {
   const typename KeyValueTrait::Value kNoValue =
       KeyValueTrait::ValueOf(typename KeyValueTrait::Pair());
@@ -223,6 +307,24 @@
 }
 
 
+template<typename KeyValueTrait>
+class DirectChainedHashMap
+    : public BaseDirectChainedHashMap<KeyValueTrait, ValueObject> {
+ public:
+  DirectChainedHashMap() : BaseDirectChainedHashMap<KeyValueTrait, ValueObject>(
+          ASSERT_NOTNULL(Thread::Current()->zone())) {}
+};
+
+
+template<typename KeyValueTrait>
+class MallocDirectChainedHashMap
+    : public BaseDirectChainedHashMap<KeyValueTrait, EmptyBase, Malloc> {
+ public:
+  MallocDirectChainedHashMap()
+      : BaseDirectChainedHashMap<KeyValueTrait, EmptyBase, Malloc>(NULL) {}
+};
+
+
 template<typename T>
 class PointerKeyValueTrait {
  public:
@@ -247,6 +349,20 @@
   }
 };
 
+
+template<typename T>
+class NumbersKeyValueTrait {
+ public:
+  typedef T Value;
+  typedef intptr_t Key;
+  typedef T Pair;
+
+  static intptr_t KeyOf(Pair kv) { return kv.first(); }
+  static T ValueOf(Pair kv) { return kv; }
+  static inline intptr_t Hashcode(Key key) { return key; }
+  static inline bool IsKeyEqual(Pair kv, Key key) { return kv.first() == key; }
+};
+
 }  // namespace dart
 
 #endif  // VM_HASH_MAP_H_
diff --git a/runtime/vm/hash_map_test.cc b/runtime/vm/hash_map_test.cc
index 522ff7c..7830b85 100644
--- a/runtime/vm/hash_map_test.cc
+++ b/runtime/vm/hash_map_test.cc
@@ -25,15 +25,94 @@
   TestValue v2(1);
   TestValue v3(0);
   map.Insert(&v1);
-  EXPECT(map.Lookup(&v1) == &v1);
+  EXPECT(map.LookupValue(&v1) == &v1);
   map.Insert(&v2);
-  EXPECT(map.Lookup(&v1) == &v1);
-  EXPECT(map.Lookup(&v2) == &v2);
-  EXPECT(map.Lookup(&v3) == &v1);
+  EXPECT(map.LookupValue(&v1) == &v1);
+  EXPECT(map.LookupValue(&v2) == &v2);
+  EXPECT(map.LookupValue(&v3) == &v1);
   DirectChainedHashMap<PointerKeyValueTrait<TestValue> > map2(map);
-  EXPECT(map2.Lookup(&v1) == &v1);
-  EXPECT(map2.Lookup(&v2) == &v2);
-  EXPECT(map2.Lookup(&v3) == &v1);
+  EXPECT(map2.LookupValue(&v1) == &v1);
+  EXPECT(map2.LookupValue(&v2) == &v2);
+  EXPECT(map2.LookupValue(&v3) == &v1);
+}
+
+
+TEST_CASE(MallocDirectChainedHashMap) {
+  MallocDirectChainedHashMap<PointerKeyValueTrait<TestValue> > map;
+  EXPECT(map.IsEmpty());
+  TestValue v1(0);
+  TestValue v2(1);
+  TestValue v3(0);
+  map.Insert(&v1);
+  EXPECT(map.LookupValue(&v1) == &v1);
+  map.Insert(&v2);
+  EXPECT(map.LookupValue(&v1) == &v1);
+  EXPECT(map.LookupValue(&v2) == &v2);
+  EXPECT(map.LookupValue(&v3) == &v1);
+  MallocDirectChainedHashMap<PointerKeyValueTrait<TestValue> > map2(map);
+  EXPECT(map2.LookupValue(&v1) == &v1);
+  EXPECT(map2.LookupValue(&v2) == &v2);
+  EXPECT(map2.LookupValue(&v3) == &v1);
+}
+
+
+class IntptrPair {
+ public:
+  IntptrPair() : first_(-1), second_(-1) {}
+  IntptrPair(intptr_t first, intptr_t second)
+      : first_(first), second_(second) {}
+
+  intptr_t first() const { return first_; }
+  intptr_t second() const { return second_; }
+
+  bool operator==(const IntptrPair& other) {
+    return (first_ == other.first_) && (second_ == other.second_);
+  }
+
+  bool operator!=(const IntptrPair& other) {
+    return (first_ != other.first_) || (second_ != other.second_);
+  }
+
+ private:
+  intptr_t first_;
+  intptr_t second_;
+};
+
+
+TEST_CASE(DirectChainedHashMapIterator) {
+  IntptrPair p1(1, 1);
+  IntptrPair p2(2, 2);
+  IntptrPair p3(3, 3);
+  IntptrPair p4(4, 4);
+  IntptrPair p5(5, 5);
+  DirectChainedHashMap<NumbersKeyValueTrait<IntptrPair> > map;
+  EXPECT(map.IsEmpty());
+  DirectChainedHashMap<NumbersKeyValueTrait<IntptrPair> >::Iterator it =
+      map.GetIterator();
+  EXPECT(it.Next() == NULL);
+  it.Reset();
+
+  map.Insert(p1);
+  EXPECT(*it.Next() == p1);
+  it.Reset();
+
+  map.Insert(p2);
+  map.Insert(p3);
+  map.Insert(p4);
+  map.Insert(p5);
+  intptr_t count = 0;
+  intptr_t sum = 0;
+  while (true) {
+    IntptrPair* p = it.Next();
+    if (p == NULL) {
+      break;
+    }
+    count++;
+    sum += p->second();
+  }
+
+  EXPECT(count == 5);
+  EXPECT(sum == 15);
 }
 
 }  // namespace dart
diff --git a/runtime/vm/hash_table.h b/runtime/vm/hash_table.h
index 8408f23..f1079b8 100644
--- a/runtime/vm/hash_table.h
+++ b/runtime/vm/hash_table.h
@@ -5,11 +5,6 @@
 #ifndef VM_HASH_TABLE_H_
 #define VM_HASH_TABLE_H_
 
-// Temporarily used when sorting the indices in EnumIndexHashTable.
-// TODO(koda): Remove these dependencies before using in production.
-#include <map>
-#include <vector>
-
 #include "platform/assert.h"
 #include "vm/object.h"
 
@@ -22,20 +17,16 @@
 //  - HashTable
 // The next layer provides ordering and iteration functionality:
 //  - UnorderedHashTable
-//  - EnumIndexHashTable
 //  - LinkedListHashTable (TODO(koda): Implement.)
-// The utility class HashTables handles growth and conversion (e.g., converting
-// a compact EnumIndexHashTable to an iteration-efficient LinkedListHashTable).
+// The utility class HashTables handles growth and conversion.
 // The next layer fixes the payload size and provides a natural interface:
 //  - HashMap
 //  - HashSet
 // Combining either of these with an iteration strategy, we get the templates
 // intended for use outside this file:
 //  - UnorderedHashMap
-//  - EnumIndexHashMap
 //  - LinkedListHashMap
 //  - UnorderedHashSet
-//  - EnumIndexHashSet
 //  - LinkedListHashSet
 // Each of these can be finally specialized with KeyTraits to support any set of
 // lookup key types (e.g., look up a char* in a set of String objects), and
@@ -435,74 +426,6 @@
 };
 
 
-// Table with insertion order, using one payload component for the enumeration
-// index, and one metadata element for the next enumeration index.
-template<typename KeyTraits, intptr_t kUserPayloadSize>
-class EnumIndexHashTable
-    : public HashTable<KeyTraits, kUserPayloadSize + 1, 1> {
- public:
-  typedef HashTable<KeyTraits, kUserPayloadSize + 1, 1> BaseTable;
-  static const intptr_t kPayloadSize = kUserPayloadSize;
-  static const intptr_t kNextEnumIndex = BaseTable::kMetaDataIndex;
-  EnumIndexHashTable(Object* key, Smi* value, Array* data)
-      : BaseTable(key, value, data) {}
-  EnumIndexHashTable(Zone* zone, RawArray* data)
-      : BaseTable(zone, data) {}
-  explicit EnumIndexHashTable(RawArray* data)
-      : BaseTable(Thread::Current()->zone(), data) {}
-  // Note: Does not check for concurrent modification.
-  class Iterator {
-   public:
-    explicit Iterator(const EnumIndexHashTable* table) : index_(-1) {
-      // TODO(koda): Use GrowableArray after adding stateful comparator support.
-      std::map<intptr_t, intptr_t> enum_to_entry;
-      for (intptr_t i = 0; i < table->NumEntries(); ++i) {
-        if (table->IsOccupied(i)) {
-          intptr_t enum_index =
-              table->GetSmiValueAt(table->PayloadIndex(i, kPayloadSize));
-          enum_to_entry[enum_index] = i;
-        }
-      }
-      for (std::map<intptr_t, intptr_t>::iterator it = enum_to_entry.begin();
-           it != enum_to_entry.end();
-           ++it) {
-        entries_.push_back(it->second);
-      }
-    }
-    bool MoveNext() {
-      if (index_ < (static_cast<intptr_t>(entries_.size() - 1))) {
-        index_++;
-        return true;
-      }
-      return false;
-    }
-    intptr_t Current() {
-      return entries_[index_];
-    }
-
-   private:
-    intptr_t index_;
-    std::vector<intptr_t> entries_;
-  };
-
-  void Initialize() const {
-    BaseTable::Initialize();
-    BaseTable::SetSmiValueAt(kNextEnumIndex, 0);
-  }
-
-  void InsertKey(intptr_t entry, const Object& key) const {
-    BaseTable::InsertKey(entry, key);
-    BaseTable::SmiHandle() =
-        Smi::New(BaseTable::GetSmiValueAt(kNextEnumIndex));
-    BaseTable::UpdatePayload(entry, kPayloadSize, BaseTable::SmiHandle());
-    // TODO(koda): Handle possible Smi overflow from repeated insert/delete.
-    BaseTable::AdjustSmiValueAt(kNextEnumIndex, 1);
-  }
-
-  // No extra book-keeping needed for DeleteEntry.
-};
-
-
 class HashTables : public AllStatic {
  public:
   // Allocates and initializes a table.
@@ -687,18 +610,6 @@
 };
 
 
-template<typename KeyTraits>
-class EnumIndexHashMap : public HashMap<EnumIndexHashTable<KeyTraits, 1> > {
- public:
-  typedef HashMap<EnumIndexHashTable<KeyTraits, 1> > BaseMap;
-  explicit EnumIndexHashMap(RawArray* data)
-      : BaseMap(Thread::Current()->zone(), data) {}
-  EnumIndexHashMap(Zone* zone, RawArray* data) : BaseMap(zone, data) {}
-  EnumIndexHashMap(Object* key, Smi* value, Array* data)
-      : BaseMap(key, value, data) {}
-};
-
-
 template<typename BaseIterTable>
 class HashSet : public BaseIterTable {
  public:
@@ -791,18 +702,6 @@
       : BaseSet(key, value, data) {}
 };
 
-
-template<typename KeyTraits>
-class EnumIndexHashSet : public HashSet<EnumIndexHashTable<KeyTraits, 0> > {
- public:
-  typedef HashSet<EnumIndexHashTable<KeyTraits, 0> > BaseSet;
-  explicit EnumIndexHashSet(RawArray* data)
-      : BaseSet(Thread::Current()->zone(), data) {}
-  EnumIndexHashSet(Zone* zone, RawArray* data) : BaseSet(zone, data) {}
-  EnumIndexHashSet(Object* key, Smi* value, Array* data)
-      : BaseSet(key, value, data) {}
-};
-
 }  // namespace dart
 
 #endif  // VM_HASH_TABLE_H_
diff --git a/runtime/vm/hash_table_test.cc b/runtime/vm/hash_table_test.cc
index 158af48..2d597b1 100644
--- a/runtime/vm/hash_table_test.cc
+++ b/runtime/vm/hash_table_test.cc
@@ -122,34 +122,6 @@
 }
 
 
-TEST_CASE(EnumIndexHashMap) {
-  typedef EnumIndexHashMap<TestTraits> Table;
-  Table table(HashTables::New<Table>(5));
-  table.UpdateOrInsert(String::Handle(String::New("a")),
-                       String::Handle(String::New("A")));
-  EXPECT(table.ContainsKey("a"));
-  table.UpdateValue("a", String::Handle(String::New("AAA")));
-  String& a_value = String::Handle();
-  a_value ^= table.GetOrNull("a");
-  EXPECT(a_value.Equals("AAA"));
-  Object& null_value = Object::Handle(table.GetOrNull("0"));
-  EXPECT(null_value.IsNull());
-
-  // Test on-demand allocation of a new key object using NewKey in traits.
-  String& b_value = String::Handle();
-  b_value ^=
-      table.InsertNewOrGetValue("b", String::Handle(String::New("BBB")));
-  EXPECT(b_value.Equals("BBB"));
-  {
-    // When the key is already present, there should be no allocation.
-    NoSafepointScope no_safepoint;
-    b_value ^= table.InsertNewOrGetValue("b", a_value);
-    EXPECT(b_value.Equals("BBB"));
-  }
-  table.Release();
-}
-
-
 std::string ToStdString(const String& str) {
   EXPECT(str.IsOneByteString());
   std::string result;
@@ -290,7 +262,6 @@
        initial_capacity < 32;
        ++initial_capacity) {
     TestSet<UnorderedHashSet<TestTraits> >(initial_capacity, false);
-    TestSet<EnumIndexHashSet<TestTraits> >(initial_capacity, true);
   }
 }
 
@@ -300,7 +271,6 @@
        initial_capacity < 32;
        ++initial_capacity) {
     TestMap<UnorderedHashMap<TestTraits> >(initial_capacity, false);
-    TestMap<EnumIndexHashMap<TestTraits> >(initial_capacity, true);
   }
 }
 
diff --git a/runtime/vm/heap.cc b/runtime/vm/heap.cc
index 2c92b17..21a4b46 100644
--- a/runtime/vm/heap.cc
+++ b/runtime/vm/heap.cc
@@ -35,10 +35,11 @@
       old_space_(this, max_old_gen_words, max_external_words),
       barrier_(new Monitor()),
       barrier_done_(new Monitor()),
+      finalization_tasks_lock_(new Monitor()),
+      finalization_tasks_(0),
       read_only_(false),
       gc_new_space_in_progress_(false),
-      gc_old_space_in_progress_(false),
-      pretenure_policy_(0) {
+      gc_old_space_in_progress_(false) {
   for (int sel = 0;
        sel < kNumWeakSelectors;
        sel++) {
@@ -52,6 +53,7 @@
 Heap::~Heap() {
   delete barrier_;
   delete barrier_done_;
+  delete finalization_tasks_lock_;
 
   for (int sel = 0;
        sel < kNumWeakSelectors;
@@ -145,14 +147,6 @@
 }
 
 
-uword Heap::AllocatePretenured(intptr_t size) {
-  ASSERT(Thread::Current()->no_safepoint_scope_depth() == 0);
-  uword addr = old_space_.TryAllocateDataBump(size, PageSpace::kControlGrowth);
-  if (addr != 0) return addr;
-  return AllocateOld(size, HeapPage::kData);
-}
-
-
 void Heap::AllocateExternal(intptr_t size, Space space) {
   ASSERT(Thread::Current()->no_safepoint_scope_depth() == 0);
   if (space == kNew) {
@@ -378,7 +372,6 @@
     UpdateClassHeapStatsBeforeGC(kNew);
     new_space_.Scavenge(invoke_api_callbacks);
     isolate()->class_table()->UpdatePromoted();
-    UpdatePretenurePolicy();
     RecordAfterGC(kNew);
     PrintStats();
     NOT_IN_PRODUCT(PrintStatsToTimeline(&tds));
@@ -461,41 +454,6 @@
 #endif
 
 
-bool Heap::ShouldPretenure(intptr_t class_id) const {
-  if (class_id == kOneByteStringCid) {
-    return pretenure_policy_ > 0;
-  } else {
-    return false;
-  }
-}
-
-
-void Heap::UpdatePretenurePolicy() {
-  if (FLAG_disable_alloc_stubs_after_gc) {
-    ClassTable* table = isolate_->class_table();
-    Zone* zone = Thread::Current()->zone();
-    for (intptr_t cid = 1; cid < table->NumCids(); ++cid) {
-      if (((cid >= kNumPredefinedCids) || (cid == kArrayCid)) &&
-          table->IsValidIndex(cid) &&
-          table->HasValidClassAt(cid)) {
-        const Class& cls = Class::Handle(zone, table->At(cid));
-        cls.DisableAllocationStub();
-      }
-    }
-  }
-  ClassHeapStats* stats =
-      isolate_->class_table()->StatsWithUpdatedSize(kOneByteStringCid);
-  int allocated = stats->pre_gc.new_count;
-  int promo_percent = (allocated == 0) ? 0 :
-      (100 * stats->promoted_count) / allocated;
-  if (promo_percent >= FLAG_pretenure_threshold) {
-    pretenure_policy_ += FLAG_pretenure_interval;
-  } else {
-    pretenure_policy_ = Utils::Maximum(0, pretenure_policy_ - 1);
-  }
-}
-
-
 void Heap::UpdateGlobalMaxUsed() {
   ASSERT(isolate_ != NULL);
   // We are accessing the used in words count for both new and old space
@@ -528,16 +486,11 @@
 }
 
 
-Heap::Space Heap::SpaceForAllocation(intptr_t cid) {
-  return FLAG_pretenure_all ? kPretenured : kNew;
-}
-
-
 intptr_t Heap::TopOffset(Heap::Space space) {
   if (space == kNew) {
     return OFFSET_OF(Heap, new_space_) + Scavenger::top_offset();
   } else {
-    ASSERT(space == kPretenured);
+    ASSERT(space == kOld);
     return OFFSET_OF(Heap, old_space_) + PageSpace::top_offset();
   }
 }
@@ -547,7 +500,7 @@
   if (space == kNew) {
     return OFFSET_OF(Heap, new_space_) + Scavenger::end_offset();
   } else {
-    ASSERT(space == kPretenured);
+    ASSERT(space == kOld);
     return OFFSET_OF(Heap, old_space_) + PageSpace::end_offset();
   }
 }
@@ -731,6 +684,7 @@
 }
 
 
+#ifndef PRODUCT
 void Heap::PrintToJSONObject(Space space, JSONObject* object) const {
   if (space == kNew) {
     new_space_.PrintToJSONObject(object);
@@ -738,6 +692,7 @@
     old_space_.PrintToJSONObject(object);
   }
 }
+#endif  // PRODUCT
 
 
 void Heap::RecordBeforeGC(Space space, GCReason reason) {
diff --git a/runtime/vm/heap.h b/runtime/vm/heap.h
index 6131bf3..d90adc4 100644
--- a/runtime/vm/heap.h
+++ b/runtime/vm/heap.h
@@ -31,8 +31,6 @@
     kNew,
     kOld,
     kCode,
-    // TODO(koda): Harmonize all old-space allocation and get rid of this.
-    kPretenured,
   };
 
   enum WeakSelector {
@@ -81,8 +79,6 @@
         return AllocateOld(size, HeapPage::kData);
       case kCode:
         return AllocateOld(size, HeapPage::kExecutable);
-      case kPretenured:
-        return AllocatePretenured(size);
       default:
         UNREACHABLE();
     }
@@ -148,7 +144,6 @@
   // Accessors for inlined allocation in generated code.
   static intptr_t TopOffset(Space space);
   static intptr_t EndOffset(Space space);
-  static Space SpaceForAllocation(intptr_t class_id);
 
   // Initialize the heap and register it with the isolate.
   static void Init(Isolate* isolate,
@@ -244,19 +239,23 @@
     return size <= kNewAllocatableSize;
   }
 
+#ifndef PRODUCT
   void PrintToJSONObject(Space space, JSONObject* object) const;
 
   // The heap map contains the sizes and class ids for the objects in each page.
   void PrintHeapMapToJSONStream(Isolate* isolate, JSONStream* stream) {
-    return old_space_.PrintHeapMapToJSONStream(isolate, stream);
+    old_space_.PrintHeapMapToJSONStream(isolate, stream);
   }
+#endif  // PRODUCT
 
   Isolate* isolate() const { return isolate_; }
 
   Monitor* barrier() const { return barrier_; }
   Monitor* barrier_done() const { return barrier_done_; }
 
-  bool ShouldPretenure(intptr_t class_id) const;
+  Monitor* finalization_tasks_lock() const { return finalization_tasks_lock_; }
+  intptr_t finalization_tasks() const { return finalization_tasks_; }
+  void set_finalization_tasks(intptr_t count) { finalization_tasks_ = count; }
 
   void SetupExternalPage(void* pointer, uword size, bool is_executable) {
     old_space_.SetupExternalPage(pointer, size, is_executable);
@@ -302,7 +301,6 @@
 
   uword AllocateNew(intptr_t size);
   uword AllocateOld(intptr_t size, HeapPage::PageType type);
-  uword AllocatePretenured(intptr_t size);
 
   // Visit all pointers. Caller must ensure concurrent sweeper is not running,
   // and the visitor must not allocate.
@@ -327,7 +325,6 @@
   void RecordAfterGC(Space space);
   void PrintStats();
   void UpdateClassHeapStatsBeforeGC(Heap::Space space);
-  void UpdatePretenurePolicy();
   void PrintStatsToTimeline(TimelineEventScope* event);
 
   // Updates gc in progress flags.
@@ -353,6 +350,9 @@
   Monitor* barrier_;
   Monitor* barrier_done_;
 
+  Monitor* finalization_tasks_lock_;
+  intptr_t finalization_tasks_;
+
   // GC stats collection.
   GCStats stats_;
 
@@ -364,8 +364,6 @@
   bool gc_new_space_in_progress_;
   bool gc_old_space_in_progress_;
 
-  int pretenure_policy_;
-
   friend class Become;  // VisitObjectPointers
   friend class ServiceEvent;
   friend class PageSpace;  // VerifyGC
diff --git a/runtime/vm/il_printer.cc b/runtime/vm/il_printer.cc
index 204725c..5104545 100644
--- a/runtime/vm/il_printer.cc
+++ b/runtime/vm/il_printer.cc
@@ -1275,8 +1275,15 @@
   return Thread::Current()->zone()->MakeCopyOfString(buffer);
 }
 
+
 #else  // PRODUCT
 
+
+const char* Instruction::ToCString() const {
+  return DebugName();
+}
+
+
 void FlowGraphPrinter::PrintOneInstruction(Instruction* instr,
                                            bool print_locations) {
   UNREACHABLE();
diff --git a/runtime/vm/instructions_dbc.cc b/runtime/vm/instructions_dbc.cc
index 3656722..4dc73be 100644
--- a/runtime/vm/instructions_dbc.cc
+++ b/runtime/vm/instructions_dbc.cc
@@ -15,13 +15,54 @@
 
 namespace dart {
 
+static bool HasLoadFromPool(Instr instr) {
+  switch (Bytecode::DecodeOpcode(instr)) {
+    case Bytecode::kLoadConstant:
+    case Bytecode::kPushConstant:
+    case Bytecode::kStaticCall:
+    case Bytecode::kInstanceCall1:
+    case Bytecode::kInstanceCall2:
+    case Bytecode::kInstanceCall1Opt:
+    case Bytecode::kInstanceCall2Opt:
+    case Bytecode::kStoreStaticTOS:
+    case Bytecode::kPushStatic:
+    case Bytecode::kAllocate:
+    case Bytecode::kInstantiateType:
+    case Bytecode::kInstantiateTypeArgumentsTOS:
+    case Bytecode::kAssertAssignable:
+      return true;
+    default:
+      return false;
+  }
+}
+
+
+static bool GetLoadedObjectAt(
+    uword pc, const ObjectPool& object_pool, Object* obj) {
+  Instr instr = Bytecode::At(pc);
+  if (HasLoadFromPool(instr)) {
+    uint16_t index = Bytecode::DecodeD(instr);
+    if (object_pool.InfoAt(index) == ObjectPool::kTaggedObject) {
+      *obj = object_pool.ObjectAt(index);
+      return true;
+    }
+  }
+  return false;
+}
+
+
 CallPattern::CallPattern(uword pc, const Code& code)
     : object_pool_(ObjectPool::Handle(code.GetObjectPool())),
       end_(pc),
       ic_data_load_end_(0),
       target_code_pool_index_(-1),
       ic_data_(ICData::Handle()) {
-  UNIMPLEMENTED();
+  ASSERT(code.ContainsInstructionAt(end_));
+  const uword call_pc = end_ - sizeof(Instr);
+  Instr call_instr = Bytecode::At(call_pc);
+  ASSERT(Bytecode::IsCallOpcode(call_instr));
+  ic_data_load_end_ = call_pc;
+  target_code_pool_index_ = Bytecode::DecodeD(call_instr);
 }
 
 
@@ -30,6 +71,7 @@
   return 0;
 }
 
+
 int CallPattern::DeoptCallPatternLengthInBytes() {
   UNIMPLEMENTED();
   return 0;
@@ -109,48 +151,21 @@
 }
 
 
-static bool HasLoadFromPool(Instr instr) {
-  switch (Bytecode::DecodeOpcode(instr)) {
-    case Bytecode::kLoadConstant:
-    case Bytecode::kPushConstant:
-    case Bytecode::kStaticCall:
-    case Bytecode::kInstanceCall1:
-    case Bytecode::kInstanceCall2:
-    case Bytecode::kInstanceCall1Opt:
-    case Bytecode::kInstanceCall2Opt:
-    case Bytecode::kStoreStaticTOS:
-    case Bytecode::kPushStatic:
-    case Bytecode::kAllocate:
-    case Bytecode::kInstantiateType:
-    case Bytecode::kInstantiateTypeArgumentsTOS:
-    case Bytecode::kAssertAssignable:
-      return true;
-    default:
-      return false;
-  }
-}
-
-
 bool DecodeLoadObjectFromPoolOrThread(uword pc,
                                       const Code& code,
                                       Object* obj) {
   ASSERT(code.ContainsInstructionAt(pc));
-  Instr instr = Bytecode::At(pc);
-  if (HasLoadFromPool(instr)) {
-    uint16_t index = Bytecode::DecodeD(instr);
-    const ObjectPool& pool = ObjectPool::Handle(code.object_pool());
-    if (pool.InfoAt(index) == ObjectPool::kTaggedObject) {
-      *obj = pool.ObjectAt(index);
-      return true;
-    }
-  }
-  return false;
+  const ObjectPool& pool = ObjectPool::Handle(code.object_pool());
+  return GetLoadedObjectAt(pc, pool, obj);
 }
 
 
 RawICData* CallPattern::IcData() {
-  UNIMPLEMENTED();
-  return ICData::null();
+  if (ic_data_.IsNull()) {
+    bool found = GetLoadedObjectAt(ic_data_load_end_, object_pool_, &ic_data_);
+    ASSERT(found);
+  }
+  return ic_data_.raw();
 }
 
 
diff --git a/runtime/vm/intermediate_language.cc b/runtime/vm/intermediate_language.cc
index 4704219..689d535 100644
--- a/runtime/vm/intermediate_language.cc
+++ b/runtime/vm/intermediate_language.cc
@@ -126,6 +126,12 @@
 }
 
 
+void Instruction::Unsupported(FlowGraphCompiler* compiler) {
+  compiler->Bailout(ToCString());
+  UNREACHABLE();
+}
+
+
 bool Value::Equals(Value* other) const {
   return definition() == other->definition();
 }
@@ -1762,6 +1768,48 @@
 }
 
 
+Definition* CheckedSmiOpInstr::Canonicalize(FlowGraph* flow_graph) {
+  if ((left()->Type()->ToCid() == kSmiCid) &&
+      (right()->Type()->ToCid() == kSmiCid)) {
+    Definition* replacement = NULL;
+    // Operations that can't deoptimize are specialized here: These include
+    // bit-wise operators and comparisons. Other arithmetic operations can
+    // overflow or divide by 0 and can't be specialized unless we have extra
+    // range information.
+    switch (op_kind()) {
+      case Token::kBIT_AND:
+      case Token::kBIT_OR:
+      case Token::kBIT_XOR:
+        replacement =
+            new BinarySmiOpInstr(op_kind(),
+                                 new Value(left()->definition()),
+                                 new Value(right()->definition()),
+                                 Thread::kNoDeoptId);
+      default:
+        break;
+    }
+    if (Token::IsRelationalOperator(op_kind())) {
+      replacement = new RelationalOpInstr(token_pos(), op_kind(),
+                                          new Value(left()->definition()),
+                                          new Value(right()->definition()),
+                                          kSmiCid,
+                                          Thread::kNoDeoptId);
+    } else if (Token::IsEqualityOperator(op_kind())) {
+      replacement = new EqualityCompareInstr(token_pos(), op_kind(),
+                                             new Value(left()->definition()),
+                                             new Value(right()->definition()),
+                                             kSmiCid,
+                                             Thread::kNoDeoptId);
+    }
+    if (replacement != NULL) {
+      flow_graph->InsertBefore(this, replacement, env(), FlowGraph::kValue);
+      return replacement;
+    }
+  }
+  return this;
+}
+
+
 Definition* BinaryIntegerOpInstr::Canonicalize(FlowGraph* flow_graph) {
   // If both operands are constants evaluate this expression. Might
   // occur due to load forwarding after constant propagation pass
@@ -3038,6 +3086,46 @@
     default:          return NULL;
   }
 }
+#else
+static void TryFastPathSmiOp(
+    FlowGraphCompiler* compiler, ICData* call_ic_data, const String& name) {
+  if (!FLAG_two_args_smi_icd) {
+    return;
+  }
+  if (name.raw() == Symbols::Plus().raw()) {
+    if (call_ic_data->AddSmiSmiCheckForFastSmiStubs()) {
+      __ AddTOS();
+    }
+  } else if (name.raw() == Symbols::Minus().raw()) {
+    if (call_ic_data->AddSmiSmiCheckForFastSmiStubs()) {
+      __ SubTOS();
+    }
+  } else if (name.raw() == Symbols::EqualOperator().raw()) {
+    if (call_ic_data->AddSmiSmiCheckForFastSmiStubs()) {
+      __ EqualTOS();
+    }
+  } else if (name.raw() == Symbols::LAngleBracket().raw()) {
+    if (call_ic_data->AddSmiSmiCheckForFastSmiStubs()) {
+     __ LessThanTOS();
+    }
+  } else if (name.raw() == Symbols::RAngleBracket().raw()) {
+    if (call_ic_data->AddSmiSmiCheckForFastSmiStubs()) {
+      __ GreaterThanTOS();
+    }
+  } else if (name.raw() == Symbols::BitAnd().raw()) {
+    if (call_ic_data->AddSmiSmiCheckForFastSmiStubs()) {
+      __ BitAndTOS();
+    }
+  } else if (name.raw() == Symbols::BitOr().raw()) {
+    if (call_ic_data->AddSmiSmiCheckForFastSmiStubs()) {
+      __ BitOrTOS();
+    }
+  } else if (name.raw() == Symbols::Star().raw()) {
+    if (call_ic_data->AddSmiSmiCheckForFastSmiStubs()) {
+      __ MulTOS();
+    }
+  }
+}
 #endif
 
 
@@ -3110,28 +3198,14 @@
     }
   }
 #else
-  call_ic_data = &ICData::ZoneHandle(call_ic_data->Original());
+  ICData* ic_data = &ICData::ZoneHandle(call_ic_data->Original());
 
   // Emit smi fast path instruction. If fast-path succeeds it skips the next
   // instruction otherwise it falls through.
-  if (function_name().raw() == Symbols::Plus().raw()) {
-    __ AddTOS();
-  } else if (function_name().raw() == Symbols::EqualOperator().raw()) {
-    __ EqualTOS();
-  } else if (function_name().raw() == Symbols::LAngleBracket().raw()) {
-    __ LessThanTOS();
-  } else if (function_name().raw() == Symbols::RAngleBracket().raw()) {
-    __ GreaterThanTOS();
-  } else if (function_name().raw() == Symbols::BitAnd().raw()) {
-    __ BitAndTOS();
-  } else if (function_name().raw() == Symbols::BitOr().raw()) {
-    __ BitOrTOS();
-  } else if (function_name().raw() == Symbols::Star().raw()) {
-    __ MulTOS();
-  }
+  TryFastPathSmiOp(compiler, ic_data, function_name());
 
   const intptr_t call_ic_data_kidx = __ AddConstant(*call_ic_data);
-  switch (call_ic_data->NumArgsTested()) {
+  switch (ic_data->NumArgsTested()) {
     case 1:
       if (compiler->is_optimizing()) {
         __ InstanceCall1Opt(ArgumentCount(), call_ic_data_kidx);
@@ -3261,9 +3335,10 @@
 
   __ PushConstant(function());
   __ StaticCall(ArgumentCount(), argdesc_kidx);
-  compiler->AddCurrentDescriptor(RawPcDescriptors::kUnoptStaticCall,
-                                 deopt_id(),
-                                 token_pos());
+  RawPcDescriptors::Kind kind = (compiler->is_optimizing())
+                              ? RawPcDescriptors::kOther
+                              : RawPcDescriptors::kUnoptStaticCall;
+  compiler->AddCurrentDescriptor(kind, deopt_id(), token_pos());
 
   compiler->RecordAfterCall(this);
 
@@ -3284,9 +3359,6 @@
   // DBC does not use LocationSummaries in the same way as other architectures.
 #if !defined(TARGET_ARCH_DBC)
   ASSERT(locs()->in(0).reg() == locs()->out(0).reg());
-#else
-  ASSERT(!compiler->is_optimizing() ||
-         (locs()->in(0).reg() == locs()->out(0).reg()));
 #endif  // !defined(TARGET_ARCH_DBC)
 }
 
@@ -3298,7 +3370,11 @@
 
 
 void DeoptimizeInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
+#if !defined(TARGET_ARCH_DBC)
   __ Jump(compiler->AddDeoptStub(deopt_id(), deopt_reason_));
+#else
+  compiler->EmitDeopt(deopt_id(), deopt_reason_);
+#endif
 }
 
 
diff --git a/runtime/vm/intermediate_language.h b/runtime/vm/intermediate_language.h
index be04e5e..7307344 100644
--- a/runtime/vm/intermediate_language.h
+++ b/runtime/vm/intermediate_language.h
@@ -891,6 +891,8 @@
 
   void ClearEnv() { env_ = NULL; }
 
+  void Unsupported(FlowGraphCompiler* compiler);
+
  protected:
   // GetDeoptId and/or CopyDeoptIdFrom.
   friend class CallSiteInliner;
@@ -2912,6 +2914,8 @@
     return instance_call_->token_pos();
   }
 
+  virtual CompileType ComputeType() const;
+
   virtual intptr_t ArgumentCount() const {
     return instance_call()->ArgumentCount();
   }
@@ -3265,6 +3269,25 @@
     ASSERT(argument_names.IsZoneHandle() ||  argument_names.InVMHeap());
   }
 
+  StaticCallInstr(TokenPosition token_pos,
+                  const Function& function,
+                  const Array& argument_names,
+                  ZoneGrowableArray<PushArgumentInstr*>* arguments,
+                  intptr_t deopt_id)
+      : TemplateDefinition(deopt_id),
+        ic_data_(NULL),
+        token_pos_(token_pos),
+        function_(function),
+        argument_names_(argument_names),
+        arguments_(arguments),
+        result_cid_(kDynamicCid),
+        is_known_list_constructor_(false),
+        is_native_list_factory_(false),
+        identity_(AliasIdentity::Unknown()) {
+    ASSERT(function.IsZoneHandle());
+    ASSERT(argument_names.IsZoneHandle() ||  argument_names.InVMHeap());
+  }
+
   // ICData for static calls carries call count.
   const ICData* ic_data() const { return ic_data_; }
   bool HasICData() const {
@@ -3560,8 +3583,7 @@
         offset_in_bytes_(field.Offset()),
         emit_store_barrier_(emit_store_barrier),
         token_pos_(token_pos),
-        is_potential_unboxed_initialization_(false),
-        is_object_reference_initialization_(false) {
+        is_initialization_(false) {
     SetInputAt(kInstancePos, instance);
     SetInputAt(kValuePos, value);
     CheckField(field);
@@ -3576,20 +3598,14 @@
         offset_in_bytes_(offset_in_bytes),
         emit_store_barrier_(emit_store_barrier),
         token_pos_(token_pos),
-        is_potential_unboxed_initialization_(false),
-        is_object_reference_initialization_(false) {
+        is_initialization_(false) {
     SetInputAt(kInstancePos, instance);
     SetInputAt(kValuePos, value);
   }
 
   DECLARE_INSTRUCTION(StoreInstanceField)
 
-  void set_is_potential_unboxed_initialization(bool value) {
-    is_potential_unboxed_initialization_ = value;
-  }
-  void set_is_object_reference_initialization(bool value) {
-    is_object_reference_initialization_ = value;
-  }
+  void set_is_initialization(bool value) { is_initialization_ = value; }
 
   enum {
     kInstancePos = 0,
@@ -3598,12 +3614,8 @@
 
   Value* instance() const { return inputs_[kInstancePos]; }
   Value* value() const { return inputs_[kValuePos]; }
-  bool is_potential_unboxed_initialization() const {
-    return is_potential_unboxed_initialization_;
-  }
-  bool is_object_reference_initialization() const {
-    return is_object_reference_initialization_;
-  }
+  bool is_initialization() const { return is_initialization_; }
+
   virtual TokenPosition token_pos() const { return token_pos_; }
 
   const Field& field() const { return field_; }
@@ -3648,11 +3660,8 @@
   intptr_t offset_in_bytes_;
   const StoreBarrierType emit_store_barrier_;
   const TokenPosition token_pos_;
-  // This may be the first store to an unboxed field.
-  bool is_potential_unboxed_initialization_;
-  // True if this store initializes an object reference field of an object that
-  // was allocated uninitialized; see AllocateUninitializedContext.
-  bool is_object_reference_initialization_;
+  // Marks initialiing stores. E.g. in the constructor.
+  bool is_initialization_;
 
   DISALLOW_COPY_AND_ASSIGN(StoreInstanceFieldInstr);
 };
@@ -6889,6 +6898,8 @@
 
   virtual EffectSet Effects() const { return EffectSet::All(); }
 
+  virtual Definition* Canonicalize(FlowGraph* flow_graph);
+
   PRINT_OPERANDS_TO_SUPPORT
 
   DECLARE_INSTRUCTION(CheckedSmiOp)
diff --git a/runtime/vm/intermediate_language_arm.cc b/runtime/vm/intermediate_language_arm.cc
index a0fa8cc..1750418 100644
--- a/runtime/vm/intermediate_language_arm.cc
+++ b/runtime/vm/intermediate_language_arm.cc
@@ -2011,7 +2011,7 @@
           ((IsPotentialUnboxedStore()) ? 3 : 0);
   LocationSummary* summary = new(zone) LocationSummary(
       zone, kNumInputs, kNumTemps,
-          ((IsUnboxedStore() && opt && is_potential_unboxed_initialization_) ||
+          ((IsUnboxedStore() && opt && is_initialization()) ||
            IsPotentialUnboxedStore())
           ? LocationSummary::kCallOnSlowPath
           : LocationSummary::kNoCall);
@@ -2072,7 +2072,7 @@
     const Register temp2 = locs()->temp(1).reg();
     const intptr_t cid = field().UnboxedFieldCid();
 
-    if (is_potential_unboxed_initialization_) {
+    if (is_initialization()) {
       const Class* cls = NULL;
       switch (cid) {
         case kDoubleCid:
@@ -2214,21 +2214,14 @@
                              CanValueBeSmi());
   } else {
     if (locs()->in(1).IsConstant()) {
-      __ StoreIntoObjectNoBarrierOffset(
-          instance_reg,
-          offset_in_bytes_,
-          locs()->in(1).constant(),
-          is_object_reference_initialization_ ?
-              Assembler::kEmptyOrSmiOrNull :
-              Assembler::kHeapObjectOrSmi);
+      __ StoreIntoObjectNoBarrierOffset(instance_reg,
+                                        offset_in_bytes_,
+                                        locs()->in(1).constant());
     } else {
       const Register value_reg = locs()->in(1).reg();
       __ StoreIntoObjectNoBarrierOffset(instance_reg,
                                         offset_in_bytes_,
-                                        value_reg,
-                                        is_object_reference_initialization_ ?
-                                            Assembler::kEmptyOrSmiOrNull :
-                                            Assembler::kHeapObjectOrSmi);
+                                        value_reg);
     }
   }
   __ Bind(&skip_store);
@@ -2345,12 +2338,12 @@
   // R3: new object end address.
 
   // Store the type argument field.
-  __ InitializeFieldNoBarrier(R0,
+  __ StoreIntoObjectNoBarrier(R0,
                               FieldAddress(R0, Array::type_arguments_offset()),
                               kElemTypeReg);
 
   // Set the length field.
-  __ InitializeFieldNoBarrier(R0,
+  __ StoreIntoObjectNoBarrier(R0,
                               FieldAddress(R0, Array::length_offset()),
                               kLengthReg);
 
diff --git a/runtime/vm/intermediate_language_arm64.cc b/runtime/vm/intermediate_language_arm64.cc
index 9d167dc..642bc9e 100644
--- a/runtime/vm/intermediate_language_arm64.cc
+++ b/runtime/vm/intermediate_language_arm64.cc
@@ -1761,7 +1761,7 @@
           ((IsPotentialUnboxedStore()) ? 2 : 0);
   LocationSummary* summary = new(zone) LocationSummary(
       zone, kNumInputs, kNumTemps,
-          ((IsUnboxedStore() && opt && is_potential_unboxed_initialization_) ||
+          ((IsUnboxedStore() && opt && is_initialization()) ||
            IsPotentialUnboxedStore())
           ? LocationSummary::kCallOnSlowPath
           : LocationSummary::kNoCall);
@@ -1798,7 +1798,7 @@
     const Register temp2 = locs()->temp(1).reg();
     const intptr_t cid = field().UnboxedFieldCid();
 
-    if (is_potential_unboxed_initialization_) {
+    if (is_initialization()) {
       const Class* cls = NULL;
       switch (cid) {
         case kDoubleCid:
@@ -2649,7 +2649,9 @@
   compiler->AddSlowPathCode(slow_path);
 
   __ ldr(TMP, Address(THR, Thread::stack_limit_offset()));
-  __ CompareRegisters(SP, TMP);
+  // Compare to CSP not SP because CSP is closer to the stack limit. See
+  // Assembler::EnterFrame.
+  __ CompareRegisters(CSP, TMP);
   __ b(slow_path->entry_label(), LS);
   if (compiler->CanOSRFunction() && in_loop()) {
     const Register temp = locs()->temp(0).reg();
diff --git a/runtime/vm/intermediate_language_dbc.cc b/runtime/vm/intermediate_language_dbc.cc
index 16a2baf..8c954b6 100644
--- a/runtime/vm/intermediate_language_dbc.cc
+++ b/runtime/vm/intermediate_language_dbc.cc
@@ -29,15 +29,12 @@
 DECLARE_FLAG(int, optimization_counter_threshold);
 
 // List of instructions that are still unimplemented by DBC backend.
-#define FOR_EACH_UNIMPLEMENTED_INSTRUCTION(M) \
-  M(Stop)                                                                      \
+#define FOR_EACH_UNIMPLEMENTED_INSTRUCTION(M)                                  \
   M(IndirectGoto)                                                              \
   M(LoadCodeUnits)                                                             \
-  M(InstanceOf)                                                                \
   M(LoadUntagged)                                                              \
   M(AllocateUninitializedContext)                                              \
   M(BinaryInt32Op)                                                             \
-  M(UnarySmiOp)                                                                \
   M(UnaryDoubleOp)                                                             \
   M(SmiToDouble)                                                               \
   M(Int32ToDouble)                                                             \
@@ -48,7 +45,6 @@
   M(DoubleToFloat)                                                             \
   M(FloatToDouble)                                                             \
   M(UnboxedConstant)                                                           \
-  M(CheckEitherNonSmi)                                                         \
   M(BinaryDoubleOp)                                                            \
   M(MathUnary)                                                                 \
   M(MathMinMax)                                                                \
@@ -59,8 +55,6 @@
   M(BinaryMintOp)                                                              \
   M(ShiftMintOp)                                                               \
   M(UnaryMintOp)                                                               \
-  M(StringToCharCode)                                                          \
-  M(OneByteStringFromCharCode)                                                 \
   M(InvokeMathCFunction)                                                       \
   M(MergedMath)                                                                \
   M(GuardFieldClass)                                                           \
@@ -108,11 +102,6 @@
   M(UnboxInteger32)                                                            \
   M(CheckedSmiOp)                                                              \
   M(CheckArrayBound)                                                           \
-  M(CheckSmi)                                                                  \
-  M(CheckClassId)                                                              \
-  M(CheckClass)                                                                \
-  M(BinarySmiOp)                                                               \
-  M(TestSmi)                                                                   \
   M(RelationalOp)                                                              \
   M(EqualityCompare)                                                           \
   M(LoadIndexed)
@@ -168,7 +157,7 @@
   Condition Name##Instr::EmitComparisonCode(FlowGraphCompiler*,                \
                                             BranchLabels) {                    \
     UNIMPLEMENTED();                                                           \
-    return EQ;                                                                 \
+    return NEXT_IS_TRUE;                                                       \
   }
 
 #define DEFINE_UNIMPLEMENTED(Name)                                             \
@@ -180,12 +169,39 @@
 #undef DEFINE_UNIMPLEMENTED
 
 DEFINE_UNIMPLEMENTED_EMIT_BRANCH_CODE(TestCids)
-DEFINE_UNIMPLEMENTED_EMIT_BRANCH_CODE(TestSmi)
 DEFINE_UNIMPLEMENTED_EMIT_BRANCH_CODE(RelationalOp)
 DEFINE_UNIMPLEMENTED_EMIT_BRANCH_CODE(EqualityCompare)
 
 
-DEFINE_MAKE_LOCATION_SUMMARY(AssertAssignable, 2, Location::SameAsFirstInput());
+EMIT_NATIVE_CODE(InstanceOf, 2, Location::SameAsFirstInput(),
+                 LocationSummary::kCall) {
+  SubtypeTestCache& test_cache = SubtypeTestCache::Handle();
+  if (!type().IsVoidType() && type().IsInstantiated()) {
+    test_cache = SubtypeTestCache::New();
+  }
+
+  if (compiler->is_optimizing()) {
+    __ Push(locs()->in(0).reg());  // Value.
+    __ Push(locs()->in(1).reg());  // Instantiator type arguments.
+  }
+
+  __ PushConstant(type());
+  __ PushConstant(test_cache);
+  __ InstanceOf(negate_result() ? 1 : 0);
+  compiler->RecordSafepoint(locs());
+  compiler->AddCurrentDescriptor(RawPcDescriptors::kOther,
+                                 deopt_id(),
+                                 token_pos());
+
+  if (compiler->is_optimizing()) {
+    __ PopLocal(locs()->out(0).reg());
+  }
+}
+
+
+DEFINE_MAKE_LOCATION_SUMMARY(AssertAssignable, 2,
+                             Location::SameAsFirstInput(),
+                             LocationSummary::kCall);
 
 
 EMIT_NATIVE_CODE(AssertBoolean,
@@ -212,7 +228,13 @@
 
 
 void PolymorphicInstanceCallInstr::EmitNativeCode(FlowGraphCompiler* compiler) {
-  compiler->Bailout(ToCString());
+  Unsupported(compiler);
+  UNREACHABLE();
+}
+
+
+EMIT_NATIVE_CODE(Stop, 0) {
+  __ Stop(message());
 }
 
 
@@ -355,14 +377,14 @@
 static void EmitBranchOnCondition(FlowGraphCompiler* compiler,
                                   Condition true_condition,
                                   BranchLabels labels) {
-  if (labels.fall_through == labels.false_label) {
-    // If the next block is the false successor, fall through to it.
+  if (true_condition == NEXT_IS_TRUE) {
     __ Jump(labels.true_label);
+    if (labels.fall_through != labels.false_label) {
+      __ Jump(labels.false_label);
+    }
   } else {
-    // If the next block is not the false successor, branch to it.
+    ASSERT(true_condition == NEXT_IS_FALSE);
     __ Jump(labels.false_label);
-
-    // Fall through or jump to the true successor.
     if (labels.fall_through != labels.true_label) {
       __ Jump(labels.true_label);
     }
@@ -375,34 +397,33 @@
   ASSERT((kind() == Token::kNE_STRICT) ||
          (kind() == Token::kEQ_STRICT));
 
+  Token::Kind comparison;
+  Condition condition;
+  if (labels.fall_through == labels.false_label) {
+    condition = NEXT_IS_TRUE;
+    comparison = kind();
+  } else {
+    // Flip comparision to save a jump.
+    condition = NEXT_IS_FALSE;
+    comparison = (kind() == Token::kEQ_STRICT) ? Token::kNE_STRICT
+                                               : Token::kEQ_STRICT;
+  }
+
   if (!compiler->is_optimizing()) {
     const Bytecode::Opcode eq_op = needs_number_check() ?
         Bytecode::kIfEqStrictNumTOS : Bytecode::kIfEqStrictTOS;
     const Bytecode::Opcode ne_op = needs_number_check() ?
         Bytecode::kIfNeStrictNumTOS : Bytecode::kIfNeStrictTOS;
-
-    if (kind() == Token::kEQ_STRICT) {
-      __ Emit((labels.fall_through == labels.false_label) ? eq_op : ne_op);
-    } else {
-      __ Emit((labels.fall_through == labels.false_label) ? ne_op : eq_op);
-    }
+    __ Emit(comparison == Token::kEQ_STRICT ? eq_op : ne_op);
   } else {
     const Bytecode::Opcode eq_op = needs_number_check() ?
         Bytecode::kIfEqStrictNum : Bytecode::kIfEqStrict;
     const Bytecode::Opcode ne_op = needs_number_check() ?
         Bytecode::kIfNeStrictNum : Bytecode::kIfNeStrict;
-
-    if (kind() == Token::kEQ_STRICT) {
-      __ Emit(Bytecode::Encode(
-          (labels.fall_through == labels.false_label) ? eq_op : ne_op,
-          locs()->in(0).reg(),
-          locs()->in(1).reg()));
-    } else {
-      __ Emit(Bytecode::Encode(
-          (labels.fall_through == labels.false_label) ? ne_op : eq_op,
-          locs()->in(0).reg(),
-          locs()->in(1).reg()));
-    }
+    __ Emit(Bytecode::Encode(
+        (comparison == Token::kEQ_STRICT) ? eq_op : ne_op,
+        locs()->in(0).reg(),
+        locs()->in(1).reg()));
   }
 
   if (needs_number_check() && token_pos().IsReal()) {
@@ -411,7 +432,8 @@
                                    Thread::kNoDeoptId,
                                    token_pos());
   }
-  return EQ;
+
+  return condition;
 }
 
 
@@ -476,6 +498,13 @@
 
 
 EMIT_NATIVE_CODE(Goto, 0) {
+  if (!compiler->is_optimizing()) {
+    // Add a deoptimization descriptor for deoptimizing instructions that
+    // may be inserted before this instruction.
+    compiler->AddCurrentDescriptor(RawPcDescriptors::kDeopt,
+                                   GetDeoptId(),
+                                   TokenPosition::kNoSource);
+  }
   if (HasParallelMove()) {
     compiler->parallel_move_resolver()->EmitNativeCode(parallel_move());
   }
@@ -487,6 +516,34 @@
 }
 
 
+Condition TestSmiInstr::EmitComparisonCode(FlowGraphCompiler* compiler,
+                                           BranchLabels labels) {
+  ASSERT((kind() == Token::kEQ) ||
+         (kind() == Token::kNE));
+  Register left = locs()->in(0).reg();
+  Register right = locs()->in(1).reg();
+  __ TestSmi(left, right);
+  return (kind() == Token::kEQ) ? NEXT_IS_TRUE : NEXT_IS_FALSE;
+}
+
+
+void TestSmiInstr::EmitBranchCode(FlowGraphCompiler* compiler,
+                                  BranchInstr* branch) {
+  BranchLabels labels = compiler->CreateBranchLabels(branch);
+  Condition true_condition = EmitComparisonCode(compiler, labels);
+  EmitBranchOnCondition(compiler, true_condition, labels);
+}
+
+
+EMIT_NATIVE_CODE(TestSmi,
+                 2,
+                 Location::RequiresRegister(),
+                 LocationSummary::kNoCall) {
+  // Never emitted outside of the BranchInstr.
+  UNREACHABLE();
+}
+
+
 EMIT_NATIVE_CODE(CreateArray,
                  2, Location::RequiresRegister(),
                  LocationSummary::kCall) {
@@ -505,9 +562,9 @@
 EMIT_NATIVE_CODE(StoreIndexed, 3) {
   if (compiler->is_optimizing()) {
     if (class_id() != kArrayCid) {
-      compiler->Bailout(ToCString());
+      Unsupported(compiler);
+      UNREACHABLE();
     }
-
     __ StoreIndexed(locs()->in(kArrayPos).reg(),
                     locs()->in(kIndexPos).reg(),
                     locs()->in(kValuePos).reg());
@@ -565,6 +622,27 @@
 }
 
 
+EMIT_NATIVE_CODE(OneByteStringFromCharCode,
+                 1, Location::RequiresRegister(),
+                 LocationSummary::kNoCall) {
+  ASSERT(compiler->is_optimizing());
+  const Register char_code = locs()->in(0).reg();  // Char code is a smi.
+  const Register result = locs()->out(0).reg();
+  __ OneByteStringFromCharCode(result, char_code);
+}
+
+
+EMIT_NATIVE_CODE(StringToCharCode,
+                 1, Location::RequiresRegister(),
+                 LocationSummary::kNoCall) {
+  ASSERT(cid_ == kOneByteStringCid);
+  const Register str = locs()->in(0).reg();
+  const Register result = locs()->out(0).reg();  // Result char code is a smi.
+  __ StringToCharCode(result, str);
+}
+
+
+
 EMIT_NATIVE_CODE(AllocateObject,
                  0, Location::RequiresRegister(),
                  LocationSummary::kCall) {
@@ -842,8 +920,12 @@
 Representation StoreIndexedInstr::RequiredInputRepresentation(
     intptr_t idx) const {
   // Array can be a Dart object or a pointer to external data.
-  if (idx == 0)  return kNoRepresentation;  // Flexible input representation.
-  if (idx == 1) return kTagged;  // Index is a smi.
+  if (idx == 0) {
+    return kNoRepresentation;  // Flexible input representation.
+  }
+  if (idx == 1) {
+    return kTagged;  // Index is a smi.
+  }
   ASSERT(idx == 2);
   switch (class_id_) {
     case kArrayCid:
@@ -895,6 +977,159 @@
 }
 
 
+EMIT_NATIVE_CODE(CheckSmi, 1) {
+  __ CheckSmi(locs()->in(0).reg());
+  compiler->EmitDeopt(deopt_id(),
+                      ICData::kDeoptCheckSmi,
+                      licm_hoisted_ ? ICData::kHoisted : 0);
+}
+
+
+EMIT_NATIVE_CODE(CheckEitherNonSmi, 2) {
+  intptr_t left_cid = left()->Type()->ToCid();
+  intptr_t right_cid = right()->Type()->ToCid();
+  const Register left = locs()->in(0).reg();
+  const Register right = locs()->in(1).reg();
+  if (this->left()->definition() == this->right()->definition()) {
+    __ CheckSmi(left);
+  } else if (left_cid == kSmiCid) {
+    __ CheckSmi(right);
+  } else if (right_cid == kSmiCid) {
+    __ CheckSmi(left);
+  } else {
+    __ CheckSmi(left);
+    compiler->EmitDeopt(deopt_id(), ICData::kDeoptBinaryDoubleOp,
+                        licm_hoisted_ ? ICData::kHoisted : 0);
+    __ CheckSmi(right);
+  }
+  compiler->EmitDeopt(deopt_id(), ICData::kDeoptBinaryDoubleOp,
+                      licm_hoisted_ ? ICData::kHoisted : 0);
+}
+
+
+EMIT_NATIVE_CODE(CheckClassId, 1) {
+  __ CheckClassId(locs()->in(0).reg(),
+                  compiler->ToEmbeddableCid(cid_, this));
+  compiler->EmitDeopt(deopt_id(), ICData::kDeoptCheckClass);
+}
+
+
+EMIT_NATIVE_CODE(CheckClass, 1) {
+  const Register value = locs()->in(0).reg();
+  if (IsNullCheck()) {
+    ASSERT(DeoptIfNull() || DeoptIfNotNull());
+    if (DeoptIfNull()) {
+      __ IfEqNull(value);
+    } else {
+      __ IfNeNull(value);
+    }
+  } else {
+    ASSERT((unary_checks().GetReceiverClassIdAt(0) != kSmiCid) ||
+           (unary_checks().NumberOfChecks() > 1));
+    const intptr_t may_be_smi =
+        (unary_checks().GetReceiverClassIdAt(0) == kSmiCid) ? 1 : 0;
+    if (IsDenseSwitch()) {
+      ASSERT(cids_[0] < cids_[cids_.length() - 1]);
+      const intptr_t low_cid = cids_[0];
+      const intptr_t cid_mask = ComputeCidMask();
+      __ CheckDenseSwitch(value, may_be_smi);
+      __ Nop(compiler->ToEmbeddableCid(low_cid, this));
+      __ Nop(__ AddConstant(Smi::Handle(Smi::New(cid_mask))));
+    } else {
+      GrowableArray<CidTarget> sorted_ic_data;
+      FlowGraphCompiler::SortICDataByCount(unary_checks(),
+                                           &sorted_ic_data,
+                                           /* drop_smi = */ true);
+      const intptr_t sorted_length = sorted_ic_data.length();
+      if (!Utils::IsUint(8, sorted_length)) {
+        Unsupported(compiler);
+        UNREACHABLE();
+      }
+      __ CheckCids(value, may_be_smi, sorted_length);
+      for (intptr_t i = 0; i < sorted_length; i++) {
+        __ Nop(compiler->ToEmbeddableCid(sorted_ic_data[i].cid, this));
+      }
+    }
+  }
+  compiler->EmitDeopt(deopt_id(),
+                      ICData::kDeoptCheckClass,
+                      licm_hoisted_ ? ICData::kHoisted : 0);
+}
+
+
+EMIT_NATIVE_CODE(BinarySmiOp, 2, Location::RequiresRegister()) {
+  const Register left = locs()->in(0).reg();
+  const Register right = locs()->in(1).reg();
+  const Register out = locs()->out(0).reg();
+  const bool can_deopt = CanDeoptimize();
+  bool needs_nop = false;
+  switch (op_kind()) {
+    case Token::kADD:
+      __ Add(out, left, right);
+      needs_nop = true;
+      break;
+    case Token::kSUB:
+      __ Sub(out, left, right);
+      needs_nop = true;
+      break;
+    case Token::kMUL:
+      __ Mul(out, left, right);
+      needs_nop = true;
+      break;
+    case Token::kTRUNCDIV:
+      ASSERT(can_deopt);
+      __ Div(out, left, right);
+      break;
+    case Token::kBIT_AND:
+      ASSERT(!can_deopt);
+      __ BitAnd(out, left, right);
+      break;
+    case Token::kBIT_OR:
+      ASSERT(!can_deopt);
+      __ BitOr(out, left, right);
+      break;
+    case Token::kBIT_XOR:
+      ASSERT(!can_deopt);
+      __ BitXor(out, left, right);
+      break;
+    case Token::kMOD:
+      __ Mod(out, left, right);
+      needs_nop = true;
+      break;
+    case Token::kSHR:
+      __ Shr(out, left, right);
+      needs_nop = true;
+      break;
+    case Token::kSHL:
+      __ Shl(out, left, right);
+      needs_nop = true;
+      break;
+    default:
+      UNREACHABLE();
+  }
+  if (can_deopt) {
+    compiler->EmitDeopt(deopt_id(), ICData::kDeoptBinarySmiOp);
+  } else if (needs_nop) {
+    __ Nop(0);
+  }
+}
+
+
+EMIT_NATIVE_CODE(UnarySmiOp, 1, Location::RequiresRegister()) {
+  switch (op_kind()) {
+    case Token::kNEGATE: {
+      __ Neg(locs()->out(0).reg(), locs()->in(0).reg());
+      compiler->EmitDeopt(deopt_id(), ICData::kDeoptUnaryOp);
+      break;
+    }
+    case Token::kBIT_NOT:
+      __ BitNot(locs()->out(0).reg(), locs()->in(0).reg());
+      break;
+    default:
+      UNREACHABLE();
+  }
+}
+
 }  // namespace dart
 
 #endif  // defined TARGET_ARCH_DBC
diff --git a/runtime/vm/intermediate_language_ia32.cc b/runtime/vm/intermediate_language_ia32.cc
index aa3c355..34555dc 100644
--- a/runtime/vm/intermediate_language_ia32.cc
+++ b/runtime/vm/intermediate_language_ia32.cc
@@ -1725,7 +1725,7 @@
           ((IsPotentialUnboxedStore()) ? 3 : 0);
   LocationSummary* summary = new(zone) LocationSummary(
       zone, kNumInputs, kNumTemps,
-          ((IsUnboxedStore() && opt && is_potential_unboxed_initialization_) ||
+          ((IsUnboxedStore() && opt && is_initialization()) ||
            IsPotentialUnboxedStore())
           ? LocationSummary::kCallOnSlowPath
           : LocationSummary::kNoCall);
@@ -1787,7 +1787,7 @@
     Register temp2 = locs()->temp(1).reg();
     const intptr_t cid = field().UnboxedFieldCid();
 
-    if (is_potential_unboxed_initialization_) {
+    if (is_initialization()) {
       const Class* cls = NULL;
       switch (cid) {
         case kDoubleCid:
@@ -1935,18 +1935,12 @@
       __ StoreIntoObjectNoBarrier(
           instance_reg,
           FieldAddress(instance_reg, offset_in_bytes_),
-          locs()->in(1).constant(),
-          is_object_reference_initialization_ ?
-              Assembler::kEmptyOrSmiOrNull :
-              Assembler::kHeapObjectOrSmi);
+          locs()->in(1).constant());
     } else {
       Register value_reg = locs()->in(1).reg();
       __ StoreIntoObjectNoBarrier(instance_reg,
                                   FieldAddress(instance_reg, offset_in_bytes_),
-                                  value_reg,
-                                  is_object_reference_initialization_ ?
-                                      Assembler::kEmptyOrSmiOrNull :
-                                      Assembler::kHeapObjectOrSmi);
+                                  value_reg);
     }
   }
   __ Bind(&skip_store);
@@ -2066,12 +2060,12 @@
                       EDI);  // temp
 
   // Store the type argument field.
-  __ InitializeFieldNoBarrier(EAX,
+  __ StoreIntoObjectNoBarrier(EAX,
                               FieldAddress(EAX, Array::type_arguments_offset()),
                               kElemTypeReg);
 
   // Set the length field.
-  __ InitializeFieldNoBarrier(EAX,
+  __ StoreIntoObjectNoBarrier(EAX,
                               FieldAddress(EAX, Array::length_offset()),
                               kLengthReg);
 
@@ -2089,13 +2083,13 @@
       intptr_t current_offset = 0;
       __ movl(EBX, raw_null);
       while (current_offset < array_size) {
-        __ InitializeFieldNoBarrier(EAX, Address(EDI, current_offset), EBX);
+        __ StoreIntoObjectNoBarrier(EAX, Address(EDI, current_offset), EBX);
         current_offset += kWordSize;
       }
     } else {
       Label init_loop;
       __ Bind(&init_loop);
-      __ InitializeFieldNoBarrier(EAX, Address(EDI, 0), Object::null_object());
+      __ StoreIntoObjectNoBarrier(EAX, Address(EDI, 0), Object::null_object());
       __ addl(EDI, Immediate(kWordSize));
       __ cmpl(EDI, EBX);
       __ j(BELOW, &init_loop, Assembler::kNearJump);
diff --git a/runtime/vm/intermediate_language_mips.cc b/runtime/vm/intermediate_language_mips.cc
index 6b9100b..342e267 100644
--- a/runtime/vm/intermediate_language_mips.cc
+++ b/runtime/vm/intermediate_language_mips.cc
@@ -1922,7 +1922,7 @@
           ((IsPotentialUnboxedStore()) ? 3 : 0);
   LocationSummary* summary = new(zone) LocationSummary(
       zone, kNumInputs, kNumTemps,
-          ((IsUnboxedStore() && opt && is_potential_unboxed_initialization_) ||
+          ((IsUnboxedStore() && opt && is_initialization()) ||
            IsPotentialUnboxedStore())
           ? LocationSummary::kCallOnSlowPath
           : LocationSummary::kNoCall);
@@ -1978,7 +1978,7 @@
     Register temp2 = locs()->temp(1).reg();
     const intptr_t cid = field().UnboxedFieldCid();
 
-    if (is_potential_unboxed_initialization_) {
+    if (is_initialization()) {
       const Class* cls = NULL;
       switch (cid) {
         case kDoubleCid:
diff --git a/runtime/vm/intermediate_language_x64.cc b/runtime/vm/intermediate_language_x64.cc
index 2b71a16..da4b4c2 100644
--- a/runtime/vm/intermediate_language_x64.cc
+++ b/runtime/vm/intermediate_language_x64.cc
@@ -1753,7 +1753,7 @@
           ((IsPotentialUnboxedStore()) ? 3 : 0);
   LocationSummary* summary = new(zone) LocationSummary(
       zone, kNumInputs, kNumTemps,
-          ((IsUnboxedStore() && opt && is_potential_unboxed_initialization_) ||
+          ((IsUnboxedStore() && opt && is_initialization()) ||
            IsPotentialUnboxedStore())
           ? LocationSummary::kCallOnSlowPath
           : LocationSummary::kNoCall);
@@ -1813,7 +1813,7 @@
     Register temp2 = locs()->temp(1).reg();
     const intptr_t cid = field().UnboxedFieldCid();
 
-    if (is_potential_unboxed_initialization_) {
+    if (is_initialization()) {
       const Class* cls = NULL;
       switch (cid) {
         case kDoubleCid:
@@ -1958,18 +1958,12 @@
     if (locs()->in(1).IsConstant()) {
       __ StoreIntoObjectNoBarrier(instance_reg,
                                   FieldAddress(instance_reg, offset_in_bytes_),
-                                  locs()->in(1).constant(),
-                                  is_object_reference_initialization_ ?
-                                      Assembler::kEmptyOrSmiOrNull :
-                                      Assembler::kHeapObjectOrSmi);
+                                  locs()->in(1).constant());
     } else {
       Register value_reg = locs()->in(1).reg();
       __ StoreIntoObjectNoBarrier(instance_reg,
           FieldAddress(instance_reg, offset_in_bytes_),
-          value_reg,
-          is_object_reference_initialization_ ?
-              Assembler::kEmptyOrSmiOrNull :
-              Assembler::kHeapObjectOrSmi);
+          value_reg);
     }
   }
   __ Bind(&skip_store);
@@ -2086,12 +2080,12 @@
 
   // RAX: new object start as a tagged pointer.
   // Store the type argument field.
-  __ InitializeFieldNoBarrier(RAX,
+  __ StoreIntoObjectNoBarrier(RAX,
                               FieldAddress(RAX, Array::type_arguments_offset()),
                               kElemTypeReg);
 
   // Set the length field.
-  __ InitializeFieldNoBarrier(RAX,
+  __ StoreIntoObjectNoBarrier(RAX,
                               FieldAddress(RAX, Array::length_offset()),
                               kLengthReg);
 
@@ -2107,13 +2101,13 @@
     if (array_size < (kInlineArraySize * kWordSize)) {
       intptr_t current_offset = 0;
       while (current_offset < array_size) {
-        __ InitializeFieldNoBarrier(RAX, Address(RDI, current_offset), R12);
+        __ StoreIntoObjectNoBarrier(RAX, Address(RDI, current_offset), R12);
         current_offset += kWordSize;
       }
     } else {
       Label init_loop;
       __ Bind(&init_loop);
-      __ InitializeFieldNoBarrier(RAX, Address(RDI, 0), R12);
+      __ StoreIntoObjectNoBarrier(RAX, Address(RDI, 0), R12);
       __ addq(RDI, Immediate(kWordSize));
       __ cmpq(RDI, RCX);
       __ j(BELOW, &init_loop, Assembler::kNearJump);
diff --git a/runtime/vm/intrinsifier.cc b/runtime/vm/intrinsifier.cc
index bdd7c5c..9e049d0 100644
--- a/runtime/vm/intrinsifier.cc
+++ b/runtime/vm/intrinsifier.cc
@@ -68,7 +68,7 @@
   String& str = String::Handle(zone);
   Error& error = Error::Handle(zone);
 
-#define SETUP_FUNCTION(class_name, function_name, destination, fp)             \
+#define SETUP_FUNCTION(class_name, function_name, destination, type, fp)       \
   if (strcmp(#class_name, "::") == 0) {                                        \
     str = String::New(#function_name);                                         \
     func = lib.LookupFunctionAllowPrivate(str);                                \
@@ -179,7 +179,7 @@
   FlowGraph* graph = new FlowGraph(parsed_function, graph_entry, block_id);
   const Function& function = parsed_function.function();
   switch (function.recognized_kind()) {
-#define EMIT_CASE(class_name, function_name, enum_name, fp)                    \
+#define EMIT_CASE(class_name, function_name, enum_name, type, fp)              \
     case MethodRecognizer::k##enum_name:                                       \
       if (!Build_##enum_name(graph)) return false;                             \
       break;
@@ -227,7 +227,7 @@
     return;
   }
 
-#define EMIT_CASE(class_name, function_name, enum_name, fp)                    \
+#define EMIT_CASE(class_name, function_name, enum_name, type, fp)              \
     case MethodRecognizer::k##enum_name:                                       \
       compiler->assembler()->Comment("Intrinsic");                             \
       enum_name(compiler->assembler());                                        \
@@ -607,7 +607,8 @@
       String::Handle(flow_graph->function().name()),
       Object::empty_array(),  // Dummy args. descr.
       Thread::kNoDeoptId,
-      1));
+      1,
+      false));
   value_check.AddReceiverCheck(kDoubleCid, flow_graph->function());
   builder.AddInstruction(
       new CheckClassInstr(new Value(value),
@@ -740,7 +741,8 @@
       String::Handle(flow_graph->function().name()),
       Object::empty_array(),  // Dummy args. descr.
       Thread::kNoDeoptId,
-      1));
+      1,
+      false));
   value_check.AddReceiverCheck(kFloat32x4Cid, flow_graph->function());
   // Check argument. Receiver (left) is known to be a Float32x4.
   builder.AddInstruction(
@@ -982,7 +984,8 @@
       String::Handle(flow_graph->function().name()),
       Object::empty_array(),  // Dummy args. descr.
       Thread::kNoDeoptId,
-      1));
+      1,
+      false));
   value_check.AddReceiverCheck(kArrayCid, flow_graph->function());
   builder.AddInstruction(
       new CheckClassInstr(new Value(data),
diff --git a/runtime/vm/intrinsifier.h b/runtime/vm/intrinsifier.h
index 54eec6e..100dee4 100644
--- a/runtime/vm/intrinsifier.h
+++ b/runtime/vm/intrinsifier.h
@@ -38,7 +38,7 @@
  private:
   static bool CanIntrinsify(const Function& function);
 
-#define DECLARE_FUNCTION(test_class_name, test_function_name, enum_name, fp)   \
+#define DECLARE_FUNCTION(class_name, function_name, enum_name, type, fp) \
   static void enum_name(Assembler* assembler);
 
   ALL_INTRINSICS_LIST(DECLARE_FUNCTION)
@@ -50,7 +50,7 @@
 #undef DECLARE_FUNCTION
 
 #if !defined(TARGET_ARCH_DBC)
-#define DECLARE_FUNCTION(test_class_name, test_function_name, enum_name, fp)   \
+#define DECLARE_FUNCTION(class_name, function_name, enum_name, type, fp) \
   static bool Build_##enum_name(FlowGraph* flow_graph);
 
   GRAPH_INTRINSICS_LIST(DECLARE_FUNCTION)
diff --git a/runtime/vm/intrinsifier_arm.cc b/runtime/vm/intrinsifier_arm.cc
index e17abcb..2daafed 100644
--- a/runtime/vm/intrinsifier_arm.cc
+++ b/runtime/vm/intrinsifier_arm.cc
@@ -103,7 +103,7 @@
   // Store backing array object in growable array object.
   __ ldr(R1, Address(SP, kArrayOffset));  // Data argument.
   // R0 is new, no barrier needed.
-  __ InitializeFieldNoBarrier(
+  __ StoreIntoObjectNoBarrier(
       R0,
       FieldAddress(R0, GrowableObjectArray::data_offset()),
       R1);
@@ -111,14 +111,14 @@
   // R0: new growable array object start as a tagged pointer.
   // Store the type argument field in the growable array object.
   __ ldr(R1, Address(SP, kTypeArgumentsOffset));  // Type argument.
-  __ InitializeFieldNoBarrier(
+  __ StoreIntoObjectNoBarrier(
       R0,
       FieldAddress(R0, GrowableObjectArray::type_arguments_offset()),
       R1);
 
   // Set the length field in the growable array object to 0.
   __ LoadImmediate(R1, 0);
-  __ InitializeFieldNoBarrier(
+  __ StoreIntoObjectNoBarrier(
       R0,
       FieldAddress(R0, GrowableObjectArray::length_offset()),
       R1);
@@ -166,7 +166,7 @@
 #define TYPED_ARRAY_ALLOCATION(type_name, cid, max_len, scale_shift)           \
   Label fall_through;                                                          \
   const intptr_t kArrayLengthStackOffset = 0 * kWordSize;                      \
-  __ MaybeTraceAllocation(cid, R2, &fall_through);                             \
+  NOT_IN_PRODUCT(__ MaybeTraceAllocation(cid, R2, &fall_through));             \
   __ ldr(R2, Address(SP, kArrayLengthStackOffset));  /* Array length. */       \
   /* Check that length is a positive Smi. */                                   \
   /* R2: requested array length argument. */                                   \
@@ -183,7 +183,7 @@
   const intptr_t fixed_size = sizeof(Raw##type_name) + kObjectAlignment - 1;   \
   __ AddImmediate(R2, fixed_size);                                             \
   __ bic(R2, R2, Operand(kObjectAlignment - 1));                               \
-  Heap::Space space = Heap::SpaceForAllocation(cid);                           \
+  Heap::Space space = Heap::kNew;                                              \
   __ ldr(R3, Address(THR, Thread::heap_offset()));                             \
   __ ldr(R0, Address(R3, Heap::TopOffset(space)));                             \
                                                                                \
@@ -202,7 +202,7 @@
                                                                                \
   /* Successfully allocated the object(s), now update top to point to */       \
   /* next object start and initialize the object. */                           \
-  __ LoadAllocationStatsAddress(R4, cid);                                      \
+  NOT_IN_PRODUCT(__ LoadAllocationStatsAddress(R4, cid));                      \
   __ str(R1, Address(R3, Heap::TopOffset(space)));                             \
   __ AddImmediate(R0, kHeapObjectTag);                                         \
   /* Initialize the tags. */                                                   \
@@ -227,7 +227,7 @@
   /* R2: allocation size. */                                                   \
   /* R4: allocation stats address. */                                          \
   __ ldr(R3, Address(SP, kArrayLengthStackOffset));  /* Array length. */       \
-  __ InitializeFieldNoBarrier(R0,                                              \
+  __ StoreIntoObjectNoBarrier(R0,                                              \
                               FieldAddress(R0, type_name::length_offset()),    \
                               R3);                                             \
   /* Initialize all array elements to 0. */                                    \
@@ -249,7 +249,7 @@
   __ b(&init_loop, CC);                                                        \
   __ str(R8, Address(R3, -2 * kWordSize), HI);                                 \
                                                                                \
-  __ IncrementAllocationStatsWithSize(R4, R2, space);                          \
+  NOT_IN_PRODUCT(__ IncrementAllocationStatsWithSize(R4, R2, space));          \
   __ Ret();                                                                    \
   __ Bind(&fall_through);                                                      \
 
@@ -778,6 +778,11 @@
 }
 
 
+void Intrinsifier::Smi_bitAndFromSmi(Assembler* assembler) {
+  Integer_bitAndFromInteger(assembler);
+}
+
+
 void Intrinsifier::Bigint_lsh(Assembler* assembler) {
   // static void _lsh(Uint32List x_digits, int x_used, int n,
   //                  Uint32List r_digits)
@@ -1824,7 +1829,7 @@
                                      Label* failure) {
   const Register length_reg = R2;
   Label fail;
-  __ MaybeTraceAllocation(kOneByteStringCid, R0, failure);
+  NOT_IN_PRODUCT(__ MaybeTraceAllocation(kOneByteStringCid, R0, failure));
   __ mov(R8, Operand(length_reg));  // Save the length register.
   // TODO(koda): Protect against negative length and overflow here.
   __ SmiUntag(length_reg);
@@ -1833,7 +1838,7 @@
   __ bic(length_reg, length_reg, Operand(kObjectAlignment - 1));
 
   const intptr_t cid = kOneByteStringCid;
-  Heap::Space space = Heap::SpaceForAllocation(cid);
+  Heap::Space space = Heap::kNew;
   __ ldr(R3, Address(THR, Thread::heap_offset()));
   __ ldr(R0, Address(R3, Heap::TopOffset(space)));
 
@@ -1852,7 +1857,7 @@
 
   // Successfully allocated the object(s), now update top to point to
   // next object start and initialize the object.
-  __ LoadAllocationStatsAddress(R4, cid);
+  NOT_IN_PRODUCT(__ LoadAllocationStatsAddress(R4, cid));
   __ str(R1, Address(R3, Heap::TopOffset(space)));
   __ AddImmediate(R0, kHeapObjectTag);
 
@@ -1876,16 +1881,16 @@
   }
 
   // Set the length field using the saved length (R8).
-  __ InitializeFieldNoBarrier(R0,
+  __ StoreIntoObjectNoBarrier(R0,
                               FieldAddress(R0, String::length_offset()),
                               R8);
   // Clear hash.
   __ LoadImmediate(TMP, 0);
-  __ InitializeFieldNoBarrier(R0,
+  __ StoreIntoObjectNoBarrier(R0,
                               FieldAddress(R0, String::hash_offset()),
                               TMP);
 
-  __ IncrementAllocationStatsWithSize(R4, R2, space);
+  NOT_IN_PRODUCT(__ IncrementAllocationStatsWithSize(R4, R2, space));
   __ b(ok);
 
   __ Bind(&fail);
diff --git a/runtime/vm/intrinsifier_arm64.cc b/runtime/vm/intrinsifier_arm64.cc
index fbaa1677..f9cade2 100644
--- a/runtime/vm/intrinsifier_arm64.cc
+++ b/runtime/vm/intrinsifier_arm64.cc
@@ -183,7 +183,7 @@
 #define TYPED_ARRAY_ALLOCATION(type_name, cid, max_len, scale_shift)           \
   Label fall_through;                                                          \
   const intptr_t kArrayLengthStackOffset = 0 * kWordSize;                      \
-  __ MaybeTraceAllocation(cid, R2, &fall_through);                             \
+  NOT_IN_PRODUCT(__ MaybeTraceAllocation(cid, R2, &fall_through));             \
   __ ldr(R2, Address(SP, kArrayLengthStackOffset));  /* Array length. */       \
   /* Check that length is a positive Smi. */                                   \
   /* R2: requested array length argument. */                                   \
@@ -200,7 +200,7 @@
   const intptr_t fixed_size = sizeof(Raw##type_name) + kObjectAlignment - 1;   \
   __ AddImmediate(R2, R2, fixed_size);                                         \
   __ andi(R2, R2, Immediate(~(kObjectAlignment - 1)));                         \
-  Heap::Space space = Heap::SpaceForAllocation(cid);                           \
+  Heap::Space space = Heap::kNew;                                              \
   __ ldr(R3, Address(THR, Thread::heap_offset()));                             \
   __ ldr(R0, Address(R3, Heap::TopOffset(space)));                             \
                                                                                \
@@ -221,7 +221,7 @@
   /* next object start and initialize the object. */                           \
   __ str(R1, Address(R3, Heap::TopOffset(space)));                             \
   __ AddImmediate(R0, R0, kHeapObjectTag);                                     \
-  __ UpdateAllocationStatsWithSize(cid, R2, space);                            \
+  NOT_IN_PRODUCT(__ UpdateAllocationStatsWithSize(cid, R2, space));            \
   /* Initialize the tags. */                                                   \
   /* R0: new object start as a tagged pointer. */                              \
   /* R1: new object end address. */                                            \
@@ -682,6 +682,11 @@
 }
 
 
+void Intrinsifier::Smi_bitAndFromSmi(Assembler* assembler) {
+  Integer_bitAndFromInteger(assembler);
+}
+
+
 void Intrinsifier::Bigint_lsh(Assembler* assembler) {
   // static void _lsh(Uint32List x_digits, int x_used, int n,
   //                  Uint32List r_digits)
@@ -1907,7 +1912,7 @@
                                      Label* failure) {
   const Register length_reg = R2;
   Label fail;
-  __ MaybeTraceAllocation(kOneByteStringCid, R0, failure);
+  NOT_IN_PRODUCT(__ MaybeTraceAllocation(kOneByteStringCid, R0, failure));
   __ mov(R6, length_reg);  // Save the length register.
   // TODO(koda): Protect against negative length and overflow here.
   __ SmiUntag(length_reg);
@@ -1916,7 +1921,7 @@
   __ andi(length_reg, length_reg, Immediate(~(kObjectAlignment - 1)));
 
   const intptr_t cid = kOneByteStringCid;
-  Heap::Space space = Heap::SpaceForAllocation(cid);
+  Heap::Space space = Heap::kNew;
   __ ldr(R3, Address(THR, Thread::heap_offset()));
   __ ldr(R0, Address(R3, Heap::TopOffset(space)));
 
@@ -1937,7 +1942,7 @@
   // next object start and initialize the object.
   __ str(R1, Address(R3, Heap::TopOffset(space)));
   __ AddImmediate(R0, R0, kHeapObjectTag);
-  __ UpdateAllocationStatsWithSize(cid, R2, space);
+  NOT_IN_PRODUCT(__ UpdateAllocationStatsWithSize(cid, R2, space));
 
   // Initialize the tags.
   // R0: new object start as a tagged pointer.
diff --git a/runtime/vm/intrinsifier_dbc.cc b/runtime/vm/intrinsifier_dbc.cc
index 21a3ae0..3d0b5d1 100644
--- a/runtime/vm/intrinsifier_dbc.cc
+++ b/runtime/vm/intrinsifier_dbc.cc
@@ -23,7 +23,7 @@
 
 intptr_t Intrinsifier::ParameterSlotFromSp() { return -1; }
 
-#define DEFINE_FUNCTION(test_class_name, test_function_name, enum_name, fp)    \
+#define DEFINE_FUNCTION(class_name, test_function_name, enum_name, type, fp)   \
   void Intrinsifier::enum_name(Assembler* assembler) {                         \
     if (Simulator::IsSupportedIntrinsic(Simulator::k##enum_name##Intrinsic)) { \
       assembler->Intrinsic(Simulator::k##enum_name##Intrinsic);                \
diff --git a/runtime/vm/intrinsifier_ia32.cc b/runtime/vm/intrinsifier_ia32.cc
index 130b5d6..b9ea3a1 100644
--- a/runtime/vm/intrinsifier_ia32.cc
+++ b/runtime/vm/intrinsifier_ia32.cc
@@ -136,17 +136,12 @@
   // Try allocating in new space.
   const Class& cls = Class::Handle(
       Isolate::Current()->object_store()->growable_object_array_class());
-#if defined(DEBUG)
-  static const bool kJumpLength = Assembler::kFarJump;
-#else
-  static const bool kJumpLength = Assembler::kNearJump;
-#endif  // DEBUG
-  __ TryAllocate(cls, &fall_through, kJumpLength, EAX, EBX);
+  __ TryAllocate(cls, &fall_through, Assembler::kNearJump, EAX, EBX);
 
   // Store backing array object in growable array object.
   __ movl(EBX, Address(ESP, kArrayOffset));  // data argument.
   // EAX is new, no barrier needed.
-  __ InitializeFieldNoBarrier(
+  __ StoreIntoObjectNoBarrier(
       EAX,
       FieldAddress(EAX, GrowableObjectArray::data_offset()),
       EBX);
@@ -154,7 +149,7 @@
   // EAX: new growable array object start as a tagged pointer.
   // Store the type argument field in the growable array object.
   __ movl(EBX, Address(ESP, kTypeArgumentsOffset));  // type argument.
-  __ InitializeFieldNoBarrier(
+  __ StoreIntoObjectNoBarrier(
       EAX,
       FieldAddress(EAX, GrowableObjectArray::type_arguments_offset()),
       EBX);
@@ -200,7 +195,7 @@
 #define TYPED_ARRAY_ALLOCATION(type_name, cid, max_len, scale_factor)          \
   Label fall_through;                                                          \
   const intptr_t kArrayLengthStackOffset = 1 * kWordSize;                      \
-  __ MaybeTraceAllocation(cid, EDI, &fall_through, false);                     \
+  NOT_IN_PRODUCT(__ MaybeTraceAllocation(cid, EDI, &fall_through, false));     \
   __ movl(EDI, Address(ESP, kArrayLengthStackOffset));  /* Array length. */    \
   /* Check that length is a positive Smi. */                                   \
   /* EDI: requested array length argument. */                                  \
@@ -223,7 +218,7 @@
   const intptr_t fixed_size = sizeof(Raw##type_name) + kObjectAlignment - 1;   \
   __ leal(EDI, Address(EDI, scale_factor, fixed_size));                        \
   __ andl(EDI, Immediate(-kObjectAlignment));                                  \
-  Heap::Space space = Heap::SpaceForAllocation(cid);                           \
+  Heap::Space space = Heap::kNew;                                              \
   __ movl(ECX, Address(THR, Thread::heap_offset()));                           \
   __ movl(EAX, Address(ECX, Heap::TopOffset(space)));                          \
   __ movl(EBX, EAX);                                                           \
@@ -244,7 +239,7 @@
   /* next object start and initialize the object. */                           \
   __ movl(Address(ECX, Heap::TopOffset(space)), EBX);                          \
   __ addl(EAX, Immediate(kHeapObjectTag));                                     \
-  __ UpdateAllocationStatsWithSize(cid, EDI, ECX, space);                      \
+  NOT_IN_PRODUCT(__ UpdateAllocationStatsWithSize(cid, EDI, ECX, space));      \
                                                                                \
   /* Initialize the tags. */                                                   \
   /* EAX: new object start as a tagged pointer. */                             \
@@ -269,7 +264,7 @@
   /* EAX: new object start as a tagged pointer. */                             \
   /* EBX: new object end address. */                                           \
   __ movl(EDI, Address(ESP, kArrayLengthStackOffset));  /* Array length. */    \
-  __ InitializeFieldNoBarrier(EAX,                                             \
+  __ StoreIntoObjectNoBarrier(EAX,                                             \
                               FieldAddress(EAX, type_name::length_offset()),   \
                               EDI);                                            \
   /* Initialize all array elements to 0. */                                    \
@@ -822,6 +817,11 @@
 }
 
 
+void Intrinsifier::Smi_bitAndFromSmi(Assembler* assembler) {
+  Integer_bitAndFromInteger(assembler);
+}
+
+
 void Intrinsifier::Bigint_lsh(Assembler* assembler) {
   // static void _lsh(Uint32List x_digits, int x_used, int n,
   //                  Uint32List r_digits)
@@ -1859,7 +1859,8 @@
                                      Label* ok,
                                      Label* failure,
                                      Register length_reg) {
-  __ MaybeTraceAllocation(kOneByteStringCid, EAX, failure, false);
+  NOT_IN_PRODUCT(
+    __ MaybeTraceAllocation(kOneByteStringCid, EAX, failure, false));
   if (length_reg != EDI) {
     __ movl(EDI, length_reg);
   }
@@ -1871,7 +1872,7 @@
   __ andl(EDI, Immediate(-kObjectAlignment));
 
   const intptr_t cid = kOneByteStringCid;
-  Heap::Space space = Heap::SpaceForAllocation(cid);
+  Heap::Space space = Heap::kNew;
   __ movl(ECX, Address(THR, Thread::heap_offset()));
   __ movl(EAX, Address(ECX, Heap::TopOffset(space)));
   __ movl(EBX, EAX);
@@ -1893,7 +1894,7 @@
   __ movl(Address(ECX, Heap::TopOffset(space)), EBX);
   __ addl(EAX, Immediate(kHeapObjectTag));
 
-  __ UpdateAllocationStatsWithSize(cid, EDI, ECX, space);
+  NOT_IN_PRODUCT(__ UpdateAllocationStatsWithSize(cid, EDI, ECX, space));
 
   // Initialize the tags.
   // EAX: new object start as a tagged pointer.
@@ -1917,7 +1918,7 @@
 
   // Set the length field.
   __ popl(EDI);
-  __ InitializeFieldNoBarrier(EAX,
+  __ StoreIntoObjectNoBarrier(EAX,
                               FieldAddress(EAX, String::length_offset()),
                               EDI);
   // Clear hash.
diff --git a/runtime/vm/intrinsifier_mips.cc b/runtime/vm/intrinsifier_mips.cc
index d8da470..748f06d 100644
--- a/runtime/vm/intrinsifier_mips.cc
+++ b/runtime/vm/intrinsifier_mips.cc
@@ -161,7 +161,7 @@
 #define TYPED_ARRAY_ALLOCATION(type_name, cid, max_len, scale_shift)           \
   Label fall_through;                                                          \
   const intptr_t kArrayLengthStackOffset = 0 * kWordSize;                      \
-  __ MaybeTraceAllocation(cid, T2, &fall_through);                             \
+  NOT_IN_PRODUCT(__ MaybeTraceAllocation(cid, T2, &fall_through));             \
   __ lw(T2, Address(SP, kArrayLengthStackOffset));  /* Array length. */        \
   /* Check that length is a positive Smi. */                                   \
   /* T2: requested array length argument. */                                   \
@@ -177,7 +177,7 @@
   __ AddImmediate(T2, fixed_size);                                             \
   __ LoadImmediate(TMP, -kObjectAlignment);                                    \
   __ and_(T2, T2, TMP);                                                        \
-  Heap::Space space = Heap::SpaceForAllocation(cid);                           \
+  Heap::Space space = Heap::kNew;                                              \
   __ lw(T3, Address(THR, Thread::heap_offset()));                              \
   __ lw(V0, Address(T3, Heap::TopOffset(space)));                              \
                                                                                \
@@ -198,7 +198,7 @@
   /* next object start and initialize the object. */                           \
   __ sw(T1, Address(T3, Heap::TopOffset(space)));                              \
   __ AddImmediate(V0, kHeapObjectTag);                                         \
-  __ UpdateAllocationStatsWithSize(cid, T2, T4, space);                        \
+  NOT_IN_PRODUCT(__ UpdateAllocationStatsWithSize(cid, T2, T4, space));        \
   /* Initialize the tags. */                                                   \
   /* V0: new object start as a tagged pointer. */                              \
   /* T1: new object end address. */                                            \
@@ -776,6 +776,11 @@
 }
 
 
+void Intrinsifier::Smi_bitAndFromSmi(Assembler* assembler) {
+  Integer_bitAndFromInteger(assembler);
+}
+
+
 void Intrinsifier::Bigint_lsh(Assembler* assembler) {
   // static void _lsh(Uint32List x_digits, int x_used, int n,
   //                  Uint32List r_digits)
@@ -1941,7 +1946,7 @@
                                      Label* ok,
                                      Label* failure) {
   const Register length_reg = T2;
-  __ MaybeTraceAllocation(kOneByteStringCid, V0, failure);
+  NOT_IN_PRODUCT(__ MaybeTraceAllocation(kOneByteStringCid, V0, failure));
   __ mov(T6, length_reg);  // Save the length register.
   // TODO(koda): Protect against negative length and overflow here.
   __ SmiUntag(length_reg);
@@ -1951,7 +1956,7 @@
   __ and_(length_reg, length_reg, TMP);
 
   const intptr_t cid = kOneByteStringCid;
-  Heap::Space space = Heap::SpaceForAllocation(cid);
+  Heap::Space space = Heap::kNew;
   __ lw(T3, Address(THR, Thread::heap_offset()));
   __ lw(V0, Address(T3, Heap::TopOffset(space)));
 
@@ -1972,7 +1977,7 @@
   __ sw(T1, Address(T3, Heap::TopOffset(space)));
   __ AddImmediate(V0, kHeapObjectTag);
 
-  __ UpdateAllocationStatsWithSize(cid, T2, T3, space);
+  NOT_IN_PRODUCT(__ UpdateAllocationStatsWithSize(cid, T2, T3, space));
 
   // Initialize the tags.
   // V0: new object start as a tagged pointer.
diff --git a/runtime/vm/intrinsifier_x64.cc b/runtime/vm/intrinsifier_x64.cc
index 5ac5d39..cdb11df 100644
--- a/runtime/vm/intrinsifier_x64.cc
+++ b/runtime/vm/intrinsifier_x64.cc
@@ -100,7 +100,7 @@
   // Store backing array object in growable array object.
   __ movq(RCX, Address(RSP, kArrayOffset));  // data argument.
   // RAX is new, no barrier needed.
-  __ InitializeFieldNoBarrier(
+  __ StoreIntoObjectNoBarrier(
       RAX,
       FieldAddress(RAX, GrowableObjectArray::data_offset()),
       RCX);
@@ -108,7 +108,7 @@
   // RAX: new growable array object start as a tagged pointer.
   // Store the type argument field in the growable array object.
   __ movq(RCX, Address(RSP, kTypeArgumentsOffset));  // type argument.
-  __ InitializeFieldNoBarrier(
+  __ StoreIntoObjectNoBarrier(
       RAX,
       FieldAddress(RAX, GrowableObjectArray::type_arguments_offset()),
       RCX);
@@ -153,7 +153,7 @@
 #define TYPED_ARRAY_ALLOCATION(type_name, cid, max_len, scale_factor)          \
   Label fall_through;                                                          \
   const intptr_t kArrayLengthStackOffset = 1 * kWordSize;                      \
-  __ MaybeTraceAllocation(cid, &fall_through, false);                          \
+  NOT_IN_PRODUCT(__ MaybeTraceAllocation(cid, &fall_through, false));          \
   __ movq(RDI, Address(RSP, kArrayLengthStackOffset));  /* Array length. */    \
   /* Check that length is a positive Smi. */                                   \
   /* RDI: requested array length argument. */                                  \
@@ -176,7 +176,7 @@
   const intptr_t fixed_size = sizeof(Raw##type_name) + kObjectAlignment - 1;   \
   __ leaq(RDI, Address(RDI, scale_factor, fixed_size));                        \
   __ andq(RDI, Immediate(-kObjectAlignment));                                  \
-  Heap::Space space = Heap::SpaceForAllocation(cid);                           \
+  Heap::Space space = Heap::kNew;                                              \
   __ movq(R13, Address(THR, Thread::heap_offset()));                           \
   __ movq(RAX, Address(R13, Heap::TopOffset(space)));                          \
   __ movq(RCX, RAX);                                                           \
@@ -197,7 +197,7 @@
   /* next object start and initialize the object. */                           \
   __ movq(Address(R13, Heap::TopOffset(space)), RCX);                          \
   __ addq(RAX, Immediate(kHeapObjectTag));                                     \
-  __ UpdateAllocationStatsWithSize(cid, RDI, space);                           \
+  NOT_IN_PRODUCT(__ UpdateAllocationStatsWithSize(cid, RDI, space));           \
   /* Initialize the tags. */                                                   \
   /* RAX: new object start as a tagged pointer. */                             \
   /* RCX: new object end address. */                                           \
@@ -222,7 +222,7 @@
   /* RAX: new object start as a tagged pointer. */                             \
   /* RCX: new object end address. */                                           \
   __ movq(RDI, Address(RSP, kArrayLengthStackOffset));  /* Array length. */    \
-  __ InitializeFieldNoBarrier(RAX,                                             \
+  __ StoreIntoObjectNoBarrier(RAX,                                             \
                               FieldAddress(RAX, type_name::length_offset()),   \
                               RDI);                                            \
   /* Initialize all array elements to 0. */                                    \
@@ -741,6 +741,11 @@
 }
 
 
+void Intrinsifier::Smi_bitAndFromSmi(Assembler* assembler) {
+  Integer_bitAndFromInteger(assembler);
+}
+
+
 void Intrinsifier::Bigint_lsh(Assembler* assembler) {
   // static void _lsh(Uint32List x_digits, int x_used, int n,
   //                  Uint32List r_digits)
@@ -1826,7 +1831,7 @@
                                      Label* ok,
                                      Label* failure,
                                      Register length_reg) {
-  __ MaybeTraceAllocation(kOneByteStringCid, failure, false);
+  NOT_IN_PRODUCT(__ MaybeTraceAllocation(kOneByteStringCid, failure, false));
   if (length_reg != RDI) {
     __ movq(RDI, length_reg);
   }
@@ -1838,7 +1843,7 @@
   __ andq(RDI, Immediate(-kObjectAlignment));
 
   const intptr_t cid = kOneByteStringCid;
-  Heap::Space space = Heap::SpaceForAllocation(cid);
+  Heap::Space space = Heap::kNew;
   __ movq(R13, Address(THR, Thread::heap_offset()));
   __ movq(RAX, Address(R13, Heap::TopOffset(space)));
 
@@ -1859,7 +1864,7 @@
   // next object start and initialize the object.
   __ movq(Address(R13, Heap::TopOffset(space)), RCX);
   __ addq(RAX, Immediate(kHeapObjectTag));
-  __ UpdateAllocationStatsWithSize(cid, RDI, space);
+  NOT_IN_PRODUCT(__ UpdateAllocationStatsWithSize(cid, RDI, space));
 
   // Initialize the tags.
   // RAX: new object start as a tagged pointer.
@@ -1882,7 +1887,7 @@
 
   // Set the length field.
   __ popq(RDI);
-  __ InitializeFieldNoBarrier(RAX,
+  __ StoreIntoObjectNoBarrier(RAX,
                               FieldAddress(RAX, String::length_offset()),
                               RDI);
   // Clear hash.
diff --git a/runtime/vm/isolate.cc b/runtime/vm/isolate.cc
index 8ed0873..eee6d04 100644
--- a/runtime/vm/isolate.cc
+++ b/runtime/vm/isolate.cc
@@ -55,6 +55,7 @@
 DECLARE_FLAG(bool, trace_service);
 DECLARE_FLAG(bool, trace_reload);
 DECLARE_FLAG(bool, warn_on_pause_with_no_debugger);
+DECLARE_FLAG(bool, check_reloaded);
 
 NOT_IN_PRODUCT(
 static void CheckedModeHandler(bool value) {
@@ -797,6 +798,7 @@
       symbols_mutex_(new Mutex()),
       type_canonicalization_mutex_(new Mutex()),
       constant_canonicalization_mutex_(new Mutex()),
+      megamorphic_lookup_mutex_(new Mutex()),
       message_handler_(NULL),
       spawn_state_(NULL),
       is_runnable_(false),
@@ -813,6 +815,7 @@
       tag_table_(GrowableObjectArray::null()),
       deoptimized_code_array_(GrowableObjectArray::null()),
       sticky_error_(Error::null()),
+      sticky_reload_error_(Error::null()),
       background_compiler_(NULL),
       background_compiler_disabled_depth_(0),
       pending_service_extension_calls_(GrowableObjectArray::null()),
@@ -862,6 +865,8 @@
   type_canonicalization_mutex_ = NULL;
   delete constant_canonicalization_mutex_;
   constant_canonicalization_mutex_ = NULL;
+  delete megamorphic_lookup_mutex_;
+  megamorphic_lookup_mutex_ = NULL;
   delete message_handler_;
   message_handler_ = NULL;  // Fail fast if we send messages to a dead isolate.
   ASSERT(deopt_context_ == NULL);  // No deopt in progress when isolate deleted.
@@ -1066,11 +1071,6 @@
 }
 
 
-void Isolate::OnStackReload() {
-  UNREACHABLE();
-}
-
-
 void Isolate::ReloadSources(bool test_mode) {
   ASSERT(!IsReloading());
   has_attempted_reload_ = true;
@@ -1090,6 +1090,10 @@
         // context on the isolate so that it can be used by unit tests.
         return;
       }
+      if (reload_context_->has_error()) {
+        // Remember the reload error.
+        sticky_reload_error_ = reload_context_->error();
+      }
       if (!reload_context_->has_error()) {
         reload_context_->ReportSuccess();
       }
@@ -1552,7 +1556,8 @@
   void VisitHandle(uword addr) {
     FinalizablePersistentHandle* handle =
         reinterpret_cast<FinalizablePersistentHandle*>(addr);
-    handle->UpdateUnreachable(thread()->isolate());
+    FinalizationQueue* queue = NULL;  // Finalize in the foreground.
+    handle->UpdateUnreachable(thread()->isolate(), queue);
   }
 
  private:
@@ -1675,10 +1680,29 @@
   if (heap_ != NULL) {
     // Wait for any concurrent GC tasks to finish before shutting down.
     // TODO(koda): Support faster sweeper shutdown (e.g., after current page).
-    PageSpace* old_space = heap_->old_space();
-    MonitorLocker ml(old_space->tasks_lock());
-    while (old_space->tasks() > 0) {
-      ml.Wait();
+    {
+      PageSpace* old_space = heap_->old_space();
+      MonitorLocker ml(old_space->tasks_lock());
+      while (old_space->tasks() > 0) {
+        ml.Wait();
+      }
+    }
+
+    // Wait for background finalization to finish before shutting down.
+    {
+      MonitorLocker ml(heap_->finalization_tasks_lock());
+      while (heap_->finalization_tasks() > 0) {
+        ml.Wait();
+      }
+    }
+  }
+
+  if (FLAG_check_reloaded &&
+      (this != Dart::vm_isolate()) &&
+      !ServiceIsolate::IsServiceIsolateDescendant(this)) {
+    if (!HasAttemptedReload()) {
+      FATAL("Isolate did not reload before exiting and "
+            "--check-reloaded is enabled.\n");
     }
   }
 
@@ -1752,6 +1776,9 @@
   visitor->VisitPointer(
         reinterpret_cast<RawObject**>(&sticky_error_));
 
+  visitor->VisitPointer(
+        reinterpret_cast<RawObject**>(&sticky_reload_error_));
+
   // Visit the pending service extension calls.
   visitor->VisitPointer(
       reinterpret_cast<RawObject**>(&pending_service_extension_calls_));
@@ -1776,6 +1803,9 @@
     if (reload_context() != NULL) {
       reload_context()->VisitObjectPointers(visitor);
     }
+    if (ServiceIsolate::IsServiceIsolate(this)) {
+      ServiceIsolate::VisitObjectPointers(visitor);
+    }
   )
 
   // Visit objects that are being used for deoptimization.
@@ -1817,6 +1847,7 @@
 }
 
 
+#ifndef PRODUCT
 static const char* ExceptionPauseInfoToServiceEnum(Dart_ExceptionPauseInfo pi) {
   switch (pi) {
     case kPauseOnAllExceptions:
@@ -1953,6 +1984,7 @@
     }
   }
 }
+#endif
 
 
 void Isolate::set_tag_table(const GrowableObjectArray& value) {
@@ -2002,6 +2034,11 @@
 }
 
 
+void Isolate::clear_sticky_reload_error() {
+  sticky_reload_error_ = Error::null();
+}
+
+
 void Isolate::set_pending_service_extension_calls(
       const GrowableObjectArray& value) {
   pending_service_extension_calls_ = value.raw();
@@ -2626,7 +2663,9 @@
   const String& lib_url = String::Handle(lib.url());
   library_url_ = NewConstChar(lib_url.ToCString());
 
-  const String& func_name = String::Handle(func.name());
+  String& func_name = String::Handle();
+  func_name ^= func.name();
+  func_name ^= String::ScrubName(func_name);
   function_name_ = NewConstChar(func_name.ToCString());
   if (!cls.IsTopLevel()) {
     const String& class_name = String::Handle(cls.Name());
diff --git a/runtime/vm/isolate.h b/runtime/vm/isolate.h
index 5c219cc..7f1df6d 100644
--- a/runtime/vm/isolate.h
+++ b/runtime/vm/isolate.h
@@ -255,7 +255,6 @@
   void DoneLoading();
   void DoneFinalizing();
 
-  void OnStackReload();
   void ReloadSources(bool test_mode = false);
 
   bool MakeRunnable();
@@ -283,6 +282,9 @@
   Mutex* constant_canonicalization_mutex() const {
     return constant_canonicalization_mutex_;
   }
+  Mutex* megamorphic_lookup_mutex() const {
+    return megamorphic_lookup_mutex_;
+  }
 
   Debugger* debugger() const {
     if (!FLAG_support_debugger) {
@@ -456,7 +458,9 @@
     return defer_finalization_count_ == 0;
   }
 
+#ifndef PRODUCT
   void PrintJSON(JSONStream* stream, bool ref = true);
+#endif
 
   // Mutator thread is used to aggregate compiler stats.
   CompilerStats* aggregate_compiler_stats() {
@@ -532,6 +536,9 @@
   RawError* sticky_error() const { return sticky_error_; }
   void clear_sticky_error();
 
+  RawError* sticky_reload_error() const { return sticky_reload_error_; }
+  void clear_sticky_reload_error();
+
   bool compilation_allowed() const { return compilation_allowed_; }
   void set_compilation_allowed(bool allowed) {
     compilation_allowed_ = allowed;
@@ -715,6 +722,7 @@
   Mutex* symbols_mutex_;  // Protects concurrent access to the symbol table.
   Mutex* type_canonicalization_mutex_;  // Protects type canonicalization.
   Mutex* constant_canonicalization_mutex_;  // Protects const canonicalization.
+  Mutex* megamorphic_lookup_mutex_;  // Protects megamorphic table lookup.
   MessageHandler* message_handler_;
   IsolateSpawnState* spawn_state_;
   bool is_runnable_;
@@ -751,6 +759,8 @@
 
   RawError* sticky_error_;
 
+  RawError* sticky_reload_error_;
+
   // Background compilation.
   BackgroundCompiler* background_compiler_;
   intptr_t background_compiler_disabled_depth_;
diff --git a/runtime/vm/isolate_reload.cc b/runtime/vm/isolate_reload.cc
index 0291517..15aaf29 100644
--- a/runtime/vm/isolate_reload.cc
+++ b/runtime/vm/isolate_reload.cc
@@ -27,7 +27,10 @@
 DEFINE_FLAG(bool, identity_reload, false, "Enable checks for identity reload.");
 DEFINE_FLAG(int, reload_every, 0, "Reload every N stack overflow checks.");
 DEFINE_FLAG(bool, reload_every_optimized, true, "Only from optimized code.");
-
+DEFINE_FLAG(bool, reload_every_back_off, false,
+            "Double the --reload-every value after each reload.");
+DEFINE_FLAG(bool, check_reloaded, false,
+            "Assert that an isolate has reloaded at least once.")
 #ifndef PRODUCT
 
 #define I (isolate())
@@ -215,7 +218,7 @@
   if (FLAG_trace_reload) {
     THR_Print("ISO-RELOAD: Error: %s\n", error.ToErrorCString());
   }
-  ServiceEvent service_event(Isolate::Current(), ServiceEvent::kIsolateReload);
+  ServiceEvent service_event(I, ServiceEvent::kIsolateReload);
   service_event.set_reload_error(&error);
   Service::HandleEvent(&service_event);
 }
@@ -227,12 +230,13 @@
 
 
 void IsolateReloadContext::ReportSuccess() {
-  ServiceEvent service_event(Isolate::Current(), ServiceEvent::kIsolateReload);
+  ServiceEvent service_event(I, ServiceEvent::kIsolateReload);
   Service::HandleEvent(&service_event);
 }
 
 
 void IsolateReloadContext::StartReload() {
+  TIMELINE_SCOPE(Reload);
   Thread* thread = Thread::Current();
 
   // Grab root library before calling CheckpointBeforeReload.
@@ -257,9 +261,6 @@
   DeoptimizeDependentCode();
   Checkpoint();
 
-  // Block class finalization attempts when calling into the library
-  // tag handler.
-  I->BlockClassFinalization();
   Object& result = Object::Handle(thread->zone());
   {
     TransitionVMToNative transition(thread);
@@ -267,11 +268,10 @@
 
     Dart_Handle retval =
         (I->library_tag_handler())(Dart_kScriptTag,
-                                Api::NewHandle(thread, Library::null()),
-                                Api::NewHandle(thread, root_lib_url.raw()));
+                                   Api::NewHandle(thread, Library::null()),
+                                   Api::NewHandle(thread, root_lib_url.raw()));
     result = Api::UnwrapHandle(retval);
   }
-  I->UnblockClassFinalization();
   if (result.IsError()) {
     ReportError(Error::Cast(result));
   }
@@ -281,7 +281,7 @@
 void IsolateReloadContext::RegisterClass(const Class& new_cls) {
   const Class& old_cls = Class::Handle(OldClassOrNull(new_cls));
   if (old_cls.IsNull()) {
-    Isolate::Current()->class_table()->Register(new_cls);
+    I->class_table()->Register(new_cls);
 
     if (FLAG_identity_reload) {
       TIR_Print("Could not find replacement class for %s\n",
@@ -350,6 +350,7 @@
 
 
 void IsolateReloadContext::DeoptimizeDependentCode() {
+  TIMELINE_SCOPE(DeoptimizeDependentCode);
   ClassTable* class_table = I->class_table();
 
   const intptr_t bottom = Dart::vm_isolate()->class_table()->NumCids();
@@ -408,7 +409,7 @@
         class_table->HasValidClassAt(i)) {
       // Copy the class into the saved class table and add it to the set.
       local_saved_class_table[i] = class_table->At(i);
-      if (i != kFreeListElement) {
+      if (i != kFreeListElement && i != kForwardingCorpse) {
         cls = class_table->At(i);
         bool already_present = old_classes_set.Insert(cls);
         ASSERT(!already_present);
@@ -599,7 +600,7 @@
     }
 
     // Reset the registered libraries to the filtered array.
-    Library::RegisterLibraries(Thread::Current(), saved_libs);
+    Library::RegisterLibraries(thread, saved_libs);
   }
 
   Library& saved_root_lib = Library::Handle(Z, saved_root_library());
@@ -622,39 +623,36 @@
 
 #ifdef DEBUG
 void IsolateReloadContext::VerifyMaps() {
+  TIMELINE_SCOPE(VerifyMaps);
   Class& cls = Class::Handle();
   Class& new_cls = Class::Handle();
   Class& cls2 = Class::Handle();
-  Class& new_cls2 = Class::Handle();
 
   // Verify that two old classes aren't both mapped to the same new
-  // class.  This could happen is the IsSameClass function is broken.
+  // class. This could happen is the IsSameClass function is broken.
   UnorderedHashMap<ClassMapTraits> class_map(class_map_storage_);
+  UnorderedHashMap<ClassMapTraits> reverse_class_map(
+      HashTables::New<UnorderedHashMap<ClassMapTraits> >(
+         class_map.NumOccupied()));
   {
     UnorderedHashMap<ClassMapTraits>::Iterator it(&class_map);
     while (it.MoveNext()) {
       const intptr_t entry = it.Current();
       new_cls = Class::RawCast(class_map.GetKey(entry));
       cls = Class::RawCast(class_map.GetPayload(entry, 0));
-      if (new_cls.raw() != cls.raw()) {
-        UnorderedHashMap<ClassMapTraits>::Iterator it2(&class_map);
-        while (it2.MoveNext()) {
-          new_cls2 = Class::RawCast(class_map.GetKey(entry));
-          if (new_cls.raw() == new_cls2.raw()) {
-            cls2 = Class::RawCast(class_map.GetPayload(entry, 0));
-            if (cls.raw() != cls2.raw()) {
-              OS::PrintErr(
-                  "Classes '%s' and '%s' are distinct classes but both map to "
-                  "class '%s'\n",
-                  cls.ToCString(), cls2.ToCString(), new_cls.ToCString());
-              UNREACHABLE();
-            }
-          }
-        }
+      cls2 ^= reverse_class_map.GetOrNull(new_cls);
+      if (!cls2.IsNull()) {
+        OS::PrintErr("Classes '%s' and '%s' are distinct classes but both map "
+                     " to class '%s'\n",
+                     cls.ToCString(), cls2.ToCString(), new_cls.ToCString());
+        UNREACHABLE();
       }
+      bool update = reverse_class_map.UpdateOrInsert(cls, new_cls);
+      ASSERT(!update);
     }
   }
   class_map.Release();
+  reverse_class_map.Release();
 }
 #endif
 
@@ -885,46 +883,6 @@
 }
 
 
-static void ResetICs(const Function& function, const Code& code) {
-  // TODO(johnmccutchan): Relying on the function's ICData Map can miss ICDatas.
-  // Use the code's object pool instead.
-  if (function.ic_data_array() == Array::null()) {
-    // TODO(johnmccutchan): Even in this case, we need to scan the code's object
-    // pool instead.
-    return;  // Already reset in an earlier round.
-  }
-
-  Thread* thread = Thread::Current();
-  Zone* zone = thread->zone();
-
-  ZoneGrowableArray<const ICData*>* ic_data_array =
-      new(zone) ZoneGrowableArray<const ICData*>();
-  function.RestoreICDataMap(ic_data_array, false /* clone ic-data */);
-  const intptr_t ic_data_array_length = ic_data_array->length();
-  if (ic_data_array_length == 0) {
-    return;
-  }
-  const PcDescriptors& descriptors =
-      PcDescriptors::Handle(code.pc_descriptors());
-  PcDescriptors::Iterator iter(descriptors, RawPcDescriptors::kIcCall |
-                                            RawPcDescriptors::kUnoptStaticCall);
-  while (iter.MoveNext()) {
-    const intptr_t index = iter.DeoptId();
-    if (index >= ic_data_array_length) {
-      // TODO(johnmccutchan): Investigate how this can happen.
-      continue;
-    }
-    const ICData* ic_data = (*ic_data_array)[index];
-    if (ic_data == NULL) {
-      // TODO(johnmccutchan): Investigate how this can happen.
-      continue;
-    }
-    bool is_static_call = iter.Kind() == RawPcDescriptors::kUnoptStaticCall;
-    ic_data->Reset(is_static_call);
-  }
-}
-
-
 void IsolateReloadContext::ResetUnoptimizedICsOnStack() {
   Code& code = Code::Handle();
   Function& function = Function::Handle();
@@ -939,10 +897,9 @@
       function = code.function();
       code = function.unoptimized_code();
       ASSERT(!code.IsNull());
-      ResetICs(function, code);
+      code.ResetICDatas();
     } else {
-      function = code.function();
-      ResetICs(function, code);
+      code.ResetICDatas();
     }
     frame = iterator.NextFrame();
   }
@@ -971,8 +928,8 @@
   }
 
   virtual void VisitObject(RawObject* obj) {
-    // Free-list elements cannot even be wrapped in handles.
-    if (obj->IsFreeListElement()) {
+    if (obj->IsPseudoObject()) {
+      // Cannot even be wrapped in handles.
       return;
     }
     handle_ = obj;
@@ -995,7 +952,7 @@
         if (clear_code) {
           ClearAllCode(func);
         } else {
-          PreserveUnoptimizedCode(func);
+          PreserveUnoptimizedCode();
         }
       }
 
@@ -1015,11 +972,11 @@
     func.set_was_compiled(false);
   }
 
-  void PreserveUnoptimizedCode(const Function& func) {
+  void PreserveUnoptimizedCode() {
     ASSERT(!code_.IsNull());
     // We are preserving the unoptimized code, fill all ICData arrays with
     // the sentinel values so that we have no stale type feedback.
-    func.FillICDataWithSentinels(code_);
+    code_.ResetICDatas();
   }
 
   bool IsFromDirtyLibrary(const Function& func) {
@@ -1079,13 +1036,23 @@
 }
 
 
+RawString* IsolateReloadContext::FindLibraryPrivateKey(
+    const Library& replacement_or_new) {
+  const Library& old = Library::Handle(OldLibraryOrNull(replacement_or_new));
+  if (old.IsNull()) {
+    return String::null();
+  }
+  return old.private_key();
+}
+
+
 RawLibrary* IsolateReloadContext::OldLibraryOrNull(
     const Library& replacement_or_new) {
   UnorderedHashSet<LibraryMapTraits>
       old_libraries_set(old_libraries_set_storage_);
   Library& lib = Library::Handle();
   lib ^= old_libraries_set.GetOrNull(replacement_or_new);
-  old_libraries_set_storage_ = old_libraries_set.Release().raw();
+  old_libraries_set.Release();
   return lib.raw();
 }
 
@@ -1103,6 +1070,11 @@
     }
     old ^= OldLibraryOrNull(replacement_or_new);
     if (old.IsNull()) {
+      if (FLAG_identity_reload) {
+        TIR_Print("Could not find original library for %s\n",
+                  replacement_or_new.ToCString());
+        UNREACHABLE();
+      }
       // New library.
       AddLibraryMapping(replacement_or_new, replacement_or_new);
     } else {
diff --git a/runtime/vm/isolate_reload.h b/runtime/vm/isolate_reload.h
index d439414..de6ff00 100644
--- a/runtime/vm/isolate_reload.h
+++ b/runtime/vm/isolate_reload.h
@@ -68,6 +68,10 @@
 
   void RegisterClass(const Class& new_cls);
 
+  // Finds the library private key for |replacement_or_new| or return null
+  // if |replacement_or_new| is new.
+  RawString* FindLibraryPrivateKey(const Library& replacement_or_new);
+
   int64_t start_time_micros() const { return start_time_micros_; }
 
  private:
diff --git a/runtime/vm/isolate_reload_test.cc b/runtime/vm/isolate_reload_test.cc
index b6d1382..aa8f188 100644
--- a/runtime/vm/isolate_reload_test.cc
+++ b/runtime/vm/isolate_reload_test.cc
@@ -228,6 +228,38 @@
 }
 
 
+TEST_CASE(IsolateReload_ClassFieldAdded2) {
+  const char* kScript =
+      "class Foo {\n"
+      "  var x;\n"
+      "  var y;\n"
+      "}\n"
+      "main() {\n"
+      "  new Foo();\n"
+      "  return 44;\n"
+      "}\n";
+
+  Dart_Handle lib = TestCase::LoadTestScript(kScript, NULL);
+  EXPECT_VALID(lib);
+
+  EXPECT_EQ(44, SimpleInvoke(lib, "main"));
+
+  const char* kReloadScript =
+      "class Foo {\n"
+      "  var x;\n"
+      "  var y;\n"
+      "  var z;\n"
+      "}\n"
+      "main() {\n"
+      "  new Foo();\n"
+      "  return 44;\n"
+      "}\n";
+
+  lib = TestCase::ReloadTestScript(kReloadScript);
+  EXPECT_ERROR(lib, "Number of instance fields changed");
+}
+
+
 TEST_CASE(IsolateReload_ClassFieldRemoved) {
   const char* kScript =
       "class Foo {\n"
@@ -648,7 +680,7 @@
 
 TEST_CASE(IsolateReload_LiveStack) {
   const char* kScript =
-      "import 'isolate_reload_test_helper';\n"
+      "import 'test:isolate_reload_helper';\n"
       "helper() => 7;\n"
       "alpha() { var x = helper(); reloadTest(); return x + helper(); }\n"
       "foo() => alpha();\n"
@@ -661,7 +693,7 @@
   EXPECT_VALID(lib);
 
   const char* kReloadScript =
-      "import 'isolate_reload_test_helper';\n"
+      "import 'test:isolate_reload_helper';\n"
       "helper() => 100;\n"
       "alpha() => 5 + helper();\n"
       "foo() => alpha();\n"
@@ -694,41 +726,41 @@
 
   EXPECT_ERROR(SimpleInvokeError(lib, "main"), "importedFunc");
 
-  // Fail to find 'importable_test_lib' in the isolate.
-  result = Dart_LookupLibrary(NewString("importable_test_lib"));
+  // Fail to find 'test:importable_lib' in the isolate.
+  result = Dart_LookupLibrary(NewString("test:importable_lib"));
   EXPECT(Dart_IsError(result));
 
   const char* kReloadScript =
-      "import 'importable_test_lib';\n"
+      "import 'test:importable_lib';\n"
       "main() {\n"
       "  return importedFunc();\n"
       "}\n";
 
-  // Reload and add 'importable_test_lib' to isolate.
+  // Reload and add 'test:importable_lib' to isolate.
   lib = TestCase::ReloadTestScript(kReloadScript);
   EXPECT_VALID(lib);
 
   EXPECT_STREQ("a", SimpleInvokeStr(lib, "main"));
 
-  // Find 'importable_test_lib' in the isolate.
-  result = Dart_LookupLibrary(NewString("importable_test_lib"));
+  // Find 'test:importable_lib' in the isolate.
+  result = Dart_LookupLibrary(NewString("test:importable_lib"));
   EXPECT(Dart_IsLibrary(result));
 
   // Reload and remove 'dart:math' from isolate.
   lib = TestCase::ReloadTestScript(kScript);
   EXPECT_VALID(lib);
 
-  // Fail to find 'importable_test_lib' in the isolate.
-  result = Dart_LookupLibrary(NewString("importable_test_lib"));
+  // Fail to find 'test:importable_lib' in the isolate.
+  result = Dart_LookupLibrary(NewString("test:importable_lib"));
   EXPECT(Dart_IsError(result));
 }
 
 
 TEST_CASE(IsolateReload_LibraryHide) {
-  // Import 'importable_test_lib' with importedFunc hidden. Will result in an
+  // Import 'test:importable_lib' with importedFunc hidden. Will result in an
   // error.
   const char* kScript =
-      "import 'importable_test_lib' hide importedFunc;\n"
+      "import 'test:importable_lib' hide importedFunc;\n"
       "main() {\n"
       "  return importedFunc();\n"
       "}\n";
@@ -740,9 +772,9 @@
 
   EXPECT_ERROR(SimpleInvokeError(lib, "main"), "importedFunc");
 
-  // Import 'importable_test_lib'.
+  // Import 'test:importable_lib'.
   const char* kReloadScript =
-      "import 'importable_test_lib';\n"
+      "import 'test:importable_lib';\n"
       "main() {\n"
       "  return importedFunc();\n"
       "}\n";
@@ -755,10 +787,10 @@
 
 
 TEST_CASE(IsolateReload_LibraryShow) {
-  // Import 'importable_test_lib' with importedIntFunc visible. Will result in
+  // Import 'test:importable_lib' with importedIntFunc visible. Will result in
   // an error when 'main' is invoked.
   const char* kScript =
-      "import 'importable_test_lib' show importedIntFunc;\n"
+      "import 'test:importable_lib' show importedIntFunc;\n"
       "main() {\n"
       "  return importedFunc();\n"
       "}\n"
@@ -775,10 +807,10 @@
   // Results in an error.
   EXPECT_ERROR(SimpleInvokeError(lib, "main"), "importedFunc");
 
-  // Import 'importable_test_lib' with importedFunc visible. Will result in
+  // Import 'test:importable_lib' with importedFunc visible. Will result in
   // an error when 'mainInt' is invoked.
   const char* kReloadScript =
-      "import 'importable_test_lib' show importedFunc;\n"
+      "import 'test:importable_lib' show importedFunc;\n"
       "main() {\n"
       "  return importedFunc();\n"
       "}\n"
@@ -800,8 +832,8 @@
 // that is compatible with the fast path smi stubs.
 TEST_CASE(IsolateReload_SmiFastPathStubs) {
   const char* kScript =
-      "import 'isolate_reload_test_helper';\n"
-      "import 'importable_test_lib' show importedIntFunc;\n"
+      "import 'test:isolate_reload_helper';\n"
+      "import 'test:importable_lib' show importedIntFunc;\n"
       "main() {\n"
       "  var x = importedIntFunc();\n"
       "  var y = importedIntFunc();\n"
@@ -824,7 +856,7 @@
 // mixins when we reload.
 TEST_CASE(IsolateReload_ImportedMixinFunction) {
   const char* kScript =
-      "import 'importable_test_lib' show ImportedMixin;\n"
+      "import 'test:importable_lib' show ImportedMixin;\n"
       "class A extends Object with ImportedMixin {\n"
       "}"
       "var func = new A().mixinFunc;\n"
@@ -838,7 +870,7 @@
   EXPECT_STREQ("mixin", SimpleInvokeStr(lib, "main"));
 
   const char* kReloadScript =
-      "import 'importable_test_lib' show ImportedMixin;\n"
+      "import 'test:importable_lib' show ImportedMixin;\n"
       "class A extends Object with ImportedMixin {\n"
       "}"
       "var func;\n"
@@ -877,7 +909,7 @@
 
 TEST_CASE(IsolateReload_PendingUnqualifiedCall_StaticToInstance) {
   const char* kScript =
-      "import 'isolate_reload_test_helper';\n"
+      "import 'test:isolate_reload_helper';\n"
       "class C {\n"
       "  static foo() => 'static';\n"
       "  test() {\n"
@@ -893,7 +925,7 @@
   EXPECT_VALID(lib);
 
   const char* kReloadScript =
-      "import 'isolate_reload_test_helper';\n"
+      "import 'test:isolate_reload_helper';\n"
       "class C {\n"
       "  foo() => 'instance';\n"
       "  test() {\n"
@@ -918,7 +950,7 @@
 
 TEST_CASE(IsolateReload_PendingUnqualifiedCall_InstanceToStatic) {
   const char* kScript =
-      "import 'isolate_reload_test_helper';\n"
+      "import 'test:isolate_reload_helper';\n"
       "class C {\n"
       "  foo() => 'instance';\n"
       "  test() {\n"
@@ -934,7 +966,7 @@
   EXPECT_VALID(lib);
 
   const char* kReloadScript =
-      "import 'isolate_reload_test_helper';\n"
+      "import 'test:isolate_reload_helper';\n"
       "class C {\n"
       "  static foo() => 'static';\n"
       "  test() {\n"
@@ -959,7 +991,7 @@
 
 TEST_CASE(IsolateReload_PendingConstructorCall_AbstractToConcrete) {
   const char* kScript =
-      "import 'isolate_reload_test_helper';\n"
+      "import 'test:isolate_reload_helper';\n"
       "abstract class Foo {}\n"
       "class C {\n"
       "  test() {\n"
@@ -980,7 +1012,7 @@
   EXPECT_VALID(lib);
 
   const char* kReloadScript =
-      "import 'isolate_reload_test_helper';\n"
+      "import 'test:isolate_reload_helper';\n"
       "class Foo {}\n"
       "class C {\n"
       "  test() {\n"
@@ -1010,7 +1042,7 @@
 
 TEST_CASE(IsolateReload_PendingConstructorCall_ConcreteToAbstract) {
   const char* kScript =
-      "import 'isolate_reload_test_helper';\n"
+      "import 'test:isolate_reload_helper';\n"
       "class Foo {}\n"
       "class C {\n"
       "  test() {\n"
@@ -1031,7 +1063,7 @@
   EXPECT_VALID(lib);
 
   const char* kReloadScript =
-      "import 'isolate_reload_test_helper';\n"
+      "import 'test:isolate_reload_helper';\n"
       "abstract class Foo {}\n"
       "class C {\n"
       "  test() {\n"
@@ -1061,7 +1093,7 @@
 
 TEST_CASE(IsolateReload_PendingStaticCall_DefinedToNSM) {
   const char* kScript =
-      "import 'isolate_reload_test_helper';\n"
+      "import 'test:isolate_reload_helper';\n"
       "class C {\n"
       "  static foo() => 'static'\n"
       "  test() {\n"
@@ -1081,7 +1113,7 @@
   EXPECT_VALID(lib);
 
   const char* kReloadScript =
-      "import 'isolate_reload_test_helper';\n"
+      "import 'test:isolate_reload_helper';\n"
       "class C {\n"
       "  test() {\n"
       "    reloadTest();\n"
@@ -1109,7 +1141,7 @@
 
 TEST_CASE(IsolateReload_PendingStaticCall_NSMToDefined) {
   const char* kScript =
-      "import 'isolate_reload_test_helper';\n"
+      "import 'test:isolate_reload_helper';\n"
       "class C {\n"
       "  test() {\n"
       "    reloadTest();\n"
@@ -1128,7 +1160,7 @@
   EXPECT_VALID(lib);
 
   const char* kReloadScript =
-      "import 'isolate_reload_test_helper';\n"
+      "import 'test:isolate_reload_helper';\n"
       "class C {\n"
       "  static foo() => 'static'\n"
       "  test() {\n"
diff --git a/runtime/vm/jit_optimizer.cc b/runtime/vm/jit_optimizer.cc
index e9085b6..e40cf12 100644
--- a/runtime/vm/jit_optimizer.cc
+++ b/runtime/vm/jit_optimizer.cc
@@ -217,7 +217,8 @@
         String::Handle(Z, ic_data.target_name()),
         Object::empty_array(),  // Dummy argument descriptor.
         ic_data.deopt_id(),
-        ic_data.NumArgsTested()));
+        ic_data.NumArgsTested(),
+        false));
     new_ic_data.SetDeoptReasons(ic_data.DeoptReasons());
     new_ic_data.AddReceiverCheck(cid, function);
     return new_ic_data;
@@ -740,7 +741,8 @@
   if (ic_data.NumberOfChecks() != 1) {
     return false;
   }
-  return TryReplaceInstanceCallWithInline(call);
+  return FlowGraphInliner::TryReplaceInstanceCallWithInline(
+      flow_graph_, current_iterator(), call);
 }
 
 
@@ -1634,50 +1636,6 @@
 }
 
 
-bool JitOptimizer::TryReplaceInstanceCallWithInline(
-    InstanceCallInstr* call) {
-  Function& target = Function::Handle(Z);
-  GrowableArray<intptr_t> class_ids;
-  call->ic_data()->GetCheckAt(0, &class_ids, &target);
-  const intptr_t receiver_cid = class_ids[0];
-
-  TargetEntryInstr* entry;
-  Definition* last;
-  if (!FlowGraphInliner::TryInlineRecognizedMethod(flow_graph_,
-                                                   receiver_cid,
-                                                   target,
-                                                   call,
-                                                   call->ArgumentAt(0),
-                                                   call->token_pos(),
-                                                   *call->ic_data(),
-                                                   &entry, &last)) {
-    return false;
-  }
-
-  // Insert receiver class check.
-  AddReceiverCheck(call);
-  // Remove the original push arguments.
-  for (intptr_t i = 0; i < call->ArgumentCount(); ++i) {
-    PushArgumentInstr* push = call->PushArgumentAt(i);
-    push->ReplaceUsesWith(push->value()->definition());
-    push->RemoveFromGraph();
-  }
-  // Replace all uses of this definition with the result.
-  call->ReplaceUsesWith(last);
-  // Finally insert the sequence other definition in place of this one in the
-  // graph.
-  call->previous()->LinkTo(entry->next());
-  entry->UnuseAllInputs();  // Entry block is not in the graph.
-  last->LinkTo(call);
-  // Remove through the iterator.
-  ASSERT(current_iterator()->Current() == call);
-  current_iterator()->RemoveCurrentFromGraph();
-  call->set_previous(NULL);
-  call->set_next(NULL);
-  return true;
-}
-
-
 void JitOptimizer::ReplaceWithMathCFunction(
     InstanceCallInstr* call,
     MethodRecognizer::Kind recognized_kind) {
@@ -1733,49 +1691,15 @@
   MethodRecognizer::Kind recognized_kind =
       MethodRecognizer::RecognizeKind(target);
 
-  if ((recognized_kind == MethodRecognizer::kGrowableArraySetData) &&
-      (ic_data.NumberOfChecks() == 1) &&
-      (class_ids[0] == kGrowableObjectArrayCid)) {
-    // This is an internal method, no need to check argument types.
-    Definition* array = call->ArgumentAt(0);
-    Definition* value = call->ArgumentAt(1);
-    StoreInstanceFieldInstr* store = new(Z) StoreInstanceFieldInstr(
-        GrowableObjectArray::data_offset(),
-        new(Z) Value(array),
-        new(Z) Value(value),
-        kEmitStoreBarrier,
-        call->token_pos());
-    ReplaceCall(call, store);
-    return true;
-  }
-
-  if ((recognized_kind == MethodRecognizer::kGrowableArraySetLength) &&
-      (ic_data.NumberOfChecks() == 1) &&
-      (class_ids[0] == kGrowableObjectArrayCid)) {
-    // This is an internal method, no need to check argument types nor
-    // range.
-    Definition* array = call->ArgumentAt(0);
-    Definition* value = call->ArgumentAt(1);
-    StoreInstanceFieldInstr* store = new(Z) StoreInstanceFieldInstr(
-        GrowableObjectArray::length_offset(),
-        new(Z) Value(array),
-        new(Z) Value(value),
-        kNoStoreBarrier,
-        call->token_pos());
-    ReplaceCall(call, store);
-    return true;
-  }
-
   if ((recognized_kind == MethodRecognizer::kOneByteStringCodeUnitAt) ||
       (recognized_kind == MethodRecognizer::kTwoByteStringCodeUnitAt) ||
       (recognized_kind == MethodRecognizer::kExternalOneByteStringCodeUnitAt) ||
-      (recognized_kind == MethodRecognizer::kExternalTwoByteStringCodeUnitAt)) {
+      (recognized_kind == MethodRecognizer::kExternalTwoByteStringCodeUnitAt) ||
+      (recognized_kind == MethodRecognizer::kGrowableArraySetData) ||
+      (recognized_kind == MethodRecognizer::kGrowableArraySetLength)) {
       ASSERT(ic_data.NumberOfChecks() == 1);
-      ASSERT((class_ids[0] == kOneByteStringCid) ||
-             (class_ids[0] == kTwoByteStringCid) ||
-             (class_ids[0] == kExternalOneByteStringCid) ||
-             (class_ids[0] == kExternalTwoByteStringCid));
-    return TryReplaceInstanceCallWithInline(call);
+    return FlowGraphInliner::TryReplaceInstanceCallWithInline(
+        flow_graph_, current_iterator(), call);
   }
 
   if ((recognized_kind == MethodRecognizer::kStringBaseCharAt) &&
@@ -1784,7 +1708,8 @@
              (class_ids[0] == kTwoByteStringCid) ||
              (class_ids[0] == kExternalOneByteStringCid) ||
              (class_ids[0] == kExternalTwoByteStringCid));
-    return TryReplaceInstanceCallWithInline(call);
+    return FlowGraphInliner::TryReplaceInstanceCallWithInline(
+        flow_graph_, current_iterator(), call);
   }
 
   if ((class_ids[0] == kOneByteStringCid) && (ic_data.NumberOfChecks() == 1)) {
@@ -1872,7 +1797,8 @@
       case MethodRecognizer::kDoubleSub:
       case MethodRecognizer::kDoubleMul:
       case MethodRecognizer::kDoubleDiv:
-        return TryReplaceInstanceCallWithInline(call);
+        return FlowGraphInliner::TryReplaceInstanceCallWithInline(
+            flow_graph_, current_iterator(), call);
       default:
         // Unsupported method.
         return false;
@@ -1881,7 +1807,8 @@
 
   if (IsSupportedByteArrayViewCid(class_ids[0]) &&
       (ic_data.NumberOfChecks() == 1)) {
-    return TryReplaceInstanceCallWithInline(call);
+    return FlowGraphInliner::TryReplaceInstanceCallWithInline(
+        flow_graph_, current_iterator(), call);
   }
 
   if ((class_ids[0] == kFloat32x4Cid) && (ic_data.NumberOfChecks() == 1)) {
@@ -1896,6 +1823,18 @@
     return TryInlineFloat64x2Method(call, recognized_kind);
   }
 
+  if (recognized_kind == MethodRecognizer::kSmi_bitAndFromSmi) {
+    AddReceiverCheck(call);
+    BinarySmiOpInstr* op =
+        new(Z) BinarySmiOpInstr(
+            Token::kBIT_AND,
+            new(Z) Value(call->ArgumentAt(0)),
+            new(Z) Value(call->ArgumentAt(1)),
+            call->deopt_id());
+    ReplaceCall(call, op);
+    return true;
+  }
+
   return false;
 }
 
@@ -2040,14 +1979,6 @@
     case MethodRecognizer::kFloat32x4NotEqual: {
       Definition* left = call->ArgumentAt(0);
       Definition* right = call->ArgumentAt(1);
-      // Type check left.
-      AddCheckClass(left,
-                    ICData::ZoneHandle(
-                        Z, call->ic_data()->AsUnaryClassChecksForArgNr(0)),
-                    call->deopt_id(),
-                    call->env(),
-                    call);
-      // Replace call.
       Float32x4ComparisonInstr* cmp =
           new(Z) Float32x4ComparisonInstr(recognized_kind,
                                           new(Z) Value(left),
@@ -2060,13 +1991,6 @@
     case MethodRecognizer::kFloat32x4Max: {
       Definition* left = call->ArgumentAt(0);
       Definition* right = call->ArgumentAt(1);
-      // Type check left.
-      AddCheckClass(left,
-                    ICData::ZoneHandle(
-                        Z, call->ic_data()->AsUnaryClassChecksForArgNr(0)),
-                    call->deopt_id(),
-                    call->env(),
-                    call);
       Float32x4MinMaxInstr* minmax =
           new(Z) Float32x4MinMaxInstr(
               recognized_kind,
@@ -2079,13 +2003,6 @@
     case MethodRecognizer::kFloat32x4Scale: {
       Definition* left = call->ArgumentAt(0);
       Definition* right = call->ArgumentAt(1);
-      // Type check left.
-      AddCheckClass(left,
-                    ICData::ZoneHandle(
-                        Z, call->ic_data()->AsUnaryClassChecksForArgNr(0)),
-                    call->deopt_id(),
-                    call->env(),
-                    call);
       // Left and right values are swapped when handed to the instruction,
       // this is done so that the double value is loaded into the output
       // register and can be destroyed.
@@ -2101,12 +2018,6 @@
     case MethodRecognizer::kFloat32x4ReciprocalSqrt:
     case MethodRecognizer::kFloat32x4Reciprocal: {
       Definition* left = call->ArgumentAt(0);
-      AddCheckClass(left,
-                    ICData::ZoneHandle(
-                        Z, call->ic_data()->AsUnaryClassChecksForArgNr(0)),
-                    call->deopt_id(),
-                    call->env(),
-                    call);
       Float32x4SqrtInstr* sqrt =
           new(Z) Float32x4SqrtInstr(recognized_kind,
                                     new(Z) Value(left),
@@ -2120,13 +2031,6 @@
     case MethodRecognizer::kFloat32x4WithW: {
       Definition* left = call->ArgumentAt(0);
       Definition* right = call->ArgumentAt(1);
-      // Type check left.
-      AddCheckClass(left,
-                    ICData::ZoneHandle(
-                        Z, call->ic_data()->AsUnaryClassChecksForArgNr(0)),
-                    call->deopt_id(),
-                    call->env(),
-                    call);
       Float32x4WithInstr* with = new(Z) Float32x4WithInstr(recognized_kind,
                                                            new(Z) Value(left),
                                                            new(Z) Value(right),
@@ -2137,13 +2041,6 @@
     case MethodRecognizer::kFloat32x4Absolute:
     case MethodRecognizer::kFloat32x4Negate: {
       Definition* left = call->ArgumentAt(0);
-      // Type check left.
-      AddCheckClass(left,
-                    ICData::ZoneHandle(
-                        Z, call->ic_data()->AsUnaryClassChecksForArgNr(0)),
-                    call->deopt_id(),
-                    call->env(),
-                    call);
       Float32x4ZeroArgInstr* zeroArg =
           new(Z) Float32x4ZeroArgInstr(
               recognized_kind, new(Z) Value(left), call->deopt_id());
@@ -2154,13 +2051,6 @@
       Definition* left = call->ArgumentAt(0);
       Definition* lower = call->ArgumentAt(1);
       Definition* upper = call->ArgumentAt(2);
-      // Type check left.
-      AddCheckClass(left,
-                    ICData::ZoneHandle(
-                        Z, call->ic_data()->AsUnaryClassChecksForArgNr(0)),
-                    call->deopt_id(),
-                    call->env(),
-                    call);
       Float32x4ClampInstr* clamp = new(Z) Float32x4ClampInstr(
           new(Z) Value(left),
           new(Z) Value(lower),
@@ -2197,13 +2087,6 @@
     case MethodRecognizer::kFloat64x2Sqrt:
     case MethodRecognizer::kFloat64x2GetSignMask: {
       Definition* left = call->ArgumentAt(0);
-      // Type check left.
-      AddCheckClass(left,
-                    ICData::ZoneHandle(
-                        Z, call->ic_data()->AsUnaryClassChecksForArgNr(0)),
-                    call->deopt_id(),
-                    call->env(),
-                    call);
       Float64x2ZeroArgInstr* zeroArg =
           new(Z) Float64x2ZeroArgInstr(
               recognized_kind, new(Z) Value(left), call->deopt_id());
@@ -2217,13 +2100,6 @@
     case MethodRecognizer::kFloat64x2Max: {
       Definition* left = call->ArgumentAt(0);
       Definition* right = call->ArgumentAt(1);
-      // Type check left.
-      AddCheckClass(left,
-                    ICData::ZoneHandle(
-                        Z, call->ic_data()->AsUnaryClassChecksForArgNr(0)),
-                    call->deopt_id(),
-                    call->env(),
-                    call);
       Float64x2OneArgInstr* zeroArg =
           new(Z) Float64x2OneArgInstr(recognized_kind,
                                       new(Z) Value(left),
@@ -2911,7 +2787,6 @@
 void JitOptimizer::VisitStoreInstanceField(
     StoreInstanceFieldInstr* instr) {
   if (instr->IsUnboxedStore()) {
-    ASSERT(instr->is_potential_unboxed_initialization_);
     // Determine if this field should be unboxed based on the usage of getter
     // and setter functions: The heuristic requires that the setter has a
     // usage count of at least 1/kGetterSetterRatio of the getter usage count.
@@ -2980,7 +2855,7 @@
                                      instr->token_pos());
   // Storing into uninitialized memory; remember to prevent dead store
   // elimination and ensure proper GC barrier.
-  store->set_is_object_reference_initialization(true);
+  store->set_is_initialization(true);
   flow_graph_->InsertAfter(replacement, store, NULL, FlowGraph::kEffect);
   Definition* cursor = store;
   for (intptr_t i = 0; i < instr->num_context_variables(); ++i) {
@@ -2992,7 +2867,7 @@
                                        instr->token_pos());
     // Storing into uninitialized memory; remember to prevent dead store
     // elimination and ensure proper GC barrier.
-    store->set_is_object_reference_initialization(true);
+    store->set_is_initialization(true);
     flow_graph_->InsertAfter(cursor, store, NULL, FlowGraph::kEffect);
     cursor = store;
   }
diff --git a/runtime/vm/json_stream.cc b/runtime/vm/json_stream.cc
index 003f1b3..f94e32a 100644
--- a/runtime/vm/json_stream.cc
+++ b/runtime/vm/json_stream.cc
@@ -46,6 +46,8 @@
       id_zone_(&default_id_zone_),
       reply_port_(ILLEGAL_PORT),
       seq_(NULL),
+      parameter_keys_(NULL),
+      parameter_values_(NULL),
       method_(""),
       param_keys_(NULL),
       param_values_(NULL),
@@ -70,13 +72,18 @@
                        const Instance& seq,
                        const String& method,
                        const Array& param_keys,
-                       const Array& param_values) {
+                       const Array& param_values,
+                       bool parameters_are_dart_objects) {
   set_reply_port(reply_port);
   seq_ = &Instance::ZoneHandle(seq.raw());
   method_ = method.ToCString();
 
-  String& string_iterator = String::Handle();
-  if (param_keys.Length() > 0) {
+  if (parameters_are_dart_objects) {
+    parameter_keys_ = &Array::ZoneHandle(param_keys.raw());
+    parameter_values_ = &Array::ZoneHandle(param_values.raw());
+    ASSERT(parameter_keys_->Length() == parameter_values_->Length());
+  } else if (param_keys.Length() > 0) {
+    String& string_iterator = String::Handle();
     ASSERT(param_keys.Length() == param_values.Length());
     const char** param_keys_native =
         zone->Alloc<const char*>(param_keys.Length());
@@ -92,6 +99,7 @@
     }
     SetParams(param_keys_native, param_values_native, param_keys.Length());
   }
+
   if (FLAG_trace_service) {
     Isolate* isolate = Isolate::Current();
     ASSERT(isolate != NULL);
@@ -130,6 +138,16 @@
       return "Isolate must be runnable";
     case kIsolateMustBePaused:
       return "Isolate must be paused";
+    case kIsolateIsReloading:
+      return "Isolate is reloading";
+    case kFileSystemAlreadyExists:
+      return "File system already exists";
+    case kFileSystemDoesNotExist:
+      return "File system does not exist";
+    case kFileDoesNotExist:
+      return "File does not exist";
+    case kIsolateReloadFailed:
+      return "Isolate reload failed";
     default:
       return "Extension error";
   }
@@ -667,6 +685,42 @@
 }
 
 
+intptr_t JSONStream::NumObjectParameters() const {
+  if (parameter_keys_ == NULL) {
+    return 0;
+  }
+  ASSERT(parameter_keys_ != NULL);
+  ASSERT(parameter_values_ != NULL);
+  return parameter_keys_->Length();
+}
+
+
+RawObject* JSONStream::GetObjectParameterKey(intptr_t i) const {
+  ASSERT((i >= 0) && (i < NumObjectParameters()));
+  return parameter_keys_->At(i);
+}
+
+
+RawObject* JSONStream::GetObjectParameterValue(intptr_t i) const {
+  ASSERT((i >= 0) && (i < NumObjectParameters()));
+  return parameter_values_->At(i);
+}
+
+
+RawObject* JSONStream::LookupObjectParam(const char* c_key) const {
+  const String& key = String::Handle(String::New(c_key));
+  Object& test = Object::Handle();
+  const intptr_t num_object_parameters = NumObjectParameters();
+  for (intptr_t i = 0; i < num_object_parameters; i++) {
+    test = GetObjectParameterKey(i);
+    if (test.IsString() && String::Cast(test).Equals(key)) {
+      return GetObjectParameterValue(i);
+    }
+  }
+  return Object::null();
+}
+
+
 void JSONStream::SetParams(const char** param_keys,
                            const char** param_values,
                            intptr_t num_params) {
@@ -768,8 +822,25 @@
   }
   intptr_t limit = offset + count;
   for (intptr_t i = offset; i < limit; i++) {
-    intptr_t code_unit = s.CharAt(i);
-    buffer_.EscapeAndAddCodeUnit(code_unit);
+    uint16_t code_unit = s.CharAt(i);
+    if (Utf16::IsTrailSurrogate(code_unit)) {
+      buffer_.EscapeAndAddUTF16CodeUnit(code_unit);
+    } else if (Utf16::IsLeadSurrogate(code_unit)) {
+      if (i + 1 == limit) {
+        buffer_.EscapeAndAddUTF16CodeUnit(code_unit);
+      } else {
+        uint16_t next_code_unit = s.CharAt(i+1);
+        if (Utf16::IsTrailSurrogate(next_code_unit)) {
+          uint32_t decoded = Utf16::Decode(code_unit, next_code_unit);
+          buffer_.EscapeAndAddCodeUnit(decoded);
+          i++;
+        } else {
+          buffer_.EscapeAndAddUTF16CodeUnit(code_unit);
+        }
+      }
+    } else {
+      buffer_.EscapeAndAddCodeUnit(code_unit);
+    }
   }
   // Return value indicates whether the string is truncated.
   return (offset > 0) || (limit < length);
diff --git a/runtime/vm/json_stream.h b/runtime/vm/json_stream.h
index 3c77d4b..eec69b0 100644
--- a/runtime/vm/json_stream.h
+++ b/runtime/vm/json_stream.h
@@ -53,7 +53,13 @@
   kStreamNotSubscribed       = 104,
   kIsolateMustBeRunnable     = 105,
   kIsolateMustBePaused       = 106,
-  kIsolateIsReloading        = 107,
+
+  // Experimental (used in private rpcs).
+  kIsolateIsReloading        = 1000,
+  kFileSystemAlreadyExists   = 1001,
+  kFileSystemDoesNotExist    = 1002,
+  kFileDoesNotExist          = 1003,
+  kIsolateReloadFailed       = 1004,
 };
 
 // Expected that user_data is a JSONStream*.
@@ -73,7 +79,8 @@
              const Instance& seq,
              const String& method,
              const Array& param_keys,
-             const Array& param_values);
+             const Array& param_values,
+             bool parameters_are_dart_objects = false);
   void SetupError();
 
   void PrintError(intptr_t code, const char* details_format, ...);
@@ -99,6 +106,11 @@
 
   Dart_Port reply_port() const { return reply_port_; }
 
+  intptr_t NumObjectParameters() const;
+  RawObject* GetObjectParameterKey(intptr_t i) const;
+  RawObject* GetObjectParameterValue(intptr_t i) const;
+  RawObject* LookupObjectParam(const char* key) const;
+
   intptr_t num_params() const { return num_params_; }
   const char* GetParamKey(intptr_t i) const {
     return param_keys_[i];
@@ -227,6 +239,8 @@
   ServiceIdZone* id_zone_;
   Dart_Port reply_port_;
   Instance* seq_;
+  Array* parameter_keys_;
+  Array* parameter_values_;
   const char* method_;
   const char** param_keys_;
   const char** param_values_;
diff --git a/runtime/vm/json_test.cc b/runtime/vm/json_test.cc
index e8c4f5d..a0b27c2 100644
--- a/runtime/vm/json_test.cc
+++ b/runtime/vm/json_test.cc
@@ -219,8 +219,9 @@
       "var unicode = '\\u00CE\\u00F1\\u0163\\u00E9r\\u00F1\\u00E5\\u0163"
       "\\u00EE\\u00F6\\u00F1\\u00E5\\u013C\\u00EE\\u017E\\u00E5\\u0163"
       "\\u00EE\\u1EDD\\u00F1';\n"
-      "var surrogates = '\\u{1D11E}\\u{1D11E}\\u{1D11E}\\u{1D11E}"
-      "\\u{1D11E}';\n"
+      "var surrogates = '\\u{1D11E}\\u{1D11E}\\u{1D11E}"
+      "\\u{1D11E}\\u{1D11E}';\n"
+      "var wrongEncoding = '\\u{1D11E}' + surrogates[0] + '\\u{1D11E}';"
       "var nullInMiddle = 'This has\\u0000 four words.';";
 
   Dart_Handle lib = TestCase::LoadTestScript(kScriptChars, NULL);
@@ -265,9 +266,7 @@
       JSONObject jsobj(&js);
       EXPECT(!jsobj.AddPropertyStr("unicode", obj));
     }
-    EXPECT_STREQ("{\"unicode\":\"\\u00CE\\u00F1\\u0163\\u00E9r\\u00F1\\u00E5"
-                 "\\u0163\\u00EE\\u00F6\\u00F1\\u00E5\\u013C\\u00EE\\u017E"
-                 "\\u00E5\\u0163\\u00EE\\u1EDD\\u00F1\"}", js.ToCString());
+    EXPECT_STREQ("{\"unicode\":\"Îñţérñåţîöñåļîžåţîờñ\"}", js.ToCString());
   }
 
   {
@@ -280,11 +279,24 @@
       JSONObject jsobj(&js);
       EXPECT(!jsobj.AddPropertyStr("surrogates", obj));
     }
-    EXPECT_STREQ("{\"surrogates\":\"\\uD834\\uDD1E\\uD834\\uDD1E\\uD834\\uDD1E"
-                 "\\uD834\\uDD1E\\uD834\\uDD1E\"}", js.ToCString());
+    EXPECT_STREQ("{\"surrogates\":\"𝄞𝄞𝄞𝄞𝄞\"}", js.ToCString());
   }
 
   {
+    result = Dart_GetField(lib, NewString("wrongEncoding"));
+    EXPECT_VALID(result);
+    obj ^= Api::UnwrapHandle(result);
+
+    JSONStream js;
+    {
+      JSONObject jsobj(&js);
+      EXPECT(!jsobj.AddPropertyStr("wrongEncoding", obj));
+    }
+    EXPECT_STREQ("{\"wrongEncoding\":\"𝄞\\uD834𝄞\"}", js.ToCString());
+  }
+
+
+  {
     result = Dart_GetField(lib, NewString("nullInMiddle"));
     EXPECT_VALID(result);
     obj ^= Api::UnwrapHandle(result);
diff --git a/runtime/vm/lockers.cc b/runtime/vm/lockers.cc
index 2efe245..b8ac4b6 100644
--- a/runtime/vm/lockers.cc
+++ b/runtime/vm/lockers.cc
@@ -12,11 +12,11 @@
 static void updateThreadState(Thread* thread) {
   // First try a fast update of the thread state to indicate it is not at a
   // safepoint anymore.
-  uword old_state = Thread::SetAtSafepoint(true, 0);
+  uint32_t old_state = Thread::SetAtSafepoint(true, 0);
   uword addr =
       reinterpret_cast<uword>(thread) + Thread::safepoint_state_offset();
-  if (AtomicOperations::CompareAndSwapWord(
-          reinterpret_cast<uword*>(addr), old_state, 0) != old_state) {
+  if (AtomicOperations::CompareAndSwapUint32(
+          reinterpret_cast<uint32_t*>(addr), old_state, 0) != old_state) {
     // Fast update failed which means we could potentially be in the middle
     // of a safepoint operation and need to block for it.
     SafepointHandler* handler = thread->isolate()->safepoint_handler();
@@ -34,11 +34,11 @@
   Monitor::WaitResult result = monitor_->Wait(millis);
   // First try a fast update of the thread state to indicate it is not at a
   // safepoint anymore.
-  uword old_state = Thread::SetAtSafepoint(true, 0);
+  uint32_t old_state = Thread::SetAtSafepoint(true, 0);
   uword addr =
       reinterpret_cast<uword>(thread) + Thread::safepoint_state_offset();
-  if (AtomicOperations::CompareAndSwapWord(
-          reinterpret_cast<uword*>(addr), old_state, 0) != old_state) {
+  if (AtomicOperations::CompareAndSwapUint32(
+          reinterpret_cast<uint32_t*>(addr), old_state, 0) != old_state) {
     // Fast update failed which means we could potentially be in the middle
     // of a safepoint operation and need to block for it.
     monitor_->Exit();
@@ -98,11 +98,11 @@
     Monitor::WaitResult result = monitor_->Wait(millis);
     // First try a fast update of the thread state to indicate it is not at a
     // safepoint anymore.
-    uword old_state = Thread::SetAtSafepoint(true, 0);
+    uint32_t old_state = Thread::SetAtSafepoint(true, 0);
     uword addr =
         reinterpret_cast<uword>(thread) + Thread::safepoint_state_offset();
-    if (AtomicOperations::CompareAndSwapWord(
-            reinterpret_cast<uword*>(addr), old_state, 0) != old_state) {
+    if (AtomicOperations::CompareAndSwapUint32(
+            reinterpret_cast<uint32_t*>(addr), old_state, 0) != old_state) {
       // Fast update failed which means we could potentially be in the middle
       // of a safepoint operation and need to block for it.
       monitor_->Exit();
diff --git a/runtime/vm/megamorphic_cache_table.cc b/runtime/vm/megamorphic_cache_table.cc
index 4705b21..e3d34dd 100644
--- a/runtime/vm/megamorphic_cache_table.cc
+++ b/runtime/vm/megamorphic_cache_table.cc
@@ -16,7 +16,7 @@
                                                    const String& name,
                                                    const Array& descriptor) {
   // Multiple compilation threads could access this lookup.
-  SafepointMutexLocker ml(isolate->mutex());
+  SafepointMutexLocker ml(isolate->megamorphic_lookup_mutex());
   ASSERT(name.IsSymbol());
   // TODO(rmacnak): ASSERT(descriptor.IsCanonical());
 
diff --git a/runtime/vm/message_handler.cc b/runtime/vm/message_handler.cc
index 8784651..5e5f685 100644
--- a/runtime/vm/message_handler.cc
+++ b/runtime/vm/message_handler.cc
@@ -457,6 +457,11 @@
 }
 
 
+void MessageHandler::DebugDump() {
+  PortMap::DebugDumpForMessageHandler(this);
+}
+
+
 void MessageHandler::PausedOnStartLocked(MonitorLocker* ml, bool paused) {
   if (paused) {
     ASSERT(!is_paused_on_start_);
diff --git a/runtime/vm/message_handler.h b/runtime/vm/message_handler.h
index 1d039c5..c52431d 100644
--- a/runtime/vm/message_handler.h
+++ b/runtime/vm/message_handler.h
@@ -81,6 +81,8 @@
     return live_ports_;
   }
 
+  void DebugDump();
+
   bool paused() const { return paused_ > 0; }
 
   void increment_paused() { paused_++; }
diff --git a/runtime/vm/method_recognizer.cc b/runtime/vm/method_recognizer.cc
index 45a6f59..03014ac 100644
--- a/runtime/vm/method_recognizer.cc
+++ b/runtime/vm/method_recognizer.cc
@@ -25,7 +25,19 @@
 }
 
 
-#define KIND_TO_STRING(class_name, function_name, enum_name, fp)               \
+intptr_t MethodRecognizer::ResultCid(const Function& function) {
+  switch (function.recognized_kind()) {
+#define DEFINE_CASE(cname, fname, ename, result_type, fingerprint) \
+    case k##ename: return k##result_type##Cid;
+    RECOGNIZED_LIST(DEFINE_CASE)
+#undef DEFINE_CASE
+    default:
+      return kDynamicCid;
+  }
+}
+
+
+#define KIND_TO_STRING(class_name, function_name, enum_name, type, fp) \
   #enum_name,
 static const char* recognized_list_method_name[] = {
   "Unknown",
@@ -51,7 +63,7 @@
   libs.Add(&Library::ZoneHandle(Library::DeveloperLibrary()));
   Function& func = Function::Handle();
 
-#define SET_RECOGNIZED_KIND(class_name, function_name, enum_name, fp)          \
+#define SET_RECOGNIZED_KIND(class_name, function_name, enum_name, type, fp)    \
   func = Library::GetFunction(libs, #class_name, #function_name);              \
   if (func.IsNull()) {                                                         \
     OS::PrintErr("Missing %s::%s\n", #class_name, #function_name);             \
@@ -74,7 +86,7 @@
 #define SET_IS_ALWAYS_INLINE(class_name, function_name, dest, fp)              \
   SET_FUNCTION_BIT(class_name, function_name, dest, fp, set_always_inline, true)
 
-#define SET_IS_NEVER_INLINE(class_name, function_name, dest, fp)              \
+#define SET_IS_NEVER_INLINE(class_name, function_name, dest, fp)               \
   SET_FUNCTION_BIT(class_name, function_name, dest, fp, set_is_inlinable, false)
 
 #define SET_IS_POLYMORPHIC_TARGET(class_name, function_name, dest, fp)         \
diff --git a/runtime/vm/method_recognizer.h b/runtime/vm/method_recognizer.h
index 6d7e1e8..083b477 100644
--- a/runtime/vm/method_recognizer.h
+++ b/runtime/vm/method_recognizer.h
@@ -9,293 +9,320 @@
 
 namespace dart {
 
-// (class-name, function-name, recognized enum, fingerprint).
+// (class-name, function-name, recognized enum, result type, fingerprint).
 // When adding a new function add a 0 as fingerprint, build and run to get the
 // correct fingerprint from the mismatch error.
 #define OTHER_RECOGNIZED_LIST(V)                                               \
-  V(::, identical, ObjectIdentical, 317103244)                                 \
-  V(ClassID, getID, ClassIDgetID, 1385157717)                                  \
-  V(Object, Object., ObjectConstructor, 1746278398)                            \
-  V(_List, ., ObjectArrayAllocate, 1661438741)                                 \
-  V(_TypedList, _getInt8, ByteArrayBaseGetInt8, 1508321565)                    \
-  V(_TypedList, _getUint8, ByteArrayBaseGetUint8, 953411007)                   \
-  V(_TypedList, _getInt16, ByteArrayBaseGetInt16, 433971756)                   \
-  V(_TypedList, _getUint16, ByteArrayBaseGetUint16, 1329446488)                \
-  V(_TypedList, _getInt32, ByteArrayBaseGetInt32, 137212209)                   \
-  V(_TypedList, _getUint32, ByteArrayBaseGetUint32, 499907480)                 \
-  V(_TypedList, _getInt64, ByteArrayBaseGetInt64, 1639388276)                  \
-  V(_TypedList, _getFloat32, ByteArrayBaseGetFloat32, 1672834581)              \
-  V(_TypedList, _getFloat64, ByteArrayBaseGetFloat64, 966634744)               \
-  V(_TypedList, _getFloat32x4, ByteArrayBaseGetFloat32x4, 1197581758)          \
-  V(_TypedList, _getInt32x4, ByteArrayBaseGetInt32x4, 810805548)               \
-  V(_TypedList, _setInt8, ByteArrayBaseSetInt8, 1317196265)                    \
-  V(_TypedList, _setUint8, ByteArrayBaseSetUint8, 1328908284)                  \
-  V(_TypedList, _setInt16, ByteArrayBaseSetInt16, 1827614958)                  \
-  V(_TypedList, _setUint16, ByteArrayBaseSetUint16, 1694054572)                \
-  V(_TypedList, _setInt32, ByteArrayBaseSetInt32, 915652649)                   \
-  V(_TypedList, _setUint32, ByteArrayBaseSetUint32, 1958474336)                \
-  V(_TypedList, _setInt64, ByteArrayBaseSetInt64, 1970687707)                  \
-  V(_TypedList, _setFloat32, ByteArrayBaseSetFloat32, 1853026980)              \
-  V(_TypedList, _setFloat64, ByteArrayBaseSetFloat64, 1197862362)              \
-  V(_TypedList, _setFloat32x4, ByteArrayBaseSetFloat32x4, 2093630771)          \
-  V(_TypedList, _setInt32x4, ByteArrayBaseSetInt32x4, 1982971324)              \
-  V(_StringBase, _interpolate, StringBaseInterpolate, 1872292681)              \
-  V(_IntegerImplementation, toDouble, IntegerToDouble, 792762465)              \
-  V(_Double, _add, DoubleAdd, 2213216)                                         \
-  V(_Double, _sub, DoubleSub, 1100692582)                                      \
-  V(_Double, _mul, DoubleMul, 436784097)                                       \
-  V(_Double, _div, DoubleDiv, 953317135)                                       \
-  V(::, min, MathMin, 1115051548)                                              \
-  V(::, max, MathMax, 1410473322)                                              \
-  V(::, _doublePow, MathDoublePow, 1770960781)                                 \
-  V(Float32x4, Float32x4., Float32x4Constructor, 93751705)                     \
-  V(Float32x4, Float32x4.zero, Float32x4Zero, 1193954374)                      \
-  V(Float32x4, Float32x4.splat, Float32x4Splat, 12296613)                      \
-  V(Float32x4, Float32x4.fromInt32x4Bits, Float32x4FromInt32x4Bits, 1188039061)\
-  V(Float32x4, Float32x4.fromFloat64x2, Float32x4FromFloat64x2, 1750763218)    \
-  V(Float32x4, shuffle, Float32x4Shuffle, 2015957023)                          \
-  V(Float32x4, shuffleMix, Float32x4ShuffleMix, 1099087979)                    \
-  V(Float32x4, get:signMask, Float32x4GetSignMask, 487049875)                  \
-  V(Float32x4, _cmpequal, Float32x4Equal, 127403211)                           \
-  V(Float32x4, _cmpgt, Float32x4GreaterThan, 2118391173)                       \
-  V(Float32x4, _cmpgte, Float32x4GreaterThanOrEqual, 557807661)                \
-  V(Float32x4, _cmplt, Float32x4LessThan, 1061691185)                          \
-  V(Float32x4, _cmplte, Float32x4LessThanOrEqual, 102608993)                   \
-  V(Float32x4, _cmpnequal, Float32x4NotEqual, 1873649982)                      \
-  V(Float32x4, _min, Float32x4Min, 1158016632)                                 \
-  V(Float32x4, _max, Float32x4Max, 118915526)                                  \
-  V(Float32x4, _scale, Float32x4Scale, 415757469)                              \
-  V(Float32x4, _sqrt, Float32x4Sqrt, 1934518992)                               \
-  V(Float32x4, _reciprocalSqrt, Float32x4ReciprocalSqrt, 1586141174)           \
-  V(Float32x4, _reciprocal, Float32x4Reciprocal, 1651466502)                   \
-  V(Float32x4, _negate, Float32x4Negate, 2142478676)                           \
-  V(Float32x4, _abs, Float32x4Absolute, 337704007)                             \
-  V(Float32x4, _clamp, Float32x4Clamp, 1107305005)                             \
-  V(Float32x4, withX, Float32x4WithX, 1311992575)                              \
-  V(Float32x4, withY, Float32x4WithY, 175290640)                               \
-  V(Float32x4, withZ, Float32x4WithZ, 837367384)                               \
-  V(Float32x4, withW, Float32x4WithW, 1625145605)                              \
-  V(Float64x2, Float64x2., Float64x2Constructor, 423355933)                    \
-  V(Float64x2, Float64x2.zero, Float64x2Zero, 2066666975)                      \
-  V(Float64x2, Float64x2.splat, Float64x2Splat, 716962994)                     \
-  V(Float64x2, Float64x2.fromFloat32x4, Float64x2FromFloat32x4, 792974246)     \
-  V(Float64x2, get:x, Float64x2GetX, 1488958362)                               \
-  V(Float64x2, get:y, Float64x2GetY, 1022688506)                               \
-  V(Float64x2, _negate, Float64x2Negate, 1693416311)                           \
-  V(Float64x2, abs, Float64x2Abs, 52403783)                                    \
-  V(Float64x2, sqrt, Float64x2Sqrt, 2012680669)                                \
-  V(Float64x2, get:signMask, Float64x2GetSignMask, 668856717)                  \
-  V(Float64x2, scale, Float64x2Scale, 646122081)                               \
-  V(Float64x2, withX, Float64x2WithX, 489409269)                               \
-  V(Float64x2, withY, Float64x2WithY, 943642284)                               \
-  V(Float64x2, min, Float64x2Min, 685235702)                                   \
-  V(Float64x2, max, Float64x2Max, 198659675)                                   \
-  V(Int32x4, Int32x4., Int32x4Constructor, 649173415)                          \
-  V(Int32x4, Int32x4.bool, Int32x4BoolConstructor, 458597857)                  \
-  V(Int32x4, Int32x4.fromFloat32x4Bits, Int32x4FromFloat32x4Bits, 2122470988)  \
-  V(Int32x4, get:flagX, Int32x4GetFlagX, 1446544324)                           \
-  V(Int32x4, get:flagY, Int32x4GetFlagY, 1148149370)                           \
-  V(Int32x4, get:flagZ, Int32x4GetFlagZ, 550901369)                            \
-  V(Int32x4, get:flagW, Int32x4GetFlagW, 1346664620)                           \
-  V(Int32x4, get:signMask, Int32x4GetSignMask, 740215269)                      \
-  V(Int32x4, shuffle, Int32x4Shuffle, 549194518)                               \
-  V(Int32x4, shuffleMix, Int32x4ShuffleMix, 1550866145)                        \
-  V(Int32x4, select, Int32x4Select, 1368318775)                                \
-  V(Int32x4, withFlagX, Int32x4WithFlagX, 250974159)                           \
-  V(Int32x4, withFlagY, Int32x4WithFlagY, 1686481348)                          \
-  V(Int32x4, withFlagZ, Int32x4WithFlagZ, 645582330)                           \
-  V(Int32x4, withFlagW, Int32x4WithFlagW, 878364277)                           \
-  V(Float32List, [], Float32ArrayGetIndexed, 1451643535)                       \
-  V(Float32List, []=, Float32ArraySetIndexed, 453873887)                       \
-  V(Int8List, [], Int8ArrayGetIndexed, 110819507)                              \
-  V(Int8List, []=, Int8ArraySetIndexed, 865684695)                             \
-  V(Uint8ClampedList, [], Uint8ClampedArrayGetIndexed, 41288685)               \
-  V(Uint8ClampedList, []=, Uint8ClampedArraySetIndexed, 687206488)             \
+  V(::, identical, ObjectIdentical, Bool, 0x12e69c8c)                          \
+  V(ClassID, getID, ClassIDgetID, Smi, 0x528fd455)                             \
+  V(Object, Object., ObjectConstructor, Dynamic, 0x681617fe)                   \
+  V(_List, ., ObjectArrayAllocate, Array, 0x63078b15)                          \
+  V(_TypedList, _getInt8, ByteArrayBaseGetInt8, Smi, 0x59e7291d)               \
+  V(_TypedList, _getUint8, ByteArrayBaseGetUint8, Smi, 0x38d3e5bf)             \
+  V(_TypedList, _getInt16, ByteArrayBaseGetInt16, Smi, 0x19dde22c)             \
+  V(_TypedList, _getUint16, ByteArrayBaseGetUint16, Smi, 0x4f3dbe58)           \
+  V(_TypedList, _getInt32, ByteArrayBaseGetInt32, Dynamic, 0x082db131)         \
+  V(_TypedList, _getUint32, ByteArrayBaseGetUint32, Dynamic, 0x1dcbfb98)       \
+  V(_TypedList, _getInt64, ByteArrayBaseGetInt64, Dynamic, 0x61b71474)         \
+  V(_TypedList, _getFloat32, ByteArrayBaseGetFloat32, Double, 0x63b56e15)      \
+  V(_TypedList, _getFloat64, ByteArrayBaseGetFloat64, Double, 0x399dacf8)      \
+  V(_TypedList, _getFloat32x4, ByteArrayBaseGetFloat32x4, Float32x4,           \
+    0x4761a5be)                                                                \
+  V(_TypedList, _getInt32x4, ByteArrayBaseGetInt32x4, Int32x4, 0x3053e92c)     \
+  V(_TypedList, _setInt8, ByteArrayBaseSetInt8, Dynamic, 0x4e82d1e9)           \
+  V(_TypedList, _setUint8, ByteArrayBaseSetUint8, Dynamic, 0x4f3587fc)         \
+  V(_TypedList, _setInt16, ByteArrayBaseSetInt16, Dynamic, 0x6cef30ee)         \
+  V(_TypedList, _setUint16, ByteArrayBaseSetUint16, Dynamic, 0x64f938ac)       \
+  V(_TypedList, _setInt32, ByteArrayBaseSetInt32, Dynamic, 0x3693c029)         \
+  V(_TypedList, _setUint32, ByteArrayBaseSetUint32, Dynamic, 0x74bbf260)       \
+  V(_TypedList, _setInt64, ByteArrayBaseSetInt64, Dynamic, 0x75764edb)         \
+  V(_TypedList, _setFloat32, ByteArrayBaseSetFloat32, Dynamic, 0x6e72f2a4)     \
+  V(_TypedList, _setFloat64, ByteArrayBaseSetFloat64, Dynamic, 0x4765edda)     \
+  V(_TypedList, _setFloat32x4, ByteArrayBaseSetFloat32x4, Dynamic, 0x7cca4533) \
+  V(_TypedList, _setInt32x4, ByteArrayBaseSetInt32x4, Dynamic, 0x7631bdbc)     \
+  V(_StringBase, _interpolate, StringBaseInterpolate, Dynamic, 0x6f98eb49)     \
+  V(_IntegerImplementation, toDouble, IntegerToDouble, Double, 0x2f409861)     \
+  V(_Double, _add, DoubleAdd, Double, 0x0021c560)                              \
+  V(_Double, _sub, DoubleSub, Double, 0x419b3c66)                              \
+  V(_Double, _mul, DoubleMul, Double, 0x1a08cbe1)                              \
+  V(_Double, _div, DoubleDiv, Double, 0x38d2770f)                              \
+  V(::, min, MathMin, Dynamic, 0x4276561c)                                     \
+  V(::, max, MathMax, Dynamic, 0x54121d6a)                                     \
+  V(::, _doublePow, MathDoublePow, Double, 0x698eb78d)                         \
+  V(Float32x4, Float32x4., Float32x4Constructor, Float32x4, 0x05968999)        \
+  V(Float32x4, Float32x4.zero, Float32x4Zero, Float32x4, 0x472a4c46)           \
+  V(Float32x4, Float32x4.splat, Float32x4Splat, Float32x4, 0x00bba1a5)         \
+  V(Float32x4, Float32x4.fromInt32x4Bits, Float32x4FromInt32x4Bits, Float32x4, \
+    0x46d00995)                                                                \
+  V(Float32x4, Float32x4.fromFloat64x2, Float32x4FromFloat64x2, Float32x4,     \
+    0x685a86d2)                                                                \
+  V(Float32x4, shuffle, Float32x4Shuffle, Float32x4, 0x7829101f)               \
+  V(Float32x4, shuffleMix, Float32x4ShuffleMix, Float32x4, 0x4182c06b)         \
+  V(Float32x4, get:signMask, Float32x4GetSignMask, Dynamic, 0x1d07ca93)        \
+  V(Float32x4, _cmpequal, Float32x4Equal, Int32x4, 0x079804cb)                 \
+  V(Float32x4, _cmpgt, Float32x4GreaterThan, Int32x4, 0x7e441585)              \
+  V(Float32x4, _cmpgte, Float32x4GreaterThanOrEqual, Int32x4, 0x213f782d)      \
+  V(Float32x4, _cmplt, Float32x4LessThan, Int32x4, 0x3f481f31)                 \
+  V(Float32x4, _cmplte, Float32x4LessThanOrEqual, Int32x4, 0x061db061)         \
+  V(Float32x4, _cmpnequal, Float32x4NotEqual, Int32x4, 0x6fada13e)             \
+  V(Float32x4, _min, Float32x4Min, Float32x4, 0x4505ee78)                      \
+  V(Float32x4, _max, Float32x4Max, Float32x4, 0x071681c6)                      \
+  V(Float32x4, _scale, Float32x4Scale, Float32x4, 0x18c7f49d)                  \
+  V(Float32x4, _sqrt, Float32x4Sqrt, Float32x4, 0x734e6ad0)                    \
+  V(Float32x4, _reciprocalSqrt, Float32x4ReciprocalSqrt, Float32x4,            \
+    0x5e8a97f6)                                                                \
+  V(Float32x4, _reciprocal, Float32x4Reciprocal, Float32x4, 0x626f6106)        \
+  V(Float32x4, _negate, Float32x4Negate, Float32x4, 0x7fb3a154)                \
+  V(Float32x4, _abs, Float32x4Absolute, Float32x4, 0x1420f447)                 \
+  V(Float32x4, _clamp, Float32x4Clamp, Float32x4, 0x4200222d)                  \
+  V(Float32x4, withX, Float32x4WithX, Float32x4, 0x4e336aff)                   \
+  V(Float32x4, withY, Float32x4WithY, Float32x4, 0x0a72b910)                   \
+  V(Float32x4, withZ, Float32x4WithZ, Float32x4, 0x31e93658)                   \
+  V(Float32x4, withW, Float32x4WithW, Float32x4, 0x60ddc105)                   \
+  V(Float64x2, Float64x2., Float64x2Constructor, Float64x2, 0x193be61d)        \
+  V(Float64x2, Float64x2.zero, Float64x2Zero, Float64x2, 0x7b2ed5df)           \
+  V(Float64x2, Float64x2.splat, Float64x2Splat, Float64x2, 0x2abbfcb2)         \
+  V(Float64x2, Float64x2.fromFloat32x4, Float64x2FromFloat32x4, Float64x2,     \
+    0x2f43d3a6)                                                                \
+  V(Float64x2, get:x, Float64x2GetX, Double, 0x58bfb39a)                       \
+  V(Float64x2, get:y, Float64x2GetY, Double, 0x3cf4fcfa)                       \
+  V(Float64x2, _negate, Float64x2Negate, Float64x2, 0x64ef7b77)                \
+  V(Float64x2, abs, Float64x2Abs, Float64x2, 0x031f9e47)                       \
+  V(Float64x2, sqrt, Float64x2Sqrt, Float64x2, 0x77f711dd)                     \
+  V(Float64x2, get:signMask, Float64x2GetSignMask, Dynamic, 0x27ddf18d)        \
+  V(Float64x2, scale, Float64x2Scale, Float64x2, 0x26830a61)                   \
+  V(Float64x2, withX, Float64x2WithX, Float64x2, 0x1d2bcaf5)                   \
+  V(Float64x2, withY, Float64x2WithY, Float64x2, 0x383ed6ac)                   \
+  V(Float64x2, min, Float64x2Min, Float64x2, 0x28d7ddf6)                       \
+  V(Float64x2, max, Float64x2Max, Float64x2, 0x0bd74e5b)                       \
+  V(Int32x4, Int32x4., Int32x4Constructor, Int32x4, 0x26b199a7)                \
+  V(Int32x4, Int32x4.bool, Int32x4BoolConstructor, Int32x4, 0x1b55a5e1)        \
+  V(Int32x4, Int32x4.fromFloat32x4Bits, Int32x4FromFloat32x4Bits, Int32x4,     \
+    0x7e82564c)                                                                \
+  V(Int32x4, get:flagX, Int32x4GetFlagX, Bool, 0x563883c4)                     \
+  V(Int32x4, get:flagY, Int32x4GetFlagY, Bool, 0x446f5e7a)                     \
+  V(Int32x4, get:flagZ, Int32x4GetFlagZ, Bool, 0x20d61679)                     \
+  V(Int32x4, get:flagW, Int32x4GetFlagW, Bool, 0x504478ac)                     \
+  V(Int32x4, get:signMask, Int32x4GetSignMask, Dynamic, 0x2c1ec9e5)            \
+  V(Int32x4, shuffle, Int32x4Shuffle, Int32x4, 0x20bc0b16)                     \
+  V(Int32x4, shuffleMix, Int32x4ShuffleMix, Int32x4, 0x5c7056e1)               \
+  V(Int32x4, select, Int32x4Select, Float32x4, 0x518ee337)                     \
+  V(Int32x4, withFlagX, Int32x4WithFlagX, Int32x4, 0x0ef58fcf)                 \
+  V(Int32x4, withFlagY, Int32x4WithFlagY, Int32x4, 0x6485a9c4)                 \
+  V(Int32x4, withFlagZ, Int32x4WithFlagZ, Int32x4, 0x267acdfa)                 \
+  V(Int32x4, withFlagW, Int32x4WithFlagW, Int32x4, 0x345ac675)                 \
+  V(Float32List, [], Float32ArrayGetIndexed, Double, 0x5686528f)               \
+  V(Float32List, []=, Float32ArraySetIndexed, Dynamic, 0x1b0d90df)             \
+  V(Int8List, [], Int8ArrayGetIndexed, Smi, 0x069af8b3)                        \
+  V(Int8List, []=, Int8ArraySetIndexed, Dynamic, 0x33994cd7)                   \
+  V(Uint8ClampedList, [], Uint8ClampedArrayGetIndexed, Smi, 0x027603ed)        \
+  V(Uint8ClampedList, []=, Uint8ClampedArraySetIndexed, Dynamic, 0x28f5f058)   \
   V(_ExternalUint8ClampedArray, [], ExternalUint8ClampedArrayGetIndexed,       \
-    41288685)                                                                  \
+    Smi, 0x027603ed)                                                           \
   V(_ExternalUint8ClampedArray, []=, ExternalUint8ClampedArraySetIndexed,      \
-    687206488)                                                                 \
-  V(Int16List, [], Int16ArrayGetIndexed, 389863073)                            \
-  V(Int16List, []=, Int16ArraySetIndexed, 855133756)                           \
-  V(Uint16List, [], Uint16ArrayGetIndexed, 1053739567)                         \
-  V(Uint16List, []=, Uint16ArraySetIndexed, 1547307961)                        \
-  V(Int32List, [], Int32ArrayGetIndexed, 640610057)                            \
-  V(Int32List, []=, Int32ArraySetIndexed, 453358705)                           \
-  V(Int64List, [], Int64ArrayGetIndexed, 202150810)                            \
-  V(Int64List, []=, Int64ArraySetIndexed, 924110852)                           \
-  V(Float32x4List, [], Float32x4ArrayGetIndexed, 29819259)                     \
-  V(Float32x4List, []=, Float32x4ArraySetIndexed, 1458062250)                  \
-  V(Int32x4List, [], Int32x4ArrayGetIndexed, 137707405)                        \
-  V(Int32x4List, []=, Int32x4ArraySetIndexed, 496650149)                       \
-  V(Float64x2List, [], Float64x2ArrayGetIndexed, 1721439384)                   \
-  V(Float64x2List, []=, Float64x2ArraySetIndexed, 1994027006)                  \
-  V(_Bigint, get:_neg, Bigint_getNeg, 2079423063)                              \
-  V(_Bigint, get:_used, Bigint_getUsed, 1426329619)                            \
-  V(_Bigint, get:_digits, Bigint_getDigits, 1185333683)                        \
-  V(_HashVMBase, get:_index, LinkedHashMap_getIndex, 2104211307)               \
-  V(_HashVMBase, set:_index, LinkedHashMap_setIndex, 1273697266)               \
-  V(_HashVMBase, get:_data, LinkedHashMap_getData, 1274399923)                 \
-  V(_HashVMBase, set:_data, LinkedHashMap_setData, 1611093357)                 \
-  V(_HashVMBase, get:_usedData, LinkedHashMap_getUsedData, 367462469)          \
-  V(_HashVMBase, set:_usedData, LinkedHashMap_setUsedData, 1049390812)         \
-  V(_HashVMBase, get:_hashMask, LinkedHashMap_getHashMask, 902147072)          \
-  V(_HashVMBase, set:_hashMask, LinkedHashMap_setHashMask, 1236137630)         \
-  V(_HashVMBase, get:_deletedKeys, LinkedHashMap_getDeletedKeys, 812542585)    \
-  V(_HashVMBase, set:_deletedKeys, LinkedHashMap_setDeletedKeys, 1072259010)   \
+    Dynamic, 0x28f5f058)                                                       \
+  V(Int16List, [], Int16ArrayGetIndexed, Smi, 0x173cd6a1)                      \
+  V(Int16List, []=, Int16ArraySetIndexed, Dynamic, 0x32f84e3c)                 \
+  V(Uint16List, [], Uint16ArrayGetIndexed, Smi, 0x3ececa2f)                    \
+  V(Uint16List, []=, Uint16ArraySetIndexed, Dynamic, 0x5c3a0bb9)               \
+  V(Int32List, [], Int32ArrayGetIndexed, Dynamic, 0x262eef09)                  \
+  V(Int32List, []=, Int32ArraySetIndexed, Dynamic, 0x1b05b471)                 \
+  V(Int64List, [], Int64ArrayGetIndexed, Dynamic, 0x0c0c939a)                  \
+  V(Int64List, []=, Int64ArraySetIndexed, Dynamic, 0x3714d004)                 \
+  V(Float32x4List, [], Float32x4ArrayGetIndexed, Float32x4, 0x01c7017b)        \
+  V(Float32x4List, []=, Float32x4ArraySetIndexed, Dynamic, 0x56e843aa)         \
+  V(Int32x4List, [], Int32x4ArrayGetIndexed, Int32x4, 0x08353f8d)              \
+  V(Int32x4List, []=, Int32x4ArraySetIndexed, Dynamic, 0x1d9a47a5)             \
+  V(Float64x2List, [], Float64x2ArrayGetIndexed, Float64x2, 0x669b1498)        \
+  V(Float64x2List, []=, Float64x2ArraySetIndexed, Dynamic, 0x76da6ffe)         \
+  V(_Bigint, get:_neg, Bigint_getNeg, Bool, 0x7bf17a57)                        \
+  V(_Bigint, get:_used, Bigint_getUsed, Smi, 0x55041013)                       \
+  V(_Bigint, get:_digits, Bigint_getDigits, TypedDataUint32Array, 0x46a6c1b3)  \
+  V(_HashVMBase, get:_index, LinkedHashMap_getIndex, Dynamic, 0x7d6bb76b)      \
+  V(_HashVMBase, set:_index, LinkedHashMap_setIndex, Dynamic, 0x4beb13f2)      \
+  V(_HashVMBase, get:_data, LinkedHashMap_getData, Array, 0x4bf5ccb3)          \
+  V(_HashVMBase, set:_data, LinkedHashMap_setData, Dynamic, 0x6007556d)        \
+  V(_HashVMBase, get:_usedData, LinkedHashMap_getUsedData, Smi, 0x15e70845)    \
+  V(_HashVMBase, set:_usedData, LinkedHashMap_setUsedData, Dynamic, 0x3e8c6edc)\
+  V(_HashVMBase, get:_hashMask, LinkedHashMap_getHashMask, Smi, 0x35c5ac00)    \
+  V(_HashVMBase, set:_hashMask, LinkedHashMap_setHashMask, Dynamic, 0x49adf69e)\
+  V(_HashVMBase, get:_deletedKeys, LinkedHashMap_getDeletedKeys, Smi,          \
+    0x306e6a79)                                                                \
+  V(_HashVMBase, set:_deletedKeys, LinkedHashMap_setDeletedKeys, Dynamic,      \
+    0x3fe95fc2)                                                                \
 
 
 // List of intrinsics:
 // (class-name, function-name, intrinsification method, fingerprint).
 #define CORE_LIB_INTRINSIC_LIST(V)                                             \
-  V(_Smi, ~, Smi_bitNegate, 1673522705)                                        \
-  V(_Smi, get:bitLength, Smi_bitLength, 632480332)                             \
-  V(_Bigint, _lsh, Bigint_lsh, 1557746963)                                     \
-  V(_Bigint, _rsh, Bigint_rsh, 761843937)                                      \
-  V(_Bigint, _absAdd, Bigint_absAdd, 1227835493)                               \
-  V(_Bigint, _absSub, Bigint_absSub, 390740532)                                \
-  V(_Bigint, _mulAdd, Bigint_mulAdd, 617534446)                                \
-  V(_Bigint, _sqrAdd, Bigint_sqrAdd, 1623635507)                               \
-  V(_Bigint, _estQuotientDigit, Bigint_estQuotientDigit, 797340802)            \
-  V(_Montgomery, _mulMod, Montgomery_mulMod, 1947987219)                       \
-  V(_Double, >, Double_greaterThan, 1453001345)                                \
-  V(_Double, >=, Double_greaterEqualThan, 1815180096)                          \
-  V(_Double, <, Double_lessThan, 652059836)                                    \
-  V(_Double, <=, Double_lessEqualThan, 512138528)                              \
-  V(_Double, ==, Double_equal, 1468668497)                                     \
-  V(_Double, +, Double_add, 1269587413)                                        \
-  V(_Double, -, Double_sub, 1644506555)                                        \
-  V(_Double, *, Double_mul, 600860888)                                         \
-  V(_Double, /, Double_div, 1220198876)                                        \
-  V(_Double, get:isNaN, Double_getIsNaN, 184085483)                            \
-  V(_Double, get:isNegative, Double_getIsNegative, 978911030)                  \
-  V(_Double, _mulFromInteger, Double_mulFromInteger, 856594998)                \
-  V(_Double, .fromInteger, DoubleFromInteger, 2129942595)                      \
-  V(_List, []=, ObjectArraySetIndexed, 886228780)                              \
-  V(_GrowableList, .withData, GrowableArray_Allocate, 631736030)               \
-  V(_GrowableList, add, GrowableArray_add, 219371757)                          \
-  V(_RegExp, _ExecuteMatch, RegExp_ExecuteMatch, 1614206970)                   \
-  V(Object, ==, ObjectEquals, 291909336)                                       \
-  V(Object, get:runtimeType, ObjectRuntimeType, 15188587)                      \
-  V(_StringBase, get:hashCode, String_getHashCode, 2026040200)                 \
-  V(_StringBase, get:isEmpty, StringBaseIsEmpty, 1958879178)                   \
-  V(_StringBase, _substringMatches, StringBaseSubstringMatches, 797253099)     \
-  V(_StringBase, [], StringBaseCharAt, 754527301)                              \
-  V(_OneByteString, get:hashCode, OneByteString_getHashCode, 2026040200)       \
+  V(_Smi, ~, Smi_bitNegate, Smi, 0x63bfee11)                                   \
+  V(_Smi, get:bitLength, Smi_bitLength, Smi, 0x25b2e24c)                       \
+  V(_Smi, _bitAndFromSmi, Smi_bitAndFromSmi, Smi, 0x0df806ed)                  \
+  V(_Bigint, _lsh, Bigint_lsh, Dynamic, 0x5cd95513)                            \
+  V(_Bigint, _rsh, Bigint_rsh, Dynamic, 0x2d68d0e1)                            \
+  V(_Bigint, _absAdd, Bigint_absAdd, Dynamic, 0x492f4865)                      \
+  V(_Bigint, _absSub, Bigint_absSub, Dynamic, 0x174a3a34)                      \
+  V(_Bigint, _mulAdd, Bigint_mulAdd, Dynamic, 0x24ced3ee)                      \
+  V(_Bigint, _sqrAdd, Bigint_sqrAdd, Dynamic, 0x60c6b633)                      \
+  V(_Bigint, _estQuotientDigit, Bigint_estQuotientDigit, Dynamic, 0x2f867482)  \
+  V(_Montgomery, _mulMod, Montgomery_mulMod, Dynamic, 0x741bed13)              \
+  V(_Double, >, Double_greaterThan, Bool, 0x569b0a81)                          \
+  V(_Double, >=, Double_greaterEqualThan, Bool, 0x6c317340)                    \
+  V(_Double, <, Double_lessThan, Bool, 0x26dda4bc)                             \
+  V(_Double, <=, Double_lessEqualThan, Bool, 0x1e869d20)                       \
+  V(_Double, ==, Double_equal, Bool, 0x578a1a51)                               \
+  V(_Double, +, Double_add, Double, 0x4bac5dd5)                                \
+  V(_Double, -, Double_sub, Double, 0x62052dbb)                                \
+  V(_Double, *, Double_mul, Double, 0x23d068d8)                                \
+  V(_Double, /, Double_div, Double, 0x48bac1dc)                                \
+  V(_Double, get:isNaN, Double_getIsNaN, Bool, 0x0af8ebeb)                     \
+  V(_Double, get:isNegative, Double_getIsNegative, Bool, 0x3a58ff36)           \
+  V(_Double, _mulFromInteger, Double_mulFromInteger, Double, 0x330e9a36)       \
+  V(_Double, .fromInteger, DoubleFromInteger, Double, 0x7ef45843)              \
+  V(_List, []=, ObjectArraySetIndexed, Dynamic, 0x34d2c72c)                    \
+  V(_GrowableList, .withData, GrowableArray_Allocate, GrowableObjectArray,     \
+    0x25a786de)                                                                \
+  V(_GrowableList, add, GrowableArray_add, Dynamic, 0x0d1358ed)                \
+  V(_RegExp, _ExecuteMatch, RegExp_ExecuteMatch, Dynamic, 0x6036d7fa)          \
+  V(Object, ==, ObjectEquals, Bool, 0x11662ed8)                                \
+  V(Object, get:runtimeType, ObjectRuntimeType, Type, 0x00e7c26b)              \
+  V(_StringBase, get:hashCode, String_getHashCode, Smi, 0x78c2eb88)            \
+  V(_StringBase, get:isEmpty, StringBaseIsEmpty, Bool, 0x74c21fca)             \
+  V(_StringBase, _substringMatches, StringBaseSubstringMatches, Bool,          \
+    0x2f851deb)                                                                \
+  V(_StringBase, [], StringBaseCharAt, Dynamic, 0x2cf92c45)                    \
+  V(_OneByteString, get:hashCode, OneByteString_getHashCode, Smi, 0x78c2eb88)  \
   V(_OneByteString, _substringUncheckedNative,                                 \
-    OneByteString_substringUnchecked, 1670133538)                              \
-  V(_OneByteString, _setAt, OneByteStringSetAt, 1160066031)                    \
-  V(_OneByteString, _allocate, OneByteString_allocate, 1028631946)             \
-  V(_OneByteString, ==, OneByteString_equality, 1062844160)                    \
-  V(_TwoByteString, ==, TwoByteString_equality, 1062844160)                    \
+    OneByteString_substringUnchecked, OneByteString, 0x638c3722)               \
+  V(_OneByteString, _setAt, OneByteStringSetAt, Dynamic, 0x452533ef)           \
+  V(_OneByteString, _allocate, OneByteString_allocate, OneByteString,          \
+    0x3d4fad8a)                                                                \
+  V(_OneByteString, ==, OneByteString_equality, Bool, 0x3f59b700)              \
+  V(_TwoByteString, ==, TwoByteString_equality, Bool, 0x3f59b700)              \
 
 
 #define CORE_INTEGER_LIB_INTRINSIC_LIST(V)                                     \
   V(_IntegerImplementation, _addFromInteger, Integer_addFromInteger,           \
-    2042488139)                                                                \
-  V(_IntegerImplementation, +, Integer_add, 239272130)                         \
-  V(_IntegerImplementation, _subFromInteger, Integer_subFromInteger, 957923759)\
-  V(_IntegerImplementation, -, Integer_sub, 216175811)                         \
+    Dynamic, 0x79bde54b)                                                       \
+  V(_IntegerImplementation, +, Integer_add, Dynamic, 0x0e4300c2)               \
+  V(_IntegerImplementation, _subFromInteger, Integer_subFromInteger, Dynamic,  \
+    0x3918c1af)                                                                \
+  V(_IntegerImplementation, -, Integer_sub, Dynamic, 0x0ce294c3)               \
   V(_IntegerImplementation, _mulFromInteger, Integer_mulFromInteger,           \
-    2032062140)                                                                \
-  V(_IntegerImplementation, *, Integer_mul, 1301152164)                        \
+    Dynamic, 0x791ecebc)                                                       \
+  V(_IntegerImplementation, *, Integer_mul, Dynamic, 0x4d8e01a4)               \
   V(_IntegerImplementation, _moduloFromInteger, Integer_moduloFromInteger,     \
-    779285842)                                                                 \
-  V(_IntegerImplementation, ~/, Integer_truncDivide, 1018128256)               \
-  V(_IntegerImplementation, unary-, Integer_negate, 1507648892)                \
+    Dynamic, 0x2e72f552)                                                       \
+  V(_IntegerImplementation, ~/, Integer_truncDivide, Dynamic, 0x3caf6780)      \
+  V(_IntegerImplementation, unary-, Integer_negate, Dynamic, 0x59dce57c)       \
   V(_IntegerImplementation, _bitAndFromInteger, Integer_bitAndFromInteger,     \
-    503046514)                                                                 \
-  V(_IntegerImplementation, &, Integer_bitAnd, 1500136766)                     \
+    Dynamic, 0x1dfbe172)                                                       \
+  V(_IntegerImplementation, &, Integer_bitAnd, Dynamic, 0x596a453e)            \
   V(_IntegerImplementation, _bitOrFromInteger, Integer_bitOrFromInteger,       \
-    1031383580)                                                                \
-  V(_IntegerImplementation, |, Integer_bitOr, 119412028)                       \
+    Dynamic, 0x3d79aa1c)                                                       \
+  V(_IntegerImplementation, |, Integer_bitOr, Dynamic, 0x071e153c)             \
   V(_IntegerImplementation, _bitXorFromInteger, Integer_bitXorFromInteger,     \
-    1339506501)                                                                \
-  V(_IntegerImplementation, ^, Integer_bitXor, 210430781)                      \
+    Dynamic, 0x4fd73f45)                                                       \
+  V(_IntegerImplementation, ^, Integer_bitXor, Dynamic, 0x0c8aeb3d)            \
   V(_IntegerImplementation, _greaterThanFromInteger,                           \
-    Integer_greaterThanFromInt, 780147656)                                     \
-  V(_IntegerImplementation, >, Integer_greaterThan, 673741711)                 \
-  V(_IntegerImplementation, ==, Integer_equal, 272474439)                      \
-  V(_IntegerImplementation, _equalToInteger, Integer_equalToInteger,           \
-    2004079901)                                                                \
-  V(_IntegerImplementation, <, Integer_lessThan, 652059836)                    \
-  V(_IntegerImplementation, <=, Integer_lessEqualThan, 512138528)              \
-  V(_IntegerImplementation, >=, Integer_greaterEqualThan, 1815180096)          \
-  V(_IntegerImplementation, <<, Integer_shl, 1127538624)                       \
-  V(_IntegerImplementation, >>, Integer_sar, 1243972513)                       \
-  V(_Double, toInt, DoubleToInteger, 653210699)
+    Integer_greaterThanFromInt, Bool, 0x2e801bc8)                              \
+  V(_IntegerImplementation, >, Integer_greaterThan, Bool, 0x28287b8f)          \
+  V(_IntegerImplementation, ==, Integer_equal, Bool, 0x103da147)               \
+  V(_IntegerImplementation, _equalToInteger, Integer_equalToInteger, Bool,     \
+    0x7773d51d)                                                                \
+  V(_IntegerImplementation, <, Integer_lessThan, Bool, 0x26dda4bc)             \
+  V(_IntegerImplementation, <=, Integer_lessEqualThan, Bool, 0x1e869d20)       \
+  V(_IntegerImplementation, >=, Integer_greaterEqualThan, Bool, 0x6c317340)    \
+  V(_IntegerImplementation, <<, Integer_shl, Dynamic, 0x4334dfc0)              \
+  V(_IntegerImplementation, >>, Integer_sar, Dynamic, 0x4a2583a1)              \
+  V(_Double, toInt, DoubleToInteger, Dynamic, 0x26ef344b)
 
 
 #define MATH_LIB_INTRINSIC_LIST(V)                                             \
-  V(::, sqrt, MathSqrt, 417912310)                                             \
-  V(_Random, _nextState, Random_nextState, 508231939)                          \
+  V(::, sqrt, MathSqrt, Double, 0x18e8d5f6)                                    \
+  V(_Random, _nextState, Random_nextState, Dynamic, 0x1e4b0103)                \
 
 #define GRAPH_MATH_LIB_INTRINSIC_LIST(V)                                       \
-  V(::, sin, MathSin, 65032)                                                   \
-  V(::, cos, MathCos, 2006233918)                                              \
-  V(::, tan, MathTan, 1276867325)                                              \
-  V(::, asin, MathAsin, 1678592173)                                            \
-  V(::, acos, MathAcos, 1121218433)                                            \
-  V(::, atan, MathAtan, 1109653625)                                            \
-  V(::, atan2, MathAtan2, 894696289)                                           \
+  V(::, sin, MathSin, Double, 0x0000fe08)                                      \
+  V(::, cos, MathCos, Double, 0x7794b33e)                                      \
+  V(::, tan, MathTan, Double, 0x4c1b72fd)                                      \
+  V(::, asin, MathAsin, Double, 0x640d48ad)                                    \
+  V(::, acos, MathAcos, Double, 0x42d46f81)                                    \
+  V(::, atan, MathAtan, Double, 0x4223f879)                                    \
+  V(::, atan2, MathAtan2, Double, 0x3553fb61)                                  \
 
 #define TYPED_DATA_LIB_INTRINSIC_LIST(V)                                       \
-  V(Int8List, ., TypedData_Int8Array_factory, 779569635)                       \
-  V(Uint8List, ., TypedData_Uint8Array_factory, 1790399545)                    \
-  V(Uint8ClampedList, ., TypedData_Uint8ClampedArray_factory, 405875159)       \
-  V(Int16List, ., TypedData_Int16Array_factory, 347431914)                     \
-  V(Uint16List, ., TypedData_Uint16Array_factory, 121990116)                   \
-  V(Int32List, ., TypedData_Int32Array_factory, 1540657744)                    \
-  V(Uint32List, ., TypedData_Uint32Array_factory, 1012511652)                  \
-  V(Int64List, ., TypedData_Int64Array_factory, 1473796807)                    \
-  V(Uint64List, ., TypedData_Uint64Array_factory, 738799620)                   \
-  V(Float32List, ., TypedData_Float32Array_factory, 1938690635)                \
-  V(Float64List, ., TypedData_Float64Array_factory, 1344005361)                \
-  V(Float32x4List, ., TypedData_Float32x4Array_factory, 2055067416)            \
-  V(Int32x4List, ., TypedData_Int32x4Array_factory, 504220232)                 \
-  V(Float64x2List, ., TypedData_Float64x2Array_factory, 416019673)             \
+  V(Int8List, ., TypedData_Int8Array_factory, TypedDataInt8Array, 0x2e7749e3)  \
+  V(Uint8List, ., TypedData_Uint8Array_factory, TypedDataUint8Array,           \
+    0x6ab75439)                                                                \
+  V(Uint8ClampedList, ., TypedData_Uint8ClampedArray_factory,                  \
+    TypedDataUint8ClampedArray, 0x183129d7)                                    \
+  V(Int16List, ., TypedData_Int16Array_factory, TypedDataInt16Array,           \
+    0x14b563ea)                                                                \
+  V(Uint16List, ., TypedData_Uint16Array_factory, TypedDataUint16Array,        \
+    0x07456be4)                                                                \
+  V(Int32List, ., TypedData_Int32Array_factory, TypedDataInt32Array,           \
+    0x5bd49250)                                                                \
+  V(Uint32List, ., TypedData_Uint32Array_factory,                              \
+    TypedDataUint32Array, 0x3c59b3a4)                                          \
+  V(Int64List, ., TypedData_Int64Array_factory,                                \
+    TypedDataInt64Array, 0x57d85ac7)                                           \
+  V(Uint64List, ., TypedData_Uint64Array_factory,                              \
+    TypedDataUint64Array, 0x2c093004)                                          \
+  V(Float32List, ., TypedData_Float32Array_factory,                            \
+    TypedDataFloat32Array, 0x738e124b)                                         \
+  V(Float64List, ., TypedData_Float64Array_factory,                            \
+    TypedDataFloat64Array, 0x501be4f1)                                         \
+  V(Float32x4List, ., TypedData_Float32x4Array_factory,                        \
+    TypedDataFloat32x4Array, 0x7a7dd718)                                       \
+  V(Int32x4List, ., TypedData_Int32x4Array_factory,                            \
+    TypedDataInt32x4Array, 0x1e0dca48)                                         \
+  V(Float64x2List, ., TypedData_Float64x2Array_factory,                        \
+    TypedDataFloat64x2Array, 0x18cbf4d9)                                       \
 
 #define GRAPH_TYPED_DATA_INTRINSICS_LIST(V)                                    \
-  V(Uint8List, [], Uint8ArrayGetIndexed, 41288685)                             \
-  V(Uint8List, []=, Uint8ArraySetIndexed, 101536342)                           \
-  V(_ExternalUint8Array, [], ExternalUint8ArrayGetIndexed, 41288685)           \
-  V(_ExternalUint8Array, []=, ExternalUint8ArraySetIndexed, 101536342)         \
-  V(Uint32List, [], Uint32ArrayGetIndexed, 1614870523)                         \
-  V(Uint32List, []=, Uint32ArraySetIndexed, 978194713)                         \
-  V(Float64List, []=, Float64ArraySetIndexed, 328934501)                       \
-  V(Float64List, [], Float64ArrayGetIndexed, 2049378701)                       \
-  V(_TypedList, get:length, TypedDataLength, 546364442)                        \
-  V(Float32x4, get:x, Float32x4ShuffleX, 1674625343)                           \
-  V(Float32x4, get:y, Float32x4ShuffleY, 540293915)                            \
-  V(Float32x4, get:z, Float32x4ShuffleZ, 320347578)                            \
-  V(Float32x4, get:w, Float32x4ShuffleW, 1770606624)                           \
-  V(Float32x4, _mul, Float32x4Mul, 42807622)                                   \
-  V(Float32x4, _sub, Float32x4Sub, 103774455)                                  \
-  V(Float32x4, _add, Float32x4Add, 1352634374)                                 \
+  V(Uint8List, [], Uint8ArrayGetIndexed, Smi, 0x027603ed)                      \
+  V(Uint8List, []=, Uint8ArraySetIndexed, Dynamic, 0x060d5256)                 \
+  V(_ExternalUint8Array, [], ExternalUint8ArrayGetIndexed, Smi, 0x027603ed)    \
+  V(_ExternalUint8Array, []=, ExternalUint8ArraySetIndexed, Dynamic,           \
+    0x060d5256)                                                                \
+  V(Uint32List, [], Uint32ArrayGetIndexed, Dynamic, 0x6040f7fb)                \
+  V(Uint32List, []=, Uint32ArraySetIndexed, Dynamic, 0x3a4e1119)               \
+  V(Float64List, [], Float64ArrayGetIndexed, Double, 0x7a27098d)               \
+  V(Float64List, []=, Float64ArraySetIndexed, Dynamic, 0x139b2465)             \
+  V(_TypedList, get:length, TypedDataLength, Smi, 0x2090dc1a)                  \
+  V(Float32x4, get:x, Float32x4ShuffleX, Double, 0x63d0c13f)                   \
+  V(Float32x4, get:y, Float32x4ShuffleY, Double, 0x20343b1b)                   \
+  V(Float32x4, get:z, Float32x4ShuffleZ, Double, 0x13181dba)                   \
+  V(Float32x4, get:w, Float32x4ShuffleW, Double, 0x69895020)                   \
+  V(Float32x4, _mul, Float32x4Mul, Float32x4, 0x028d3146)                      \
+  V(Float32x4, _sub, Float32x4Sub, Float32x4, 0x062f78f7)                      \
+  V(Float32x4, _add, Float32x4Add, Float32x4, 0x509f9006)                      \
 
 #define GRAPH_CORE_INTRINSICS_LIST(V)                                          \
-  V(_List, get:length, ObjectArrayLength, 630471378)                           \
-  V(_List, [], ObjectArrayGetIndexed, 360400496)                               \
-  V(_ImmutableList, get:length, ImmutableArrayLength, 630471378)               \
-  V(_ImmutableList, [], ImmutableArrayGetIndexed, 360400496)                   \
-  V(_GrowableList, get:length, GrowableArrayLength, 417111542)                 \
-  V(_GrowableList, get:_capacity, GrowableArrayCapacity, 41110914)             \
-  V(_GrowableList, _setData, GrowableArraySetData, 210059283)                  \
-  V(_GrowableList, _setLength, GrowableArraySetLength, 335652822)              \
-  V(_GrowableList, [], GrowableArrayGetIndexed, 1957529650)                    \
-  V(_GrowableList, []=, GrowableArraySetIndexed, 225246870)                    \
-  V(_StringBase, get:length, StringBaseLength, 707533587)                      \
-  V(_OneByteString, codeUnitAt, OneByteStringCodeUnitAt, 1436590579)           \
-  V(_TwoByteString, codeUnitAt, TwoByteStringCodeUnitAt, 1436590579)           \
+  V(_List, get:length, ObjectArrayLength, Smi, 0x25943ad2)                     \
+  V(_List, [], ObjectArrayGetIndexed, Dynamic, 0x157b4670)                     \
+  V(_ImmutableList, get:length, ImmutableArrayLength, Smi, 0x25943ad2)         \
+  V(_ImmutableList, [], ImmutableArrayGetIndexed, Dynamic, 0x157b4670)         \
+  V(_GrowableList, get:length, GrowableArrayLength, Smi, 0x18dc9df6)           \
+  V(_GrowableList, get:_capacity, GrowableArrayCapacity, Smi, 0x02734d82)      \
+  V(_GrowableList, _setData, GrowableArraySetData, Dynamic, 0x0c854013)        \
+  V(_GrowableList, _setLength, GrowableArraySetLength, Dynamic, 0x1401a7d6)    \
+  V(_GrowableList, [], GrowableArrayGetIndexed, Dynamic, 0x74ad8832)           \
+  V(_GrowableList, []=, GrowableArraySetIndexed, Dynamic, 0x0d6cfe96)          \
+  V(_StringBase, get:length, StringBaseLength, Smi, 0x2a2c1b13)                \
+  V(_OneByteString, codeUnitAt, OneByteStringCodeUnitAt, Smi, 0x55a0a1f3)      \
+  V(_TwoByteString, codeUnitAt, TwoByteStringCodeUnitAt, Smi, 0x55a0a1f3)      \
   V(_ExternalOneByteString, codeUnitAt, ExternalOneByteStringCodeUnitAt,       \
-    1436590579)                                                                \
+    Smi, 0x55a0a1f3)                                                           \
   V(_ExternalTwoByteString, codeUnitAt, ExternalTwoByteStringCodeUnitAt,       \
-    1436590579)                                                                \
-  V(_Double, unary-, DoubleFlipSignBit, 1783281169)                            \
-  V(_Double, truncateToDouble, DoubleTruncate, 791143891)                      \
-  V(_Double, roundToDouble, DoubleRound, 797558034)                            \
-  V(_Double, floorToDouble, DoubleFloor, 1789426271)                           \
-  V(_Double, ceilToDouble, DoubleCeil, 453271198)                              \
-  V(_Double, _modulo, DoubleMod, 776062204)
+    Smi, 0x55a0a1f3)                                                           \
+  V(_Double, unary-, DoubleFlipSignBit, Double, 0x6a4ab611)                    \
+  V(_Double, truncateToDouble, DoubleTruncate, Double, 0x2f27e5d3)             \
+  V(_Double, roundToDouble, DoubleRound, Double, 0x2f89c512)                   \
+  V(_Double, floorToDouble, DoubleFloor, Double, 0x6aa87a5f)                   \
+  V(_Double, ceilToDouble, DoubleCeil, Double, 0x1b045e9e)                     \
+  V(_Double, _modulo, DoubleMod, Double, 0x2e41c4fc)
 
 
 #define GRAPH_INTRINSICS_LIST(V)                                               \
@@ -304,10 +331,11 @@
   GRAPH_MATH_LIB_INTRINSIC_LIST(V)                                             \
 
 #define DEVELOPER_LIB_INTRINSIC_LIST(V)                                        \
-  V(_UserTag, makeCurrent, UserTag_makeCurrent, 187721469)                     \
-  V(::, _getDefaultTag, UserTag_defaultTag, 350077879)                         \
-  V(::, _getCurrentTag, Profiler_getCurrentTag, 1215225901)                    \
-  V(::, _isDartStreamEnabled, Timeline_isDartStreamEnabled, 1072246292)        \
+  V(_UserTag, makeCurrent, UserTag_makeCurrent, Dynamic, 0x0b3066fd)           \
+  V(::, _getDefaultTag, UserTag_defaultTag, Dynamic, 0x14ddc3b7)               \
+  V(::, _getCurrentTag, Profiler_getCurrentTag, Dynamic, 0x486ee02d)           \
+  V(::, _isDartStreamEnabled, Timeline_isDartStreamEnabled, Dynamic,           \
+    0x3fe92e14)                                                                \
 
 #define ALL_INTRINSICS_NO_INTEGER_LIB_LIST(V)                                  \
   CORE_LIB_INTRINSIC_LIST(V)                                                   \
@@ -326,158 +354,158 @@
 
 // A list of core function that should always be inlined.
 #define INLINE_WHITE_LIST(V)                                                   \
-  V(Object, ==, ObjectEquals, 291909336)                                       \
-  V(_List, get:length, ObjectArrayLength, 630471378)                           \
-  V(_ImmutableList, get:length, ImmutableArrayLength, 630471378)               \
-  V(_TypedList, get:length, TypedDataLength, 546364442)                        \
-  V(_GrowableList, get:length, GrowableArrayLength, 417111542)                 \
-  V(_GrowableList, add, GrowableListAdd, 219371757)                            \
-  V(_GrowableList, removeLast, GrowableListRemoveLast, 324891524)              \
-  V(_StringBase, get:length, StringBaseLength, 707533587)                      \
-  V(ListIterator, moveNext, ListIteratorMoveNext, 1065954929)                  \
-  V(_FixedSizeArrayIterator, moveNext, FixedListIteratorMoveNext, 1451346178)  \
-  V(_GrowableList, get:iterator, GrowableArrayIterator, 1840323187)            \
-  V(_GrowableList, forEach, GrowableArrayForEach, 620771070)                   \
-  V(_List, ., ObjectArrayAllocate, 1661438741)                                 \
-  V(_List, [], ObjectArrayGetIndexed, 360400496)                               \
-  V(_List, []=, ObjectArraySetIndexed, 886228780)                              \
-  V(ListMixin, get:isEmpty, ListMixinIsEmpty, 2021497798)                      \
-  V(_List, get:iterator, ObjectArrayIterator, 295498778)                       \
-  V(_List, forEach, ObjectArrayForEach, 180150673)                             \
-  V(_List, _slice, ObjectArraySlice, 840558357)                                \
-  V(_ImmutableList, get:iterator, ImmutableArrayIterator, 295498778)           \
-  V(_ImmutableList, forEach, ImmutableArrayForEach, 180150673)                 \
-  V(_ImmutableList, [], ImmutableArrayGetIndexed, 360400496)                   \
-  V(_GrowableList, [], GrowableArrayGetIndexed, 1957529650)                    \
-  V(_GrowableList, []=, GrowableArraySetIndexed, 225246870)                    \
-  V(Float32List, [], Float32ArrayGetIndexed, 1451643535)                       \
-  V(Float32List, []=, Float32ArraySetIndexed, 453873887)                       \
-  V(Float64List, [], Float64ArrayGetIndexed, 2049378701)                       \
-  V(Float64List, []=, Float64ArraySetIndexed, 328934501)                       \
-  V(Int8List, [], Int8ArrayGetIndexed, 110819507)                              \
-  V(Int8List, []=, Int8ArraySetIndexed, 865684695)                             \
-  V(Uint8List, [], Uint8ArrayGetIndexed, 41288685)                             \
-  V(Uint8List, []=, Uint8ArraySetIndexed, 101536342)                           \
-  V(Uint8ClampedList, [], Uint8ClampedArrayGetIndexed, 41288685)               \
-  V(Uint8ClampedList, []=, Uint8ClampedArraySetIndexed, 687206488)             \
-  V(Uint16List, [], Uint16ArrayGetIndexed, 1053739567)                         \
-  V(Uint16List, []=, Uint16ArraySetIndexed, 1547307961)                        \
-  V(Int16List, [], Int16ArrayGetIndexed, 389863073)                            \
-  V(Int16List, []=, Int16ArraySetIndexed, 855133756)                           \
-  V(Int32List, [], Int32ArrayGetIndexed, 640610057)                            \
-  V(Int32List, []=, Int32ArraySetIndexed, 453358705)                           \
-  V(Int64List, [], Int64ArrayGetIndexed, 202150810)                            \
-  V(Int64List, []=, Int64ArraySetIndexed, 924110852)                           \
-  V(_Uint8ArrayView, [], Uint8ArrayViewGetIndexed, 1338422227)                 \
-  V(_Uint8ArrayView, []=, Uint8ArrayViewSetIndexed, 540212720)                 \
-  V(_Int8ArrayView, [], Int8ArrayViewGetIndexed, 302213458)                    \
-  V(_Int8ArrayView, []=, Int8ArrayViewSetIndexed, 1837635160)                  \
-  V(_ByteDataView, setInt8, ByteDataViewSetInt8, 660389322)                    \
-  V(_ByteDataView, setUint8, ByteDataViewSetUint8, 1651986039)                 \
-  V(_ByteDataView, setInt16, ByteDataViewSetInt16, 2051262146)                 \
-  V(_ByteDataView, setUint16, ByteDataViewSetUint16, 1692244111)               \
-  V(_ByteDataView, setInt32, ByteDataViewSetInt32, 862135882)                  \
-  V(_ByteDataView, setUint32, ByteDataViewSetUint32, 361732249)                \
-  V(_ByteDataView, setInt64, ByteDataViewSetInt64, 1208972197)                 \
-  V(_ByteDataView, setUint64, ByteDataViewSetUint64, 1545853836)               \
-  V(_ByteDataView, setFloat32, ByteDataViewSetFloat32, 1333183642)             \
-  V(_ByteDataView, setFloat64, ByteDataViewSetFloat64, 1579015503)             \
-  V(_ByteDataView, getInt8, ByteDataViewGetInt8, 29018237)                     \
-  V(_ByteDataView, getUint8, ByteDataViewGetUint8, 312322868)                  \
-  V(_ByteDataView, getInt16, ByteDataViewGetInt16, 1613243255)                 \
-  V(_ByteDataView, getUint16, ByteDataViewGetUint16, 284020105)                \
-  V(_ByteDataView, getInt32, ByteDataViewGetInt32, 2036535169)                 \
-  V(_ByteDataView, getUint32, ByteDataViewGetUint32, 571293096)                \
-  V(_ByteDataView, getInt64, ByteDataViewGetInt64, 1971181000)                 \
-  V(_ByteDataView, getUint64, ByteDataViewGetUint64, 799775022)                \
-  V(_ByteDataView, getFloat32, ByteDataViewGetFloat32, 947822534)              \
-  V(_ByteDataView, getFloat64, ByteDataViewGetFloat64, 1402356525)             \
-  V(::, asin, MathASin, 1678592173)                                            \
-  V(::, acos, MathACos, 1121218433)                                            \
-  V(::, atan, MathATan, 1109653625)                                            \
-  V(::, atan2, MathATan2, 894696289)                                           \
-  V(::, cos, MathCos, 2006233918)                                              \
-  V(::, exp, MathExp, 1500946333)                                              \
-  V(::, log, MathLog, 739403086)                                               \
-  V(::, max, MathMax, 1410473322)                                              \
-  V(::, min, MathMin, 1115051548)                                              \
-  V(::, pow, MathPow, 2058759335)                                              \
-  V(::, sin, MathSin, 65032)                                                   \
-  V(::, sqrt, MathSqrt, 417912310)                                             \
-  V(::, tan, MathTan, 1276867325)                                              \
-  V(Lists, copy, ListsCopy, 564237562)                                         \
-  V(_Bigint, get:_neg, Bigint_getNeg, 2079423063)                              \
-  V(_Bigint, get:_used, Bigint_getUsed, 1426329619)                            \
-  V(_Bigint, get:_digits, Bigint_getDigits, 1185333683)                        \
-  V(_HashVMBase, get:_index, LinkedHashMap_getIndex, 2104211307)               \
-  V(_HashVMBase, set:_index, LinkedHashMap_setIndex, 1273697266)               \
-  V(_HashVMBase, get:_data, LinkedHashMap_getData, 1274399923)                 \
-  V(_HashVMBase, set:_data, LinkedHashMap_setData, 1611093357)                 \
-  V(_HashVMBase, get:_usedData, LinkedHashMap_getUsedData, 367462469)          \
-  V(_HashVMBase, set:_usedData, LinkedHashMap_setUsedData, 1049390812)         \
-  V(_HashVMBase, get:_hashMask, LinkedHashMap_getHashMask, 902147072)          \
-  V(_HashVMBase, set:_hashMask, LinkedHashMap_setHashMask, 1236137630)         \
-  V(_HashVMBase, get:_deletedKeys, LinkedHashMap_getDeletedKeys, 812542585)    \
-  V(_HashVMBase, set:_deletedKeys, LinkedHashMap_setDeletedKeys, 1072259010)   \
+  V(Object, ==, ObjectEquals, 0x11662ed8)                                      \
+  V(_List, get:length, ObjectArrayLength, 0x25943ad2)                          \
+  V(_ImmutableList, get:length, ImmutableArrayLength, 0x25943ad2)              \
+  V(_TypedList, get:length, TypedDataLength, 0x2090dc1a)                       \
+  V(_GrowableList, get:length, GrowableArrayLength, 0x18dc9df6)                \
+  V(_GrowableList, add, GrowableListAdd, 0x0d1358ed)                           \
+  V(_GrowableList, removeLast, GrowableListRemoveLast, 0x135d7384)             \
+  V(_StringBase, get:length, StringBaseLength, 0x2a2c1b13)                     \
+  V(ListIterator, moveNext, ListIteratorMoveNext, 0x3f892e71)                  \
+  V(_FixedSizeArrayIterator, moveNext, FixedListIteratorMoveNext, 0x5681c902)  \
+  V(_GrowableList, get:iterator, GrowableArrayIterator, 0x6db11a73)            \
+  V(_GrowableList, forEach, GrowableArrayForEach, 0x250036fe)                  \
+  V(_List, ., ObjectArrayAllocate, 0x63078b15)                                 \
+  V(_List, [], ObjectArrayGetIndexed, 0x157b4670)                              \
+  V(_List, []=, ObjectArraySetIndexed, 0x34d2c72c)                             \
+  V(ListMixin, get:isEmpty, ListMixinIsEmpty, 0x787d9bc6)                      \
+  V(_List, get:iterator, ObjectArrayIterator, 0x119cf41a)                      \
+  V(_List, forEach, ObjectArrayForEach, 0x0abce191)                            \
+  V(_List, _slice, ObjectArraySlice, 0x3219e715)                               \
+  V(_ImmutableList, get:iterator, ImmutableArrayIterator, 0x119cf41a)          \
+  V(_ImmutableList, forEach, ImmutableArrayForEach, 0x0abce191)                \
+  V(_ImmutableList, [], ImmutableArrayGetIndexed, 0x157b4670)                  \
+  V(_GrowableList, [], GrowableArrayGetIndexed, 0x74ad8832)                    \
+  V(_GrowableList, []=, GrowableArraySetIndexed, 0x0d6cfe96)                   \
+  V(Float32List, [], Float32ArrayGetIndexed, 0x5686528f)                       \
+  V(Float32List, []=, Float32ArraySetIndexed, 0x1b0d90df)                      \
+  V(Float64List, [], Float64ArrayGetIndexed, 0x7a27098d)                       \
+  V(Float64List, []=, Float64ArraySetIndexed, 0x139b2465)                      \
+  V(Int8List, [], Int8ArrayGetIndexed, 0x069af8b3)                             \
+  V(Int8List, []=, Int8ArraySetIndexed, 0x33994cd7)                            \
+  V(Uint8List, [], Uint8ArrayGetIndexed, 0x027603ed)                           \
+  V(Uint8List, []=, Uint8ArraySetIndexed, 0x060d5256)                          \
+  V(Uint8ClampedList, [], Uint8ClampedArrayGetIndexed, 0x027603ed)             \
+  V(Uint8ClampedList, []=, Uint8ClampedArraySetIndexed, 0x28f5f058)            \
+  V(Uint16List, [], Uint16ArrayGetIndexed, 0x3ececa2f)                         \
+  V(Uint16List, []=, Uint16ArraySetIndexed, 0x5c3a0bb9)                        \
+  V(Int16List, [], Int16ArrayGetIndexed, 0x173cd6a1)                           \
+  V(Int16List, []=, Int16ArraySetIndexed, 0x32f84e3c)                          \
+  V(Int32List, [], Int32ArrayGetIndexed, 0x262eef09)                           \
+  V(Int32List, []=, Int32ArraySetIndexed, 0x1b05b471)                          \
+  V(Int64List, [], Int64ArrayGetIndexed, 0x0c0c939a)                           \
+  V(Int64List, []=, Int64ArraySetIndexed, 0x3714d004)                          \
+  V(_Uint8ArrayView, [], Uint8ArrayViewGetIndexed, 0x4fc6b3d3)                 \
+  V(_Uint8ArrayView, []=, Uint8ArrayViewSetIndexed, 0x2032fdf0)                \
+  V(_Int8ArrayView, [], Int8ArrayViewGetIndexed, 0x12036952)                   \
+  V(_Int8ArrayView, []=, Int8ArrayViewSetIndexed, 0x6d881658)                  \
+  V(_ByteDataView, setInt8, ByteDataViewSetInt8, 0x275cbdca)                   \
+  V(_ByteDataView, setUint8, ByteDataViewSetUint8, 0x62774e77)                 \
+  V(_ByteDataView, setInt16, ByteDataViewSetInt16, 0x7a43c6c2)                 \
+  V(_ByteDataView, setUint16, ByteDataViewSetUint16, 0x64dd988f)               \
+  V(_ByteDataView, setInt32, ByteDataViewSetInt32, 0x3363264a)                 \
+  V(_ByteDataView, setUint32, ByteDataViewSetUint32, 0x158f9899)               \
+  V(_ByteDataView, setInt64, ByteDataViewSetInt64, 0x480f73a5)                 \
+  V(_ByteDataView, setUint64, ByteDataViewSetUint64, 0x5c23db8c)               \
+  V(_ByteDataView, setFloat32, ByteDataViewSetFloat32, 0x4f76c49a)             \
+  V(_ByteDataView, setFloat64, ByteDataViewSetFloat64, 0x5e1ddd4f)             \
+  V(_ByteDataView, getInt8, ByteDataViewGetInt8, 0x01bac87d)                   \
+  V(_ByteDataView, getUint8, ByteDataViewGetUint8, 0x129dab34)                 \
+  V(_ByteDataView, getInt16, ByteDataViewGetInt16, 0x60282377)                 \
+  V(_ByteDataView, getUint16, ByteDataViewGetUint16, 0x10edcd89)               \
+  V(_ByteDataView, getInt32, ByteDataViewGetInt32, 0x79630f81)                 \
+  V(_ByteDataView, getUint32, ByteDataViewGetUint32, 0x220d3da8)               \
+  V(_ByteDataView, getInt64, ByteDataViewGetInt64, 0x757dd5c8)                 \
+  V(_ByteDataView, getUint64, ByteDataViewGetUint64, 0x2fab992e)               \
+  V(_ByteDataView, getFloat32, ByteDataViewGetFloat32, 0x387e9fc6)             \
+  V(_ByteDataView, getFloat64, ByteDataViewGetFloat64, 0x5396432d)             \
+  V(::, asin, MathAsin, 0x640d48ad)                                            \
+  V(::, acos, MathAcos, 0x42d46f81)                                            \
+  V(::, atan, MathAtan, 0x4223f879)                                            \
+  V(::, atan2, MathAtan2, 0x3553fb61)                                          \
+  V(::, cos, MathCos, 0x7794b33e)                                              \
+  V(::, exp, MathExp, 0x59769f9d)                                              \
+  V(::, log, MathLog, 0x2c12654e)                                              \
+  V(::, max, MathMax, 0x54121d6a)                                              \
+  V(::, min, MathMin, 0x4276561c)                                              \
+  V(::, pow, MathPow, 0x7ab62ca7)                                              \
+  V(::, sin, MathSin, 0x0000fe08)                                              \
+  V(::, sqrt, MathSqrt, 0x18e8d5f6)                                            \
+  V(::, tan, MathTan, 0x4c1b72fd)                                              \
+  V(Lists, copy, ListsCopy, 0x21a194fa)                                        \
+  V(_Bigint, get:_neg, Bigint_getNeg, 0x7bf17a57)                              \
+  V(_Bigint, get:_used, Bigint_getUsed, 0x55041013)                            \
+  V(_Bigint, get:_digits, Bigint_getDigits, 0x46a6c1b3)                        \
+  V(_HashVMBase, get:_index, LinkedHashMap_getIndex, 0x7d6bb76b)               \
+  V(_HashVMBase, set:_index, LinkedHashMap_setIndex, 0x4beb13f2)               \
+  V(_HashVMBase, get:_data, LinkedHashMap_getData, 0x4bf5ccb3)                 \
+  V(_HashVMBase, set:_data, LinkedHashMap_setData, 0x6007556d)                 \
+  V(_HashVMBase, get:_usedData, LinkedHashMap_getUsedData, 0x15e70845)         \
+  V(_HashVMBase, set:_usedData, LinkedHashMap_setUsedData, 0x3e8c6edc)         \
+  V(_HashVMBase, get:_hashMask, LinkedHashMap_getHashMask, 0x35c5ac00)         \
+  V(_HashVMBase, set:_hashMask, LinkedHashMap_setHashMask, 0x49adf69e)         \
+  V(_HashVMBase, get:_deletedKeys, LinkedHashMap_getDeletedKeys, 0x306e6a79)   \
+  V(_HashVMBase, set:_deletedKeys, LinkedHashMap_setDeletedKeys, 0x3fe95fc2)   \
 
 // A list of core function that should never be inlined.
 #define INLINE_BLACK_LIST(V)                                                   \
-  V(_Bigint, _lsh, Bigint_lsh, 1557746963)                                     \
-  V(_Bigint, _rsh, Bigint_rsh, 761843937)                                      \
-  V(_Bigint, _absAdd, Bigint_absAdd, 1227835493)                               \
-  V(_Bigint, _absSub, Bigint_absSub, 390740532)                                \
-  V(_Bigint, _mulAdd, Bigint_mulAdd, 617534446)                                \
-  V(_Bigint, _sqrAdd, Bigint_sqrAdd, 1623635507)                               \
-  V(_Bigint, _estQuotientDigit, Bigint_estQuotientDigit, 797340802)            \
-  V(_Montgomery, _mulMod, Montgomery_mulMod, 1947987219)                       \
-  V(_Double, >, Double_greaterThan, 1453001345)                                \
-  V(_Double, >=, Double_greaterEqualThan, 1815180096)                          \
-  V(_Double, <, Double_lessThan, 652059836)                                    \
-  V(_Double, <=, Double_lessEqualThan, 512138528)                              \
-  V(_Double, ==, Double_equal, 1468668497)                                     \
-  V(_Double, +, Double_add, 1269587413)                                        \
-  V(_Double, -, Double_sub, 1644506555)                                        \
-  V(_Double, *, Double_mul, 600860888)                                         \
-  V(_Double, /, Double_div, 1220198876)                                        \
-  V(_IntegerImplementation, +, Integer_add, 239272130)                         \
-  V(_IntegerImplementation, -, Integer_sub, 216175811)                         \
-  V(_IntegerImplementation, *, Integer_mul, 1301152164)                        \
-  V(_IntegerImplementation, ~/, Integer_truncDivide, 1018128256)               \
-  V(_IntegerImplementation, unary-, Integer_negate, 1507648892)                \
-  V(_IntegerImplementation, &, Integer_bitAnd, 1500136766)                     \
-  V(_IntegerImplementation, |, Integer_bitOr, 119412028)                       \
-  V(_IntegerImplementation, ^, Integer_bitXor, 210430781)                      \
-  V(_IntegerImplementation, >, Integer_greaterThan, 673741711)                 \
-  V(_IntegerImplementation, ==, Integer_equal, 272474439)                      \
-  V(_IntegerImplementation, <, Integer_lessThan, 652059836)                    \
-  V(_IntegerImplementation, <=, Integer_lessEqualThan, 512138528)              \
-  V(_IntegerImplementation, >=, Integer_greaterEqualThan, 1815180096)          \
-  V(_IntegerImplementation, <<, Integer_shl, 1127538624)                       \
-  V(_IntegerImplementation, >>, Integer_sar, 1243972513)                       \
+  V(_Bigint, _lsh, Bigint_lsh, 0x5cd95513)                                     \
+  V(_Bigint, _rsh, Bigint_rsh, 0x2d68d0e1)                                     \
+  V(_Bigint, _absAdd, Bigint_absAdd, 0x492f4865)                               \
+  V(_Bigint, _absSub, Bigint_absSub, 0x174a3a34)                               \
+  V(_Bigint, _mulAdd, Bigint_mulAdd, 0x24ced3ee)                               \
+  V(_Bigint, _sqrAdd, Bigint_sqrAdd, 0x60c6b633)                               \
+  V(_Bigint, _estQuotientDigit, Bigint_estQuotientDigit, 0x2f867482)           \
+  V(_Montgomery, _mulMod, Montgomery_mulMod, 0x741bed13)                       \
+  V(_Double, >, Double_greaterThan, 0x569b0a81)                                \
+  V(_Double, >=, Double_greaterEqualThan, 0x6c317340)                          \
+  V(_Double, <, Double_lessThan, 0x26dda4bc)                                   \
+  V(_Double, <=, Double_lessEqualThan, 0x1e869d20)                             \
+  V(_Double, ==, Double_equal, 0x578a1a51)                                     \
+  V(_Double, +, Double_add, 0x4bac5dd5)                                        \
+  V(_Double, -, Double_sub, 0x62052dbb)                                        \
+  V(_Double, *, Double_mul, 0x23d068d8)                                        \
+  V(_Double, /, Double_div, 0x48bac1dc)                                        \
+  V(_IntegerImplementation, +, Integer_add, 0x0e4300c2)                        \
+  V(_IntegerImplementation, -, Integer_sub, 0x0ce294c3)                        \
+  V(_IntegerImplementation, *, Integer_mul, 0x4d8e01a4)                        \
+  V(_IntegerImplementation, ~/, Integer_truncDivide, 0x3caf6780)               \
+  V(_IntegerImplementation, unary-, Integer_negate, 0x59dce57c)                \
+  V(_IntegerImplementation, &, Integer_bitAnd, 0x596a453e)                     \
+  V(_IntegerImplementation, |, Integer_bitOr, 0x071e153c)                      \
+  V(_IntegerImplementation, ^, Integer_bitXor, 0x0c8aeb3d)                     \
+  V(_IntegerImplementation, >, Integer_greaterThan, 0x28287b8f)                \
+  V(_IntegerImplementation, ==, Integer_equal, 0x103da147)                     \
+  V(_IntegerImplementation, <, Integer_lessThan, 0x26dda4bc)                   \
+  V(_IntegerImplementation, <=, Integer_lessEqualThan, 0x1e869d20)             \
+  V(_IntegerImplementation, >=, Integer_greaterEqualThan, 0x6c317340)          \
+  V(_IntegerImplementation, <<, Integer_shl, 0x4334dfc0)                       \
+  V(_IntegerImplementation, >>, Integer_sar, 0x4a2583a1)                       \
 
 // A list of core functions that internally dispatch based on received id.
 #define POLYMORPHIC_TARGET_LIST(V)                                             \
-  V(_StringBase, [], StringBaseCharAt, 754527301)                              \
-  V(_TypedList, _getInt8, ByteArrayBaseGetInt8, 1508321565)                    \
-  V(_TypedList, _getUint8, ByteArrayBaseGetUint8, 953411007)                   \
-  V(_TypedList, _getInt16, ByteArrayBaseGetInt16, 433971756)                   \
-  V(_TypedList, _getUint16, ByteArrayBaseGetUint16, 1329446488)                \
-  V(_TypedList, _getInt32, ByteArrayBaseGetInt32, 137212209)                   \
-  V(_TypedList, _getUint32, ByteArrayBaseGetUint32, 499907480)                 \
-  V(_TypedList, _getFloat32, ByteArrayBaseGetFloat32, 1672834581)              \
-  V(_TypedList, _getFloat64, ByteArrayBaseGetFloat64, 966634744)               \
-  V(_TypedList, _getFloat32x4, ByteArrayBaseGetFloat32x4, 1197581758)          \
-  V(_TypedList, _getInt32x4, ByteArrayBaseGetInt32x4, 810805548)               \
-  V(_TypedList, _setInt8, ByteArrayBaseSetInt8, 1317196265)                    \
-  V(_TypedList, _setUint8, ByteArrayBaseSetInt8, 1328908284)                   \
-  V(_TypedList, _setInt16, ByteArrayBaseSetInt16, 1827614958)                  \
-  V(_TypedList, _setUint16, ByteArrayBaseSetInt16, 1694054572)                 \
-  V(_TypedList, _setInt32, ByteArrayBaseSetInt32, 915652649)                   \
-  V(_TypedList, _setUint32, ByteArrayBaseSetUint32, 1958474336)                \
-  V(_TypedList, _setFloat32, ByteArrayBaseSetFloat32, 1853026980)              \
-  V(_TypedList, _setFloat64, ByteArrayBaseSetFloat64, 1197862362)              \
-  V(_TypedList, _setFloat32x4, ByteArrayBaseSetFloat32x4, 2093630771)          \
-  V(_TypedList, _setInt32x4, ByteArrayBaseSetInt32x4, 1982971324)              \
+  V(_StringBase, [], StringBaseCharAt, 0x2cf92c45)                             \
+  V(_TypedList, _getInt8, ByteArrayBaseGetInt8, 0x59e7291d)                    \
+  V(_TypedList, _getUint8, ByteArrayBaseGetUint8, 0x38d3e5bf)                  \
+  V(_TypedList, _getInt16, ByteArrayBaseGetInt16, 0x19dde22c)                  \
+  V(_TypedList, _getUint16, ByteArrayBaseGetUint16, 0x4f3dbe58)                \
+  V(_TypedList, _getInt32, ByteArrayBaseGetInt32, 0x082db131)                  \
+  V(_TypedList, _getUint32, ByteArrayBaseGetUint32, 0x1dcbfb98)                \
+  V(_TypedList, _getFloat32, ByteArrayBaseGetFloat32, 0x63b56e15)              \
+  V(_TypedList, _getFloat64, ByteArrayBaseGetFloat64, 0x399dacf8)              \
+  V(_TypedList, _getFloat32x4, ByteArrayBaseGetFloat32x4, 0x4761a5be)          \
+  V(_TypedList, _getInt32x4, ByteArrayBaseGetInt32x4, 0x3053e92c)              \
+  V(_TypedList, _setInt8, ByteArrayBaseSetInt8, 0x4e82d1e9)                    \
+  V(_TypedList, _setUint8, ByteArrayBaseSetInt8, 0x4f3587fc)                   \
+  V(_TypedList, _setInt16, ByteArrayBaseSetInt16, 0x6cef30ee)                  \
+  V(_TypedList, _setUint16, ByteArrayBaseSetInt16, 0x64f938ac)                 \
+  V(_TypedList, _setInt32, ByteArrayBaseSetInt32, 0x3693c029)                  \
+  V(_TypedList, _setUint32, ByteArrayBaseSetUint32, 0x74bbf260)                \
+  V(_TypedList, _setFloat32, ByteArrayBaseSetFloat32, 0x6e72f2a4)              \
+  V(_TypedList, _setFloat64, ByteArrayBaseSetFloat64, 0x4765edda)              \
+  V(_TypedList, _setFloat32x4, ByteArrayBaseSetFloat32x4, 0x7cca4533)          \
+  V(_TypedList, _setInt32x4, ByteArrayBaseSetInt32x4, 0x7631bdbc)              \
 
 // Forward declarations.
 class Function;
@@ -489,7 +517,8 @@
  public:
   enum Kind {
     kUnknown,
-#define DEFINE_ENUM_LIST(class_name, function_name, enum_name, fp) k##enum_name,
+#define DEFINE_ENUM_LIST(class_name, function_name, enum_name, type, fp) \
+    k##enum_name,
     RECOGNIZED_LIST(DEFINE_ENUM_LIST)
 #undef DEFINE_ENUM_LIST
     kNumRecognizedMethods
@@ -498,6 +527,7 @@
   static Kind RecognizeKind(const Function& function);
   static bool AlwaysInline(const Function& function);
   static bool PolymorphicTarget(const Function& function);
+  static intptr_t ResultCid(const Function& function);
   static const char* KindToCString(Kind kind);
 #if defined(DART_NO_SNAPSHOT)
   static void InitializeState();
@@ -517,21 +547,21 @@
 // List of recognized list factories:
 // (factory-name-symbol, result-cid, fingerprint).
 #define RECOGNIZED_LIST_FACTORY_LIST(V)                                        \
-  V(_ListFactory, kArrayCid, 1661438741)                                       \
-  V(_GrowableListWithData, kGrowableObjectArrayCid, 631736030)                 \
-  V(_GrowableListFactory, kGrowableObjectArrayCid, 1330464656)                 \
-  V(_Int8ArrayFactory, kTypedDataInt8ArrayCid, 779569635)                      \
-  V(_Uint8ArrayFactory, kTypedDataUint8ArrayCid, 1790399545)                   \
-  V(_Uint8ClampedArrayFactory, kTypedDataUint8ClampedArrayCid, 405875159)      \
-  V(_Int16ArrayFactory, kTypedDataInt16ArrayCid, 347431914)                    \
-  V(_Uint16ArrayFactory, kTypedDataUint16ArrayCid, 121990116)                  \
-  V(_Int32ArrayFactory, kTypedDataInt32ArrayCid, 1540657744)                   \
-  V(_Uint32ArrayFactory, kTypedDataUint32ArrayCid, 1012511652)                 \
-  V(_Int64ArrayFactory, kTypedDataInt64ArrayCid, 1473796807)                   \
-  V(_Uint64ArrayFactory, kTypedDataUint64ArrayCid, 738799620)                  \
-  V(_Float64ArrayFactory, kTypedDataFloat64ArrayCid, 1344005361)               \
-  V(_Float32ArrayFactory, kTypedDataFloat32ArrayCid, 1938690635)               \
-  V(_Float32x4ArrayFactory, kTypedDataFloat32x4ArrayCid, 2055067416)           \
+  V(_ListFactory, kArrayCid, 0x63078b15)                                       \
+  V(_GrowableListWithData, kGrowableObjectArrayCid, 0x25a786de)                \
+  V(_GrowableListFactory, kGrowableObjectArrayCid, 0x4f4d4790)                 \
+  V(_Int8ArrayFactory, kTypedDataInt8ArrayCid, 0x2e7749e3)                     \
+  V(_Uint8ArrayFactory, kTypedDataUint8ArrayCid, 0x6ab75439)                   \
+  V(_Uint8ClampedArrayFactory, kTypedDataUint8ClampedArrayCid, 0x183129d7)     \
+  V(_Int16ArrayFactory, kTypedDataInt16ArrayCid, 0x14b563ea)                   \
+  V(_Uint16ArrayFactory, kTypedDataUint16ArrayCid, 0x07456be4)                 \
+  V(_Int32ArrayFactory, kTypedDataInt32ArrayCid, 0x5bd49250)                   \
+  V(_Uint32ArrayFactory, kTypedDataUint32ArrayCid, 0x3c59b3a4)                 \
+  V(_Int64ArrayFactory, kTypedDataInt64ArrayCid, 0x57d85ac7)                   \
+  V(_Uint64ArrayFactory, kTypedDataUint64ArrayCid, 0x2c093004)                 \
+  V(_Float64ArrayFactory, kTypedDataFloat64ArrayCid, 0x501be4f1)               \
+  V(_Float32ArrayFactory, kTypedDataFloat32ArrayCid, 0x738e124b)               \
+  V(_Float32x4ArrayFactory, kTypedDataFloat32x4ArrayCid, 0x7a7dd718)           \
 
 
 // Class that recognizes factories and returns corresponding result cid.
diff --git a/runtime/vm/native_symbol_fuchsia.cc b/runtime/vm/native_symbol_fuchsia.cc
new file mode 100644
index 0000000..cb0f02a
--- /dev/null
+++ b/runtime/vm/native_symbol_fuchsia.cc
@@ -0,0 +1,36 @@
+// Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+#include "vm/globals.h"
+#if defined(TARGET_OS_FUCHSIA)
+
+#include "vm/native_symbol.h"
+
+#include "platform/assert.h"
+
+namespace dart {
+
+void NativeSymbolResolver::InitOnce() {
+  UNIMPLEMENTED();
+}
+
+
+void NativeSymbolResolver::ShutdownOnce() {
+  UNIMPLEMENTED();
+}
+
+
+char* NativeSymbolResolver::LookupSymbolName(uintptr_t pc, uintptr_t* start) {
+  UNIMPLEMENTED();
+  return NULL;
+}
+
+
+void NativeSymbolResolver::FreeSymbolName(char* name) {
+  UNIMPLEMENTED();
+}
+
+}  // namespace dart
+
+#endif  // defined(TARGET_OS_FUCHSIA)
diff --git a/runtime/vm/object.cc b/runtime/vm/object.cc
index 0d5ca5f..9b2e319 100644
--- a/runtime/vm/object.cc
+++ b/runtime/vm/object.cc
@@ -7,6 +7,7 @@
 #include "include/dart_api.h"
 #include "platform/assert.h"
 #include "vm/assembler.h"
+#include "vm/become.h"
 #include "vm/cpu.h"
 #include "vm/bit_vector.h"
 #include "vm/bootstrap.h"
@@ -44,7 +45,6 @@
 #include "vm/timer.h"
 #include "vm/type_table.h"
 #include "vm/unicode.h"
-#include "vm/verified_memory.h"
 #include "vm/weak_code.h"
 
 namespace dart {
@@ -562,6 +562,13 @@
   cls.set_is_finalized();
   cls.set_is_type_finalized();
 
+  // Allocate and initialize the forwarding corpse class.
+  cls = Class::New<ForwardingCorpse::FakeInstance>(kForwardingCorpse);
+  cls.set_num_type_arguments(0);
+  cls.set_num_own_type_arguments(0);
+  cls.set_is_finalized();
+  cls.set_is_type_finalized();
+
   // Allocate and initialize the sentinel values of Null class.
   {
     *sentinel_ ^=
@@ -929,6 +936,8 @@
   void VisitObject(RawObject* obj) {
     // Free list elements should never be marked.
     ASSERT(!obj->IsMarked());
+    // No forwarding corpses in the VM isolate.
+    ASSERT(!obj->IsForwardingCorpse());
     if (!obj->IsFreeListElement()) {
       ASSERT(obj->IsVMHeapObject());
       obj->SetMarkBitUnsynchronized();
@@ -1000,6 +1009,13 @@
   cls = isolate->object_store()->one_byte_string_class();
   cls.set_name(Symbols::OneByteString());
 
+  // Set up names for the pseudo-classes for free list elements and forwarding
+  // corpses. Mainly this makes VM debugging easier.
+  cls = isolate->class_table()->At(kFreeListElement);
+  cls.set_name(Symbols::FreeListElement());
+  cls = isolate->class_table()->At(kForwardingCorpse);
+  cls.set_name(Symbols::ForwardingCorpse());
+
   {
     ASSERT(isolate == Dart::vm_isolate());
     WritableVMIsolateScope scope(Thread::Current());
@@ -1011,9 +1027,9 @@
 }
 
 
-void Object::InitVmIsolateSnapshotObjectTable(intptr_t len) {
+void Object::set_vm_isolate_snapshot_object_table(const Array& table) {
   ASSERT(Isolate::Current() == Dart::vm_isolate());
-  *vm_isolate_snapshot_object_table_ = Array::New(len, Heap::kOld);
+  *vm_isolate_snapshot_object_table_ = table.raw();
 }
 
 
@@ -1048,7 +1064,7 @@
 
       intptr_t leftover_len = (leftover_size - TypedData::InstanceSize(0));
       ASSERT(TypedData::InstanceSize(leftover_len) == leftover_size);
-      raw->InitializeSmi(&(raw->ptr()->length_), Smi::New(leftover_len));
+      raw->StoreSmi(&(raw->ptr()->length_), Smi::New(leftover_len));
     } else {
       // Update the leftover space as a basic object.
       ASSERT(leftover_size == Object::InstanceSize());
@@ -1080,6 +1096,7 @@
     }
   }
   ASSERT(builtin_vtables_[kFreeListElement] == 0);
+  ASSERT(builtin_vtables_[kForwardingCorpse] == 0);
 #endif
 }
 
@@ -1673,8 +1690,8 @@
 #define REGISTER_TYPED_DATA_VIEW_CLASS(clazz)                                  \
   cls = Class::NewTypedDataViewClass(kTypedData##clazz##ViewCid);
   CLASS_LIST_TYPED_DATA(REGISTER_TYPED_DATA_VIEW_CLASS);
-  cls = Class::NewTypedDataViewClass(kByteDataViewCid);
 #undef REGISTER_TYPED_DATA_VIEW_CLASS
+  cls = Class::NewTypedDataViewClass(kByteDataViewCid);
 #define REGISTER_EXT_TYPED_DATA_CLASS(clazz)                                   \
   cls = Class::NewExternalTypedDataClass(kExternalTypedData##clazz##Cid);
   CLASS_LIST_TYPED_DATA(REGISTER_EXT_TYPED_DATA_CLASS);
@@ -1781,7 +1798,6 @@
   tags = RawObject::VMHeapObjectTag::update(is_vm_object, tags);
   reinterpret_cast<RawObject*>(address)->tags_ = tags;
   ASSERT(is_vm_object == RawObject::IsVMHeapObject(tags));
-  VerifiedMemory::Accept(address, size);
 }
 
 
@@ -1900,7 +1916,6 @@
   memmove(reinterpret_cast<uint8_t*>(clone_addr + kHeaderSizeInBytes),
           reinterpret_cast<uint8_t*>(orig_addr + kHeaderSizeInBytes),
           size - kHeaderSizeInBytes);
-  VerifiedMemory::Accept(clone_addr, size);
   // Add clone to store buffer, if needed.
   if (!raw_clone->IsOldObject()) {
     // No need to remember an object in new space.
@@ -1916,8 +1931,6 @@
 
 
 RawString* Class::Name() const {
-  // TODO(turnidge): This assert fails for the fake kFreeListElement class.
-  // Fix this.
   ASSERT(raw_ptr()->name_ != String::null());
   return raw_ptr()->name_;
 }
@@ -4432,6 +4445,7 @@
   while (it.MoveNext()) {
     constant ^= set.GetKey(it.Current());
     ASSERT(!constant.IsNull());
+    ASSERT(constant.IsCanonical());
     InsertCanonicalConstant(zone, constant);
   }
   set.Release();
@@ -7123,11 +7137,10 @@
       // This output can be copied into a file, then used with sed
       // to replace the old values.
       // sed -i .bak -f /tmp/newkeys runtime/vm/method_recognizer.h
-      THR_Print("s/V(%s, %d)/V(%s, %d)/\n",
-                prefix, fp, prefix, SourceFingerprint());
+      THR_Print("s/0x%08x/0x%08x/\n", fp, SourceFingerprint());
     } else {
       THR_Print("FP mismatch while recognizing method %s:"
-                " expecting %d found %d\n",
+                " expecting 0x%08x found 0x%08x\n",
                 ToFullyQualifiedCString(),
                 fp,
                 SourceFingerprint());
@@ -10349,6 +10362,8 @@
   Thread* thread = Thread::Current();
   Zone* zone = thread->zone();
   ASSERT(thread->IsMutatorThread());
+  // Force the url to have a hash code.
+  url.Hash();
   const Library& result = Library::Handle(zone, Library::New());
   result.StorePointer(&result.raw_ptr()->name_, Symbols::Empty().raw());
   result.StorePointer(&result.raw_ptr()->url_, url.raw());
@@ -10536,6 +10551,18 @@
   Zone* zone = thread->zone();
   Isolate* isolate = thread->isolate();
 
+  if (FLAG_support_reload && isolate->IsReloading()) {
+    // When reloading, we need to make sure we use the original private key
+    // if this library previously existed.
+    IsolateReloadContext* reload_context = isolate->reload_context();
+    const String& original_key =
+        String::Handle(reload_context->FindLibraryPrivateKey(*this));
+    if (!original_key.IsNull()) {
+      StorePointer(&raw_ptr()->private_key_, original_key.raw());
+      return;
+    }
+  }
+
   // Format of the private key is: "@<sequence number><6 digits of hash>
   const intptr_t hash_mask = 0x7FFFF;
 
@@ -10921,12 +10948,17 @@
     pending_deferred_loads.Add(deferred_lib);
     const String& lib_url = String::Handle(zone, deferred_lib.url());
     Dart_LibraryTagHandler handler = isolate->library_tag_handler();
+    Object& obj = Object::Handle(zone);
     {
       TransitionVMToNative transition(thread);
       Api::Scope api_scope(thread);
-      handler(Dart_kImportTag,
-              Api::NewHandle(thread, importer()),
-              Api::NewHandle(thread, lib_url.raw()));
+      obj = Api::UnwrapHandle(
+              handler(Dart_kImportTag,
+                      Api::NewHandle(thread, importer()),
+                      Api::NewHandle(thread, lib_url.raw())));
+    }
+    if (obj.IsError()) {
+      Exceptions::PropagateError(Error::Cast(obj));
     }
   } else {
     // Another load request is in flight.
@@ -11315,33 +11347,39 @@
     OS::Print("Function not found %s.%s\n", #class_name, #function_name);      \
   } else {                                                                     \
     CHECK_FINGERPRINT3(func, class_name, function_name, dest, fp);             \
-  }                                                                            \
+  }
+
+#define CHECK_FINGERPRINTS2(class_name, function_name, dest, type, fp) \
+  CHECK_FINGERPRINTS(class_name, function_name, dest, fp)
 
   all_libs.Add(&Library::ZoneHandle(Library::CoreLibrary()));
-  CORE_LIB_INTRINSIC_LIST(CHECK_FINGERPRINTS);
-  CORE_INTEGER_LIB_INTRINSIC_LIST(CHECK_FINGERPRINTS);
+  CORE_LIB_INTRINSIC_LIST(CHECK_FINGERPRINTS2);
+  CORE_INTEGER_LIB_INTRINSIC_LIST(CHECK_FINGERPRINTS2);
 
   all_libs.Add(&Library::ZoneHandle(Library::MathLibrary()));
   all_libs.Add(&Library::ZoneHandle(Library::TypedDataLibrary()));
   all_libs.Add(&Library::ZoneHandle(Library::CollectionLibrary()));
-  OTHER_RECOGNIZED_LIST(CHECK_FINGERPRINTS);
+  OTHER_RECOGNIZED_LIST(CHECK_FINGERPRINTS2);
   INLINE_WHITE_LIST(CHECK_FINGERPRINTS);
   INLINE_BLACK_LIST(CHECK_FINGERPRINTS);
   POLYMORPHIC_TARGET_LIST(CHECK_FINGERPRINTS);
 
   all_libs.Clear();
   all_libs.Add(&Library::ZoneHandle(Library::DeveloperLibrary()));
-  DEVELOPER_LIB_INTRINSIC_LIST(CHECK_FINGERPRINTS);
+  DEVELOPER_LIB_INTRINSIC_LIST(CHECK_FINGERPRINTS2);
 
   all_libs.Clear();
   all_libs.Add(&Library::ZoneHandle(Library::MathLibrary()));
-  MATH_LIB_INTRINSIC_LIST(CHECK_FINGERPRINTS);
+  MATH_LIB_INTRINSIC_LIST(CHECK_FINGERPRINTS2);
 
   all_libs.Clear();
   all_libs.Add(&Library::ZoneHandle(Library::TypedDataLibrary()));
-  TYPED_DATA_LIB_INTRINSIC_LIST(CHECK_FINGERPRINTS);
+  TYPED_DATA_LIB_INTRINSIC_LIST(CHECK_FINGERPRINTS2);
 
 #undef CHECK_FINGERPRINTS
+#undef CHECK_FINGERPRINTS2
+
+
 
 Class& cls = Class::Handle();
 
@@ -12532,6 +12570,17 @@
 }
 
 
+void ICData::SetIsStaticCall(bool static_call) const {
+  StoreNonPointer(&raw_ptr()->state_bits_,
+                  StaticCallBit::update(static_call, raw_ptr()->state_bits_));
+}
+
+
+bool ICData::is_static_call() const {
+  return StaticCallBit::decode(raw_ptr()->state_bits_);
+}
+
+
 void ICData::set_state_bits(uint32_t bits) const {
   StoreNonPointer(&raw_ptr()->state_bits_, bits);
 }
@@ -13344,7 +13393,8 @@
                                  const String& target_name,
                                  const Array& arguments_descriptor,
                                  intptr_t deopt_id,
-                                 intptr_t num_args_tested) {
+                                 intptr_t num_args_tested,
+                                 bool is_static_call) {
   ASSERT(!owner.IsNull());
   ASSERT(!target_name.IsNull());
   ASSERT(!arguments_descriptor.IsNull());
@@ -13367,6 +13417,7 @@
 #if defined(TAG_IC_DATA)
   result.set_tag(-1);
 #endif
+  result.SetIsStaticCall(is_static_call);
   result.SetNumArgsTested(num_args_tested);
   return result.raw();
 }
@@ -13411,7 +13462,8 @@
                        const String& target_name,
                        const Array& arguments_descriptor,
                        intptr_t deopt_id,
-                       intptr_t num_args_tested) {
+                       intptr_t num_args_tested,
+                       bool is_static_call) {
   Zone* zone = Thread::Current()->zone();
   const ICData& result = ICData::Handle(zone,
                                         NewDescriptor(zone,
@@ -13419,7 +13471,8 @@
                                                       target_name,
                                                       arguments_descriptor,
                                                       deopt_id,
-                                                      num_args_tested));
+                                                      num_args_tested,
+                                                      is_static_call));
   result.set_ic_data_array(
       Array::Handle(zone, NewEmptyICDataArray(num_args_tested)));
   return result.raw();
@@ -13432,7 +13485,8 @@
       String::Handle(from.target_name()),
       Array::Handle(from.arguments_descriptor()),
       from.deopt_id(),
-      num_args_tested));
+      num_args_tested,
+      from.is_static_call()));
   // Copy deoptimization reasons.
   result.SetDeoptReasons(from.DeoptReasons());
   return result.raw();
@@ -13447,7 +13501,8 @@
       String::Handle(zone, from.target_name()),
       Array::Handle(zone, from.arguments_descriptor()),
       from.deopt_id(),
-      from.NumArgsTested()));
+      from.NumArgsTested(),
+      from.is_static_call()));
   // Clone entry array.
   const Array& from_array = Array::Handle(zone, from.ic_data());
   const intptr_t len = from_array.Length();
@@ -13980,7 +14035,6 @@
   MemoryRegion region(reinterpret_cast<void*>(instrs.EntryPoint()),
                       instrs.size());
   assembler->FinalizeInstructions(region);
-  VerifiedMemory::Accept(region.start(), region.size());
   CPU::FlushICache(instrs.EntryPoint(), instrs.size());
 
   code.set_compile_timestamp(OS::GetCurrentMonotonicMicros());
@@ -18018,7 +18072,7 @@
   // on new heap.
   RawObject* raw = Object::Allocate(MixinAppType::kClassId,
                                     MixinAppType::InstanceSize(),
-                                    Heap::kNew);
+                                    Heap::kOld);
   return reinterpret_cast<RawMixinAppType*>(raw);
 }
 
@@ -21283,8 +21337,6 @@
                          space));
     NoSafepointScope no_safepoint;
     raw->StoreSmi(&(raw->ptr()->length_), Smi::New(len));
-    VerifiedMemory::Accept(reinterpret_cast<uword>(raw->ptr()),
-                           Array::InstanceSize(len));
     return raw;
   }
 }
@@ -21550,7 +21602,6 @@
     }
   }
 };
-typedef EnumIndexHashMap<DefaultHashTraits> EnumIndexDefaultMap;
 
 
 RawLinkedHashMap* LinkedHashMap::NewDefault(Heap::Space space) {
@@ -21962,6 +22013,12 @@
 
 
 const char* TypedData::ToCString() const {
+  switch (GetClassId()) {
+#define CASE_TYPED_DATA_CLASS(clazz)                                           \
+  case kTypedData##clazz##Cid: return #clazz;
+  CLASS_LIST_TYPED_DATA(CASE_TYPED_DATA_CLASS);
+#undef CASE_TYPED_DATA_CLASS
+  }
   return "TypedData";
 }
 
diff --git a/runtime/vm/object.h b/runtime/vm/object.h
index 30cc72d..143cf9e 100644
--- a/runtime/vm/object.h
+++ b/runtime/vm/object.h
@@ -25,7 +25,6 @@
 #include "vm/tags.h"
 #include "vm/thread.h"
 #include "vm/token_position.h"
-#include "vm/verified_memory.h"
 
 namespace dart {
 
@@ -504,7 +503,7 @@
     return *void_type_;
   }
 
-  static void InitVmIsolateSnapshotObjectTable(intptr_t len);
+  static void set_vm_isolate_snapshot_object_table(const Array& table);
 
   static RawClass* class_class() { return class_class_; }
   static RawClass* dynamic_class() { return dynamic_class_; }
@@ -655,7 +654,6 @@
     ASSERT(Contains(reinterpret_cast<uword>(to)));
     if (raw()->IsNewObject()) {
       memmove(const_cast<RawObject**>(to), from, count * kWordSize);
-      VerifiedMemory::Accept(reinterpret_cast<uword>(to), count * kWordSize);
     } else {
       for (intptr_t i = 0; i < count; ++i) {
         StorePointer(&to[i], from[i]);
@@ -835,6 +833,7 @@
   friend void RawObject::Validate(Isolate* isolate) const;
   friend class Closure;
   friend class SnapshotReader;
+  friend class InstanceDeserializationCluster;
   friend class OneByteString;
   friend class TwoByteString;
   friend class ExternalOneByteString;
@@ -1842,7 +1841,7 @@
 
   bool IsImmutable() const;
 
-  void Reset(bool is_static_call) const;
+  void Reset() const;
   void ResetData() const;
 
   // Note: only deopts with reasons before Unknown in this list are recorded in
@@ -2028,7 +2027,8 @@
                         const String& target_name,
                         const Array& arguments_descriptor,
                         intptr_t deopt_id,
-                        intptr_t num_args_tested);
+                        intptr_t num_args_tested,
+                        bool is_static_call);
   static RawICData* NewFrom(const ICData& from, intptr_t num_args_tested);
 
   // Generates a new ICData with descriptor and data array copied (deep clone).
@@ -2146,6 +2146,9 @@
   intptr_t tag() const { return raw_ptr()->tag_; }
 #endif
 
+  void SetIsStaticCall(bool static_call) const;
+  bool is_static_call() const;
+
  private:
   static RawICData* New();
 
@@ -2167,7 +2170,9 @@
     kDeoptReasonPos = kNumArgsTestedPos + kNumArgsTestedSize,
     kDeoptReasonSize = kLastRecordedDeoptReason + 1,
     kRangeFeedbackPos = kDeoptReasonPos + kDeoptReasonSize,
-    kRangeFeedbackSize = kBitsPerRangeFeedback * kRangeFeedbackSlots
+    kRangeFeedbackSize = kBitsPerRangeFeedback * kRangeFeedbackSlots,
+    kStaticCallPos = kRangeFeedbackPos + kRangeFeedbackSize,
+    kStaticCallSize = 1,
   };
 
   class NumArgsTestedBits : public BitField<uint32_t,
@@ -2183,6 +2188,10 @@
                                             ICData::kRangeFeedbackPos,
                                             ICData::kRangeFeedbackSize> {};
 
+  class StaticCallBit : public BitField<uint32_t,
+                                        bool,
+                                        ICData::kStaticCallPos,
+                                        ICData::kStaticCallSize> {};
 #if defined(DEBUG)
   // Used in asserts to verify that a check is not added twice.
   bool HasCheck(const GrowableArray<intptr_t>& cids) const;
@@ -2196,7 +2205,8 @@
                                   const String& target_name,
                                   const Array& arguments_descriptor,
                                   intptr_t deopt_id,
-                                  intptr_t num_args_tested);
+                                  intptr_t num_args_tested,
+                                  bool is_static_call);
 
   static void WriteSentinel(const Array& data, intptr_t test_entry_length);
 
@@ -2206,6 +2216,8 @@
   FINAL_HEAP_OBJECT_IMPLEMENTATION(ICData, Object);
   friend class Class;
   friend class SnapshotWriter;
+  friend class Serializer;
+  friend class Deserializer;
 };
 
 
@@ -2275,7 +2287,6 @@
   // Reloading support:
   void Reparent(const Class& new_cls) const;
   void ZeroEdgeCounters() const;
-  void FillICDataWithSentinels(const Code& code) const;
 
   RawClass* Owner() const;
   RawClass* origin() const;
@@ -3325,6 +3336,7 @@
   friend class Class;
   friend class HeapProfiler;
   friend class RawField;
+  friend class FieldSerializationCluster;
 };
 
 
@@ -3849,6 +3861,7 @@
   friend class DictionaryIterator;
   friend class Namespace;
   friend class Object;
+  friend class LibraryDeserializationCluster;
 };
 
 
@@ -4539,6 +4552,10 @@
     StorePointer(&raw_ptr()->code_source_map_, code_source_map.raw());
   }
 
+  // Used during reloading (see object_reload.cc). Calls Reset on all ICDatas
+  // that are embedded inside the Code object.
+  void ResetICDatas() const;
+
   TokenPosition GetTokenPositionAt(intptr_t offset) const;
 
   // Array of DeoptInfo objects.
@@ -4846,6 +4863,8 @@
   FINAL_HEAP_OBJECT_IMPLEMENTATION(Code, Object);
   friend class Class;
   friend class SnapshotWriter;
+  friend class FunctionSerializationCluster;
+  friend class CodeSerializationCluster;
   friend class CodePatcher;  // for set_instructions
   friend class Precompiler;  // for set_instructions
   // So that the RawFunction pointer visitor can determine whether code the
@@ -5399,6 +5418,9 @@
   friend class SnapshotWriter;
   friend class StubCode;
   friend class TypedDataView;
+  friend class InstanceSerializationCluster;
+  friend class InstanceDeserializationCluster;
+  friend class ClassDeserializationCluster;  // vtable
 };
 
 
@@ -6772,6 +6794,7 @@
   // So that SkippedCodeFunctions can print a debug string from a NoHandleScope.
   friend class SkippedCodeFunctions;
   friend class RawOneByteString;
+  friend class RODataSerializationCluster;  // SetHash
 };
 
 
@@ -7415,7 +7438,7 @@
     ASSERT(index < Length());
 
     // TODO(iposva): Add storing NoSafepointScope.
-    DataStorePointer(ObjectAddr(index), value.raw());
+    data()->StorePointer(ObjectAddr(index), value.raw());
   }
 
   void Add(const Object& value, Heap::Space space = Heap::kNew) const;
@@ -7485,9 +7508,6 @@
     ASSERT((index >= 0) && (index < Length()));
     return &(DataArray()->data()[index]);
   }
-  void DataStorePointer(RawObject** addr, RawObject* value) const {
-    data()->StorePointer(addr, value);
-  }
 
   static const int kDefaultInitialCapacity = 4;
 
@@ -8118,6 +8138,7 @@
   static RawLinkedHashMap* NewUninitialized(Heap::Space space = Heap::kNew);
 
   friend class Class;
+  friend class LinkedHashMapDeserializationCluster;
 };
 
 
@@ -8521,6 +8542,7 @@
   intptr_t cid = value->GetClassId();
   // Free-list elements cannot be wrapped in a handle.
   ASSERT(cid != kFreeListElement);
+  ASSERT(cid != kForwardingCorpse);
   if (cid >= kNumPredefinedCids) {
     cid = kInstanceCid;
   }
diff --git a/runtime/vm/object_arm64_test.cc b/runtime/vm/object_arm64_test.cc
index 4349bb9..a883609 100644
--- a/runtime/vm/object_arm64_test.cc
+++ b/runtime/vm/object_arm64_test.cc
@@ -17,7 +17,7 @@
 // Generate a simple dart code sequence.
 // This is used to test Code and Instruction object creation.
 void GenerateIncrement(Assembler* assembler) {
-  __ mov(SP, CSP);
+  __ EnterFrame(1 * kWordSize);
   __ movz(R0, Immediate(0), 0);
   __ Push(R0);
   __ add(R0, R0, Operand(1));
@@ -26,6 +26,7 @@
   __ add(R1, R1, Operand(1));
   __ Pop(R0);
   __ mov(R0, R1);
+  __ LeaveFrame();
   __ ret();
 }
 
diff --git a/runtime/vm/object_dbc_test.cc b/runtime/vm/object_dbc_test.cc
index 464350d..ecb1fa0 100644
--- a/runtime/vm/object_dbc_test.cc
+++ b/runtime/vm/object_dbc_test.cc
@@ -16,14 +16,12 @@
 
 // Generate a simple dart code sequence.
 // This is used to test Code and Instruction object creation.
+// For other architectures, this sequence does do an increment, hence the name.
+// On DBC, we don't do an increment because generating an instance call here
+// would be too complex.
 void GenerateIncrement(Assembler* assembler) {
   __ Frame(1);
-  __ LoadConstant(0, Smi::Handle(Smi::New(0)));
-  __ PushConstant(Smi::Handle(Smi::New(1)));
-  __ Push(0);
-  __ AddTOS();
-  __ Trap();
-  __ PopLocal(0);
+  __ LoadConstant(0, Smi::Handle(Smi::New(1)));
   __ Return(0);
 }
 
diff --git a/runtime/vm/object_reload.cc b/runtime/vm/object_reload.cc
index 405854d..f9c5ad9 100644
--- a/runtime/vm/object_reload.cc
+++ b/runtime/vm/object_reload.cc
@@ -56,38 +56,48 @@
 }
 
 
-static void ClearICs(const Function& function, const Code& code) {
-  if (function.ic_data_array() == Array::null()) {
-    return;  // Already reset in an earlier round.
-  }
-
-  Thread* thread = Thread::Current();
-  Zone* zone = thread->zone();
-
-  ZoneGrowableArray<const ICData*>* ic_data_array =
-      new(zone) ZoneGrowableArray<const ICData*>();
-  function.RestoreICDataMap(ic_data_array, false /* clone ic-data */);
-  if (ic_data_array->length() == 0) {
+void Code::ResetICDatas() const {
+  // Iterate over the Code's object pool and reset all ICDatas.
+#ifdef TARGET_ARCH_IA32
+  // IA32 does not have an object pool, but, we can iterate over all
+  // embedded objects by using the variable length data section.
+  if (!is_alive()) {
     return;
   }
-  const PcDescriptors& descriptors =
-      PcDescriptors::Handle(code.pc_descriptors());
-  PcDescriptors::Iterator iter(descriptors, RawPcDescriptors::kIcCall |
-                                            RawPcDescriptors::kUnoptStaticCall);
-  while (iter.MoveNext()) {
-    const ICData* ic_data = (*ic_data_array)[iter.DeoptId()];
-    if (ic_data == NULL) {
+  const Instructions& instrs = Instructions::Handle(instructions());
+  ASSERT(!instrs.IsNull());
+  uword base_address = instrs.EntryPoint();
+  Object& object = Object::Handle();
+  intptr_t offsets_length = pointer_offsets_length();
+  const int32_t* offsets = raw_ptr()->data();
+  for (intptr_t i = 0; i < offsets_length; i++) {
+    int32_t offset = offsets[i];
+    RawObject** object_ptr =
+        reinterpret_cast<RawObject**>(base_address + offset);
+    RawObject* raw_object = *object_ptr;
+    if (!raw_object->IsHeapObject()) {
       continue;
     }
-    bool is_static_call = iter.Kind() == RawPcDescriptors::kUnoptStaticCall;
-    ic_data->Reset(is_static_call);
+    object = raw_object;
+    if (object.IsICData()) {
+      ICData::Cast(object).Reset();
+    }
   }
-}
-
-
-void Function::FillICDataWithSentinels(const Code& code) const {
-  ASSERT(code.raw() == CurrentCode());
-  ClearICs(*this, code);
+#else
+  const ObjectPool& pool = ObjectPool::Handle(object_pool());
+  Object& object = Object::Handle();
+  ASSERT(!pool.IsNull());
+  for (intptr_t i = 0; i < pool.Length(); i++) {
+    ObjectPool::EntryType entry_type = pool.InfoAt(i);
+    if (entry_type != ObjectPool::kTaggedObject) {
+      continue;
+    }
+    object = pool.ObjectAt(i);
+    if (object.IsICData()) {
+      ICData::Cast(object).Reset();
+    }
+  }
+#endif
 }
 
 
@@ -423,6 +433,9 @@
     TIR_Print("Finalized replacement class for %s\n", ToCString());
   }
 
+  // At this point the original and replacement must be in the same state.
+  ASSERT(is_finalized() == replacement.is_finalized());
+
   if (is_finalized()) {
     // Get the field maps for both classes. These field maps walk the class
     // hierarchy.
@@ -431,13 +444,22 @@
     const Array& replacement_fields =
         Array::Handle(replacement.OffsetToFieldMap());
 
-    // Check that we have the same number of fields.
+    // Check that the size of the instance is the same.
     if (fields.Length() != replacement_fields.Length()) {
       IRC->ReportError(String::Handle(String::NewFormatted(
           "Number of instance fields changed in %s", ToCString())));
       return false;
     }
 
+    // Check that we have the same next field offset. This check is not
+    // redundant with the one above because the instance OffsetToFieldMap
+    // array length is based on the instance size (which may be aligned up).
+    if (next_field_offset() != replacement.next_field_offset()) {
+      IRC->ReportError(String::Handle(String::NewFormatted(
+          "Number of instance fields changed in %s", ToCString())));
+      return false;
+    }
+
     if (NumTypeArguments() != replacement.NumTypeArguments()) {
       IRC->ReportError(String::Handle(String::NewFormatted(
           "Number of type arguments changed in %s", ToCString())));
@@ -470,7 +492,8 @@
   } else if (is_prefinalized()) {
     if (!replacement.is_prefinalized()) {
       IRC->ReportError(String::Handle(String::NewFormatted(
-          "Original class ('%s') is prefinalized and replacement class ('%s')",
+          "Original class ('%s') is prefinalized and replacement class "
+          "('%s') is not ",
           ToCString(), replacement.ToCString())));
       return false;
     }
@@ -510,10 +533,8 @@
 
 static const Function* static_call_target = NULL;
 
-void ICData::Reset(bool is_static_call) const {
-  // TODO(johnmccutchan): ICData should know whether or not it's for a
-  // static call.
-  if (is_static_call) {
+void ICData::Reset() const {
+  if (is_static_call()) {
     const Function& old_target = Function::Handle(GetTargetAt(0));
     if (old_target.IsNull()) {
       FATAL("old_target is NULL.\n");
diff --git a/runtime/vm/object_store.cc b/runtime/vm/object_store.cc
index 9affdcb..e1ca9dd 100644
--- a/runtime/vm/object_store.cc
+++ b/runtime/vm/object_store.cc
@@ -120,6 +120,24 @@
 }
 
 
+void ObjectStore::PrintToJSONObject(JSONObject* jsobj) {
+  if (!FLAG_support_service) {
+    return;
+  }
+  jsobj->AddProperty("type", "_ObjectStore");
+
+  {
+    JSONObject fields(jsobj, "fields");
+    Object& value = Object::Handle();
+#define PRINT_OBJECT_STORE_FIELD(type, name)                                   \
+    value = name;                                                              \
+    fields.AddProperty(#name, value);
+OBJECT_STORE_FIELD_LIST(PRINT_OBJECT_STORE_FIELD);
+#undef PRINT_OBJECT_STORE_FIELD
+  }
+}
+
+
 RawError* ObjectStore::PreallocateObjects() {
   Thread* thread = Thread::Current();
   Isolate* isolate = thread->isolate();
diff --git a/runtime/vm/object_store.h b/runtime/vm/object_store.h
index 56ee2b0..6ab93de 100644
--- a/runtime/vm/object_store.h
+++ b/runtime/vm/object_store.h
@@ -498,92 +498,100 @@
 
   static void Init(Isolate* isolate);
 
+  void PrintToJSONObject(JSONObject* jsobj);
+
  private:
   ObjectStore();
 
+#define OBJECT_STORE_FIELD_LIST(V)                                             \
+  V(RawClass*, object_class_)                                                  \
+  V(RawType*, object_type_)                                                    \
+  V(RawClass*, null_class_)                                                    \
+  V(RawType*, null_type_)                                                      \
+  V(RawType*, function_type_)                                                  \
+  V(RawClass*, closure_class_)                                                 \
+  V(RawType*, number_type_)                                                    \
+  V(RawType*, int_type_)                                                       \
+  V(RawClass*, integer_implementation_class_)                                  \
+  V(RawClass*, smi_class_)                                                     \
+  V(RawType*, smi_type_)                                                       \
+  V(RawClass*, mint_class_)                                                    \
+  V(RawType*, mint_type_)                                                      \
+  V(RawClass*, bigint_class_)                                                  \
+  V(RawClass*, double_class_)                                                  \
+  V(RawType*, double_type_)                                                    \
+  V(RawType*, float32x4_type_)                                                 \
+  V(RawType*, int32x4_type_)                                                   \
+  V(RawType*, float64x2_type_)                                                 \
+  V(RawType*, string_type_)                                                    \
+  V(RawClass*, future_class_)                                                  \
+  V(RawClass*, completer_class_)                                               \
+  V(RawClass*, stream_iterator_class_)                                         \
+  V(RawClass*, symbol_class_)                                                  \
+  V(RawClass*, one_byte_string_class_)                                         \
+  V(RawClass*, two_byte_string_class_)                                         \
+  V(RawClass*, external_one_byte_string_class_)                                \
+  V(RawClass*, external_two_byte_string_class_)                                \
+  V(RawType*, bool_type_)                                                      \
+  V(RawClass*, bool_class_)                                                    \
+  V(RawClass*, array_class_)                                                   \
+  V(RawType*, array_type_)                                                     \
+  V(RawClass*, immutable_array_class_)                                         \
+  V(RawClass*, growable_object_array_class_)                                   \
+  V(RawClass*, linked_hash_map_class_)                                         \
+  V(RawClass*, float32x4_class_)                                               \
+  V(RawClass*, int32x4_class_)                                                 \
+  V(RawClass*, float64x2_class_)                                               \
+  V(RawClass*, error_class_)                                                   \
+  V(RawClass*, weak_property_class_)                                           \
+  V(RawArray*, symbol_table_)                                                  \
+  V(RawArray*, canonical_types_)                                               \
+  V(RawArray*, canonical_type_arguments_)                                      \
+  V(RawLibrary*, async_library_)                                               \
+  V(RawLibrary*, builtin_library_)                                             \
+  V(RawLibrary*, core_library_)                                                \
+  V(RawLibrary*, collection_library_)                                          \
+  V(RawLibrary*, convert_library_)                                             \
+  V(RawLibrary*, developer_library_)                                           \
+  V(RawLibrary*, internal_library_)                                            \
+  V(RawLibrary*, isolate_library_)                                             \
+  V(RawLibrary*, math_library_)                                                \
+  V(RawLibrary*, mirrors_library_)                                             \
+  V(RawLibrary*, native_wrappers_library_)                                     \
+  V(RawLibrary*, profiler_library_)                                            \
+  V(RawLibrary*, root_library_)                                                \
+  V(RawLibrary*, typed_data_library_)                                          \
+  V(RawLibrary*, vmservice_library_)                                           \
+  V(RawGrowableObjectArray*, libraries_)                                       \
+  V(RawArray*, libraries_map_)                                                 \
+  V(RawGrowableObjectArray*, closure_functions_)                               \
+  V(RawGrowableObjectArray*, pending_classes_)                                 \
+  V(RawGrowableObjectArray*, pending_deferred_loads_)                          \
+  V(RawGrowableObjectArray*, resume_capabilities_)                             \
+  V(RawGrowableObjectArray*, exit_listeners_)                                  \
+  V(RawGrowableObjectArray*, error_listeners_)                                 \
+  V(RawContext*, empty_context_)                                               \
+  V(RawInstance*, stack_overflow_)                                             \
+  V(RawInstance*, out_of_memory_)                                              \
+  V(RawUnhandledException*, preallocated_unhandled_exception_)                 \
+  V(RawStacktrace*, preallocated_stack_trace_)                                 \
+  V(RawFunction*, lookup_port_handler_)                                        \
+  V(RawTypedData*, empty_uint32_array_)                                        \
+  V(RawFunction*, handle_message_function_)                                    \
+  V(RawArray*, library_load_error_table_)                                      \
+  V(RawArray*, compile_time_constants_)                                        \
+  V(RawArray*, unique_dynamic_targets_)                                        \
+  V(RawGrowableObjectArray*, token_objects_)                                   \
+  V(RawArray*, token_objects_map_)                                             \
+  V(RawGrowableObjectArray*, megamorphic_cache_table_)                         \
+  V(RawCode*, megamorphic_miss_code_)                                          \
+  V(RawFunction*, megamorphic_miss_function_)                                  \
+
   RawObject** from() { return reinterpret_cast<RawObject**>(&object_class_); }
-  RawClass* object_class_;
-  RawType* object_type_;
-  RawClass* null_class_;
-  RawType* null_type_;
-  RawType* function_type_;
-  RawClass* closure_class_;
-  RawType* number_type_;
-  RawType* int_type_;
-  RawClass* integer_implementation_class_;
-  RawClass* smi_class_;
-  RawType* smi_type_;
-  RawClass* mint_class_;
-  RawType* mint_type_;
-  RawClass* bigint_class_;
-  RawClass* double_class_;
-  RawType* double_type_;
-  RawType* float32x4_type_;
-  RawType* int32x4_type_;
-  RawType* float64x2_type_;
-  RawType* string_type_;
-  RawClass* future_class_;
-  RawClass* completer_class_;
-  RawClass* stream_iterator_class_;
-  RawClass* symbol_class_;
-  RawClass* one_byte_string_class_;
-  RawClass* two_byte_string_class_;
-  RawClass* external_one_byte_string_class_;
-  RawClass* external_two_byte_string_class_;
-  RawType* bool_type_;
-  RawClass* bool_class_;
-  RawClass* array_class_;
-  RawType* array_type_;
-  RawClass* immutable_array_class_;
-  RawClass* growable_object_array_class_;
-  RawClass* linked_hash_map_class_;
-  RawClass* float32x4_class_;
-  RawClass* int32x4_class_;
-  RawClass* float64x2_class_;
-  RawClass* error_class_;
-  RawClass* weak_property_class_;
-  RawArray* symbol_table_;
-  RawArray* canonical_types_;
-  RawArray* canonical_type_arguments_;
-  RawLibrary* async_library_;
-  RawLibrary* builtin_library_;
-  RawLibrary* core_library_;
-  RawLibrary* collection_library_;
-  RawLibrary* convert_library_;
-  RawLibrary* developer_library_;
-  RawLibrary* internal_library_;
-  RawLibrary* isolate_library_;
-  RawLibrary* math_library_;
-  RawLibrary* mirrors_library_;
-  RawLibrary* native_wrappers_library_;
-  RawLibrary* profiler_library_;
-  RawLibrary* root_library_;
-  RawLibrary* typed_data_library_;
-  RawLibrary* vmservice_library_;
-  RawGrowableObjectArray* libraries_;
-  RawArray* libraries_map_;
-  RawGrowableObjectArray* closure_functions_;
-  RawGrowableObjectArray* pending_classes_;
-  RawGrowableObjectArray* pending_deferred_loads_;
-  RawGrowableObjectArray* resume_capabilities_;
-  RawGrowableObjectArray* exit_listeners_;
-  RawGrowableObjectArray* error_listeners_;
-  RawContext* empty_context_;
-  RawInstance* stack_overflow_;
-  RawInstance* out_of_memory_;
-  RawUnhandledException* preallocated_unhandled_exception_;
-  RawStacktrace* preallocated_stack_trace_;
-  RawFunction* lookup_port_handler_;
-  RawTypedData* empty_uint32_array_;
-  RawFunction* handle_message_function_;
-  RawArray* library_load_error_table_;
-  RawArray* compile_time_constants_;
-  RawArray* unique_dynamic_targets_;
-  RawGrowableObjectArray* token_objects_;
-  RawArray* token_objects_map_;
-  RawGrowableObjectArray* megamorphic_cache_table_;
-  RawCode* megamorphic_miss_code_;
-  RawFunction* megamorphic_miss_function_;
+#define DECLARE_OBJECT_STORE_FIELD(type, name)                                 \
+  type name;
+OBJECT_STORE_FIELD_LIST(DECLARE_OBJECT_STORE_FIELD)
+#undef DECLARE_OBJECT_STORE_FIELD
   RawObject** to() {
     return reinterpret_cast<RawObject**>(&megamorphic_miss_function_);
   }
@@ -604,9 +612,8 @@
     return NULL;
   }
 
-  friend class FullSnapshotWriter;
-  friend class SnapshotReader;
-  friend class VmIsolateSnapshotReader;
+  friend class Serializer;
+  friend class Deserializer;
 
   DISALLOW_COPY_AND_ASSIGN(ObjectStore);
 };
diff --git a/runtime/vm/object_test.cc b/runtime/vm/object_test.cc
index 9ad5f70..05ddd32 100644
--- a/runtime/vm/object_test.cc
+++ b/runtime/vm/object_test.cc
@@ -3038,7 +3038,8 @@
   const Array& args_descriptor =
       Array::Handle(ArgumentsDescriptor::New(1, Object::null_array()));
   ICData& o1 = ICData::Handle();
-  o1 = ICData::New(function, target_name, args_descriptor, id, num_args_tested);
+  o1 = ICData::New(function, target_name, args_descriptor, id,
+                   num_args_tested, false);
   EXPECT_EQ(1, o1.NumArgsTested());
   EXPECT_EQ(id, o1.deopt_id());
   EXPECT_EQ(function.raw(), o1.Owner());
@@ -3077,7 +3078,7 @@
   EXPECT_EQ(2, o1.NumberOfUsedChecks());
 
   ICData& o2 = ICData::Handle();
-  o2 = ICData::New(function, target_name, args_descriptor, 57, 2);
+  o2 = ICData::New(function, target_name, args_descriptor, 57, 2, false);
   EXPECT_EQ(2, o2.NumArgsTested());
   EXPECT_EQ(57, o2.deopt_id());
   EXPECT_EQ(function.raw(), o2.Owner());
@@ -3096,7 +3097,8 @@
   // Check ICData for unoptimized static calls.
   const intptr_t kNumArgsChecked = 0;
   const ICData& scall_icdata = ICData::Handle(
-      ICData::New(function, target_name, args_descriptor, 57, kNumArgsChecked));
+      ICData::New(function, target_name, args_descriptor, 57,
+                  kNumArgsChecked, false));
   scall_icdata.AddTarget(target1);
   EXPECT_EQ(target1.raw(), scall_icdata.GetTargetAt(0));
 }
@@ -4125,9 +4127,8 @@
       : objects_(objects) { }
   virtual ~ObjectAccumulator() { }
   virtual void VisitObject(RawObject* obj) {
-    // Free-list elements cannot even be wrapped in handles.
-    if (obj->IsFreeListElement()) {
-      return;
+    if (obj->IsPseudoObject()) {
+      return;  // Cannot be wrapped in handles.
     }
     Object& handle = Object::Handle(obj);
     // Skip some common simple objects to run in reasonable time.
diff --git a/runtime/vm/os.h b/runtime/vm/os.h
index ff61aaa..a86889b4 100644
--- a/runtime/vm/os.h
+++ b/runtime/vm/os.h
@@ -25,8 +25,9 @@
   // Returns the current process id.
   static intptr_t ProcessId();
 
-  // Returns the abbreviated time-zone name for the given instant.
-  // For example "CET" or "CEST".
+  // Returns a time-zone name for the given instant.
+  // The name is provided by the underlying platform.
+  // The returned string may be Zone allocated.
   static const char* GetTimeZoneName(int64_t seconds_since_epoch);
 
   // Returns the difference in seconds between local time and UTC for the given
diff --git a/runtime/vm/os_fuchsia.cc b/runtime/vm/os_fuchsia.cc
new file mode 100644
index 0000000..10faf1c
--- /dev/null
+++ b/runtime/vm/os_fuchsia.cc
@@ -0,0 +1,223 @@
+// Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+#include "vm/globals.h"
+#if defined(TARGET_OS_FUCHSIA)
+
+#include "vm/os.h"
+
+#include <magenta/syscalls.h>
+#include <magenta/types.h>
+
+#include "platform/assert.h"
+
+namespace dart {
+
+const char* OS::Name() {
+  return "fuchsia";
+}
+
+
+intptr_t OS::ProcessId() {
+  UNIMPLEMENTED();
+  return 0;
+}
+
+
+const char* OS::GetTimeZoneName(int64_t seconds_since_epoch) {
+  UNIMPLEMENTED();
+  return "";
+}
+
+
+int OS::GetTimeZoneOffsetInSeconds(int64_t seconds_since_epoch) {
+  UNIMPLEMENTED();
+  return 0;
+}
+
+
+int OS::GetLocalTimeZoneAdjustmentInSeconds() {
+  UNIMPLEMENTED();
+  return 0;
+}
+
+
+int64_t OS::GetCurrentTimeMillis() {
+  return GetCurrentTimeMicros() / 1000;
+}
+
+
+int64_t OS::GetCurrentTimeMicros() {
+  return _magenta_current_time() / 1000;
+}
+
+
+int64_t OS::GetCurrentMonotonicTicks() {
+  UNIMPLEMENTED();
+  return 0;
+}
+
+
+int64_t OS::GetCurrentMonotonicFrequency() {
+  UNIMPLEMENTED();
+  return 0;
+}
+
+
+int64_t OS::GetCurrentMonotonicMicros() {
+  UNIMPLEMENTED();
+  return 0;
+}
+
+
+int64_t OS::GetCurrentThreadCPUMicros() {
+  UNIMPLEMENTED();
+  return 0;
+}
+
+
+void* OS::AlignedAllocate(intptr_t size, intptr_t alignment) {
+  UNIMPLEMENTED();
+  return NULL;
+}
+
+
+void OS::AlignedFree(void* ptr) {
+  UNIMPLEMENTED();
+}
+
+
+// TODO(5411554):  May need to hoist these architecture dependent code
+// into a architecture specific file e.g: os_ia32_linux.cc
+intptr_t OS::ActivationFrameAlignment() {
+  UNIMPLEMENTED();
+  return 0;
+}
+
+
+intptr_t OS::PreferredCodeAlignment() {
+  UNIMPLEMENTED();
+  return 0;
+}
+
+
+bool OS::AllowStackFrameIteratorFromAnotherThread() {
+  UNIMPLEMENTED();
+  return false;
+}
+
+
+int OS::NumberOfAvailableProcessors() {
+  UNIMPLEMENTED();
+  return 0;
+}
+
+
+void OS::Sleep(int64_t millis) {
+  UNIMPLEMENTED();
+}
+
+
+void OS::SleepMicros(int64_t micros) {
+  UNIMPLEMENTED();
+}
+
+
+void OS::DebugBreak() {
+  UNIMPLEMENTED();
+}
+
+
+char* OS::StrNDup(const char* s, intptr_t n) {
+  UNIMPLEMENTED();
+  return NULL;
+}
+
+
+intptr_t OS::StrNLen(const char* s, intptr_t n) {
+  UNIMPLEMENTED();
+  return 0;
+}
+
+
+void OS::Print(const char* format, ...) {
+  UNIMPLEMENTED();
+}
+
+
+void OS::VFPrint(FILE* stream, const char* format, va_list args) {
+  vfprintf(stream, format, args);
+  fflush(stream);
+}
+
+
+int OS::SNPrint(char* str, size_t size, const char* format, ...) {
+  UNIMPLEMENTED();
+  return 0;
+}
+
+
+int OS::VSNPrint(char* str, size_t size, const char* format, va_list args) {
+  UNIMPLEMENTED();
+  return 0;
+}
+
+
+char* OS::SCreate(Zone* zone, const char* format, ...) {
+  UNIMPLEMENTED();
+  return NULL;
+}
+
+
+char* OS::VSCreate(Zone* zone, const char* format, va_list args) {
+  UNIMPLEMENTED();
+  return NULL;
+}
+
+
+bool OS::StringToInt64(const char* str, int64_t* value) {
+  UNIMPLEMENTED();
+  return false;
+}
+
+
+void OS::RegisterCodeObservers() {
+  UNIMPLEMENTED();
+}
+
+
+void OS::PrintErr(const char* format, ...) {
+  va_list args;
+  va_start(args, format);
+  VFPrint(stderr, format, args);
+  va_end(args);
+}
+
+
+void OS::InitOnce() {
+  // TODO(5411554): For now we check that initonce is called only once,
+  // Once there is more formal mechanism to call InitOnce we can move
+  // this check there.
+  static bool init_once_called = false;
+  ASSERT(init_once_called == false);
+  init_once_called = true;
+}
+
+
+void OS::Shutdown() {
+}
+
+
+void OS::Abort() {
+  abort();
+}
+
+
+void OS::Exit(int code) {
+  UNIMPLEMENTED();
+}
+
+}  // namespace dart
+
+#endif  // defined(TARGET_OS_FUCHSIA)
diff --git a/runtime/vm/os_thread.cc b/runtime/vm/os_thread.cc
index 75a7a31..3700c9f 100644
--- a/runtime/vm/os_thread.cc
+++ b/runtime/vm/os_thread.cc
@@ -26,7 +26,9 @@
 #if defined(DEBUG)
     join_id_(kInvalidThreadJoinId),
 #endif
+#ifndef PRODUCT
     trace_id_(OSThread::GetCurrentThreadTraceId()),
+#endif
     name_(NULL),
     timeline_block_lock_(new Mutex()),
     timeline_block_(NULL),
diff --git a/runtime/vm/os_thread.h b/runtime/vm/os_thread.h
index 415cff89..e950832 100644
--- a/runtime/vm/os_thread.h
+++ b/runtime/vm/os_thread.h
@@ -12,6 +12,8 @@
 // Declare the OS-specific types ahead of defining the generic classes.
 #if defined(TARGET_OS_ANDROID)
 #include "vm/os_thread_android.h"
+#elif defined(TARGET_OS_FUCHSIA)
+#include "vm/os_thread_fuchsia.h"
 #elif defined(TARGET_OS_LINUX)
 #include "vm/os_thread_linux.h"
 #elif defined(TARGET_OS_MACOS)
@@ -61,10 +63,12 @@
     return id_;
   }
 
+#ifndef PRODUCT
   ThreadId trace_id() const {
     ASSERT(trace_id_ != OSThread::kInvalidThreadId);
     return trace_id_;
   }
+#endif
 
   const char* name() const {
     return name_;
@@ -203,7 +207,9 @@
   }
 
   static void Cleanup();
+#ifndef PRODUCT
   static ThreadId GetCurrentThreadTraceId();
+#endif  // PRODUCT
   static OSThread* GetOSThreadFromThread(Thread* thread);
   static void AddThreadToListLocked(OSThread* thread);
   static void RemoveThreadFromList(OSThread* thread);
@@ -217,7 +223,9 @@
   // only called once per OSThread.
   ThreadJoinId join_id_;
 #endif
+#ifndef PRODUCT
   const ThreadId trace_id_;  // Used to interface with tracing tools.
+#endif
   char* name_;  // A name for this thread.
 
   Mutex* timeline_block_lock_;
diff --git a/runtime/vm/os_thread_android.cc b/runtime/vm/os_thread_android.cc
index 5a54c59..0ea90e6 100644
--- a/runtime/vm/os_thread_android.cc
+++ b/runtime/vm/os_thread_android.cc
@@ -188,9 +188,11 @@
 }
 
 
+#ifndef PRODUCT
 ThreadId OSThread::GetCurrentThreadTraceId() {
   return GetCurrentThreadId();
 }
+#endif  // PRODUCT
 
 
 ThreadJoinId OSThread::GetCurrentThreadJoinId(OSThread* thread) {
diff --git a/runtime/vm/os_thread_fuchsia.cc b/runtime/vm/os_thread_fuchsia.cc
new file mode 100644
index 0000000..c3e8d25
--- /dev/null
+++ b/runtime/vm/os_thread_fuchsia.cc
@@ -0,0 +1,436 @@
+// Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+#include "platform/globals.h"  // NOLINT
+#if defined(TARGET_OS_FUCHSIA)
+
+#include "vm/os_thread.h"
+#include "vm/os_thread_fuchsia.h"
+
+#include <errno.h>  // NOLINT
+#include <magenta/syscalls.h>
+#include <magenta/types.h>
+
+#include "platform/assert.h"
+
+namespace dart {
+
+#define VALIDATE_PTHREAD_RESULT(result) \
+  if (result != 0) { \
+    FATAL1("pthread error: %d", result); \
+  }
+
+
+#if defined(DEBUG)
+#define ASSERT_PTHREAD_SUCCESS(result) VALIDATE_PTHREAD_RESULT(result)
+#else
+// NOTE: This (currently) expands to a no-op.
+#define ASSERT_PTHREAD_SUCCESS(result) ASSERT(result == 0)
+#endif
+
+
+#ifdef DEBUG
+#define RETURN_ON_PTHREAD_FAILURE(result) \
+  if (result != 0) { \
+    const int kBufferSize = 1024; \
+    char error_buf[kBufferSize]; \
+    fprintf(stderr, "%s:%d: pthread error: %d\n", \
+            __FILE__, __LINE__, result); \
+    return result; \
+  }
+#else
+#define RETURN_ON_PTHREAD_FAILURE(result) \
+  if (result != 0) return result;
+#endif
+
+
+static void ComputeTimeSpecMicros(struct timespec* ts, int64_t micros) {
+  // time in nanoseconds.
+  mx_time_t now = _magenta_current_time();
+  mx_time_t target = now + (micros * kNanosecondsPerMicrosecond);
+  int64_t secs = target / kNanosecondsPerSecond;
+  int64_t nanos = target - (secs * kNanosecondsPerSecond);
+
+  ts->tv_sec += secs;
+  ts->tv_nsec += nanos;
+  if (ts->tv_nsec >= kNanosecondsPerSecond) {
+    ts->tv_sec += 1;
+    ts->tv_nsec -= kNanosecondsPerSecond;
+  }
+}
+
+
+class ThreadStartData {
+ public:
+  ThreadStartData(const char* name,
+                  OSThread::ThreadStartFunction function,
+                  uword parameter)
+      : name_(name), function_(function), parameter_(parameter) {}
+
+  const char* name() const { return name_; }
+  OSThread::ThreadStartFunction function() const { return function_; }
+  uword parameter() const { return parameter_; }
+
+ private:
+  const char* name_;
+  OSThread::ThreadStartFunction function_;
+  uword parameter_;
+
+  DISALLOW_COPY_AND_ASSIGN(ThreadStartData);
+};
+
+
+// Dispatch to the thread start function provided by the caller. This trampoline
+// is used to ensure that the thread is properly destroyed if the thread just
+// exits.
+static void* ThreadStart(void* data_ptr) {
+  ThreadStartData* data = reinterpret_cast<ThreadStartData*>(data_ptr);
+
+  const char* name = data->name();
+  OSThread::ThreadStartFunction function = data->function();
+  uword parameter = data->parameter();
+  delete data;
+
+  // Create new OSThread object and set as TLS for new thread.
+  OSThread* thread = OSThread::CreateOSThread();
+  if (thread != NULL) {
+    OSThread::SetCurrent(thread);
+    thread->set_name(name);
+    // Call the supplied thread start function handing it its parameters.
+    function(parameter);
+  }
+
+  return NULL;
+}
+
+
+int OSThread::Start(const char* name,
+                    ThreadStartFunction function,
+                    uword parameter) {
+  pthread_attr_t attr;
+  int result = pthread_attr_init(&attr);
+  RETURN_ON_PTHREAD_FAILURE(result);
+
+  result = pthread_attr_setstacksize(&attr, OSThread::GetMaxStackSize());
+  RETURN_ON_PTHREAD_FAILURE(result);
+
+  ThreadStartData* data = new ThreadStartData(name, function, parameter);
+
+  pthread_t tid;
+  result = pthread_create(&tid, &attr, ThreadStart, data);
+  RETURN_ON_PTHREAD_FAILURE(result);
+
+  result = pthread_attr_destroy(&attr);
+  RETURN_ON_PTHREAD_FAILURE(result);
+
+  return 0;
+}
+
+
+const ThreadId OSThread::kInvalidThreadId = static_cast<ThreadId>(0);
+const ThreadJoinId OSThread::kInvalidThreadJoinId =
+    static_cast<ThreadJoinId>(0);
+
+
+ThreadLocalKey OSThread::CreateThreadLocal(ThreadDestructor destructor) {
+  pthread_key_t key = kUnsetThreadLocalKey;
+  int result = pthread_key_create(&key, destructor);
+  VALIDATE_PTHREAD_RESULT(result);
+  ASSERT(key != kUnsetThreadLocalKey);
+  return key;
+}
+
+
+void OSThread::DeleteThreadLocal(ThreadLocalKey key) {
+  ASSERT(key != kUnsetThreadLocalKey);
+  int result = pthread_key_delete(key);
+  VALIDATE_PTHREAD_RESULT(result);
+}
+
+
+void OSThread::SetThreadLocal(ThreadLocalKey key, uword value) {
+  ASSERT(key != kUnsetThreadLocalKey);
+  int result = pthread_setspecific(key, reinterpret_cast<void*>(value));
+  VALIDATE_PTHREAD_RESULT(result);
+}
+
+
+intptr_t OSThread::GetMaxStackSize() {
+  const int kStackSize = (128 * kWordSize * KB);
+  return kStackSize;
+}
+
+
+ThreadId OSThread::GetCurrentThreadId() {
+  return pthread_self();
+}
+
+
+#ifndef PRODUCT
+ThreadId OSThread::GetCurrentThreadTraceId() {
+  UNIMPLEMENTED();
+  return 0;
+}
+#endif  // PRODUCT
+
+
+ThreadJoinId OSThread::GetCurrentThreadJoinId(OSThread* thread) {
+  ASSERT(thread != NULL);
+  // Make sure we're filling in the join id for the current thread.
+  ASSERT(thread->id() == GetCurrentThreadId());
+  // Make sure the join_id_ hasn't been set, yet.
+  DEBUG_ASSERT(thread->join_id_ == kInvalidThreadJoinId);
+  pthread_t id = pthread_self();
+#if defined(DEBUG)
+  thread->join_id_ = id;
+#endif
+  return id;
+}
+
+
+void OSThread::Join(ThreadJoinId id) {
+  int result = pthread_join(id, NULL);
+  ASSERT(result == 0);
+}
+
+
+intptr_t OSThread::ThreadIdToIntPtr(ThreadId id) {
+  ASSERT(sizeof(id) == sizeof(intptr_t));
+  return static_cast<intptr_t>(id);
+}
+
+
+ThreadId OSThread::ThreadIdFromIntPtr(intptr_t id) {
+  return static_cast<ThreadId>(id);
+}
+
+
+bool OSThread::Compare(ThreadId a, ThreadId b) {
+  return pthread_equal(a, b) != 0;
+}
+
+
+Mutex::Mutex() {
+  pthread_mutexattr_t attr;
+  int result = pthread_mutexattr_init(&attr);
+  VALIDATE_PTHREAD_RESULT(result);
+
+#if defined(DEBUG)
+  result = pthread_mutexattr_settype(&attr, PTHREAD_MUTEX_ERRORCHECK);
+  VALIDATE_PTHREAD_RESULT(result);
+#endif  // defined(DEBUG)
+
+  result = pthread_mutex_init(data_.mutex(), &attr);
+  // Verify that creating a pthread_mutex succeeded.
+  VALIDATE_PTHREAD_RESULT(result);
+
+  result = pthread_mutexattr_destroy(&attr);
+  VALIDATE_PTHREAD_RESULT(result);
+
+#if defined(DEBUG)
+  // When running with assertions enabled we track the owner.
+  owner_ = OSThread::kInvalidThreadId;
+#endif  // defined(DEBUG)
+}
+
+
+Mutex::~Mutex() {
+  int result = pthread_mutex_destroy(data_.mutex());
+  // Verify that the pthread_mutex was destroyed.
+  VALIDATE_PTHREAD_RESULT(result);
+
+#if defined(DEBUG)
+  // When running with assertions enabled we track the owner.
+  ASSERT(owner_ == OSThread::kInvalidThreadId);
+#endif  // defined(DEBUG)
+}
+
+
+void Mutex::Lock() {
+  int result = pthread_mutex_lock(data_.mutex());
+  // Specifically check for dead lock to help debugging.
+  ASSERT(result != EDEADLK);
+  ASSERT_PTHREAD_SUCCESS(result);  // Verify no other errors.
+#if defined(DEBUG)
+  // When running with assertions enabled we track the owner.
+  owner_ = OSThread::GetCurrentThreadId();
+#endif  // defined(DEBUG)
+}
+
+
+bool Mutex::TryLock() {
+  int result = pthread_mutex_trylock(data_.mutex());
+  // Return false if the lock is busy and locking failed.
+  if (result == EBUSY) {
+    return false;
+  }
+  ASSERT_PTHREAD_SUCCESS(result);  // Verify no other errors.
+#if defined(DEBUG)
+  // When running with assertions enabled we track the owner.
+  owner_ = OSThread::GetCurrentThreadId();
+#endif  // defined(DEBUG)
+  return true;
+}
+
+
+void Mutex::Unlock() {
+#if defined(DEBUG)
+  // When running with assertions enabled we track the owner.
+  ASSERT(IsOwnedByCurrentThread());
+  owner_ = OSThread::kInvalidThreadId;
+#endif  // defined(DEBUG)
+  int result = pthread_mutex_unlock(data_.mutex());
+  // Specifically check for wrong thread unlocking to aid debugging.
+  ASSERT(result != EPERM);
+  ASSERT_PTHREAD_SUCCESS(result);  // Verify no other errors.
+}
+
+
+Monitor::Monitor() {
+  pthread_mutexattr_t mutex_attr;
+  int result = pthread_mutexattr_init(&mutex_attr);
+  VALIDATE_PTHREAD_RESULT(result);
+
+#if defined(DEBUG)
+  result = pthread_mutexattr_settype(&mutex_attr, PTHREAD_MUTEX_ERRORCHECK);
+  VALIDATE_PTHREAD_RESULT(result);
+#endif  // defined(DEBUG)
+
+  result = pthread_mutex_init(data_.mutex(), &mutex_attr);
+  VALIDATE_PTHREAD_RESULT(result);
+
+  result = pthread_mutexattr_destroy(&mutex_attr);
+  VALIDATE_PTHREAD_RESULT(result);
+
+  pthread_condattr_t cond_attr;
+  result = pthread_condattr_init(&cond_attr);
+  VALIDATE_PTHREAD_RESULT(result);
+
+  result = pthread_condattr_setclock(&cond_attr, CLOCK_MONOTONIC);
+  VALIDATE_PTHREAD_RESULT(result);
+
+  result = pthread_cond_init(data_.cond(), &cond_attr);
+  VALIDATE_PTHREAD_RESULT(result);
+
+  result = pthread_condattr_destroy(&cond_attr);
+  VALIDATE_PTHREAD_RESULT(result);
+
+#if defined(DEBUG)
+  // When running with assertions enabled we track the owner.
+  owner_ = OSThread::kInvalidThreadId;
+#endif  // defined(DEBUG)
+}
+
+
+Monitor::~Monitor() {
+#if defined(DEBUG)
+  // When running with assertions enabled we track the owner.
+  ASSERT(owner_ == OSThread::kInvalidThreadId);
+#endif  // defined(DEBUG)
+
+  int result = pthread_mutex_destroy(data_.mutex());
+  VALIDATE_PTHREAD_RESULT(result);
+
+  result = pthread_cond_destroy(data_.cond());
+  VALIDATE_PTHREAD_RESULT(result);
+}
+
+
+bool Monitor::TryEnter() {
+  int result = pthread_mutex_trylock(data_.mutex());
+  // Return false if the lock is busy and locking failed.
+  if (result == EBUSY) {
+    return false;
+  }
+  ASSERT_PTHREAD_SUCCESS(result);  // Verify no other errors.
+#if defined(DEBUG)
+  // When running with assertions enabled we track the owner.
+  ASSERT(owner_ == OSThread::kInvalidThreadId);
+  owner_ = OSThread::GetCurrentThreadId();
+#endif  // defined(DEBUG)
+  return true;
+}
+
+
+void Monitor::Enter() {
+  int result = pthread_mutex_lock(data_.mutex());
+  VALIDATE_PTHREAD_RESULT(result);
+
+#if defined(DEBUG)
+  // When running with assertions enabled we track the owner.
+  ASSERT(owner_ == OSThread::kInvalidThreadId);
+  owner_ = OSThread::GetCurrentThreadId();
+#endif  // defined(DEBUG)
+}
+
+
+void Monitor::Exit() {
+#if defined(DEBUG)
+  // When running with assertions enabled we track the owner.
+  ASSERT(IsOwnedByCurrentThread());
+  owner_ = OSThread::kInvalidThreadId;
+#endif  // defined(DEBUG)
+
+  int result = pthread_mutex_unlock(data_.mutex());
+  VALIDATE_PTHREAD_RESULT(result);
+}
+
+
+Monitor::WaitResult Monitor::Wait(int64_t millis) {
+  Monitor::WaitResult retval = WaitMicros(millis * kMicrosecondsPerMillisecond);
+  return retval;
+}
+
+
+Monitor::WaitResult Monitor::WaitMicros(int64_t micros) {
+#if defined(DEBUG)
+  // When running with assertions enabled we track the owner.
+  ASSERT(IsOwnedByCurrentThread());
+  ThreadId saved_owner = owner_;
+  owner_ = OSThread::kInvalidThreadId;
+#endif  // defined(DEBUG)
+
+  Monitor::WaitResult retval = kNotified;
+  if (micros == kNoTimeout) {
+    // Wait forever.
+    int result = pthread_cond_wait(data_.cond(), data_.mutex());
+    VALIDATE_PTHREAD_RESULT(result);
+  } else {
+    struct timespec ts;
+    ComputeTimeSpecMicros(&ts, micros);
+    int result = pthread_cond_timedwait(data_.cond(), data_.mutex(), &ts);
+    ASSERT((result == 0) || (result == ETIMEDOUT));
+    if (result == ETIMEDOUT) {
+      retval = kTimedOut;
+    }
+  }
+
+#if defined(DEBUG)
+  // When running with assertions enabled we track the owner.
+  ASSERT(owner_ == OSThread::kInvalidThreadId);
+  owner_ = OSThread::GetCurrentThreadId();
+  ASSERT(owner_ == saved_owner);
+#endif  // defined(DEBUG)
+  return retval;
+}
+
+
+void Monitor::Notify() {
+  // When running with assertions enabled we track the owner.
+  ASSERT(IsOwnedByCurrentThread());
+  int result = pthread_cond_signal(data_.cond());
+  VALIDATE_PTHREAD_RESULT(result);
+}
+
+
+void Monitor::NotifyAll() {
+  // When running with assertions enabled we track the owner.
+  ASSERT(IsOwnedByCurrentThread());
+  int result = pthread_cond_broadcast(data_.cond());
+  VALIDATE_PTHREAD_RESULT(result);
+}
+
+}  // namespace dart
+
+#endif  // defined(TARGET_OS_FUCHSIA)
diff --git a/runtime/vm/os_thread_fuchsia.h b/runtime/vm/os_thread_fuchsia.h
new file mode 100644
index 0000000..b45207b
--- /dev/null
+++ b/runtime/vm/os_thread_fuchsia.h
@@ -0,0 +1,80 @@
+// Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+#ifndef VM_OS_THREAD_FUCHSIA_H_
+#define VM_OS_THREAD_FUCHSIA_H_
+
+#if !defined(VM_OS_THREAD_H_)
+#error Do not include os_thread_fuchsia.h directly; use os_thread.h instead.
+#endif
+
+#include <pthread.h>
+
+#include "platform/assert.h"
+#include "platform/globals.h"
+
+namespace dart {
+
+typedef pthread_key_t ThreadLocalKey;
+typedef pthread_t ThreadId;
+typedef pthread_t ThreadJoinId;
+
+
+static const ThreadLocalKey kUnsetThreadLocalKey =
+    static_cast<pthread_key_t>(-1);
+
+
+class ThreadInlineImpl {
+ private:
+  ThreadInlineImpl() {}
+  ~ThreadInlineImpl() {}
+
+  static uword GetThreadLocal(ThreadLocalKey key) {
+    ASSERT(key != kUnsetThreadLocalKey);
+    return reinterpret_cast<uword>(pthread_getspecific(key));
+  }
+
+  friend class OSThread;
+
+  DISALLOW_ALLOCATION();
+  DISALLOW_COPY_AND_ASSIGN(ThreadInlineImpl);
+};
+
+
+class MutexData {
+ private:
+  MutexData() {}
+  ~MutexData() {}
+
+  pthread_mutex_t* mutex() { return &mutex_; }
+
+  pthread_mutex_t mutex_;
+
+  friend class Mutex;
+
+  DISALLOW_ALLOCATION();
+  DISALLOW_COPY_AND_ASSIGN(MutexData);
+};
+
+
+class MonitorData {
+ private:
+  MonitorData() {}
+  ~MonitorData() {}
+
+  pthread_mutex_t* mutex() { return &mutex_; }
+  pthread_cond_t* cond() { return &cond_; }
+
+  pthread_mutex_t mutex_;
+  pthread_cond_t cond_;
+
+  friend class Monitor;
+
+  DISALLOW_ALLOCATION();
+  DISALLOW_COPY_AND_ASSIGN(MonitorData);
+};
+
+}  // namespace dart
+
+#endif  // VM_OS_THREAD_FUCHSIA_H_
diff --git a/runtime/vm/os_thread_linux.cc b/runtime/vm/os_thread_linux.cc
index 9695042..0f8f958 100644
--- a/runtime/vm/os_thread_linux.cc
+++ b/runtime/vm/os_thread_linux.cc
@@ -189,9 +189,11 @@
 }
 
 
+#ifndef PRODUCT
 ThreadId OSThread::GetCurrentThreadTraceId() {
   return syscall(__NR_gettid);
 }
+#endif  // PRODUCT
 
 
 ThreadJoinId OSThread::GetCurrentThreadJoinId(OSThread* thread) {
diff --git a/runtime/vm/os_thread_macos.cc b/runtime/vm/os_thread_macos.cc
index b5fd6f9..53034be 100644
--- a/runtime/vm/os_thread_macos.cc
+++ b/runtime/vm/os_thread_macos.cc
@@ -163,9 +163,11 @@
 }
 
 
+#ifndef PRODUCT
 ThreadId OSThread::GetCurrentThreadTraceId() {
   return ThreadIdFromIntPtr(pthread_mach_thread_np(pthread_self()));
 }
+#endif  // PRODUCT
 
 
 ThreadJoinId OSThread::GetCurrentThreadJoinId(OSThread* thread) {
diff --git a/runtime/vm/os_thread_win.cc b/runtime/vm/os_thread_win.cc
index ed8de4af..6be6804 100644
--- a/runtime/vm/os_thread_win.cc
+++ b/runtime/vm/os_thread_win.cc
@@ -125,9 +125,11 @@
 }
 
 
+#ifndef PRODUCT
 ThreadId OSThread::GetCurrentThreadTraceId() {
   return ::GetCurrentThreadId();
 }
+#endif  // PRODUCT
 
 
 ThreadJoinId OSThread::GetCurrentThreadJoinId(OSThread* thread) {
diff --git a/runtime/vm/os_win.cc b/runtime/vm/os_win.cc
index 2ea8c1d..7236a91 100644
--- a/runtime/vm/os_win.cc
+++ b/runtime/vm/os_win.cc
@@ -34,7 +34,9 @@
 // As a side-effect sets the globals _timezone, _daylight and _tzname.
 static bool LocalTime(int64_t seconds_since_epoch, tm* tm_result) {
   time_t seconds = static_cast<time_t>(seconds_since_epoch);
-  if (seconds != seconds_since_epoch) return false;
+  if (seconds != seconds_since_epoch) {
+    return false;
+  }
   // localtime_s implicitly sets _timezone, _daylight and _tzname.
   errno_t error_code = localtime_s(tm_result, &seconds);
   return error_code == 0;
@@ -54,17 +56,38 @@
 
 
 const char* OS::GetTimeZoneName(int64_t seconds_since_epoch) {
-  tm decomposed;
-  // LocalTime will set _tzname.
-  bool succeeded = LocalTime(seconds_since_epoch, &decomposed);
-  if (succeeded) {
-    int inDaylightSavingsTime = decomposed.tm_isdst;
-    ASSERT(inDaylightSavingsTime == 0 || inDaylightSavingsTime == 1);
-    return _tzname[inDaylightSavingsTime];
-  } else {
-    // Return an empty string like V8 does.
+  TIME_ZONE_INFORMATION zone_information;
+  memset(&zone_information, 0, sizeof(zone_information));
+
+  // Initialize and grab the time zone data.
+  _tzset();
+  DWORD status = GetTimeZoneInformation(&zone_information);
+  if (GetTimeZoneInformation(&zone_information) == TIME_ZONE_ID_INVALID) {
+    // If we can't get the time zone data, the Windows docs indicate that we
+    // are probably out of memory. Return an empty string.
     return "";
   }
+
+  // Figure out whether we're in standard or daylight.
+  bool daylight_savings = (status == TIME_ZONE_ID_DAYLIGHT);
+  if (status == TIME_ZONE_ID_UNKNOWN) {
+    tm local_time;
+    if (LocalTime(seconds_since_epoch, &local_time)) {
+      daylight_savings = (local_time.tm_isdst == 1);
+    }
+  }
+
+  // Convert the wchar string to a null-terminated utf8 string.
+  wchar_t* wchar_name = daylight_savings
+                      ? zone_information.DaylightName
+                      : zone_information.StandardName;
+  intptr_t utf8_len = WideCharToMultiByte(
+      CP_UTF8, 0, wchar_name, -1, NULL, 0, NULL, NULL);
+  char* name = Thread::Current()->zone()->Alloc<char>(utf8_len + 1);
+  WideCharToMultiByte(
+      CP_UTF8, 0, wchar_name, -1, name, utf8_len, NULL, NULL);
+  name[utf8_len] = '\0';
+  return name;
 }
 
 
diff --git a/runtime/vm/pages.cc b/runtime/vm/pages.cc
index e5d1e65..a10d097 100644
--- a/runtime/vm/pages.cc
+++ b/runtime/vm/pages.cc
@@ -12,7 +12,6 @@
 #include "vm/object.h"
 #include "vm/os_thread.h"
 #include "vm/safepoint.h"
-#include "vm/verified_memory.h"
 #include "vm/virtual_memory.h"
 
 namespace dart {
@@ -57,7 +56,7 @@
 
 HeapPage* HeapPage::Allocate(intptr_t size_in_words, PageType type) {
   VirtualMemory* memory =
-      VerifiedMemory::Reserve(size_in_words << kWordSizeLog2);
+      VirtualMemory::Reserve(size_in_words << kWordSizeLog2);
   if (memory == NULL) {
     return NULL;
   }
@@ -672,6 +671,7 @@
 }
 
 
+#ifndef PRODUCT
 void PageSpace::PrintToJSONObject(JSONObject* object) const {
   if (!FLAG_support_service) {
     return;
@@ -755,6 +755,7 @@
     }
   }
 }
+#endif  // PRODUCT
 
 
 bool PageSpace::ShouldCollectCode() {
@@ -1055,23 +1056,6 @@
 }
 
 
-uword PageSpace::TryAllocateSmiInitializedLocked(intptr_t size,
-                                                 GrowthPolicy growth_policy) {
-  uword result = TryAllocateDataBumpLocked(size, growth_policy);
-  if (collections() != 0) {
-    FATAL1("%" Pd " GCs before TryAllocateSmiInitializedLocked", collections());
-  }
-#if defined(DEBUG)
-  RawObject** begin = reinterpret_cast<RawObject**>(result);
-  RawObject** end = reinterpret_cast<RawObject**>(result + size);
-  for (RawObject** current = begin; current < end; ++current) {
-    ASSERT(!(*current)->IsHeapObject());
-  }
-#endif
-  return result;
-}
-
-
 void PageSpace::SetupExternalPage(void* pointer,
                                   uword size,
                                   bool is_executable) {
diff --git a/runtime/vm/pages.h b/runtime/vm/pages.h
index ff1ca15..ae32c87 100644
--- a/runtime/vm/pages.h
+++ b/runtime/vm/pages.h
@@ -314,8 +314,10 @@
     return collections_;
   }
 
+#ifndef PRODUCT
   void PrintToJSONObject(JSONObject* object) const;
   void PrintHeapMapToJSONStream(Isolate* isolate, JSONStream* stream) const;
+#endif  // PRODUCT
 
   void AllocateExternal(intptr_t size);
   void FreeExternal(intptr_t size);
@@ -345,11 +347,6 @@
   uword TryAllocateDataBumpLocked(intptr_t size, GrowthPolicy growth_policy);
   // Prefer small freelist blocks, then chip away at the bump block.
   uword TryAllocatePromoLocked(intptr_t size, GrowthPolicy growth_policy);
-  // Allocates memory where every word is guaranteed to be a Smi. Calling this
-  // method after the first garbage collection is inefficient in release mode
-  // and illegal in debug mode.
-  uword TryAllocateSmiInitializedLocked(intptr_t size,
-                                        GrowthPolicy growth_policy);
 
   // Bump block allocation from generated code.
   uword* TopAddress() { return &bump_top_; }
diff --git a/runtime/vm/parser.cc b/runtime/vm/parser.cc
index 6668731..c902639 100644
--- a/runtime/vm/parser.cc
+++ b/runtime/vm/parser.cc
@@ -365,6 +365,7 @@
   void exit_finally() { inside_finally_ = false; }
 
   void AddNodeForFinallyInlining(AstNode* node);
+  void RemoveJumpToLabel(SourceLabel *label);
   AstNode* GetNodeToInlineFinally(int index) {
     if (0 <= index && index < inlined_finally_nodes_.length()) {
       return inlined_finally_nodes_[index];
@@ -390,6 +391,25 @@
 }
 
 
+void Parser::TryStack::RemoveJumpToLabel(SourceLabel *label) {
+  int i = 0;
+  while (i < inlined_finally_nodes_.length()) {
+    if (inlined_finally_nodes_[i]->IsJumpNode()) {
+      JumpNode* jump = inlined_finally_nodes_[i]->AsJumpNode();
+      if (jump->label() == label) {
+        // Shift remaining entries left and delete last entry.
+        for (int j = i + 1; j < inlined_finally_nodes_.length(); j++) {
+          inlined_finally_nodes_[j - 1] = inlined_finally_nodes_[j];
+        }
+        inlined_finally_nodes_.RemoveLast();
+        continue;
+      }
+    }
+    i++;
+  }
+}
+
+
 // For parsing a compilation unit.
 Parser::Parser(const Script& script,
                const Library& library,
@@ -508,6 +528,14 @@
 }
 
 
+int Parser::FunctionLevel() const {
+  if (current_block_ != NULL) {
+    return current_block_->scope->function_level();
+  }
+  return 0;
+}
+
+
 const Class& Parser::current_class() const {
   return current_class_;
 }
@@ -953,7 +981,7 @@
     const int param_cnt = params.num_fixed_parameters +
                           params.num_optional_parameters;
     const Array& param_descriptor =
-        Array::Handle(Array::New(param_cnt * kParameterEntrySize));
+        Array::Handle(Array::New(param_cnt * kParameterEntrySize, Heap::kOld));
     for (int i = 0, j = 0; i < param_cnt; i++, j += kParameterEntrySize) {
       param_descriptor.SetAt(j + kParameterIsFinalOffset,
                              param[i].is_final ? Bool::True() : Bool::False());
@@ -1445,7 +1473,8 @@
 
   EnsureExpressionTemp();
   StoreInstanceFieldNode* store_field =
-      new StoreInstanceFieldNode(ident_pos, receiver, field, value);
+      new StoreInstanceFieldNode(ident_pos, receiver, field, value,
+                                 /* is_initializer = */ false);
   current_block_->statements->Add(store_field);
   current_block_->statements->Add(new ReturnNode(ST(ident_pos)));
   return CloseBlock();
@@ -1490,7 +1519,7 @@
 
   if (func.HasOptionalNamedParameters()) {
     const Array& arg_names =
-        Array::ZoneHandle(Array::New(func.NumOptionalParameters()));
+        Array::ZoneHandle(Array::New(func.NumOptionalParameters(), Heap::kOld));
     for (intptr_t i = 0; i < arg_names.Length(); i++) {
       intptr_t index = func.num_fixed_parameters() + i;
       arg_names.SetAt(i, String::Handle(func.ParameterNameAt(index)));
@@ -2045,6 +2074,9 @@
                                    bool evaluate_metadata,
                                    ParamList* params) {
   TRACE_PARSER("ParseFormalParameters");
+  // Optional parameter lists cannot be empty.
+  // The completely empty parameter list is handled before getting here.
+  bool has_seen_parameter = false;
   do {
     ConsumeToken();
     if (!params->has_optional_positional_parameters &&
@@ -2061,9 +2093,18 @@
       params->has_optional_named_parameters = true;
       return;
     }
+    Token::Kind terminator =
+       params->has_optional_positional_parameters ? Token::kRBRACK :
+       params->has_optional_named_parameters ? Token::kRBRACE :
+       Token :: kRPAREN;
+    if (has_seen_parameter && CurrentToken() == terminator) {
+      // Allow a trailing comma.
+      break;
+    }
     ParseFormalParameter(allow_explicit_default_values,
                          evaluate_metadata,
                          params);
+    has_seen_parameter = true;
   } while (CurrentToken() == Token::kCOMMA);
 }
 
@@ -2369,7 +2410,7 @@
     // If we create an implicit instance closure from inside a closure of a
     // parameterized class, make sure that the receiver is captured as
     // instantiator.
-    if (current_block_->scope->function_level() > 0) {
+    if (FunctionLevel() > 0) {
       const Type& signature_type = Type::Handle(Z,
           implicit_closure_function.SignatureType());
       const Class& scope_class = Class::Handle(Z, signature_type.type_class());
@@ -2581,7 +2622,8 @@
       initialized_fields, instance, &field, init_expr);
   if (initializer == NULL) {
     initializer =
-        new(Z) StoreInstanceFieldNode(field_pos, instance, field, init_expr);
+        new(Z) StoreInstanceFieldNode(field_pos, instance, field, init_expr,
+                                      /* is_initializer = */ true);
   }
   return initializer;
 }
@@ -2620,17 +2662,18 @@
 
 AstNode* Parser::ParseExternalInitializedField(const Field& field) {
   // Only use this function if the initialized field originates
-  // from a different class. We need to save and restore current
-  // class, library, and token stream (script).
+  // from a different class. We need to save and restore the
+  // library and token stream (script).
+  // The current_class remains unchanged, so that type arguments
+  // are resolved in the correct scope class.
   ASSERT(current_class().raw() != field.Origin());
-  const Class& saved_class = Class::Handle(Z, current_class().raw());
   const Library& saved_library = Library::Handle(Z, library().raw());
   const Script& saved_script = Script::Handle(Z, script().raw());
   const TokenPosition saved_token_pos = TokenPos();
 
-  set_current_class(Class::Handle(Z, field.Origin()));
-  set_library(Library::Handle(Z, current_class().library()));
-  SetScript(Script::Handle(Z, current_class().script()), field.token_pos());
+  const Class& origin_class = Class::Handle(Z, field.Origin());
+  set_library(Library::Handle(Z, origin_class.library()));
+  SetScript(Script::Handle(Z, origin_class.script()), field.token_pos());
 
   ASSERT(IsIdentifier());
   ConsumeToken();
@@ -2650,7 +2693,6 @@
       init_expr = new(Z) LiteralNode(field.token_pos(), expr_value);
     }
   }
-  set_current_class(saved_class);
   set_library(saved_library);
   SetScript(saved_script, saved_token_pos);
   return init_expr;
@@ -2707,7 +2749,8 @@
           new StoreInstanceFieldNode(field.token_pos(),
                                      instance,
                                      field,
-                                     init_expr);
+                                     init_expr,
+                                     /* is_initializer = */ true);
       current_block_->statements->Add(field_init);
     }
   }
@@ -3170,7 +3213,8 @@
                                     value);
         if (initializer == NULL) {
           initializer = new(Z) StoreInstanceFieldNode(
-              param.name_pos, instance, field, value);
+              param.name_pos, instance, field, value,
+              /* is_initializer = */ true);
         }
         current_block_->statements->Add(initializer);
       }
@@ -3358,7 +3402,7 @@
     AddFormalParamsToScope(&params, current_block_->scope);
 
     if (I->type_checks() &&
-        (current_block_->scope->function_level() > 0)) {
+        (FunctionLevel() > 0)) {
       // We are parsing, but not compiling, a local function.
       // The instantiator may be required at run time for generic type checks.
       if (IsInstantiatorRequired()) {
@@ -4649,6 +4693,11 @@
   }
   ExpectToken(Token::kRBRACE);
 
+  if (cls.LookupTypeParameter(class_name) != TypeParameter::null()) {
+    ReportError(class_pos,
+                "class name conflicts with type parameter '%s'",
+                class_name.ToCString());
+  }
   CheckConstructors(&members);
 
   // Need to compute this here since MakeArray() will clear the
@@ -5312,7 +5361,7 @@
   TRACE_PARSER("ParseInterfaceList");
   ASSERT(CurrentToken() == Token::kIMPLEMENTS);
   const GrowableObjectArray& all_interfaces =
-      GrowableObjectArray::Handle(Z, GrowableObjectArray::New());
+      GrowableObjectArray::Handle(Z, GrowableObjectArray::New(Heap::kOld));
   AbstractType& interface = AbstractType::Handle(Z);
   // First get all the interfaces already implemented by class.
   Array& cls_interfaces = Array::Handle(Z, cls.interfaces());
@@ -5341,7 +5390,7 @@
   TRACE_PARSER("ParseMixins");
   ASSERT(CurrentToken() == Token::kWITH);
   const GrowableObjectArray& mixin_types =
-      GrowableObjectArray::Handle(Z, GrowableObjectArray::New());
+      GrowableObjectArray::Handle(Z, GrowableObjectArray::New(Heap::kOld));
   AbstractType& mixin_type = AbstractType::Handle(Z);
   do {
     ConsumeToken();
@@ -5846,6 +5895,7 @@
 
 void Parser::ParseLibraryImportExport(const Object& tl_owner,
                                       TokenPosition metadata_pos) {
+  ASSERT(Thread::Current()->IsMutatorThread());
   bool is_import = (CurrentToken() == Token::kIMPORT);
   bool is_export = (CurrentToken() == Token::kEXPORT);
   ASSERT(is_import || is_export);
@@ -6028,6 +6078,9 @@
 void Parser::ParseLibraryPart() {
   const TokenPosition source_pos = TokenPos();
   ConsumeToken();  // Consume "part".
+  if (IsSymbol(Symbols::Of())) {
+    ReportError("part of declarations are not allowed in script files");
+  }
   CheckToken(Token::kSTRING, "url expected");
   AstNode* url_literal = ParseStringLiteral(false);
   ASSERT(url_literal->IsLiteralNode());
@@ -6792,6 +6845,13 @@
   OpenFunctionBlock(closure);
   AddFormalParamsToScope(&closure_params, current_block_->scope);
   async_temp_scope_ = current_block_->scope;
+
+  // Capture instantiator in case it may be needed to generate the type
+  // check of the return value. (C.f. handling of Token::kRETURN.)
+  ASSERT(FunctionLevel() > 0);
+  if (I->type_checks() && IsInstantiatorRequired()) {
+    CaptureInstantiator();
+  }
   return closure.raw();
 }
 
@@ -7406,7 +7466,7 @@
 
 
 void Parser::CaptureInstantiator() {
-  ASSERT(current_block_->scope->function_level() > 0);
+  ASSERT(FunctionLevel() > 0);
   const String* variable_name = current_function().IsInFactoryScope() ?
       &Symbols::TypeArgumentsParameter() : &Symbols::This();
   current_block_->scope->CaptureVariable(
@@ -7643,6 +7703,7 @@
   // Note that we cannot share the same closure function between the closurized
   // and non-closurized versions of the same parent function.
   Function& function = Function::ZoneHandle(Z);
+  bool found_func = true;
   // TODO(hausner): There could be two different closures at the given
   // function_pos, one enclosed in a closurized function and one enclosed in the
   // non-closurized version of this same function.
@@ -7651,6 +7712,7 @@
     // The function will be registered in the lookup table by the
     // EffectGraphVisitor::VisitClosureNode when the newly allocated closure
     // function has been properly setup.
+    found_func = false;
     function = Function::NewClosureFunction(*function_name,
                                             innermost_function(),
                                             function_pos);
@@ -7700,15 +7762,40 @@
     }
   }
 
-  // Parse the local function.
-  SequenceNode* statements = Parser::ParseFunc(function, !is_literal);
-  INC_STAT(thread(), num_functions_parsed, 1);
+  Type& signature_type = Type::ZoneHandle(Z);
+  SequenceNode* statements = NULL;
+  if (!found_func) {
+    // Parse the local function. As a side effect of the parsing, the
+    // variables of this function's scope that are referenced by the local
+    // function (and its inner nested functions) will be marked as captured.
 
-  // Now that the local function has formal parameters, lookup the signature
-  Type& signature_type = Type::ZoneHandle(Z, function.SignatureType());
-  signature_type ^= ClassFinalizer::FinalizeType(
-      current_class(), signature_type, ClassFinalizer::kCanonicalize);
-  function.SetSignatureType(signature_type);
+    statements = Parser::ParseFunc(function, !is_literal);
+    INC_STAT(thread(), num_functions_parsed, 1);
+
+    // Now that the local function has formal parameters, lookup the signature
+    signature_type = function.SignatureType();
+    signature_type ^= ClassFinalizer::FinalizeType(
+        current_class(), signature_type, ClassFinalizer::kCanonicalize);
+    function.SetSignatureType(signature_type);
+  } else {
+    // The local function was parsed before. The captured variables are
+    // saved in the function's context scope. Iterate over the context scope
+    // and mark its variables as captured.
+    const ContextScope& context_scope =
+        ContextScope::Handle(Z, function.context_scope());
+    ASSERT(!context_scope.IsNull());
+    String& var_name = String::Handle(Z);
+    for (int i = 0; i < context_scope.num_variables(); i++) {
+      var_name = context_scope.NameAt(i);
+      // We need to look up the name in a way that returns even hidden
+      // variables, e.g. 'this' in an initializer list.
+      LocalVariable* v = current_block_->scope->LookupVariable(var_name, true);
+      ASSERT(v != NULL);
+      current_block_->scope->CaptureVariable(v);
+    }
+    SkipFunctionLiteral();
+    signature_type = function.SignatureType();
+  }
 
   // Local functions are registered in the enclosing class, but
   // ignored during class finalization. The enclosing class has
@@ -7717,7 +7804,7 @@
   ASSERT(signature_type.IsFinalized());
 
   // Make sure that the instantiator is captured.
-  if ((current_block_->scope->function_level() > 0) &&
+  if ((FunctionLevel() > 0) &&
       Class::Handle(signature_type.type_class()).IsGeneric()) {
     CaptureInstantiator();
   }
@@ -7763,8 +7850,9 @@
   // variables are not relevant for the compilation of the enclosing function.
   // This pruning is done by omitting to hook the local scope in its parent
   // scope in the constructor of LocalScope.
-  AstNode* closure = new(Z) ClosureNode(
-      function_pos, function, NULL, statements->scope());
+  AstNode* closure =
+      new(Z) ClosureNode(function_pos, function, NULL,
+                         statements != NULL ? statements->scope() : NULL);
 
   if (function_variable == NULL) {
     ASSERT(is_literal);
@@ -8329,7 +8417,8 @@
         ArgumentListNode* arguments = new(Z) ArgumentListNode(TokenPos());
         arguments->Add(new(Z) LiteralNode(
             TokenPos(),
-            Integer::ZoneHandle(Z, Integer::New(TokenPos().value()))));
+            Integer::ZoneHandle(Z, Integer::New(TokenPos().value(),
+                                                Heap::kOld))));
         current_block_->statements->Add(
             MakeStaticCall(Symbols::FallThroughError(),
                            Library::PrivateCoreLibName(Symbols::ThrowNew()),
@@ -8405,6 +8494,7 @@
         // We have seen a 'continue' with this label name. Resolve
         // the forward reference.
         case_label->ResolveForwardReference();
+        RemoveNodesForFinallyInlining(case_label);
       } else {
         ReportError(label_pos, "label '%s' already exists in scope",
                     label_name->ToCString());
@@ -8548,7 +8638,7 @@
   if (try_stack_ != NULL) {
     LocalScope* scope = try_stack_->try_block()->scope;
     uint16_t try_index = try_stack_->try_index();
-    const int current_function_level = current_block_->scope->function_level();
+    const int current_function_level = FunctionLevel();
     if (scope->function_level() == current_function_level) {
       // The block declaring :saved_try_ctx_var variable is the parent of the
       // pushed try block.
@@ -9104,36 +9194,6 @@
 }
 
 
-AstNode* Parser::MakeAssertCall(TokenPosition begin, TokenPosition end) {
-  ArgumentListNode* arguments = new(Z) ArgumentListNode(begin);
-  arguments->Add(new(Z) LiteralNode(begin,
-      Integer::ZoneHandle(Z, Integer::New(begin.value()))));
-  arguments->Add(new(Z) LiteralNode(end,
-      Integer::ZoneHandle(Z, Integer::New(end.value()))));
-  return MakeStaticCall(Symbols::AssertionError(),
-                        Library::PrivateCoreLibName(Symbols::ThrowNew()),
-                        arguments);
-}
-
-
-AstNode* Parser::InsertClosureCallNodes(AstNode* condition) {
-  if (condition->IsClosureNode() ||
-      (condition->IsStoreLocalNode() &&
-       condition->AsStoreLocalNode()->value()->IsClosureNode())) {
-    // Function literal in assert implies a call.
-    const TokenPosition pos = condition->token_pos();
-    condition = BuildClosureCall(pos,
-                                 condition,
-                                 new(Z) ArgumentListNode(pos));
-  } else if (condition->IsConditionalExprNode()) {
-    ConditionalExprNode* cond_expr = condition->AsConditionalExprNode();
-    cond_expr->set_true_expr(InsertClosureCallNodes(cond_expr->true_expr()));
-    cond_expr->set_false_expr(InsertClosureCallNodes(cond_expr->false_expr()));
-  }
-  return condition;
-}
-
-
 AstNode* Parser::ParseAssertStatement() {
   TRACE_PARSER("ParseAssertStatement");
   ConsumeToken();  // Consume assert keyword.
@@ -9147,14 +9207,16 @@
   AstNode* condition = ParseAwaitableExpr(kAllowConst, kConsumeCascades, NULL);
   const TokenPosition condition_end = TokenPos();
   ExpectToken(Token::kRPAREN);
-  condition = InsertClosureCallNodes(condition);
-  condition = new(Z) UnaryOpNode(condition_pos, Token::kNOT, condition);
-  AstNode* assert_throw = MakeAssertCall(condition_pos, condition_end);
-  return new(Z) IfNode(
-      condition_pos,
-      condition,
-      NodeAsSequenceNode(condition_pos, assert_throw, NULL),
-      NULL);
+
+  ArgumentListNode* arguments = new(Z) ArgumentListNode(condition_pos);
+  arguments->Add(condition);
+  arguments->Add(new(Z) LiteralNode(condition_pos,
+      Integer::ZoneHandle(Z, Integer::New(condition_pos.value(), Heap::kOld))));
+  arguments->Add(new(Z) LiteralNode(condition_end,
+      Integer::ZoneHandle(Z, Integer::New(condition_end.value(), Heap::kOld))));
+  return MakeStaticCall(Symbols::AssertionError(),
+                        Library::PrivateCoreLibName(Symbols::CheckAssertion()),
+                        arguments);
 }
 
 
@@ -9299,7 +9361,7 @@
     return;
   }
   ASSERT(node->IsReturnNode() || node->IsJumpNode());
-  const intptr_t func_level = current_block_->scope->function_level();
+  const intptr_t func_level = FunctionLevel();
   TryStack* iterator = try_stack_;
   while ((iterator != NULL) &&
       (iterator->try_block()->scope->function_level() == func_level)) {
@@ -9313,7 +9375,12 @@
       // so we do not need to inline the finally code. Otherwise we need
       // to inline the finally code of this try block and then move on to the
       // next outer try block.
-      if (label->owner()->IsNestedWithin(try_scope)) {
+      // For unresolved forward jumps to switch cases, we don't yet know
+      // to which scope the label will be resolved. Tentatively add the
+      // jump to all nested try statements and remove the outermost ones
+      // when we know the exact jump target. (See
+      // RemoveNodesForFinallyInlining below.)
+      if (!label->IsUnresolved() && label->owner()->IsNestedWithin(try_scope)) {
         break;
       }
     }
@@ -9323,13 +9390,26 @@
 }
 
 
+void Parser::RemoveNodesForFinallyInlining(SourceLabel* label) {
+  TryStack* iterator = try_stack_;
+  const intptr_t func_level = FunctionLevel();
+  while ((iterator != NULL) &&
+         (iterator->try_block()->scope->function_level() == func_level)) {
+    iterator->RemoveJumpToLabel(label);
+    iterator = iterator->outer_try();
+  }
+}
+
+
 // Add the inlined finally clause to the specified node.
 void Parser::AddFinallyClauseToNode(bool is_async,
                                     AstNode* node,
                                     InlinedFinallyNode* finally_clause) {
   ReturnNode* return_node = node->AsReturnNode();
   if (return_node != NULL) {
-    parsed_function()->EnsureFinallyReturnTemp(is_async);
+    if (FunctionLevel() == 0) {
+      parsed_function()->EnsureFinallyReturnTemp(is_async);
+    }
     return_node->AddInlinedFinallyNode(finally_clause);
     return;
   }
@@ -9444,7 +9524,7 @@
       // Has a type specification that is not malformed or malbounded.  Now
       // form an 'if type check' to guard the catch handler code.
       if (!exception_param.type->IsInstantiated() &&
-          (current_block_->scope->function_level() > 0)) {
+          (FunctionLevel() > 0)) {
         // Make sure that the instantiator is captured.
         CaptureInstantiator();
       }
@@ -9859,7 +9939,7 @@
   if (jump_kind == Token::kBREAK && target->kind() == SourceLabel::kCase) {
     ReportError(jump_pos, "'break' to case clause label is illegal");
   }
-  if (target->FunctionLevel() != current_block_->scope->function_level()) {
+  if (target->FunctionLevel() != FunctionLevel()) {
     ReportError(jump_pos, "'%s' target must be in same function context",
                 Token::Str(jump_kind));
   }
@@ -10057,7 +10137,7 @@
     ConsumeToken();
     if (CurrentToken() != Token::kSEMICOLON) {
       const TokenPosition expr_pos = TokenPos();
-      const int function_level = current_block_->scope->function_level();
+      const int function_level = FunctionLevel();
       if (current_function().IsGenerativeConstructor() &&
           (function_level == 0)) {
         ReportError(expr_pos,
@@ -10068,17 +10148,14 @@
       }
       AstNode* expr = ParseAwaitableExpr(kAllowConst, kConsumeCascades, NULL);
       if (I->type_checks() &&
-          (((function_level == 0) && current_function().IsAsyncClosure()) ||
-           ((function_level > 0) && current_function().IsAsyncFunction()))) {
+          (((function_level == 0) && current_function().IsAsyncClosure()))) {
         // In checked mode, when the declared result type is Future<T>, verify
         // that the returned expression is of type T or Future<T> as follows:
         // return temp = expr, temp is Future ? temp as Future<T> : temp as T;
         // In case of a mismatch, we need a TypeError and not a CastError, so
         // we do not actually implement an "as" test, but an "assignable" test.
-        Function& async_func = Function::Handle(Z, current_function().raw());
-        if (function_level == 0) {
-          async_func = async_func.parent_function();
-        }
+        Function& async_func =
+            Function::Handle(Z, current_function().parent_function());
         const AbstractType& result_type =
             AbstractType::ZoneHandle(Z, async_func.result_type());
         const Class& future_class =
@@ -10090,48 +10167,39 @@
           if (!result_type_args.IsNull() && (result_type_args.Length() == 1)) {
             const AbstractType& result_type_arg =
                 AbstractType::ZoneHandle(Z, result_type_args.TypeAt(0));
-            if (function_level == 0) {
-              // Parsing and generating code for async closure.
-              LetNode* checked_expr = new(Z) LetNode(expr_pos);
-              LocalVariable* temp = checked_expr->AddInitializer(expr);
-              temp->set_is_final();
-              const AbstractType& future_type =
-                  AbstractType::ZoneHandle(Z, future_class.RareType());
-              AstNode* is_future = new(Z) LoadLocalNode(expr_pos, temp);
-              is_future = new(Z) ComparisonNode(expr_pos,
-                                                Token::kIS,
-                                                is_future,
-                                                new(Z) TypeNode(expr_pos,
-                                                                future_type));
-              AstNode* as_future_t = new(Z) LoadLocalNode(expr_pos, temp);
-              as_future_t = new(Z) AssignableNode(expr_pos,
-                                                  as_future_t,
-                                                  result_type,
-                                                  Symbols::FunctionResult());
-              AstNode* as_t = new(Z) LoadLocalNode(expr_pos, temp);
-              as_t = new(Z) AssignableNode(expr_pos,
-                                           as_t,
-                                           result_type_arg,
-                                           Symbols::FunctionResult());
-              checked_expr->AddNode(new(Z) ConditionalExprNode(expr_pos,
-                                                               is_future,
-                                                               as_future_t,
-                                                               as_t));
-              expr = checked_expr;
-            } else {
-              // Parsing async function, but not generating async closure code.
-              if (!result_type_arg.IsInstantiated()) {
-                // Make sure that the instantiator is captured.
-                CaptureInstantiator();
-              }
-            }
+            LetNode* checked_expr = new(Z) LetNode(expr_pos);
+            LocalVariable* temp = checked_expr->AddInitializer(expr);
+            temp->set_is_final();
+            const AbstractType& future_type =
+                AbstractType::ZoneHandle(Z, future_class.RareType());
+            AstNode* is_future = new(Z) LoadLocalNode(expr_pos, temp);
+            is_future = new(Z) ComparisonNode(expr_pos,
+                                              Token::kIS,
+                                              is_future,
+                                              new(Z) TypeNode(expr_pos,
+                                                              future_type));
+            AstNode* as_future_t = new(Z) LoadLocalNode(expr_pos, temp);
+            as_future_t = new(Z) AssignableNode(expr_pos,
+                                                as_future_t,
+                                                result_type,
+                                                Symbols::FunctionResult());
+            AstNode* as_t = new(Z) LoadLocalNode(expr_pos, temp);
+            as_t = new(Z) AssignableNode(expr_pos,
+                                         as_t,
+                                         result_type_arg,
+                                         Symbols::FunctionResult());
+            checked_expr->AddNode(new(Z) ConditionalExprNode(expr_pos,
+                                                             is_future,
+                                                             as_future_t,
+                                                             as_t));
+            expr = checked_expr;
           }
         }
       }
       statement = new(Z) ReturnNode(statement_pos, expr);
     } else {
       if (current_function().IsSyncGenClosure() &&
-          (current_block_->scope->function_level() == 0)) {
+          (FunctionLevel() == 0)) {
         // In a synchronous generator, return without an expression
         // returns false, signaling that the iterator terminates and
         // did not yield a value.
@@ -10187,8 +10255,13 @@
     // Rethrow of current exception.
     ConsumeToken();
     ExpectSemicolon();
-    // Check if it is ok to do a rethrow.
-    if ((try_stack_ == NULL) || !try_stack_->inside_catch()) {
+    // Check if it is ok to do a rethrow. Find the inntermost enclosing
+    // catch block.
+    TryStack* try_statement = try_stack_;
+    while ((try_statement != NULL) && !try_statement->inside_catch()) {
+      try_statement = try_statement->outer_try();
+    }
+    if (try_statement == NULL) {
       ReportError(statement_pos, "rethrow of an exception is not valid here");
     }
 
@@ -10196,7 +10269,7 @@
     // instead of :exception_var and :stack_trace_var.
     // These variables are bound in the block containing the try.
     // Look in the try scope directly.
-    LocalScope* scope = try_stack_->try_block()->scope->parent();
+    LocalScope* scope = try_statement->try_block()->scope->parent();
     ASSERT(scope != NULL);
     LocalVariable* excp_var;
     LocalVariable* trace_var;
@@ -10407,7 +10480,8 @@
   }
   // Location argument.
   arguments->Add(new(Z) LiteralNode(
-      type_pos, Integer::ZoneHandle(Z, Integer::New(type_pos.value()))));
+      type_pos, Integer::ZoneHandle(Z, Integer::New(type_pos.value(),
+                                                    Heap::kOld))));
   // Src value argument.
   arguments->Add(new(Z) LiteralNode(type_pos, Object::null_instance()));
   // Dst type argument.
@@ -10541,8 +10615,7 @@
         const TokenPosition type_pos = TokenPos();
         const AbstractType& type = AbstractType::ZoneHandle(Z,
             ParseType(ClassFinalizer::kCanonicalize));
-        if (!type.IsInstantiated() &&
-            (current_block_->scope->function_level() > 0)) {
+        if (!type.IsInstantiated() && (FunctionLevel() > 0)) {
           // Make sure that the instantiator is captured.
           CaptureInstantiator();
         }
@@ -11169,6 +11242,10 @@
       ASSERT((CurrentToken() == Token::kLPAREN) ||
              (CurrentToken() == Token::kCOMMA));
       ConsumeToken();
+      if (CurrentToken() == Token::kRPAREN) {
+        // Allow trailing comma.
+        break;
+      }
       if (IsIdentifier() && (LookaheadToken(1) == Token::kCOLON)) {
         named_argument_seen = true;
         // The canonicalization of the arguments descriptor array built in
@@ -11463,7 +11540,7 @@
                         "from static function",
                         name.ToCString());
           }
-          if (current_block_->scope->function_level() > 0) {
+          if (FunctionLevel() > 0) {
             // Make sure that the instantiator is captured.
             CaptureInstantiator();
           }
@@ -11559,7 +11636,7 @@
                         "from static function",
                         name.ToCString());
           }
-          if (current_block_->scope->function_level() > 0) {
+          if (FunctionLevel() > 0) {
             // Make sure that the instantiator is captured.
             CaptureInstantiator();
           }
@@ -11636,7 +11713,7 @@
         } else if (primary_node->primary().IsClass()) {
           const Class& type_class = Class::Cast(primary_node->primary());
           AbstractType& type = Type::ZoneHandle(Z, Type::New(
-              type_class, TypeArguments::Handle(Z), primary_pos));
+              type_class, TypeArguments::Handle(Z), primary_pos, Heap::kOld));
           type ^= ClassFinalizer::FinalizeType(
               current_class(), type, ClassFinalizer::kCanonicalize);
           // Type may be malbounded, but not malformed.
@@ -11662,7 +11739,7 @@
         } else if (primary_node->primary().IsClass()) {
           const Class& type_class = Class::Cast(primary_node->primary());
           AbstractType& type = Type::ZoneHandle(Z, Type::New(
-              type_class, TypeArguments::Handle(Z), primary_pos));
+              type_class, TypeArguments::Handle(Z), primary_pos, Heap::kOld));
           type = ClassFinalizer::FinalizeType(
               current_class(), type, ClassFinalizer::kCanonicalize);
           // Type may be malbounded, but not malformed.
@@ -11677,7 +11754,7 @@
                         "from static function",
                         name.ToCString());
           }
-          if (current_block_->scope->function_level() > 0) {
+          if (FunctionLevel() > 0) {
             // Make sure that the instantiator is captured.
             CaptureInstantiator();
           }
@@ -12035,8 +12112,8 @@
   if (!type.IsNull()) {
     return &type;
   }
-  type = Type::New(cls,
-      TypeArguments::Handle(Z, cls.type_parameters()), cls.token_pos());
+  type = Type::New(cls, TypeArguments::Handle(Z, cls.type_parameters()),
+                   cls.token_pos(), Heap::kOld);
   if (cls.is_type_finalized()) {
     type ^= ClassFinalizer::FinalizeType(
         cls, type, ClassFinalizer::kCanonicalizeWellFormed);
@@ -12060,6 +12137,7 @@
 void Parser::InsertCachedConstantValue(const String& url,
                                        TokenPosition token_pos,
                                        const Instance& value) {
+  ASSERT(Thread::Current()->IsMutatorThread());
   Isolate* isolate = Isolate::Current();
   ConstantPosKey key(url, token_pos);
   if (isolate->object_store()->compile_time_constants() == Array::null()) {
@@ -12510,7 +12588,7 @@
       TypeParameter& type_parameter = TypeParameter::ZoneHandle(Z,
           current_class().LookupTypeParameter(ident));
       if (!type_parameter.IsNull()) {
-        if (current_block_->scope->function_level() > 0) {
+        if (FunctionLevel() > 0) {
           // Make sure that the instantiator is captured.
           CaptureInstantiator();
         }
@@ -12556,7 +12634,8 @@
     } else if (primary->primary().IsClass()) {
       const Class& type_class = Class::Cast(primary->primary());
       AbstractType& type = Type::ZoneHandle(Z,
-          Type::New(type_class, TypeArguments::Handle(Z), primary_pos));
+          Type::New(type_class, TypeArguments::Handle(Z), primary_pos,
+                    Heap::kOld));
       type ^= ClassFinalizer::FinalizeType(
           current_class(), type, ClassFinalizer::kCanonicalize);
       // Type may be malbounded, but not malformed.
@@ -12687,7 +12766,7 @@
     return Type::DynamicType();
   }
   AbstractType& type = AbstractType::Handle(
-      Z, Type::New(type_class, type_arguments, ident_pos));
+      Z, Type::New(type_class, type_arguments, ident_pos, Heap::kOld));
   if (finalization >= ClassFinalizer::kResolveTypeParameters) {
     ResolveTypeFromClass(current_class(), finalization, &type);
     if (finalization >= ClassFinalizer::kCanonicalize) {
@@ -12770,7 +12849,7 @@
   ASSERT(list_type_arguments.IsNull() || (list_type_arguments.Length() == 1));
   const Class& array_class = Class::Handle(Z, I->object_store()->array_class());
   Type& type = Type::ZoneHandle(Z,
-      Type::New(array_class, list_type_arguments, type_pos));
+      Type::New(array_class, list_type_arguments, type_pos, Heap::kOld));
   type ^= ClassFinalizer::FinalizeType(
       current_class(), type, ClassFinalizer::kCanonicalize);
   GrowableArray<AstNode*> element_list;
@@ -12851,7 +12930,7 @@
     ASSERT(!factory_method.IsNull());
     if (!list_type_arguments.IsNull() &&
         !list_type_arguments.IsInstantiated() &&
-        (current_block_->scope->function_level() > 0)) {
+        (FunctionLevel() > 0)) {
       // Make sure that the instantiator is captured.
       CaptureInstantiator();
     }
@@ -12862,7 +12941,7 @@
     if (!factory_type_args.IsNull() && (factory_class.NumTypeArguments() > 1)) {
       ASSERT(factory_type_args.Length() == 1);
       Type& factory_type = Type::Handle(Z, Type::New(
-          factory_class, factory_type_args, type_pos, Heap::kNew));
+          factory_class, factory_type_args, type_pos, Heap::kOld));
       factory_type ^= ClassFinalizer::FinalizeType(
           current_class(), factory_type, ClassFinalizer::kFinalize);
       factory_type_args = factory_type.arguments();
@@ -13113,7 +13192,7 @@
     ASSERT(!factory_method.IsNull());
     if (!map_type_arguments.IsNull() &&
         !map_type_arguments.IsInstantiated() &&
-        (current_block_->scope->function_level() > 0)) {
+        (FunctionLevel() > 0)) {
       // Make sure that the instantiator is captured.
       CaptureInstantiator();
     }
@@ -13124,7 +13203,7 @@
     if (!factory_type_args.IsNull() && (factory_class.NumTypeArguments() > 2)) {
       ASSERT(factory_type_args.Length() == 2);
       Type& factory_type = Type::Handle(Z, Type::New(
-          factory_class, factory_type_args, type_pos, Heap::kNew));
+          factory_class, factory_type_args, type_pos, Heap::kOld));
       factory_type ^= ClassFinalizer::FinalizeType(
           current_class(), factory_type, ClassFinalizer::kFinalize);
       factory_type_args = factory_type.arguments();
@@ -13568,6 +13647,12 @@
   }
   ASSERT(!constructor.IsNull());
 
+  // It is a compile time error to instantiate a const instance of an
+  // abstract class. Factory methods are ok.
+  if (is_const && type_class.is_abstract() && !constructor.IsFactory()) {
+    ReportError(new_pos, "cannot instantiate abstract class");
+  }
+
   // It is ok to call a factory method of an abstract class, but it is
   // a dynamic error to instantiate an abstract class.
   if (type_class.is_abstract() && !constructor.IsFactory()) {
@@ -13578,7 +13663,8 @@
     }
     ArgumentListNode* error_arguments = new(Z) ArgumentListNode(type_pos);
     error_arguments->Add(new(Z) LiteralNode(
-        TokenPos(), Integer::ZoneHandle(Z, Integer::New(type_pos.value()))));
+        TokenPos(), Integer::ZoneHandle(Z, Integer::New(type_pos.value(),
+                                                        Heap::kOld))));
     error_arguments->Add(new(Z) LiteralNode(
         TokenPos(), String::ZoneHandle(Z, type_class_name.raw())));
     result->AddNode(
@@ -13697,7 +13783,7 @@
     CheckConstructorCallTypeArguments(new_pos, constructor, type_arguments);
     if (!type_arguments.IsNull() &&
         !type_arguments.IsInstantiated() &&
-        (current_block_->scope->function_level() > 0)) {
+        (FunctionLevel() > 0)) {
       // Make sure that the instantiator is captured.
       CaptureInstantiator();
     }
@@ -14364,6 +14450,7 @@
 void Parser::SkipPostfixExpr() {
   SkipPrimary();
   if (CurrentToken() == Token::kHASH) {
+    ConsumeToken();
     if (IsIdentifier()) {
       ConsumeToken();
       SkipIf(Token::kASSIGN);
diff --git a/runtime/vm/parser.h b/runtime/vm/parser.h
index 84301f2..815f978 100644
--- a/runtime/vm/parser.h
+++ b/runtime/vm/parser.h
@@ -338,6 +338,10 @@
   // current_function(), but is greater than zero while parsing the body of
   // local functions nested in current_function().
 
+  // FunctionLevel is 0 when parsing code of current_function(), and denotes
+  // the relative nesting level when parsing a nested function.
+  int FunctionLevel() const;
+
   // The class being parsed.
   const Class& current_class() const;
   void set_current_class(const Class& value);
@@ -739,6 +743,7 @@
   // Add specified node to try block list so that it can be patched with
   // inlined finally code if needed.
   void AddNodeForFinallyInlining(AstNode* node);
+  void RemoveNodesForFinallyInlining(SourceLabel* label);
   // Add the inlined finally clause to the specified node.
   void AddFinallyClauseToNode(bool is_async,
                               AstNode* node,
@@ -869,7 +874,6 @@
                           const String& func_name,
                           ArgumentListNode* arguments);
   String& Interpolate(const GrowableArray<AstNode*>& values);
-  AstNode* MakeAssertCall(TokenPosition begin, TokenPosition end);
   AstNode* ThrowTypeError(TokenPosition type_pos,
                           const AbstractType& type,
                           LibraryPrefix* prefix = NULL);
@@ -893,7 +897,6 @@
                                 const String* left_ident,
                                 TokenPosition left_pos,
                                 bool is_compound = false);
-  AstNode* InsertClosureCallNodes(AstNode* condition);
 
   ConstructorCallNode* CreateConstructorCallNode(
       TokenPosition token_pos,
diff --git a/runtime/vm/port.cc b/runtime/vm/port.cc
index 3df0a63..8ef15c5 100644
--- a/runtime/vm/port.cc
+++ b/runtime/vm/port.cc
@@ -326,4 +326,20 @@
   }
 }
 
+
+void PortMap::DebugDumpForMessageHandler(MessageHandler* handler) {
+  SafepointMutexLocker ml(mutex_);
+  Object& msg_handler = Object::Handle();
+  for (intptr_t i = 0; i < capacity_; i++) {
+    if (map_[i].handler == handler) {
+      if (map_[i].state == kLivePort) {
+        OS::Print("Live Port = %" Pd64 "\n", map_[i].port);
+        msg_handler = DartLibraryCalls::LookupHandler(map_[i].port);
+        OS::Print("Handler = %s\n", msg_handler.ToCString());
+      }
+    }
+  }
+}
+
+
 }  // namespace dart
diff --git a/runtime/vm/port.h b/runtime/vm/port.h
index 9ec7e7b..6e41e3a 100644
--- a/runtime/vm/port.h
+++ b/runtime/vm/port.h
@@ -59,6 +59,8 @@
   static void PrintPortsForMessageHandler(MessageHandler* handler,
                                           JSONStream* stream);
 
+  static void DebugDumpForMessageHandler(MessageHandler* handler);
+
  private:
   friend class dart::PortMapTestPeer;
 
diff --git a/runtime/vm/precompiler.cc b/runtime/vm/precompiler.cc
index 52c6e88..35df46b 100644
--- a/runtime/vm/precompiler.cc
+++ b/runtime/vm/precompiler.cc
@@ -366,7 +366,8 @@
     }
     if ((cid == kDynamicCid) ||
         (cid == kVoidCid) ||
-        (cid == kFreeListElement)) {
+        (cid == kFreeListElement) ||
+        (cid == kForwardingCorpse)) {
       continue;
     }
     cls = isolate()->class_table()->At(cid);
@@ -407,11 +408,12 @@
     { "dart:typed_data", "ByteData", "ByteData." },
     { "dart:typed_data", "ByteData", "ByteData._view" },
     { "dart:typed_data", "ByteBuffer", "ByteBuffer._New" },
-#if !defined(PRODUCT)
     { "dart:_vmservice", "::", "_registerIsolate" },
     { "dart:_vmservice", "::", "boot" },
+#if !defined(PRODUCT)
     { "dart:developer", "Metrics", "_printMetrics" },
     { "dart:developer", "::", "_runExtension" },
+    { "dart:isolate", "::", "_runPendingImmediateCallback" },
 #endif  // !PRODUCT
     // Fields
     { "dart:core", "Error", "_stackTrace" },
@@ -1718,12 +1720,26 @@
       it.GetNext();
       entries++;
     }
-    // The root library might have no surviving members if it only exports main
-    // from another library. It will still be referenced from the object store,
-    // so retain it.
-    bool retain = (entries > 0) ||
-                  lib.is_dart_scheme() ||
-                  (lib.raw() == root_lib.raw());
+    bool retain = false;
+    if (entries > 0) {
+      retain = true;
+    } else if (lib.is_dart_scheme()) {
+      // The core libraries are referenced from the object store.
+      retain = true;
+    } else if (lib.raw() == root_lib.raw()) {
+      // The root library might have no surviving members if it only exports
+      // main from another library. It will still be referenced from the object
+      // store, so retain it.
+      retain = true;
+    } else {
+      // A type for a top-level class may be referenced from an object pool as
+      // part of an error message.
+      const Class& top = Class::Handle(Z, lib.toplevel_class());
+      if (classes_to_retain_.Lookup(&top) != NULL) {
+        retain = true;
+      }
+    }
+
     if (retain) {
       lib.set_index(retained_libraries.Length());
       retained_libraries.Add(lib);
@@ -1906,7 +1922,7 @@
 
     RawStackmap* DedupStackmap(const Stackmap& stackmap) {
       const Stackmap* canonical_stackmap =
-          canonical_stackmaps_.Lookup(&stackmap);
+          canonical_stackmaps_.LookupValue(&stackmap);
       if (canonical_stackmap == NULL) {
         canonical_stackmaps_.Insert(
             &Stackmap::ZoneHandle(zone_, stackmap.raw()));
@@ -1954,7 +1970,7 @@
 
     RawArray* DedupStackmapList(const Array& stackmaps) {
       const Array* canonical_stackmap_list =
-          canonical_stackmap_lists_.Lookup(&stackmaps);
+          canonical_stackmap_lists_.LookupValue(&stackmaps);
       if (canonical_stackmap_list == NULL) {
         canonical_stackmap_lists_.Insert(
             &Array::ZoneHandle(zone_, stackmaps.raw()));
@@ -2002,7 +2018,7 @@
 
     RawInstructions* DedupOneInstructions(const Instructions& instructions) {
       const Instructions* canonical_instructions =
-          canonical_instructions_set_.Lookup(&instructions);
+          canonical_instructions_set_.LookupValue(&instructions);
       if (canonical_instructions == NULL) {
         canonical_instructions_set_.Insert(
             &Instructions::ZoneHandle(zone_, instructions.raw()));
@@ -2708,8 +2724,11 @@
         // The return value of setjmp is the deopt id of the check instruction
         // that caused the bailout.
         done = false;
+        if (!use_speculative_inlining) {
+          // Assert that we don't repeatedly retry speculation.
+          UNREACHABLE();
+        }
 #if defined(DEBUG)
-        ASSERT(use_speculative_inlining);
         for (intptr_t i = 0; i < inlining_black_list.length(); ++i) {
           ASSERT(inlining_black_list[i] != val);
         }
diff --git a/runtime/vm/profiler.h b/runtime/vm/profiler.h
index 32b4ec4..b6b2417 100644
--- a/runtime/vm/profiler.h
+++ b/runtime/vm/profiler.h
@@ -356,8 +356,8 @@
     kTruncatedTraceBit = 5,
     kClassAllocationSampleBit = 6,
     kContinuationSampleBit = 7,
-    kThreadTaskBit = 8,  // 4 bits.
-    kNextFreeBit = 12,
+    kThreadTaskBit = 8,  // 5 bits.
+    kNextFreeBit = 13,
   };
   class HeadSampleBit : public BitField<uword, bool, kHeadSampleBit, 1> {};
   class LeafFrameIsDart :
@@ -373,7 +373,7 @@
   class ContinuationSampleBit
       : public BitField<uword, bool, kContinuationSampleBit, 1> {};
   class ThreadTaskBit
-      : public BitField<uword, Thread::TaskKind, kThreadTaskBit, 4> {};
+      : public BitField<uword, Thread::TaskKind, kThreadTaskBit, 5> {};
 
   int64_t timestamp_;
   ThreadId tid_;
diff --git a/runtime/vm/profiler_service.cc b/runtime/vm/profiler_service.cc
index 195149f..6b3d839 100644
--- a/runtime/vm/profiler_service.cc
+++ b/runtime/vm/profiler_service.cc
@@ -542,7 +542,7 @@
 
   ProfileFunction* Lookup(const Function& function) {
     ASSERT(!function.IsNull());
-    return function_hash_.Lookup(&function);
+    return function_hash_.LookupValue(&function);
   }
 
   ProfileFunction* GetUnknown() {
@@ -1255,7 +1255,9 @@
 
   void Build() {
     ScopeTimer sw("ProfileBuilder::Build", FLAG_trace_profiler);
-    FilterSamples();
+    if (!FilterSamples()) {
+      return;
+    }
 
     Setup();
     BuildCodeTable();
@@ -1297,15 +1299,16 @@
     RegisterProfileCodeTag(VMTag::kInlineEndCodeTagId);
   }
 
-  void FilterSamples() {
+  bool FilterSamples() {
     ScopeTimer sw("ProfileBuilder::FilterSamples", FLAG_trace_profiler);
     SampleBuffer* sample_buffer = Profiler::sample_buffer();
     if (sample_buffer == NULL) {
-      return;
+      return false;
     }
     samples_ = sample_buffer->BuildProcessedSampleBuffer(filter_);
     profile_->samples_ = samples_;
     profile_->sample_count_ = samples_->length();
+    return true;
   }
 
   void UpdateMinMaxTimes(int64_t timestamp) {
@@ -2518,7 +2521,7 @@
 
 
 ProfileFunction* Profile::FindFunction(const Function& function) {
-  return functions_->Lookup(function);
+  return (functions_ != NULL) ? functions_->Lookup(function) : NULL;
 }
 
 
diff --git a/runtime/vm/raw_object.cc b/runtime/vm/raw_object.cc
index cc880e9..73070ac 100644
--- a/runtime/vm/raw_object.cc
+++ b/runtime/vm/raw_object.cc
@@ -4,6 +4,7 @@
 
 #include "vm/raw_object.h"
 
+#include "vm/become.h"
 #include "vm/class_table.h"
 #include "vm/dart.h"
 #include "vm/freelist.h"
@@ -14,11 +15,6 @@
 
 namespace dart {
 
-#if defined(DEBUG)
-DEFINE_FLAG(bool, validate_overwrite, true, "Verify overwritten fields.");
-#endif  // DEBUG
-
-
 void RawObject::Validate(Isolate* isolate) const {
   if (Object::void_class_ == reinterpret_cast<RawClass*>(kHeapObjectTag)) {
     // Validation relies on properly initialized class classes. Skip if the
@@ -177,6 +173,12 @@
       instance_size = element->Size();
       break;
     }
+    case kForwardingCorpse: {
+      uword addr = RawObject::ToAddr(this);
+      ForwardingCorpse* element = reinterpret_cast<ForwardingCorpse*>(addr);
+      instance_size = element->Size();
+      break;
+    }
     default: {
       // Get the (constant) instance size out of the class object.
       // TODO(koda): Add Size(ClassTable*) interface to allow caching in loops.
@@ -219,22 +221,6 @@
 }
 
 
-#if defined(DEBUG)
-void RawObject::ValidateOverwrittenPointer(RawObject* raw) {
-  if (FLAG_validate_overwrite) {
-    raw->Validate(Isolate::Current());
-  }
-}
-
-
-void RawObject::ValidateOverwrittenSmi(RawSmi* raw) {
-  if (FLAG_validate_overwrite && raw->IsHeapObject() && raw != Object::null()) {
-    FATAL1("Expected smi/null, found: %" Px "\n", reinterpret_cast<uword>(raw));
-  }
-}
-#endif  // DEBUG
-
-
 intptr_t RawObject::VisitPointers(ObjectPointerVisitor* visitor) {
   intptr_t size = 0;
 
@@ -288,6 +274,12 @@
         size = element->Size();
         break;
       }
+      case kForwardingCorpse: {
+        uword addr = RawObject::ToAddr(this);
+        ForwardingCorpse* forwarder = reinterpret_cast<ForwardingCorpse*>(addr);
+        size = forwarder->Size();
+        break;
+      }
       case kNullCid:
         size = Size();
         break;
diff --git a/runtime/vm/raw_object.h b/runtime/vm/raw_object.h
index 74cf176..dd8ff93 100644
--- a/runtime/vm/raw_object.h
+++ b/runtime/vm/raw_object.h
@@ -11,7 +11,6 @@
 #include "vm/snapshot.h"
 #include "vm/token.h"
 #include "vm/token_position.h"
-#include "vm/verified_memory.h"
 
 namespace dart {
 
@@ -181,9 +180,12 @@
   kDynamicCid,
   kVoidCid,
 
-  // The following entry does not describe a real class, but instead it is an
-  // id which is used to identify free list elements in the heap.
+  // The following entries describes classes for pseudo-objects in the heap
+  // that should never be reachable from live objects. Free list elements
+  // maintain the free list for old space, and forwarding corpses are used to
+  // implement one-way become.
   kFreeListElement,
+  kForwardingCorpse,
 
   kNumPredefinedCids,
 };
@@ -247,9 +249,11 @@
     }                                                                          \
     SNAPSHOT_WRITER_SUPPORT()                                                  \
     HEAP_PROFILER_SUPPORT()                                                    \
+    friend class object##SerializationCluster;                                 \
+    friend class object##DeserializationCluster;                               \
 
-// RawObject is the base class of all raw objects, even though it carries the
-// class_ field not all raw objects are allocated in the heap and thus cannot
+// RawObject is the base class of all raw objects; even though it carries the
+// tags_ field not all raw objects are allocated in the heap and thus cannot
 // be dereferenced (e.g. RawSmi).
 class RawObject {
  public:
@@ -442,6 +446,12 @@
   bool IsFreeListElement() const {
     return ((GetClassId() == kFreeListElement));
   }
+  bool IsForwardingCorpse() const {
+    return ((GetClassId() == kForwardingCorpse));
+  }
+  bool IsPseudoObject() const {
+    return IsFreeListElement() || IsForwardingCorpse();
+  }
 
   intptr_t Size() const {
     uword tags = ptr()->tags_;
@@ -561,10 +571,7 @@
 
   template<typename type>
   void StorePointer(type const* addr, type value) {
-#if defined(DEBUG)
-    ValidateOverwrittenPointer(*addr);
-#endif  // DEBUG
-    VerifiedMemory::Write(const_cast<type*>(addr), value);
+    *const_cast<type*>(addr) = value;
     // Filter stores based on source and target.
     if (!value->IsHeapObject()) return;
     if (value->IsNewObject() && this->IsOldObject() &&
@@ -577,29 +584,14 @@
   // Use for storing into an explicitly Smi-typed field of an object
   // (i.e., both the previous and new value are Smis).
   void StoreSmi(RawSmi* const* addr, RawSmi* value) {
-#if defined(DEBUG)
-    ValidateOverwrittenSmi(*addr);
-#endif  // DEBUG
     // Can't use Contains, as array length is initialized through this method.
     ASSERT(reinterpret_cast<uword>(addr) >= RawObject::ToAddr(this));
-    VerifiedMemory::Write(const_cast<RawSmi**>(addr), value);
+    *const_cast<RawSmi**>(addr) = value;
   }
 
-  void InitializeSmi(RawSmi* const* addr, RawSmi* value) {
-    // Can't use Contains, as array length is initialized through this method.
-    ASSERT(reinterpret_cast<uword>(addr) >= RawObject::ToAddr(this));
-    // This is an initializing store, so any previous content is OK.
-    VerifiedMemory::Accept(reinterpret_cast<uword>(addr), kWordSize);
-    VerifiedMemory::Write(const_cast<RawSmi**>(addr), value);
-  }
-
-#if defined(DEBUG)
-  static void ValidateOverwrittenPointer(RawObject* raw);
-  static void ValidateOverwrittenSmi(RawSmi* raw);
-#endif  // DEBUG
-
   friend class Api;
   friend class ApiMessageReader;  // GetClassId
+  friend class Serializer;  // GetClassId
   friend class Array;
   friend class Bigint;
   friend class ByteBuffer;
@@ -635,6 +627,7 @@
   friend class AssemblyInstructionsWriter;
   friend class BlobInstructionsWriter;
   friend class SnapshotReader;
+  friend class Deserializer;
   friend class SnapshotWriter;
   friend class String;
   friend class Type;  // GetClassId
@@ -721,6 +714,7 @@
   friend class RawInstance;
   friend class RawInstructions;
   friend class SnapshotReader;
+  friend class InstanceSerializationCluster;
 };
 
 
@@ -820,7 +814,9 @@
   // So that the SkippedCodeFunctions::DetachCode can null out the code fields.
   friend class SkippedCodeFunctions;
   friend class Class;
+
   RAW_HEAP_OBJECT_IMPLEMENTATION(Function);
+
   static bool ShouldVisitCode(RawCode* raw_code);
   static bool CheckUsageCounter(RawFunction* raw_fun);
 
@@ -1152,6 +1148,7 @@
   friend class SkippedCodeFunctions;
   friend class StackFrame;
   friend class Profiler;
+  friend class FunctionDeserializationCluster;
 };
 
 
@@ -1841,6 +1838,7 @@
 
   friend class ApiMessageReader;
   friend class SnapshotReader;
+  friend class RODataSerializationCluster;
 };
 
 
@@ -1852,6 +1850,7 @@
   const uint16_t* data() const { OPEN_ARRAY_START(uint16_t, uint16_t); }
 
   friend class SnapshotReader;
+  friend class RODataSerializationCluster;
 };
 
 
@@ -1927,6 +1926,9 @@
     return reinterpret_cast<RawObject**>(&ptr()->data()[length - 1]);
   }
 
+  friend class LinkedHashMapSerializationCluster;
+  friend class LinkedHashMapDeserializationCluster;
+  friend class Deserializer;
   friend class RawCode;
   friend class RawImmutableArray;
   friend class SnapshotReader;
@@ -1979,7 +1981,6 @@
     return reinterpret_cast<RawObject**>(&ptr()->deleted_keys_);
   }
 
-
   friend class SnapshotReader;
 };
 
@@ -1987,41 +1988,47 @@
 class RawFloat32x4 : public RawInstance {
   RAW_HEAP_OBJECT_IMPLEMENTATION(Float32x4);
 
-  float value_[4];
+  ALIGN8 float value_[4];
 
   friend class SnapshotReader;
+
  public:
   float x() const { return value_[0]; }
   float y() const { return value_[1]; }
   float z() const { return value_[2]; }
   float w() const { return value_[3]; }
 };
+COMPILE_ASSERT(sizeof(RawFloat32x4) == 24);
 
 
 class RawInt32x4 : public RawInstance {
   RAW_HEAP_OBJECT_IMPLEMENTATION(Int32x4);
 
-  int32_t value_[4];
+  ALIGN8 int32_t value_[4];
 
   friend class SnapshotReader;
+
  public:
   int32_t x() const { return value_[0]; }
   int32_t y() const { return value_[1]; }
   int32_t z() const { return value_[2]; }
   int32_t w() const { return value_[3]; }
 };
+COMPILE_ASSERT(sizeof(RawInt32x4) == 24);
 
 
 class RawFloat64x2 : public RawInstance {
   RAW_HEAP_OBJECT_IMPLEMENTATION(Float64x2);
 
-  double value_[2];
+  ALIGN8 double value_[2];
 
   friend class SnapshotReader;
+
  public:
   double x() const { return value_[0]; }
   double y() const { return value_[1]; }
 };
+COMPILE_ASSERT(sizeof(RawFloat64x2) == 24);
 
 
 // Define an aliases for intptr_t.
@@ -2053,6 +2060,8 @@
   friend class SnapshotReader;
   friend class ObjectPool;
   friend class RawObjectPool;
+  friend class ObjectPoolSerializationCluster;
+  friend class ObjectPoolDeserializationCluster;
 };
 
 
@@ -2269,10 +2278,7 @@
                  kTwoByteStringCid == kStringCid + 2 &&
                  kExternalOneByteStringCid == kStringCid + 3 &&
                  kExternalTwoByteStringCid == kStringCid + 4);
-  return (index == kOneByteStringCid ||
-          index == kTwoByteStringCid ||
-          index == kExternalOneByteStringCid ||
-          index == kExternalTwoByteStringCid);
+  return (index == kTwoByteStringCid || index == kExternalTwoByteStringCid);
 }
 
 
diff --git a/runtime/vm/raw_object_snapshot.cc b/runtime/vm/raw_object_snapshot.cc
index b245a94..c2448a3 100644
--- a/runtime/vm/raw_object_snapshot.cc
+++ b/runtime/vm/raw_object_snapshot.cc
@@ -1374,94 +1374,13 @@
 }
 
 
-#if defined(DEBUG)
-static uword Checksum(uword entry, intptr_t size) {
-  uword sum = 0;
-  uword* start = reinterpret_cast<uword*>(entry);
-  uword* end = reinterpret_cast<uword*>(entry + size);
-  for (uword* cursor = start; cursor < end; cursor++) {
-    sum ^= *cursor;
-  }
-  return sum;
-}
-#endif
-
-
 RawCode* Code::ReadFrom(SnapshotReader* reader,
                         intptr_t object_id,
                         intptr_t tags,
                         Snapshot::Kind kind,
                         bool as_reference) {
-  ASSERT(Snapshot::IncludesCode(kind));
-  ASSERT(Snapshot::IsFull(kind));
-
-  Code& result = Code::ZoneHandle(reader->zone(), NEW_OBJECT_WITH_LEN(Code, 0));
-  reader->AddBackRef(object_id, &result, kIsDeserialized);
-
-  result.set_compile_timestamp(0);
-  result.set_state_bits(reader->Read<int32_t>());
-  result.set_lazy_deopt_pc_offset(-1);
-
-  int32_t text_offset = reader->Read<int32_t>();
-  RawInstructions* instr = reinterpret_cast<RawInstructions*>(
-      reader->GetInstructionsAt(text_offset) + kHeapObjectTag);
-  uword entry_point = Instructions::EntryPoint(instr);
-
-#if defined(DEBUG)
-  ASSERT(instr->IsMarked());
-  ASSERT(instr->IsVMHeapObject());
-  uword expected_check = reader->Read<uword>();
-  intptr_t instructions_size = Utils::RoundUp(instr->size_,
-                                              OS::PreferredCodeAlignment());
-  uword actual_check = Checksum(entry_point, instructions_size);
-  ASSERT(expected_check == actual_check);
-#endif
-
-  result.StoreNonPointer(&result.raw_ptr()->entry_point_, entry_point);
-
-  result.StorePointer(&result.raw_ptr()->active_instructions_, instr);
-  result.StorePointer(&result.raw_ptr()->instructions_, instr);
-
-  (*reader->PassiveObjectHandle()) ^= reader->ReadObjectImpl(kAsReference);
-  result.StorePointer(reinterpret_cast<RawObject*const*>(
-                          &result.raw_ptr()->object_pool_),
-                      reader->PassiveObjectHandle()->raw());
-
-  (*reader->PassiveObjectHandle()) ^= reader->ReadObjectImpl(kAsReference);
-  result.StorePointer(&result.raw_ptr()->owner_,
-                      reader->PassiveObjectHandle()->raw());
-
-  (*reader->PassiveObjectHandle()) ^= reader->ReadObjectImpl(kAsReference);
-  result.StorePointer(reinterpret_cast<RawObject*const*>(
-                          &result.raw_ptr()->exception_handlers_),
-                      reader->PassiveObjectHandle()->raw());
-
-  (*reader->PassiveObjectHandle()) ^= reader->ReadObjectImpl(kAsReference);
-  result.StorePointer(reinterpret_cast<RawObject*const*>(
-                          &result.raw_ptr()->pc_descriptors_),
-                      reader->PassiveObjectHandle()->raw());
-
-  (*reader->PassiveObjectHandle()) ^= reader->ReadObjectImpl(kAsReference);
-  result.StorePointer(reinterpret_cast<RawObject*const*>(
-                          &result.raw_ptr()->stackmaps_),
-                      reader->PassiveObjectHandle()->raw());
-
-  result.StorePointer(&result.raw_ptr()->deopt_info_array_,
-                      Array::null());
-  result.StorePointer(&result.raw_ptr()->static_calls_target_table_,
-                      Array::null());
-  result.StorePointer(&result.raw_ptr()->var_descriptors_,
-                      LocalVarDescriptors::null());
-  result.StorePointer(&result.raw_ptr()->inlined_metadata_,
-                      Array::null());
-  result.StorePointer(&result.raw_ptr()->code_source_map_,
-                      CodeSourceMap::null());
-  result.StorePointer(&result.raw_ptr()->comments_,
-                      Array::null());
-  result.StorePointer(&result.raw_ptr()->return_address_metadata_,
-                      Object::null());
-
-  return result.raw();
+  UNREACHABLE();
+  return Code::null();
 }
 
 
@@ -1469,55 +1388,7 @@
                       intptr_t object_id,
                       Snapshot::Kind kind,
                       bool as_reference) {
-  ASSERT(Snapshot::IncludesCode(kind));
-  ASSERT(Snapshot::IsFull(kind));
-
-  intptr_t pointer_offsets_length =
-      Code::PtrOffBits::decode(ptr()->state_bits_);
-  if (pointer_offsets_length != 0) {
-    FATAL("Cannot serialize code with embedded pointers");
-  }
-  if (kind == Snapshot::kAppNoJIT) {
-    // No disabled code in precompilation.
-    ASSERT(ptr()->instructions_ == ptr()->active_instructions_);
-  } else {
-    ASSERT(kind == Snapshot::kAppWithJIT);
-    // We never include optimized code in JIT precompilation. Deoptimization
-    // requires code patching and we cannot patch code that is shared between
-    // isolates and should not mutate memory allocated by the embedder.
-    bool is_optimized = Code::PtrOffBits::decode(ptr()->state_bits_);
-    if (is_optimized) {
-      FATAL("Cannot include optimized code in a JIT snapshot");
-    }
-  }
-
-  // Write out the serialization header value for this object.
-  writer->WriteInlinedObjectHeader(object_id);
-
-  // Write out the class and tags information.
-  writer->WriteVMIsolateObject(kCodeCid);
-  writer->WriteTags(writer->GetObjectTags(this));
-
-  // Write out all the non object fields.
-  writer->Write<int32_t>(ptr()->state_bits_);
-
-  RawInstructions* instr = ptr()->instructions_;
-  int32_t text_offset = writer->GetInstructionsId(instr, this);
-  writer->Write<int32_t>(text_offset);
-
-#if defined(DEBUG)
-  uword entry = ptr()->entry_point_;
-  intptr_t instructions_size = Utils::RoundUp(instr->size_,
-                                              OS::PreferredCodeAlignment());
-  uword check = Checksum(entry, instructions_size);
-  writer->Write<uword>(check);
-#endif
-
-  writer->WriteObjectImpl(ptr()->object_pool_, kAsReference);
-  writer->WriteObjectImpl(ptr()->owner_, kAsReference);
-  writer->WriteObjectImpl(ptr()->exception_handlers_, kAsReference);
-  writer->WriteObjectImpl(ptr()->pc_descriptors_, kAsReference);
-  writer->WriteObjectImpl(ptr()->stackmaps_, kAsReference);
+  UNREACHABLE();
 }
 
 
@@ -1544,62 +1415,8 @@
                                     intptr_t tags,
                                     Snapshot::Kind kind,
                                     bool as_reference) {
-  ASSERT(Snapshot::IncludesCode(kind));
-  ASSERT(Snapshot::IsFull(kind));
-
-  intptr_t len = reader->Read<intptr_t>();
-  ObjectPool* result = NULL;
-  DeserializeState state;
-  if (!as_reference) {
-    result = reinterpret_cast<ObjectPool*>(reader->GetBackRef(object_id));
-    state = kIsDeserialized;
-  } else {
-    state = kIsNotDeserialized;
-  }
-  if (result == NULL) {
-    result = &(ObjectPool::ZoneHandle(
-        reader->zone(), NEW_OBJECT_WITH_LEN(ObjectPool, len)));
-    reader->AddBackRef(object_id, result, state);
-  }
-  if (!as_reference) {
-    // Read all the individual elements for inlined objects.
-    const TypedData& info_array =
-        TypedData::Handle(reader->NewTypedData(kTypedDataInt8ArrayCid, len));
-    result->set_info_array(info_array);
-
-    NoSafepointScope no_safepoint;
-    for (intptr_t i = 0; i < len; i++) {
-      ObjectPool::EntryType entry_type =
-          static_cast<ObjectPool::EntryType>(reader->Read<int8_t>());
-      *reinterpret_cast<int8_t*>(info_array.DataAddr(i)) = entry_type;
-      switch (entry_type) {
-        case ObjectPool::kTaggedObject: {
-          (*reader->PassiveObjectHandle()) =
-              reader->ReadObjectImpl(kAsReference);
-          result->SetObjectAt(i, *(reader->PassiveObjectHandle()));
-          break;
-        }
-        case ObjectPool::kImmediate: {
-          intptr_t raw_value = reader->Read<intptr_t>();
-          result->SetRawValueAt(i, raw_value);
-          break;
-        }
-        case ObjectPool::kNativeEntry: {
-#if !defined(TARGET_ARCH_DBC)
-          // Read nothing. Initialize with the lazy link entry.
-          uword new_entry = NativeEntry::LinkNativeCallEntry();
-          result->SetRawValueAt(i, static_cast<intptr_t>(new_entry));
-#else
-          UNREACHABLE();  // DBC does not support lazy native call linking.
-#endif
-          break;
-        }
-        default:
-          UNREACHABLE();
-      }
-    }
-  }
-  return result->raw();
+  UNREACHABLE();
+  return ObjectPool::null();
 }
 
 
@@ -1607,68 +1424,7 @@
                             intptr_t object_id,
                             Snapshot::Kind kind,
                             bool as_reference) {
-  ASSERT(Snapshot::IncludesCode(kind));
-  ASSERT(Snapshot::IsFull(kind));
-  intptr_t tags = writer->GetObjectTags(this);
-  intptr_t length = ptr()->length_;
-
-  if (as_reference) {
-    // Write out the serialization header value for this object.
-    writer->WriteInlinedObjectHeader(kOmittedObjectId);
-
-    // Write out the class information.
-    writer->WriteVMIsolateObject(kObjectPoolCid);
-    writer->WriteTags(tags);
-
-    // Write out the length field.
-    writer->Write<intptr_t>(length);
-  } else {
-    // Write out the serialization header value for this object.
-    writer->WriteInlinedObjectHeader(object_id);
-
-    // Write out the class and tags information.
-    writer->WriteVMIsolateObject(kObjectPoolCid);
-    writer->WriteTags(tags);
-
-    RawTypedData* info_array = ptr()->info_array_->ptr();
-    ASSERT(info_array != TypedData::null());
-
-    writer->Write<intptr_t>(length);
-    for (intptr_t i = 0; i < length; i++) {
-      ObjectPool::EntryType entry_type =
-          static_cast<ObjectPool::EntryType>(info_array->data()[i]);
-      writer->Write<int8_t>(entry_type);
-      Entry& entry = ptr()->data()[i];
-      switch (entry_type) {
-        case ObjectPool::kTaggedObject: {
-#if !defined(TARGET_ARCH_DBC)
-          if (entry.raw_obj_ == StubCode::CallNativeCFunction_entry()->code()) {
-            // Natives can run while precompiling, becoming linked and switching
-            // their stub. Reset to the initial stub used for lazy-linking.
-            writer->WriteObjectImpl(
-                StubCode::CallBootstrapCFunction_entry()->code(), kAsReference);
-            break;
-          }
-#endif
-          writer->WriteObjectImpl(entry.raw_obj_, kAsReference);
-          break;
-        }
-        case ObjectPool::kImmediate: {
-          writer->Write<intptr_t>(entry.raw_value_);
-          break;
-        }
-        case ObjectPool::kNativeEntry: {
-          // Write nothing. Will initialize with the lazy link entry.
-#if defined(TARGET_ARCH_DBC)
-          UNREACHABLE();   // DBC does not support lazy native call linking.
-#endif
-          break;
-        }
-        default:
-          UNREACHABLE();
-      }
-    }
-  }
+  UNREACHABLE();
 }
 
 
@@ -1677,15 +1433,8 @@
                                           intptr_t tags,
                                           Snapshot::Kind kind,
                                           bool as_reference) {
-  ASSERT(Snapshot::IncludesCode(kind));
-  ASSERT(Snapshot::IsFull(kind));
-
-  intptr_t offset = reader->Read<int32_t>();
-  PcDescriptors& result = PcDescriptors::ZoneHandle(reader->zone());
-  result ^= reader->GetObjectAt(offset);
-  reader->AddBackRef(object_id, &result, kIsDeserialized);
-
-  return result.raw();
+  UNREACHABLE();
+  return PcDescriptors::null();
 }
 
 
@@ -1693,15 +1442,7 @@
                                intptr_t object_id,
                                Snapshot::Kind kind,
                                bool as_reference) {
-  ASSERT(Snapshot::IncludesCode(kind));
-  ASSERT(Snapshot::IsFull(kind));
-
-  // Write out the serialization header value for this object.
-  writer->WriteInlinedObjectHeader(object_id);
-  writer->WriteIndexedObject(kPcDescriptorsCid);
-  writer->WriteTags(writer->GetObjectTags(this));
-
-  writer->Write<int32_t>(writer->GetObjectId(this));
+  UNREACHABLE();
 }
 
 
@@ -1710,23 +1451,8 @@
                                           intptr_t tags,
                                           Snapshot::Kind kind,
                                           bool as_reference) {
-  ASSERT(Snapshot::IncludesCode(kind));
-  ASSERT(Snapshot::IsFull(kind));
-
-  const int32_t length = reader->Read<int32_t>();
-  CodeSourceMap& result =
-      CodeSourceMap::ZoneHandle(reader->zone(),
-                                NEW_OBJECT_WITH_LEN(CodeSourceMap, length));
-  reader->AddBackRef(object_id, &result, kIsDeserialized);
-
-  if (result.Length() > 0) {
-    NoSafepointScope no_safepoint;
-    intptr_t len = result.Length();
-    uint8_t* data = result.UnsafeMutableNonPointer(result.raw_ptr()->data());
-    reader->ReadBytes(data, len);
-  }
-
-  return result.raw();
+  UNREACHABLE();
+  return CodeSourceMap::null();
 }
 
 
@@ -1734,19 +1460,7 @@
                                intptr_t object_id,
                                Snapshot::Kind kind,
                                bool as_reference) {
-  ASSERT(Snapshot::IncludesCode(kind));
-  ASSERT(Snapshot::IsFull(kind));
-
-  // Write out the serialization header value for this object.
-  writer->WriteInlinedObjectHeader(object_id);
-  writer->WriteIndexedObject(kCodeSourceMapCid);
-  writer->WriteTags(writer->GetObjectTags(this));
-  writer->Write<int32_t>(ptr()->length_);
-  if (ptr()->length_ > 0) {
-    intptr_t len = ptr()->length_;
-    uint8_t* data = reinterpret_cast<uint8_t*>(ptr()->data());
-    writer->WriteBytes(data, len);
-  }
+  UNREACHABLE();
 }
 
 
@@ -1755,15 +1469,8 @@
                                 intptr_t tags,
                                 Snapshot::Kind kind,
                                 bool as_reference) {
-  ASSERT(Snapshot::IncludesCode(kind));
-  ASSERT(Snapshot::IsFull(kind));
-
-  intptr_t offset = reader->Read<int32_t>();
-  Stackmap& result = Stackmap::ZoneHandle(reader->zone());
-  result ^= reader->GetObjectAt(offset);
-  reader->AddBackRef(object_id, &result, kIsDeserialized);
-
-  return result.raw();
+  UNREACHABLE();
+  return Stackmap::null();
 }
 
 
@@ -1771,15 +1478,7 @@
                           intptr_t object_id,
                           Snapshot::Kind kind,
                           bool as_reference) {
-  ASSERT(Snapshot::IncludesCode(kind));
-  ASSERT(Snapshot::IsFull(kind));
-
-  // Write out the serialization header value for this object.
-  writer->WriteInlinedObjectHeader(object_id);
-  writer->WriteIndexedObject(kStackmapCid);
-  writer->WriteTags(writer->GetObjectTags(this));
-
-  writer->Write<int32_t>(writer->GetObjectId(this));
+  UNREACHABLE();
 }
 
 
@@ -1788,32 +1487,8 @@
                                                       intptr_t tags,
                                                       Snapshot::Kind kind,
                                                       bool as_reference) {
-  ASSERT(Snapshot::IncludesCode(kind));
-  ASSERT(Snapshot::IsFull(kind));
-
-  const int32_t num_entries = reader->Read<int32_t>();
-
-  LocalVarDescriptors& result =
-      LocalVarDescriptors::ZoneHandle(reader->zone(),
-                                      NEW_OBJECT_WITH_LEN(LocalVarDescriptors,
-                                                          num_entries));
-  reader->AddBackRef(object_id, &result, kIsDeserialized);
-
-  for (intptr_t i = 0; i < num_entries; i++) {
-    (*reader->StringHandle()) ^= reader->ReadObjectImpl(kAsReference);
-    result.StorePointer(result.raw()->nameAddrAt(i),
-                        reader->StringHandle()->raw());
-  }
-
-  if (num_entries > 0) {
-    NoSafepointScope no_safepoint;
-    intptr_t len = num_entries * sizeof(RawLocalVarDescriptors::VarInfo);
-    uint8_t* data = result.UnsafeMutableNonPointer(
-        reinterpret_cast<const uint8_t*>(result.raw()->data()));
-    reader->ReadBytes(data, len);
-  }
-
-  return result.raw();
+  UNREACHABLE();
+  return LocalVarDescriptors::null();
 }
 
 
@@ -1821,22 +1496,7 @@
                                      intptr_t object_id,
                                      Snapshot::Kind kind,
                                      bool as_reference) {
-  ASSERT(Snapshot::IncludesCode(kind));
-  ASSERT(Snapshot::IsFull(kind));
-
-  // Write out the serialization header value for this object.
-  writer->WriteInlinedObjectHeader(object_id);
-  writer->WriteIndexedObject(kLocalVarDescriptorsCid);
-  writer->WriteTags(writer->GetObjectTags(this));
-  writer->Write<int32_t>(ptr()->num_entries_);
-  for (intptr_t i = 0; i < ptr()->num_entries_; i++) {
-    writer->WriteObjectImpl(ptr()->names()[i], kAsReference);
-  }
-  if (ptr()->num_entries_ > 0) {
-    intptr_t len = ptr()->num_entries_ * sizeof(VarInfo);
-    uint8_t* data = reinterpret_cast<uint8_t*>(this->data());
-    writer->WriteBytes(data, len);
-  }
+  UNREACHABLE();
 }
 
 
@@ -1845,30 +1505,8 @@
                                                   intptr_t tags,
                                                   Snapshot::Kind kind,
                                                   bool as_reference) {
-  ASSERT(Snapshot::IncludesCode(kind));
-  ASSERT(Snapshot::IsFull(kind));
-
-  const int32_t num_entries = reader->Read<int32_t>();
-  ExceptionHandlers& result =
-      ExceptionHandlers::ZoneHandle(reader->zone(),
-                                    NEW_OBJECT_WITH_LEN(ExceptionHandlers,
-                                                        num_entries));
-  reader->AddBackRef(object_id, &result, kIsDeserialized);
-
-  if (result.num_entries() > 0) {
-    NoSafepointScope no_safepoint;
-    const intptr_t len =
-        result.num_entries() * sizeof(RawExceptionHandlers::HandlerInfo);
-    uint8_t* data = result.UnsafeMutableNonPointer(
-        reinterpret_cast<const uint8_t*>(result.raw_ptr()->data()));
-    reader->ReadBytes(data, len);
-  }
-
-  *(reader->ArrayHandle()) ^= reader->ReadObjectImpl(kAsInlinedObject);
-  result.StorePointer(&result.raw_ptr()->handled_types_data_,
-                      reader->ArrayHandle()->raw());
-
-  return result.raw();
+  UNREACHABLE();
+  return ExceptionHandlers::null();
 }
 
 
@@ -1876,22 +1514,7 @@
                                    intptr_t object_id,
                                    Snapshot::Kind kind,
                                    bool as_reference) {
-  ASSERT(Snapshot::IncludesCode(kind));
-  ASSERT(Snapshot::IsFull(kind));
-
-  // Write out the serialization header value for this object.
-  writer->WriteInlinedObjectHeader(object_id);
-  writer->WriteIndexedObject(kExceptionHandlersCid);
-  writer->WriteTags(writer->GetObjectTags(this));
-  writer->Write<int32_t>(ptr()->num_entries_);
-
-  if (ptr()->num_entries_ > 0) {
-    intptr_t len = ptr()->num_entries_ * sizeof(HandlerInfo);
-    uint8_t* data = reinterpret_cast<uint8_t*>(ptr()->data());
-    writer->WriteBytes(data, len);
-  }
-
-  writer->WriteObjectImpl(ptr()->handled_types_data_, kAsInlinedObject);
+  UNREACHABLE();
 }
 
 
@@ -2106,22 +1729,8 @@
                                                 intptr_t tags,
                                                 Snapshot::Kind kind,
                                                 bool as_reference) {
-  ASSERT(Snapshot::IncludesCode(kind));
-  ASSERT(Snapshot::IsFull(kind));
-
-  MegamorphicCache& result =
-      MegamorphicCache::ZoneHandle(reader->zone(),
-                                   NEW_OBJECT(MegamorphicCache));
-  reader->AddBackRef(object_id, &result, kIsDeserialized);
-
-  result.set_filled_entry_count(reader->Read<int32_t>());
-
-  // Set all the object fields.
-  READ_OBJECT_FIELDS(result,
-                     result.raw()->from(), result.raw()->to(),
-                     kAsReference);
-
-  return result.raw();
+  UNREACHABLE();
+  return MegamorphicCache::null();
 }
 
 
@@ -2129,22 +1738,7 @@
                                   intptr_t object_id,
                                   Snapshot::Kind kind,
                                   bool as_reference) {
-  ASSERT(Snapshot::IncludesCode(kind));
-  ASSERT(Snapshot::IsFull(kind));
-
-  // Write out the serialization header value for this object.
-  writer->WriteInlinedObjectHeader(object_id);
-
-  // Write out the class and tags information.
-  writer->WriteVMIsolateObject(kMegamorphicCacheCid);
-  writer->WriteTags(writer->GetObjectTags(this));
-
-  // Write out all the non object fields.
-  writer->Write<int32_t>(ptr()->filled_entry_count_);
-
-  // Write out all the object pointer fields.
-  SnapshotWriterVisitor visitor(writer, kAsReference);
-  visitor.VisitPointers(from(), to());
+  UNREACHABLE();
 }
 
 
@@ -2153,22 +1747,8 @@
                                                 intptr_t tags,
                                                 Snapshot::Kind kind,
                                                 bool as_reference) {
-  ASSERT(Snapshot::IncludesCode(kind));
-  ASSERT(Snapshot::IsFull(kind));
-
-  SubtypeTestCache& result =
-      SubtypeTestCache::ZoneHandle(reader->zone(),
-                                   NEW_OBJECT(SubtypeTestCache));
-  reader->AddBackRef(object_id, &result, kIsDeserialized);
-
-  // Set all the object fields.
-  // TODO(5411462): Need to assert No GC can happen here, even though
-  // allocations may happen.
-  (*reader->ArrayHandle()) ^= reader->ReadObjectImpl(kAsReference);
-  result.StorePointer(&result.raw_ptr()->cache_,
-                      reader->ArrayHandle()->raw());
-
-  return result.raw();
+  UNREACHABLE();
+  return SubtypeTestCache::null();
 }
 
 
@@ -2176,18 +1756,7 @@
                                   intptr_t object_id,
                                   Snapshot::Kind kind,
                                   bool as_reference) {
-  ASSERT(Snapshot::IncludesCode(kind));
-  ASSERT(Snapshot::IsFull(kind));
-
-  // Write out the serialization header value for this object.
-  writer->WriteInlinedObjectHeader(object_id);
-
-  // Write out the class and tags information.
-  writer->WriteVMIsolateObject(kSubtypeTestCacheCid);
-  writer->WriteTags(writer->GetObjectTags(this));
-
-  // Write out all the object pointer fields.
-  writer->WriteObjectImpl(ptr()->cache_, kAsReference);
+  UNREACHABLE();
 }
 
 
@@ -2621,14 +2190,6 @@
                                           intptr_t tags,
                                           Snapshot::Kind kind,
                                           bool as_reference) {
-  if (Snapshot::IncludesCode(kind)) {
-    ASSERT(Snapshot::IsFull(kind));
-    intptr_t offset = reader->Read<int32_t>();
-    String& result = String::ZoneHandle(reader->zone());
-    result ^= reader->GetObjectAt(offset);
-    reader->AddBackRef(object_id, &result, kIsDeserialized);
-    return raw(result);
-  }
   // Read the length so that we can determine instance size to allocate.
   ASSERT(reader != NULL);
   intptr_t len = reader->ReadSmiValue();
@@ -2736,22 +2297,6 @@
                                intptr_t object_id,
                                Snapshot::Kind kind,
                                bool as_reference) {
-  if (Snapshot::IncludesCode(kind)) {
-    ASSERT(Snapshot::IncludesCode(kind));
-    ASSERT(Snapshot::IsFull(kind));
-    // Assert that hash is computed.
-    if (ptr()->hash_ == NULL) {
-      ptr()->hash_ = Smi::New(String::Hash(ptr()->data(),
-                                           Smi::Value(ptr()->length_)));
-    }
-    ASSERT(ptr()->hash_ != NULL);
-    // Write out the serialization header value for this object.
-    writer->WriteInlinedObjectHeader(object_id);
-    writer->WriteIndexedObject(kOneByteStringCid);
-    writer->WriteTags(writer->GetObjectTags(this));
-    writer->Write<int32_t>(writer->GetObjectId(this));
-    return;
-  }
   StringWriteTo(writer,
                 object_id,
                 kind,
@@ -3017,8 +2562,7 @@
 
   LinkedHashMap& map = LinkedHashMap::ZoneHandle(
       reader->zone(), LinkedHashMap::null());
-  if ((Snapshot::IsFull(kind) && !Snapshot::IncludesCode(kind)) ||
-      kind == Snapshot::kScript) {
+  if (Snapshot::IsFull(kind) || kind == Snapshot::kScript) {
     // The immutable maps that seed map literals are not yet VM-internal, so
     // we don't reach this.
     UNREACHABLE();
@@ -3076,8 +2620,7 @@
                                intptr_t object_id,
                                Snapshot::Kind kind,
                                bool as_reference) {
-  if ((Snapshot::IsFull(kind) && !Snapshot::IncludesCode(kind)) ||
-      kind == Snapshot::kScript) {
+  if (Snapshot::IsFull(kind) || kind == Snapshot::kScript) {
     // The immutable maps that seed map literals are not yet VM-internal, so
     // we don't reach this.
   }
@@ -3552,18 +3095,14 @@
                                 intptr_t tags,
                                 Snapshot::Kind kind,
                                 bool as_reference) {
-  ASSERT(kind == Snapshot::kMessage || Snapshot::IncludesCode(kind));
+  ASSERT(kind == Snapshot::kMessage);
 
   uint64_t id = reader->Read<uint64_t>();
   uint64_t origin_id = reader->Read<uint64_t>();
 
-  SendPort& result = SendPort::ZoneHandle(reader->zone());
-  if (Snapshot::IncludesCode(kind)) {
-    // TODO(rmacnak): Reset fields in precompiled snapshots and assert
-    // this is unreachable.
-  } else {
-    result = SendPort::New(id, origin_id);
-  }
+  SendPort& result =
+      SendPort::ZoneHandle(reader->zone(),
+                           SendPort::New(id, origin_id));
   reader->AddBackRef(object_id, &result, kIsDeserialized);
   return result.raw();
 }
diff --git a/runtime/vm/redundancy_elimination.cc b/runtime/vm/redundancy_elimination.cc
index 93b3951..32fe07b 100644
--- a/runtime/vm/redundancy_elimination.cc
+++ b/runtime/vm/redundancy_elimination.cc
@@ -45,7 +45,7 @@
   }
 
   Instruction* Lookup(Instruction* other) const {
-    return GetMapFor(other)->Lookup(other);
+    return GetMapFor(other)->LookupValue(other);
   }
 
   void Insert(Instruction* instr) {
@@ -707,7 +707,7 @@
   }
 
   intptr_t LookupAliasId(const Place& alias) {
-    const Place* result = aliases_map_.Lookup(&alias);
+    const Place* result = aliases_map_.LookupValue(&alias);
     return (result != NULL) ? result->id() : static_cast<intptr_t>(kNoAlias);
   }
 
@@ -725,7 +725,7 @@
   }
 
   Place* LookupCanonical(Place* place) const {
-    return places_map_->Lookup(place);
+    return places_map_->LookupValue(place);
   }
 
   void PrintSet(BitVector* set) {
@@ -851,14 +851,14 @@
   }
 
   const Place* CanonicalizeAlias(const Place& alias) {
-    const Place* canonical = aliases_map_.Lookup(&alias);
+    const Place* canonical = aliases_map_.LookupValue(&alias);
     if (canonical == NULL) {
       canonical = Place::Wrap(zone_,
                               alias,
                               kAnyInstanceAnyIndexAlias + aliases_.length());
       InsertAlias(canonical);
     }
-    ASSERT(aliases_map_.Lookup(&alias) == canonical);
+    ASSERT(aliases_map_.LookupValue(&alias) == canonical);
     return canonical;
   }
 
@@ -1234,7 +1234,7 @@
       for (intptr_t j = 0; j < phi->InputCount(); j++) {
         input_place.set_instance(phi->InputAt(j)->definition());
 
-        Place* result = map->Lookup(&input_place);
+        Place* result = map->LookupValue(&input_place);
         if (result == NULL) {
           result = Place::Wrap(zone, input_place, places->length());
           map->Insert(result);
@@ -1289,7 +1289,7 @@
         continue;
       }
 
-      Place* result = map->Lookup(&place);
+      Place* result = map->LookupValue(&place);
       if (result == NULL) {
         result = Place::Wrap(zone, place, places->length());
         map->Insert(result);
@@ -2539,8 +2539,7 @@
       case Instruction::kStoreInstanceField: {
         StoreInstanceFieldInstr* store_instance = instr->AsStoreInstanceField();
         // Can't eliminate stores that initialize fields.
-        return !(store_instance->is_potential_unboxed_initialization() ||
-                 store_instance->is_object_reference_initialization());
+        return !store_instance->is_initialization();
       }
       case Instruction::kStoreIndexed:
       case Instruction::kStoreStaticField:
diff --git a/runtime/vm/safepoint.h b/runtime/vm/safepoint.h
index 9477425..58afb49 100644
--- a/runtime/vm/safepoint.h
+++ b/runtime/vm/safepoint.h
@@ -313,10 +313,45 @@
   }
 
  private:
-  int16_t execution_state_;
+  uint32_t execution_state_;
   DISALLOW_COPY_AND_ASSIGN(TransitionToGenerated);
 };
 
+
+// TransitionToVM is used to transition the safepoint state of a
+// thread from "running native code" to "running vm code"
+// and ensures that the state is reverted back to "running native code"
+// when exiting the scope/frame.
+// This transition helper is mainly used in the error path of the
+// Dart API implementations where we sometimes do not have an explicit
+// transition set up.
+class TransitionToVM : public TransitionSafepointState {
+ public:
+  explicit TransitionToVM(Thread* T) : TransitionSafepointState(T),
+                                       execution_state_(T->execution_state()) {
+    ASSERT(T == Thread::Current());
+    ASSERT((execution_state_ == Thread::kThreadInVM) ||
+           (execution_state_ == Thread::kThreadInNative));
+    if (execution_state_ == Thread::kThreadInNative) {
+      T->ExitSafepoint();
+      T->set_execution_state(Thread::kThreadInVM);
+    }
+    ASSERT(T->execution_state() == Thread::kThreadInVM);
+  }
+
+  ~TransitionToVM() {
+    ASSERT(thread()->execution_state() == Thread::kThreadInVM);
+    if (execution_state_ == Thread::kThreadInNative) {
+      thread()->set_execution_state(Thread::kThreadInNative);
+      thread()->EnterSafepoint();
+    }
+  }
+
+ private:
+  uint32_t execution_state_;
+  DISALLOW_COPY_AND_ASSIGN(TransitionToVM);
+};
+
 }  // namespace dart
 
 #endif  // VM_SAFEPOINT_H_
diff --git a/runtime/vm/scavenger.cc b/runtime/vm/scavenger.cc
index 03f0912..d1bfd3f 100644
--- a/runtime/vm/scavenger.cc
+++ b/runtime/vm/scavenger.cc
@@ -15,7 +15,6 @@
 #include "vm/store_buffer.h"
 #include "vm/thread_registry.h"
 #include "vm/timeline.h"
-#include "vm/verified_memory.h"
 #include "vm/verifier.h"
 #include "vm/visitor.h"
 #include "vm/weak_table.h"
@@ -161,7 +160,6 @@
       memmove(reinterpret_cast<void*>(new_addr),
               reinterpret_cast<void*>(raw_addr),
               size);
-      VerifiedMemory::Accept(new_addr, size);
       // Remember forwarding address.
       ForwardTo(raw_addr, new_addr);
     }
@@ -170,7 +168,6 @@
     *p = new_obj;
     // Update the store buffer as needed.
     if (visiting_old_object_ != NULL) {
-      VerifiedMemory::Accept(reinterpret_cast<uword>(p), sizeof(*p));
       UpdateStoreBuffer(p, new_obj);
     }
   }
@@ -193,10 +190,13 @@
 
 class ScavengerWeakVisitor : public HandleVisitor {
  public:
-  explicit ScavengerWeakVisitor(Scavenger* scavenger)
-      :  HandleVisitor(Thread::Current()),
-         scavenger_(scavenger) {
-    ASSERT(scavenger->heap_->isolate() == Thread::Current()->isolate());
+  ScavengerWeakVisitor(Thread* thread,
+                       Scavenger* scavenger,
+                       FinalizationQueue* finalization_queue) :
+      HandleVisitor(thread),
+      scavenger_(scavenger),
+      queue_(finalization_queue) {
+    ASSERT(scavenger->heap_->isolate() == thread->isolate());
   }
 
   void VisitHandle(uword addr) {
@@ -204,7 +204,7 @@
       reinterpret_cast<FinalizablePersistentHandle*>(addr);
     RawObject** p = handle->raw_addr();
     if (scavenger_->IsUnreachable(p)) {
-      handle->UpdateUnreachable(thread()->isolate());
+      handle->UpdateUnreachable(thread()->isolate(), queue_);
     } else {
       handle->UpdateRelocated(thread()->isolate());
     }
@@ -212,6 +212,7 @@
 
  private:
   Scavenger* scavenger_;
+  FinalizationQueue* queue_;
 
   DISALLOW_COPY_AND_ASSIGN(ScavengerWeakVisitor);
 };
@@ -285,7 +286,7 @@
     return new SemiSpace(NULL);
   } else {
     intptr_t size_in_bytes = size_in_words << kWordSizeLog2;
-    VirtualMemory* reserved = VerifiedMemory::Reserve(size_in_bytes);
+    VirtualMemory* reserved = VirtualMemory::Reserve(size_in_bytes);
     if ((reserved == NULL) || !reserved->Commit(false)) {  // Not executable.
       // TODO(koda): If cache_ is not empty, we could try to delete it.
       delete reserved;
@@ -293,7 +294,6 @@
     }
 #if defined(DEBUG)
     memset(reserved->address(), Heap::kZapByte, size_in_bytes);
-    VerifiedMemory::Accept(reserved->start(), size_in_bytes);
 #endif  // defined(DEBUG)
     return new SemiSpace(reserved);
   }
@@ -305,7 +305,6 @@
   if (reserved_ != NULL) {
     const intptr_t size_in_bytes = size_in_words() << kWordSizeLog2;
     memset(reserved_->address(), Heap::kZapByte, size_in_bytes);
-    VerifiedMemory::Accept(reserved_->start(), size_in_bytes);
   }
 #endif
   SemiSpace* old_cache = NULL;
@@ -425,7 +424,6 @@
     // objects candidates for promotion next time.
     survivor_end_ = end_;
   }
-  VerifiedMemory::Accept(to_->start(), to_->end() - to_->start());
 #if defined(DEBUG)
   // We can only safely verify the store buffers from old space if there is no
   // concurrent old space task. At the same time we prevent new tasks from
@@ -464,11 +462,9 @@
     total_count += count;
     while (!pending->IsEmpty()) {
       RawObject* raw_object = pending->Pop();
-      if (raw_object->IsFreeListElement()) {
-        // TODO(rmacnak): Forwarding corpse from become. Probably we should also
-        // visit the store buffer blocks during become, and mark any forwardees
-        // as remembered if their forwarders are remembered to satisfy the
-        // following assert.
+      if (raw_object->IsForwardingCorpse()) {
+        // A source object in a become was a remembered object, but we do
+        // not visit the store buffer during become to remove it.
         continue;
       }
       ASSERT(raw_object->IsRemembered());
@@ -814,8 +810,19 @@
     int64_t middle = OS::GetCurrentTimeMicros();
     {
       TIMELINE_FUNCTION_GC_DURATION(thread, "WeakHandleProcessing");
-      ScavengerWeakVisitor weak_visitor(this);
-      IterateWeakRoots(isolate, &weak_visitor);
+      if (FLAG_background_finalization) {
+        FinalizationQueue* queue = new FinalizationQueue();
+        ScavengerWeakVisitor weak_visitor(thread, this, queue);
+        IterateWeakRoots(isolate, &weak_visitor);
+        if (queue->length() > 0) {
+          Dart::thread_pool()->Run(new BackgroundFinalizer(isolate, queue));
+        } else {
+          delete queue;
+        }
+      } else {
+        ScavengerWeakVisitor weak_visitor(thread, this, NULL);
+        IterateWeakRoots(isolate, &weak_visitor);
+      }
     }
     ProcessWeakReferences();
     page_space->ReleaseDataLock();
diff --git a/runtime/vm/scopes.cc b/runtime/vm/scopes.cc
index 6060e7a..be2cdda 100644
--- a/runtime/vm/scopes.cc
+++ b/runtime/vm/scopes.cc
@@ -98,6 +98,14 @@
 }
 
 
+void LocalScope::MoveLabel(SourceLabel* label) {
+  ASSERT(LocalLookupLabel(label->name()) == NULL);
+  ASSERT(label->kind() == SourceLabel::kForward);
+  labels_.Add(label);
+  label->set_owner(this);
+}
+
+
 NameReference* LocalScope::FindReference(const String& name) const {
   ASSERT(name.IsSymbol());
   intptr_t num_references = referenced_.length();
@@ -489,7 +497,7 @@
       if (outer_switch == NULL) {
         return label;
       } else {
-        outer_switch->AddLabel(label);
+        outer_switch->MoveLabel(label);
       }
     }
   }
diff --git a/runtime/vm/scopes.h b/runtime/vm/scopes.h
index aaf120d..c2fbfe0 100644
--- a/runtime/vm/scopes.h
+++ b/runtime/vm/scopes.h
@@ -182,7 +182,6 @@
   const String& name() const { return name_; }
   LocalScope* owner() const { return owner_; }
   void set_owner(LocalScope* owner) {
-    ASSERT(owner_ == NULL);
     owner_ = owner;
   }
 
@@ -191,6 +190,7 @@
   // Returns the function level of the scope in which the label is defined.
   int FunctionLevel() const;
 
+  bool IsUnresolved() { return kind_ == kForward; }
   void ResolveForwardReference() { kind_ = kCase; }
 
  private:
@@ -255,6 +255,9 @@
   // is already present.
   bool AddLabel(SourceLabel* label);
 
+  // Move an unresolved label of a switch case label to an outer switch.
+  void MoveLabel(SourceLabel* label);
+
   // Lookup a variable in this scope only.
   LocalVariable* LocalLookupVariable(const String& name) const;
 
diff --git a/runtime/vm/service.cc b/runtime/vm/service.cc
index fc78a48..e209d3b 100644
--- a/runtime/vm/service.cc
+++ b/runtime/vm/service.cc
@@ -413,6 +413,10 @@
     return true;
   }
 
+  virtual bool ValidateObject(const Object& value) const {
+    return true;
+  }
+
   const char* name() const {
     return name_;
   }
@@ -427,12 +431,42 @@
     PrintInvalidParamError(js, name);
   }
 
+  virtual void PrintErrorObject(const char* name,
+                                const Object& value,
+                                JSONStream* js) const {
+    PrintInvalidParamError(js, name);
+  }
+
  private:
   const char* name_;
   bool required_;
 };
 
 
+class DartStringParameter : public MethodParameter {
+ public:
+  DartStringParameter(const char* name, bool required)
+      : MethodParameter(name, required) {
+  }
+
+  virtual bool ValidateObject(const Object& value) const {
+    return value.IsString();
+  }
+};
+
+
+class DartListParameter : public MethodParameter {
+ public:
+  DartListParameter(const char* name, bool required)
+      : MethodParameter(name, required) {
+  }
+
+  virtual bool ValidateObject(const Object& value) const {
+    return value.IsArray() || value.IsGrowableObjectArray();
+  }
+};
+
+
 class NoSuchParameter : public MethodParameter {
  public:
   explicit NoSuchParameter(const char* name)
@@ -442,6 +476,10 @@
   virtual bool Validate(const char* value) const {
     return (value == NULL);
   }
+
+  virtual bool ValidateObject(const Object& value) const {
+    return value.IsNull();
+  }
 };
 
 
@@ -737,19 +775,38 @@
   if (parameters == NULL) {
     return true;
   }
-  for (intptr_t i = 0; parameters[i] != NULL; i++) {
-    const MethodParameter* parameter = parameters[i];
-    const char* name = parameter->name();
-    const bool required = parameter->required();
-    const char* value = js->LookupParam(name);
-    const bool has_parameter = (value != NULL);
-    if (required && !has_parameter) {
-      PrintMissingParamError(js, name);
-      return false;
+  if (js->NumObjectParameters() > 0) {
+    Object& value = Object::Handle();
+    for (intptr_t i = 0; parameters[i] != NULL; i++) {
+      const MethodParameter* parameter = parameters[i];
+      const char* name = parameter->name();
+      const bool required = parameter->required();
+      value = js->LookupObjectParam(name);
+      const bool has_parameter = !value.IsNull();
+      if (required && !has_parameter) {
+        PrintMissingParamError(js, name);
+        return false;
+      }
+      if (has_parameter && !parameter->ValidateObject(value)) {
+        parameter->PrintErrorObject(name, value, js);
+        return false;
+      }
     }
-    if (has_parameter && !parameter->Validate(value)) {
-      parameter->PrintError(name, value, js);
-      return false;
+  } else {
+    for (intptr_t i = 0; parameters[i] != NULL; i++) {
+      const MethodParameter* parameter = parameters[i];
+      const char* name = parameter->name();
+      const bool required = parameter->required();
+      const char* value = js->LookupParam(name);
+      const bool has_parameter = (value != NULL);
+      if (required && !has_parameter) {
+        PrintMissingParamError(js, name);
+        return false;
+      }
+      if (has_parameter && !parameter->Validate(value)) {
+        parameter->PrintError(name, value, js);
+        return false;
+      }
     }
   }
   return true;
@@ -775,7 +832,9 @@
 }
 
 
-void Service::InvokeMethod(Isolate* I, const Array& msg) {
+void Service::InvokeMethod(Isolate* I,
+                           const Array& msg,
+                           bool parameters_are_dart_objects) {
   Thread* T = Thread::Current();
   ASSERT(I == T->isolate());
   ASSERT(I != NULL);
@@ -809,7 +868,11 @@
 
     JSONStream js;
     js.Setup(zone.GetZone(), SendPort::Cast(reply_port).Id(),
-             seq, method_name, param_keys, param_values);
+             seq,
+             method_name,
+             param_keys,
+             param_values,
+             parameters_are_dart_objects);
 
     // RPC came in with a custom service id zone.
     const char* id_zone_param = js.LookupParam("_idZone");
@@ -859,7 +922,6 @@
 
     if (handler != NULL) {
       EmbedderHandleMessage(handler, &js);
-      js.PostReply();
       return;
     }
 
@@ -890,6 +952,12 @@
 }
 
 
+void Service::HandleObjectRootMessage(const Array& msg_instance) {
+  Isolate* isolate = Isolate::Current();
+  InvokeMethod(isolate, msg_instance, true);
+}
+
+
 void Service::HandleIsolateMessage(Isolate* isolate, const Array& msg) {
   ASSERT(isolate != NULL);
   InvokeMethod(isolate, msg);
@@ -1126,16 +1194,16 @@
   ASSERT(handler != NULL);
   Dart_ServiceRequestCallback callback = handler->callback();
   ASSERT(callback != NULL);
-  const char* r = NULL;
-  const char* method = js->method();
-  const char** keys = js->param_keys();
-  const char** values = js->param_values();
-  r = callback(method, keys, values, js->num_params(), handler->user_data());
-  ASSERT(r != NULL);
-  // TODO(johnmccutchan): Allow for NULL returns?
-  TextBuffer* buffer = js->buffer();
-  buffer->AddString(r);
-  free(const_cast<char*>(r));
+  const char* response = NULL;
+  bool success = callback(js->method(), js->param_keys(), js->param_values(),
+                          js->num_params(), handler->user_data(), &response);
+  ASSERT(response != NULL);
+  if (!success) {
+    js->SetupError();
+  }
+  js->buffer()->AddString(response);
+  js->PostReply();
+  free(const_cast<char*>(response));
 }
 
 
@@ -2217,7 +2285,7 @@
 
   virtual Direction VisitObject(ObjectGraph::StackIterator* it) {
     RawObject* raw_obj = it->Get();
-    if (raw_obj->IsFreeListElement()) {
+    if (raw_obj->IsPseudoObject()) {
       return kProceed;
     }
     Thread* thread = Thread::Current();
@@ -2415,19 +2483,24 @@
                    "This isolate is being reloaded.");
     return true;
   }
-  DebuggerStackTrace* stack = isolate->debugger()->StackTrace();
-  ASSERT(isolate->CanReload());
-
-  if (stack->Length() > 0) {
-    // TODO(turnidge): We need to support this case.
+  if (!isolate->CanReload()) {
     js->PrintError(kFeatureDisabled,
-                   "Source can only be reloaded when stack is empty.");
+                   "This isolate cannot reload sources right now.");
     return true;
-  } else {
-    isolate->ReloadSources();
   }
 
-  PrintSuccess(js);
+  isolate->ReloadSources();
+
+  const Error& error = Error::Handle(isolate->sticky_reload_error());
+
+  if (error.IsNull()) {
+    PrintSuccess(js);
+  } else {
+    // Clear the sticky error.
+    isolate->clear_sticky_reload_error();
+    js->PrintError(kIsolateReloadFailed,
+                   "Isolate reload failed: %s", error.ToErrorCString());
+  }
   return true;
 }
 
@@ -3341,8 +3414,7 @@
   virtual uword filter_addr() const { return addr_; }
 
   virtual bool FindObject(RawObject* obj) const {
-    // Free list elements are not real objects, so skip them.
-    if (obj->IsFreeListElement()) {
+    if (obj->IsPseudoObject()) {
       return false;
     }
     uword obj_begin = RawObject::ToAddr(obj);
@@ -3597,6 +3669,19 @@
 }
 
 
+static const MethodParameter* get_object_store_params[] = {
+  RUNNABLE_ISOLATE_PARAMETER,
+  NULL,
+};
+
+
+static bool GetObjectStore(Thread* thread, JSONStream* js) {
+  JSONObject jsobj(js);
+  thread->isolate()->object_store()->PrintToJSONObject(&jsobj);
+  return true;
+}
+
+
 static const MethodParameter* get_class_list_params[] = {
   RUNNABLE_ISOLATE_PARAMETER,
   NULL,
@@ -3872,6 +3957,7 @@
 
 
 static const MethodParameter* set_vm_name_params[] = {
+  NO_ISOLATE_PARAMETER,
   new MethodParameter("name", true),
   NULL,
 };
@@ -3973,6 +4059,8 @@
     get_isolate_metric_list_params },
   { "getObject", GetObject,
     get_object_params },
+  { "_getObjectStore", GetObjectStore,
+    get_object_store_params },
   { "_getObjectByAddress", GetObjectByAddress,
     get_object_by_address_params },
   { "_getPersistentHandles", GetPersistentHandles,
diff --git a/runtime/vm/service.h b/runtime/vm/service.h
index 084b218..420ff62 100644
--- a/runtime/vm/service.h
+++ b/runtime/vm/service.h
@@ -82,6 +82,10 @@
   // Handles a message which is not directed to an isolate.
   static void HandleRootMessage(const Array& message);
 
+  // Handles a message which is not directed to an isolate and also
+  // expects the parameter keys and values to be actual dart objects.
+  static void HandleObjectRootMessage(const Array& message);
+
   // Handles a message which is directed to a particular isolate.
   static void HandleIsolateMessage(Isolate* isolate, const Array& message);
 
@@ -161,7 +165,9 @@
   static void PrintJSONForVM(JSONStream* js, bool ref);
 
  private:
-  static void InvokeMethod(Isolate* isolate, const Array& message);
+  static void InvokeMethod(Isolate* isolate,
+                           const Array& message,
+                           bool parameters_are_dart_objects = false);
 
   static void EmbedderHandleMessage(EmbedderServiceHandler* handler,
                                     JSONStream* js);
diff --git a/runtime/vm/service_isolate.cc b/runtime/vm/service_isolate.cc
index e65c539..e85a5c0 100644
--- a/runtime/vm/service_isolate.cc
+++ b/runtime/vm/service_isolate.cc
@@ -363,6 +363,9 @@
 
  protected:
   static void ShutdownIsolate(uword parameter) {
+    if (FLAG_trace_service) {
+      OS::Print("vm-service: ShutdownIsolate\n");
+    }
     Isolate* I = reinterpret_cast<Isolate*>(parameter);
     ASSERT(ServiceIsolate::IsServiceIsolate(I));
     ServiceIsolate::SetServiceIsolate(NULL);
@@ -382,6 +385,10 @@
       if (!error.IsNull() && !error.IsUnwindError()) {
         OS::PrintErr("vm-service: Error: %s\n", error.ToErrorCString());
       }
+      error = I->sticky_error();
+      if (!error.IsNull() && !error.IsUnwindError()) {
+        OS::PrintErr("vm-service: Error: %s\n", error.ToErrorCString());
+      }
       Dart::RunShutdownCallback();
     }
     // Shut the isolate down.
@@ -523,4 +530,8 @@
   ServiceIsolate::SetServicePort(port);
 }
 
+
+void ServiceIsolate::VisitObjectPointers(ObjectPointerVisitor* visitor) {
+}
+
 }  // namespace dart
diff --git a/runtime/vm/service_isolate.h b/runtime/vm/service_isolate.h
index dc2e4be..9def85c 100644
--- a/runtime/vm/service_isolate.h
+++ b/runtime/vm/service_isolate.h
@@ -12,6 +12,8 @@
 
 namespace dart {
 
+class ObjectPointerVisitor;
+
 class ServiceIsolate : public AllStatic {
  public:
   static const char* kName;
@@ -41,6 +43,8 @@
     return server_address_;
   }
 
+  static void VisitObjectPointers(ObjectPointerVisitor* visitor);
+
  private:
   static void KillServiceIsolate();
 
diff --git a/runtime/vm/service_test.cc b/runtime/vm/service_test.cc
index 5071bce..c272b4c 100644
--- a/runtime/vm/service_test.cc
+++ b/runtime/vm/service_test.cc
@@ -571,23 +571,27 @@
 }
 
 
-static const char* alpha_callback(
+static bool alpha_callback(
     const char* name,
     const char** option_keys,
     const char** option_values,
     intptr_t num_options,
-    void* user_data) {
-  return strdup("alpha");
+    void* user_data,
+    const char** result) {
+  *result = strdup("alpha");
+  return true;
 }
 
 
-static const char* beta_callback(
+static bool beta_callback(
     const char* name,
     const char** option_keys,
     const char** option_values,
     intptr_t num_options,
-    void* user_data) {
-  return strdup("beta");
+    void* user_data,
+    const char** result) {
+  *result = strdup("beta");
+  return false;
 }
 
 
@@ -626,7 +630,7 @@
   service_msg = Eval(lib, "[0, port, 1, 'beta', [], []]");
   Service::HandleRootMessage(service_msg);
   EXPECT_EQ(MessageHandler::kOK, handler.HandleNextMessage());
-  EXPECT_STREQ("{\"jsonrpc\":\"2.0\", \"result\":beta,\"id\":1}",
+  EXPECT_STREQ("{\"jsonrpc\":\"2.0\", \"error\":beta,\"id\":1}",
                handler.msg());
 }
 
@@ -666,7 +670,7 @@
   service_msg = Eval(lib, "[0, port, '0', 'beta', [], []]");
   Service::HandleIsolateMessage(isolate, service_msg);
   EXPECT_EQ(MessageHandler::kOK, handler.HandleNextMessage());
-  EXPECT_STREQ("{\"jsonrpc\":\"2.0\", \"result\":beta,\"id\":\"0\"}",
+  EXPECT_STREQ("{\"jsonrpc\":\"2.0\", \"error\":beta,\"id\":\"0\"}",
                handler.msg());
 }
 
diff --git a/runtime/vm/signal_handler.h b/runtime/vm/signal_handler.h
index ecdfe02..615e584 100644
--- a/runtime/vm/signal_handler.h
+++ b/runtime/vm/signal_handler.h
@@ -34,6 +34,9 @@
 struct mcontext_t;
 struct sigset_t {
 };
+#elif defined(TARGET_OS_FUCHSIA)
+#include <signal.h>  // NOLINT
+#include <ucontext.h>  // NOLINT
 #endif
 
 
@@ -84,7 +87,8 @@
   // that no actual instructions are skipped and then branch to the actual
   // signal handler.
   //
-  // For the kernel patch that fixes the issue see: http://git.kernel.org/cgit/linux/kernel/git/torvalds/linux.git/commit/?id=6ecf830e5029598732e04067e325d946097519cb
+  // For the kernel patch that fixes the issue see:
+  // http://git.kernel.org/cgit/linux/kernel/git/torvalds/linux.git/commit/?id=6ecf830e5029598732e04067e325d946097519cb
   //
   // Note: this function is marked "naked" because we must guarantee that
   // our NOPs occur before any compiler generated prologue.
diff --git a/runtime/vm/signal_handler_fuchsia.cc b/runtime/vm/signal_handler_fuchsia.cc
new file mode 100644
index 0000000..1b704eb
--- /dev/null
+++ b/runtime/vm/signal_handler_fuchsia.cc
@@ -0,0 +1,55 @@
+// Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+#include "vm/globals.h"
+#if defined(TARGET_OS_FUCHSIA)
+
+#include "vm/signal_handler.h"
+
+#include "platform/assert.h"
+
+namespace dart {
+
+uintptr_t SignalHandler::GetProgramCounter(const mcontext_t& mcontext) {
+  UNIMPLEMENTED();
+  return 0;
+}
+
+
+uintptr_t SignalHandler::GetFramePointer(const mcontext_t& mcontext) {
+  UNIMPLEMENTED();
+  return 0;
+}
+
+
+uintptr_t SignalHandler::GetCStackPointer(const mcontext_t& mcontext) {
+  UNIMPLEMENTED();
+  return 0;
+}
+
+
+uintptr_t SignalHandler::GetDartStackPointer(const mcontext_t& mcontext) {
+  UNIMPLEMENTED();
+  return 0;
+}
+
+
+uintptr_t SignalHandler::GetLinkRegister(const mcontext_t& mcontext) {
+  UNIMPLEMENTED();
+  return 0;
+}
+
+
+void SignalHandler::InstallImpl(SignalAction action) {
+  UNIMPLEMENTED();
+}
+
+
+void SignalHandler::Remove() {
+  UNIMPLEMENTED();
+}
+
+}  // namespace dart
+
+#endif  // defined(TARGET_OS_FUCHSIA)
diff --git a/runtime/vm/simulator_arm64.cc b/runtime/vm/simulator_arm64.cc
index 36f5f9c..b3cac8b 100644
--- a/runtime/vm/simulator_arm64.cc
+++ b/runtime/vm/simulator_arm64.cc
@@ -1218,7 +1218,7 @@
 void Simulator::ClearExclusive() {
   MutexLocker ml(exclusive_access_lock_);
   // Remove the reservation for this thread.
-  SetExclusiveAccess(NULL);
+  SetExclusiveAccess(0);
 }
 
 
diff --git a/runtime/vm/simulator_dbc.cc b/runtime/vm/simulator_dbc.cc
index ffe4e92..b1c648d 100644
--- a/runtime/vm/simulator_dbc.cc
+++ b/runtime/vm/simulator_dbc.cc
@@ -28,6 +28,7 @@
 #include "vm/object_store.h"
 #include "vm/os_thread.h"
 #include "vm/stack_frame.h"
+#include "vm/symbols.h"
 
 namespace dart {
 
@@ -442,7 +443,7 @@
       : "r"(rhs), "r"(out)
       : "cc", "r12");
 #elif defined(HOST_ARCH_ARM64)
-  int64_t prod_lo;
+  int64_t prod_lo = 0;
   asm volatile(
       "mul %1, %2, %3\n"
       "smulh %2, %2, %3\n"
@@ -451,7 +452,7 @@
       "mov %0, #0;\n"
       "str %1, [%4, #0]\n"
       "1:"
-      : "+r"(res), "=r"(prod_lo), "+r"(lhs)
+      : "=r"(res), "+r"(prod_lo), "+r"(lhs)
       : "r"(rhs), "r"(out)
       : "cc");
 #else
@@ -477,6 +478,7 @@
 #define SMI_GT(lhs, rhs, pres) SMI_COND(>, lhs, rhs, pres)
 #define SMI_BITOR(lhs, rhs, pres) ((*(pres) = (lhs | rhs)), false)
 #define SMI_BITAND(lhs, rhs, pres) ((*(pres) = ((lhs) & (rhs))), false)
+#define SMI_BITXOR(lhs, rhs, pres) ((*(pres) = ((lhs) ^ (rhs))), false)
 
 
 void Simulator::CallRuntime(Thread* thread,
@@ -754,6 +756,20 @@
 #define DECLARE_A_X int32_t rD; USE(rD)
 #define DECODE_A_X rD = (static_cast<int32_t>(op) >> Bytecode::kDShift);
 
+
+#define SMI_FASTPATH_ICDATA_INC                                                \
+  do {                                                                         \
+    ASSERT(Bytecode::IsCallOpcode(*pc));                                       \
+    const uint16_t kidx = Bytecode::DecodeD(*pc);                              \
+    const RawICData* icdata = RAW_CAST(ICData, LOAD_CONSTANT(kidx));           \
+    RawObject** data = icdata->ptr()->ic_data_->ptr()->data();                 \
+    const intptr_t count_offset = ICData::CountIndexFor(2);                    \
+    const intptr_t raw_smi_old =                                               \
+        reinterpret_cast<intptr_t>(data[count_offset]);                        \
+    const intptr_t raw_smi_new = raw_smi_old + Smi::RawValue(1);               \
+    *reinterpret_cast<intptr_t*>(&data[count_offset]) = raw_smi_new;           \
+  } while (0);                                                                 \
+
 // Declare bytecode handler for a smi operation (e.g. AddTOS) with the
 // given result type and the given behavior specified as a function
 // that takes left and right operands and result slot and returns
@@ -764,6 +780,7 @@
     const intptr_t rhs = reinterpret_cast<intptr_t>(SP[-0]);                   \
     ResultT* slot = reinterpret_cast<ResultT*>(SP - 1);                        \
     if (LIKELY(AreBothSmis(lhs, rhs) && !Func(lhs, rhs, slot))) {              \
+      SMI_FASTPATH_ICDATA_INC;                                                 \
       /* Fast path succeeded. Skip the generic call that follows. */           \
       pc++;                                                                    \
       /* We dropped 2 arguments and push result                   */           \
@@ -771,6 +788,28 @@
     }                                                                          \
   }
 
+// Skip the next instruction if there is no overflow.
+#define SMI_OP_CHECK(ResultT, Func)                                            \
+  {                                                                            \
+    const intptr_t lhs = reinterpret_cast<intptr_t>(FP[rB]);                   \
+    const intptr_t rhs = reinterpret_cast<intptr_t>(FP[rC]);                   \
+    ResultT* slot = reinterpret_cast<ResultT*>(&FP[rA]);                       \
+    if (LIKELY(!Func(lhs, rhs, slot))) {                                       \
+      /* Success. Skip the instruction that follows. */                        \
+      pc++;                                                                    \
+    }                                                                          \
+  }
+
+// Do not check for overflow.
+#define SMI_OP_NOCHECK(ResultT, Func)                                          \
+  {                                                                            \
+    const intptr_t lhs = reinterpret_cast<intptr_t>(FP[rB]);                   \
+    const intptr_t rhs = reinterpret_cast<intptr_t>(FP[rC]);                   \
+    ResultT* slot = reinterpret_cast<ResultT*>(&FP[rA]);                       \
+    Func(lhs, rhs, slot);                                                      \
+  }                                                                            \
+
+
 // Exception handling helper. Gets handler FP and PC from the Simulator where
 // they were stored by Simulator::Longjmp and proceeds to execute the handler.
 // Corner case: handler PC can be a fake marker that marks entry frame, which
@@ -1487,6 +1526,28 @@
   }
 
   {
+    BYTECODE(OneByteStringFromCharCode, A_X);
+    const intptr_t char_code = Smi::Value(RAW_CAST(Smi, FP[rD]));
+    ASSERT(char_code >= 0);
+    ASSERT(char_code <= 255);
+    RawString** strings = Symbols::PredefinedAddress();
+    const intptr_t index = char_code + Symbols::kNullCharCodeSymbolOffset;
+    FP[rA] = strings[index];
+    DISPATCH();
+  }
+
+  {
+    BYTECODE(StringToCharCode, A_X);
+    RawOneByteString* str = RAW_CAST(OneByteString, FP[rD]);
+    if (str->ptr()->length_ == Smi::New(1)) {
+      FP[rA] = Smi::New(str->ptr()->data()[0]);
+    } else {
+      FP[rA] = Smi::New(-1);
+    }
+    DISPATCH();
+  }
+
+  {
     BYTECODE(AddTOS, A_B_C);
     SMI_FASTPATH_TOS(intptr_t, SignedAddWithOverflow);
     DISPATCH();
@@ -1526,6 +1587,112 @@
     SMI_FASTPATH_TOS(RawObject*, SMI_GT);
     DISPATCH();
   }
+  {
+    BYTECODE(Add, A_B_C);
+    SMI_OP_CHECK(intptr_t, SignedAddWithOverflow);
+    DISPATCH();
+  }
+  {
+    BYTECODE(Sub, A_B_C);
+    SMI_OP_CHECK(intptr_t, SignedSubWithOverflow);
+    DISPATCH();
+  }
+  {
+    BYTECODE(Mul, A_B_C);
+    SMI_OP_CHECK(intptr_t, SMI_MUL);
+    DISPATCH();
+  }
+  {
+    BYTECODE(Neg, A_D);
+    const intptr_t value = reinterpret_cast<intptr_t>(FP[rD]);
+    intptr_t* out = reinterpret_cast<intptr_t*>(&FP[rA]);
+    if (LIKELY(!SignedSubWithOverflow(0, value, out))) {
+      pc++;
+    }
+    DISPATCH();
+  }
+  {
+    BYTECODE(BitOr, A_B_C);
+    SMI_OP_NOCHECK(intptr_t, SMI_BITOR);
+    DISPATCH();
+  }
+  {
+    BYTECODE(BitAnd, A_B_C);
+    SMI_OP_NOCHECK(intptr_t, SMI_BITAND);
+    DISPATCH();
+  }
+  {
+    BYTECODE(BitXor, A_B_C);
+    SMI_OP_NOCHECK(intptr_t, SMI_BITXOR);
+    DISPATCH();
+  }
+  {
+    BYTECODE(BitNot, A_D);
+    const intptr_t value = reinterpret_cast<intptr_t>(FP[rD]);
+    *reinterpret_cast<intptr_t*>(&FP[rA]) = ~value & (~kSmiTagMask);
+    DISPATCH();
+  }
+
+  {
+    BYTECODE(Div, A_B_C);
+    const intptr_t rhs = reinterpret_cast<intptr_t>(FP[rC]);
+    if (rhs != 0) {
+      const intptr_t lhs = reinterpret_cast<intptr_t>(FP[rB]);
+      const intptr_t res = (lhs >> kSmiTagSize) / (rhs >> kSmiTagSize);
+#if defined(ARCH_IS_64_BIT)
+      const intptr_t untaggable = 0x4000000000000000LL;
+#else
+      const intptr_t untaggable = 0x40000000L;
+#endif  // defined(ARCH_IS_64_BIT)
+      if (res != untaggable) {
+        *reinterpret_cast<intptr_t*>(&FP[rA]) = res << kSmiTagSize;
+        pc++;
+      }
+    }
+    DISPATCH();
+  }
+
+  {
+    BYTECODE(Mod, A_B_C);
+    const intptr_t rhs = reinterpret_cast<intptr_t>(FP[rC]);
+    if (rhs != 0) {
+      const intptr_t lhs = reinterpret_cast<intptr_t>(FP[rB]);
+      const intptr_t res =
+          ((lhs >> kSmiTagSize) % (rhs >> kSmiTagSize)) << kSmiTagSize;
+      *reinterpret_cast<intptr_t*>(&FP[rA]) =
+          (res < 0) ? ((rhs < 0) ? (res - rhs) : (res + rhs)) : res;
+      pc++;
+    }
+    DISPATCH();
+  }
+
+  {
+    BYTECODE(Shl, A_B_C);
+    const intptr_t rhs = reinterpret_cast<intptr_t>(FP[rC]) >> kSmiTagSize;
+    if (rhs >= 0) {
+      const intptr_t lhs = reinterpret_cast<intptr_t>(FP[rB]);
+      const intptr_t res = lhs << rhs;
+      if (lhs == (res >> rhs)) {
+        *reinterpret_cast<intptr_t*>(&FP[rA]) = res;
+        pc++;
+      }
+    }
+    DISPATCH();
+  }
+
+  {
+    BYTECODE(Shr, A_B_C);
+    const intptr_t rhs = reinterpret_cast<intptr_t>(FP[rC]) >> kSmiTagSize;
+    if (rhs >= 0) {
+      const intptr_t shift_amount =
+          (rhs >= kBitsPerWord) ? (kBitsPerWord - 1) : rhs;
+      const intptr_t lhs = reinterpret_cast<intptr_t>(FP[rB]) >> kSmiTagSize;
+      *reinterpret_cast<intptr_t*>(&FP[rA]) =
+          (lhs >> shift_amount) << kSmiTagSize;
+      pc++;
+    }
+    DISPATCH();
+  }
 
   // Return and return like instructions (Instrinsic).
   {
@@ -1710,6 +1877,73 @@
   }
 
   {
+    BYTECODE(InstanceOf, A);  // Stack: instance, type args, type, cache
+    RawInstance* instance = static_cast<RawInstance*>(SP[-3]);
+    RawTypeArguments* instantiator_type_arguments =
+        static_cast<RawTypeArguments*>(SP[-2]);
+    RawAbstractType* type = static_cast<RawAbstractType*>(SP[-1]);
+    RawSubtypeTestCache* cache = static_cast<RawSubtypeTestCache*>(SP[0]);
+
+    if (cache != null_value) {
+      const intptr_t cid = SimulatorHelpers::GetClassId(instance);
+
+      RawTypeArguments* instance_type_arguments =
+          static_cast<RawTypeArguments*>(null_value);
+      RawObject* instance_cid_or_function;
+      if (cid == kClosureCid) {
+        RawClosure* closure = static_cast<RawClosure*>(instance);
+        instance_type_arguments = closure->ptr()->type_arguments_;
+        instance_cid_or_function = closure->ptr()->function_;
+      } else {
+        instance_cid_or_function = Smi::New(cid);
+
+        RawClass* instance_class =
+            thread->isolate()->class_table()->At(cid);
+        if (instance_class->ptr()->num_type_arguments_ < 0) {
+          goto InstanceOfCallRuntime;
+        } else if (instance_class->ptr()->num_type_arguments_ > 0) {
+          instance_type_arguments = reinterpret_cast<RawTypeArguments**>(
+              instance
+                  ->ptr())[instance_class->ptr()
+                               ->type_arguments_field_offset_in_words_];
+        }
+      }
+
+      for (RawObject** entries = cache->ptr()->cache_->ptr()->data();
+           entries[0] != null_value;
+           entries += SubtypeTestCache::kTestEntryLength) {
+        if ((entries[SubtypeTestCache::kInstanceClassIdOrFunction] ==
+                instance_cid_or_function) &&
+            (entries[SubtypeTestCache::kInstanceTypeArguments] ==
+                instance_type_arguments) &&
+            (entries[SubtypeTestCache::kInstantiatorTypeArguments] ==
+                instantiator_type_arguments)) {
+          SP[-3] = entries[SubtypeTestCache::kTestResult];
+          goto InstanceOfOk;
+        }
+      }
+    }
+
+  InstanceOfCallRuntime:
+    {
+      SP[1] = instance;
+      SP[2] = type;
+      SP[3] = instantiator_type_arguments;
+      SP[4] = cache;
+      Exit(thread, FP, SP + 5, pc);
+      NativeArguments native_args(thread, 4, SP + 1, SP - 3);
+      INVOKE_RUNTIME(DRT_Instanceof, native_args);
+    }
+
+  InstanceOfOk:
+    SP -= 3;
+    if (rA) {  // Negate result.
+      SP[0] = (SP[0] == true_value) ? false_value : true_value;
+    }
+    DISPATCH();
+  }
+
+  {
     BYTECODE(AssertAssignable, A_D);  // Stack: instance, type args, type, name
     RawObject** args = SP - 3;
     if (args[0] != null_value) {
@@ -1720,8 +1954,8 @@
         SP[2] = args[3];  // name.
         SP[3] = args[2];  // type.
         Exit(thread, FP, SP + 4, pc);
-        NativeArguments args(thread, 3, SP + 1, SP - 3);
-        INVOKE_RUNTIME(DRT_BadTypeError, args);
+        NativeArguments native_args(thread, 3, SP + 1, SP - 3);
+        INVOKE_RUNTIME(DRT_BadTypeError, native_args);
         UNREACHABLE();
       }
 
@@ -1781,8 +2015,8 @@
       SP[4] = args[3];  // name
       SP[5] = cache;
       Exit(thread, FP, SP + 6, pc);
-      NativeArguments args(thread, 5, SP + 1, SP - 3);
-      INVOKE_RUNTIME(DRT_TypeCheck, args);
+      NativeArguments native_args(thread, 5, SP + 1, SP - 3);
+      INVOKE_RUNTIME(DRT_TypeCheck, native_args);
     }
 
   AssertAssignableOk:
@@ -1814,6 +2048,84 @@
   }
 
   {
+    BYTECODE(TestSmi, A_D);
+    intptr_t left = reinterpret_cast<intptr_t>(RAW_CAST(Smi, FP[rA]));
+    intptr_t right = reinterpret_cast<intptr_t>(RAW_CAST(Smi, FP[rD]));
+    if ((left & right) != 0) {
+      pc++;
+    }
+    DISPATCH();
+  }
+
+  {
+    BYTECODE(CheckSmi, 0);
+    intptr_t obj = reinterpret_cast<intptr_t>(FP[rA]);
+    if ((obj & kSmiTagMask) == kSmiTag) {
+      pc++;
+    }
+    DISPATCH();
+  }
+
+  {
+    BYTECODE(CheckClassId, A_D);
+    const intptr_t actual_cid = SimulatorHelpers::GetClassId(FP[rA]);
+    const intptr_t desired_cid = rD;
+    pc += (actual_cid == desired_cid) ? 1 : 0;
+    DISPATCH();
+  }
+
+  {
+    BYTECODE(CheckDenseSwitch, A_D);
+    const intptr_t raw_value = reinterpret_cast<intptr_t>(FP[rA]);
+    const bool is_smi = ((raw_value & kSmiTagMask) == kSmiTag);
+    const intptr_t cid_min = Bytecode::DecodeD(*pc);
+    const intptr_t cid_mask =
+        Smi::Value(RAW_CAST(Smi, LOAD_CONSTANT(Bytecode::DecodeD(*(pc + 1)))));
+    if (LIKELY(!is_smi)) {
+      const intptr_t cid_max = Utils::HighestBit(cid_mask) + cid_min;
+      const intptr_t cid = SimulatorHelpers::GetClassId(FP[rA]);
+      // The cid is in-bounds, and the bit is set in the mask.
+      if ((cid >= cid_min) && (cid <= cid_max) &&
+          ((cid_mask & (1 << (cid - cid_min))) != 0)) {
+        pc += 3;
+      } else {
+        pc += 2;
+      }
+    } else {
+      const bool may_be_smi = (rD == 1);
+      pc += (may_be_smi ? 3 : 2);
+    }
+    DISPATCH();
+  }
+
+  {
+    BYTECODE(CheckCids, A_B_C);
+    const intptr_t raw_value = reinterpret_cast<intptr_t>(FP[rA]);
+    const bool is_smi = ((raw_value & kSmiTagMask) == kSmiTag);
+    const bool may_be_smi = (rB == 1);
+    const intptr_t cids_length = rC;
+    if (LIKELY(!is_smi)) {
+      const intptr_t cid = SimulatorHelpers::GetClassId(FP[rA]);
+      for (intptr_t i = 0; i < cids_length; i++) {
+        const intptr_t desired_cid = Bytecode::DecodeD(*(pc + i));
+        if (cid == desired_cid) {
+          pc++;
+          break;
+        }
+        // The cids are sorted.
+        if (cid < desired_cid) {
+          break;
+        }
+      }
+      pc += cids_length;
+    } else {
+      pc += cids_length;
+      pc += (may_be_smi ? 1 : 0);
+    }
+    DISPATCH();
+  }
+
+  {
     BYTECODE(IfEqStrictTOS, 0);
     SP -= 2;
     if (SP[1] != SP[2]) {
@@ -1902,6 +2214,22 @@
   }
 
   {
+    BYTECODE(IfEqNull, A);
+    if (FP[rA] != null_value) {
+      pc++;
+    }
+    DISPATCH();
+  }
+
+  {
+    BYTECODE(IfNeNull, A_D);
+    if (FP[rA] == null_value) {
+      pc++;
+    }
+    DISPATCH();
+  }
+
+  {
     BYTECODE(Jump, 0);
     const int32_t target = static_cast<int32_t>(op) >> 8;
     pc += (target - 1);
@@ -1950,56 +2278,75 @@
 
   {
     BYTECODE(Deopt, A_D);
-    const uint16_t deopt_id = rD;
-    if (deopt_id == 0) {  // Lazy deoptimization.
-      // Preserve result of the previous call.
-      // TODO(vegorov) we could have actually included result into the
-      // deoptimization environment because it is passed through the stack.
-      // If we do then we could remove special result handling from this code.
-      RawObject* result = SP[0];
+    const bool is_lazy = rD == 0;
 
-      // Leaf runtime function DeoptimizeCopyFrame expects a Dart frame.
-      // The code in this frame may not cause GC.
-      // DeoptimizeCopyFrame and DeoptimizeFillFrame are leaf runtime calls.
-      EnterSyntheticFrame(&FP, &SP, pc - 1);
-      const intptr_t frame_size_in_bytes =
-          DLRT_DeoptimizeCopyFrame(reinterpret_cast<uword>(FP),
-                                   /*is_lazy_deopt=*/1);
-      LeaveSyntheticFrame(&FP, &SP);
+    // Preserve result of the previous call.
+    // TODO(vegorov) we could have actually included result into the
+    // deoptimization environment because it is passed through the stack.
+    // If we do then we could remove special result handling from this code.
+    RawObject* result = SP[0];
 
-      SP = FP + (frame_size_in_bytes / kWordSize);
-      EnterSyntheticFrame(&FP, &SP, pc - 1);
-      DLRT_DeoptimizeFillFrame(reinterpret_cast<uword>(FP));
-
-      // We are now inside a valid frame.
-      {
-        *++SP = result;  // Preserve result (call below can cause GC).
-        *++SP = 0;  // Space for the result: number of materialization args.
-        Exit(thread, FP, SP + 1, /*pc=*/0);
-        NativeArguments native_args(thread, 0, SP, SP);
-        INVOKE_RUNTIME(DRT_DeoptimizeMaterialize, native_args);
-      }
-      const intptr_t materialization_arg_count =
-          Smi::Value(RAW_CAST(Smi, *SP--));
-      result = *SP--;  // Reload the result. It might have been relocated by GC.
-
-      // Restore caller PC.
-      pc = SavedCallerPC(FP);
-
-      // Check if it is a fake PC marking the entry frame.
-      ASSERT((reinterpret_cast<uword>(pc) & 2) == 0);
-
-      // Restore SP, FP and PP. Push result and dispatch.
-      // Note: unlike in a normal return sequence we don't need to drop
-      // arguments - those are not part of the innermost deoptimization
-      // environment they were dropped by FlowGraphCompiler::RecordAfterCall.
-      SP = FrameArguments(FP, materialization_arg_count);
-      FP = SavedCallerFP(FP);
-      pp = SimulatorHelpers::FrameCode(FP)->ptr()->object_pool_->ptr();
-      *SP = result;
-    } else {
-      UNIMPLEMENTED();
+    // When not preserving the result, we still need to preserve SP[0] as it
+    // contains some temporary expression.
+    if (!is_lazy) {
+      SP++;
     }
+
+    // Leaf runtime function DeoptimizeCopyFrame expects a Dart frame.
+    // The code in this frame may not cause GC.
+    // DeoptimizeCopyFrame and DeoptimizeFillFrame are leaf runtime calls.
+    EnterSyntheticFrame(&FP, &SP, pc - (is_lazy ? 1 : 0));
+    const intptr_t frame_size_in_bytes =
+        DLRT_DeoptimizeCopyFrame(reinterpret_cast<uword>(FP), is_lazy ? 1 : 0);
+    LeaveSyntheticFrame(&FP, &SP);
+
+    SP = FP + (frame_size_in_bytes / kWordSize);
+    EnterSyntheticFrame(&FP, &SP, pc - (is_lazy ? 1 : 0));
+    DLRT_DeoptimizeFillFrame(reinterpret_cast<uword>(FP));
+
+    // We are now inside a valid frame.
+    {
+      if (is_lazy) {
+        *++SP = result;  // Preserve result (call below can cause GC).
+      }
+      *++SP = 0;  // Space for the result: number of materialization args.
+      Exit(thread, FP, SP + 1, /*pc=*/0);
+      NativeArguments native_args(thread, 0, SP, SP);
+      INVOKE_RUNTIME(DRT_DeoptimizeMaterialize, native_args);
+    }
+    const intptr_t materialization_arg_count =
+        Smi::Value(RAW_CAST(Smi, *SP--)) / kWordSize;
+    if (is_lazy) {
+      // Reload the result. It might have been relocated by GC.
+      result = *SP--;
+    }
+
+    // Restore caller PC.
+    pc = SavedCallerPC(FP);
+
+    // Check if it is a fake PC marking the entry frame.
+    ASSERT((reinterpret_cast<uword>(pc) & 2) == 0);
+
+    // Restore SP, FP and PP. Push result and dispatch.
+    // Note: unlike in a normal return sequence we don't need to drop
+    // arguments - those are not part of the innermost deoptimization
+    // environment they were dropped by FlowGraphCompiler::RecordAfterCall.
+
+    // If the result is not preserved, the unoptimized frame ends at the
+    // next slot.
+    SP = FrameArguments(FP, materialization_arg_count);
+    FP = SavedCallerFP(FP);
+    pp = SimulatorHelpers::FrameCode(FP)->ptr()->object_pool_->ptr();
+    if (is_lazy) {
+      SP[0] = result;  // Put the result on the stack.
+    } else {
+      SP--;  // No result to push.
+    }
+    DISPATCH();
+  }
+
+  {
+    BYTECODE(Nop, 0);
     DISPATCH();
   }
 
diff --git a/runtime/vm/simulator_dbc.h b/runtime/vm/simulator_dbc.h
index 9865ca9..7172bbe 100644
--- a/runtime/vm/simulator_dbc.h
+++ b/runtime/vm/simulator_dbc.h
@@ -76,7 +76,7 @@
   }
 
   enum IntrinsicId {
-#define V(test_class_name, test_function_name, enum_name, fp) \
+#define V(test_class_name, test_function_name, enum_name, type, fp) \
     k##enum_name##Intrinsic,
   ALL_INTRINSICS_LIST(V)
   GRAPH_INTRINSICS_LIST(V)
diff --git a/runtime/vm/simulator_mips.cc b/runtime/vm/simulator_mips.cc
index d23c755..d980ca1 100644
--- a/runtime/vm/simulator_mips.cc
+++ b/runtime/vm/simulator_mips.cc
@@ -1156,7 +1156,7 @@
 void Simulator::ClearExclusive() {
   MutexLocker ml(exclusive_access_lock_);
   // Remove the reservation for this thread.
-  SetExclusiveAccess(NULL);
+  SetExclusiveAccess(0);
 }
 
 
diff --git a/runtime/vm/snapshot.cc b/runtime/vm/snapshot.cc
index e5c5b0b..4195995 100644
--- a/runtime/vm/snapshot.cc
+++ b/runtime/vm/snapshot.cc
@@ -19,7 +19,6 @@
 #include "vm/stub_code.h"
 #include "vm/symbols.h"
 #include "vm/timeline.h"
-#include "vm/verified_memory.h"
 #include "vm/version.h"
 
 // We currently only expect the Dart mutator to read snapshots.
@@ -29,8 +28,6 @@
 
 namespace dart {
 
-static const int kNumVmIsolateSnapshotReferences = 32 * KB;
-static const int kNumInitialReferencesInFullSnapshot = 160 * KB;
 static const int kNumInitialReferences = 64;
 
 
@@ -586,82 +583,6 @@
 };
 
 
-RawApiError* SnapshotReader::ReadFullSnapshot() {
-  ASSERT(Snapshot::IsFull(kind_));
-  Thread* thread = Thread::Current();
-  Isolate* isolate = thread->isolate();
-  ASSERT(isolate != NULL);
-  ObjectStore* object_store = isolate->object_store();
-  ASSERT(object_store != NULL);
-
-  // First read the version string, and check that it matches.
-  RawApiError* error = VerifyVersionAndFeatures();
-  if (error != ApiError::null()) {
-    return error;
-  }
-
-  // The version string matches. Read the rest of the snapshot.
-
-  // TODO(asiva): Add a check here to ensure we have the right heap
-  // size for the full snapshot being read.
-  {
-    NoSafepointScope no_safepoint;
-    HeapLocker hl(thread, old_space());
-
-    // Read in all the objects stored in the object store.
-    intptr_t num_flds =
-        (object_store->to_snapshot(kind_) - object_store->from());
-    for (intptr_t i = 0; i <= num_flds; i++) {
-      *(object_store->from() + i) = ReadObjectImpl(kAsInlinedObject);
-    }
-    for (intptr_t i = 0; i < backward_references_->length(); i++) {
-      if (!(*backward_references_)[i].is_deserialized()) {
-        ReadObjectImpl(kAsInlinedObject);
-        (*backward_references_)[i].set_state(kIsDeserialized);
-      }
-    }
-
-    if (kind_ == Snapshot::kAppNoJIT) {
-      ICData& ic = ICData::Handle(thread->zone());
-      Object& funcOrCode = Object::Handle(thread->zone());
-      Code& code = Code::Handle(thread->zone());
-      Smi& entry_point = Smi::Handle(thread->zone());
-      for (intptr_t i = 0; i < backward_references_->length(); i++) {
-        if ((*backward_references_)[i].reference()->IsICData()) {
-          ic ^= (*backward_references_)[i].reference()->raw();
-          for (intptr_t j = 0; j < ic.NumberOfChecks(); j++) {
-            funcOrCode = ic.GetTargetOrCodeAt(j);
-            if (funcOrCode.IsCode()) {
-              code ^= funcOrCode.raw();
-              entry_point = Smi::FromAlignedAddress(code.EntryPoint());
-              ic.SetEntryPointAt(j, entry_point);
-            }
-          }
-        }
-      }
-    }
-
-    // Validate the class table.
-#if defined(DEBUG)
-    isolate->ValidateClassTable();
-#endif
-
-    // Setup native resolver for bootstrap impl.
-    Bootstrap::SetupNativeResolver();
-  }
-
-  Class& cls = Class::Handle(thread->zone());
-  for (intptr_t i = 0; i < backward_references_->length(); i++) {
-    if ((*backward_references_)[i].reference()->IsClass()) {
-      cls ^= (*backward_references_)[i].reference()->raw();
-      cls.RehashConstants(thread->zone());
-    }
-  }
-
-  return ApiError::null();
-}
-
-
 RawObject* SnapshotReader::ReadScriptSnapshot() {
   ASSERT(kind_ == Snapshot::kScript);
 
@@ -1506,12 +1427,8 @@
   ASSERT_NO_SAFEPOINT_SCOPE();
   ASSERT(Utils::IsAligned(size, kObjectAlignment));
 
-  // Allocate memory where all words look like smis. This is currently
-  // only needed for DEBUG-mode validation in StorePointer/StoreSmi, but will
-  // be essential with the upcoming deletion barrier.
   uword address =
-      old_space()->TryAllocateSmiInitializedLocked(size,
-                                                   PageSpace::kForceGrowth);
+      old_space()->TryAllocateDataBumpLocked(size, PageSpace::kForceGrowth);
   if (address == 0) {
     // Use the preallocated out of memory exception to avoid calling
     // into dart code or allocating any code.
@@ -1521,7 +1438,6 @@
         object_store()->preallocated_unhandled_exception());
     thread()->long_jump_base()->Jump(1, error);
   }
-  VerifiedMemory::Accept(address, size);
 
   RawObject* raw_obj = reinterpret_cast<RawObject*>(address + kHeapObjectTag);
   uword tags = 0;
@@ -1702,108 +1618,6 @@
 }
 
 
-VmIsolateSnapshotReader::VmIsolateSnapshotReader(
-    Snapshot::Kind kind,
-    const uint8_t* buffer,
-    intptr_t size,
-    const uint8_t* instructions_buffer,
-    const uint8_t* data_buffer,
-    Thread* thread)
-      : SnapshotReader(buffer,
-                       size,
-                       instructions_buffer,
-                       data_buffer,
-                       kind,
-                       new ZoneGrowableArray<BackRefNode>(
-                           kNumVmIsolateSnapshotReferences),
-                       thread) {
-  ASSERT(Snapshot::IsFull(kind));
-}
-
-
-VmIsolateSnapshotReader::~VmIsolateSnapshotReader() {
-  intptr_t len = GetBackwardReferenceTable()->length();
-  Object::InitVmIsolateSnapshotObjectTable(len);
-  ZoneGrowableArray<BackRefNode>* backrefs = GetBackwardReferenceTable();
-  for (intptr_t i = 0; i < len; i++) {
-    Object::vm_isolate_snapshot_object_table().SetAt(
-        i, *(backrefs->At(i).reference()));
-  }
-  ResetBackwardReferenceTable();
-  Dart::set_instructions_snapshot_buffer(instructions_buffer_);
-  Dart::set_data_snapshot_buffer(data_buffer_);
-}
-
-
-RawApiError* VmIsolateSnapshotReader::ReadVmIsolateSnapshot() {
-  ASSERT(Snapshot::IsFull(kind()));
-  Thread* thread = Thread::Current();
-  Isolate* isolate = thread->isolate();
-  ASSERT(isolate != NULL);
-  ASSERT(isolate == Dart::vm_isolate());
-  ObjectStore* object_store = isolate->object_store();
-  ASSERT(object_store != NULL);
-
-  // First read the version string, and check that it matches.
-  RawApiError* error = VerifyVersionAndFeatures();
-  if (error != ApiError::null()) {
-    return error;
-  }
-
-  // The version string matches. Read the rest of the snapshot.
-
-  {
-    NoSafepointScope no_safepoint;
-    HeapLocker hl(thread, old_space());
-
-    // Read in the symbol table.
-    object_store->symbol_table_ = reinterpret_cast<RawArray*>(ReadObject());
-
-    Symbols::InitOnceFromSnapshot(isolate);
-
-    // Read in all the script objects and the accompanying token streams
-    // for bootstrap libraries so that they are in the VM isolate's read
-    // only memory.
-    *(ArrayHandle()) ^= ReadObject();
-
-    if (Snapshot::IncludesCode(kind())) {
-      StubCode::ReadFrom(this);
-    }
-
-    // Validate the class table.
-#if defined(DEBUG)
-    isolate->ValidateClassTable();
-#endif
-
-    return ApiError::null();
-  }
-}
-
-
-IsolateSnapshotReader::IsolateSnapshotReader(Snapshot::Kind kind,
-                                             const uint8_t* buffer,
-                                             intptr_t size,
-                                             const uint8_t* instructions_buffer,
-                                             const uint8_t* data_buffer,
-                                             Thread* thread)
-    : SnapshotReader(buffer,
-                     size,
-                     instructions_buffer,
-                     data_buffer,
-                     kind,
-                     new ZoneGrowableArray<BackRefNode>(
-                         kNumInitialReferencesInFullSnapshot),
-                     thread) {
-  isolate()->set_compilation_allowed(kind != Snapshot::kAppNoJIT);
-  ASSERT(Snapshot::IsFull(kind));
-}
-
-
-IsolateSnapshotReader::~IsolateSnapshotReader() {
-  ResetBackwardReferenceTable();
-}
-
-
 ScriptSnapshotReader::ScriptSnapshotReader(const uint8_t* buffer,
                                            intptr_t size,
                                            Thread* thread)
@@ -2039,212 +1853,6 @@
 };
 
 
-FullSnapshotWriter::FullSnapshotWriter(Snapshot::Kind kind,
-                                       uint8_t** vm_isolate_snapshot_buffer,
-                                       uint8_t** isolate_snapshot_buffer,
-                                       ReAlloc alloc,
-                                       InstructionsWriter* instructions_writer)
-    : thread_(Thread::Current()),
-      kind_(kind),
-      vm_isolate_snapshot_buffer_(vm_isolate_snapshot_buffer),
-      isolate_snapshot_buffer_(isolate_snapshot_buffer),
-      alloc_(alloc),
-      vm_isolate_snapshot_size_(0),
-      isolate_snapshot_size_(0),
-      forward_list_(NULL),
-      instructions_writer_(instructions_writer),
-      scripts_(Array::Handle(zone())),
-      saved_symbol_table_(Array::Handle(zone())),
-      new_vm_symbol_table_(Array::Handle(zone())) {
-  ASSERT(isolate_snapshot_buffer_ != NULL);
-  ASSERT(alloc_ != NULL);
-  ASSERT(isolate() != NULL);
-  ASSERT(ClassFinalizer::AllClassesFinalized());
-  ASSERT(isolate() != NULL);
-  ASSERT(heap() != NULL);
-  ObjectStore* object_store = isolate()->object_store();
-  ASSERT(object_store != NULL);
-
-#if defined(DEBUG)
-  // Ensure the class table is valid.
-  isolate()->ValidateClassTable();
-#endif
-  // Can't have any mutation happening while we're serializing.
-  ASSERT(isolate()->background_compiler() == NULL);
-
-  intptr_t first_object_id = -1;
-  if (vm_isolate_snapshot_buffer != NULL) {
-    NOT_IN_PRODUCT(TimelineDurationScope tds(thread(),
-        Timeline::GetIsolateStream(), "PrepareNewVMIsolate"));
-
-    // Collect all the script objects and their accompanying token stream
-    // objects into an array so that we can write it out as part of the VM
-    // isolate snapshot. We first count the number of script objects, allocate
-    // an array and then fill it up with the script objects.
-    ScriptVisitor scripts_counter(thread());
-    heap()->IterateOldObjects(&scripts_counter);
-    Dart::vm_isolate()->heap()->IterateOldObjects(&scripts_counter);
-    intptr_t count = scripts_counter.count();
-    scripts_ = Array::New(count, Heap::kOld);
-    ScriptVisitor script_visitor(thread(), &scripts_);
-    heap()->IterateOldObjects(&script_visitor);
-    Dart::vm_isolate()->heap()->IterateOldObjects(&script_visitor);
-    ASSERT(script_visitor.count() == count);
-
-    // Tuck away the current symbol table.
-    saved_symbol_table_ = object_store->symbol_table();
-
-    // Create a unified symbol table that will be written as the vm isolate's
-    // symbol table.
-    new_vm_symbol_table_ = Symbols::UnifiedSymbolTable();
-
-    // Create an empty symbol table that will be written as the isolate's symbol
-    // table.
-    Symbols::SetupSymbolTable(isolate());
-
-    first_object_id = kMaxPredefinedObjectIds;
-  } else {
-    intptr_t max_vm_isolate_object_id =
-        Object::vm_isolate_snapshot_object_table().Length();
-    first_object_id = kMaxPredefinedObjectIds + max_vm_isolate_object_id;
-  }
-
-  forward_list_ = new ForwardList(thread(), first_object_id);
-  ASSERT(forward_list_ != NULL);
-}
-
-
-FullSnapshotWriter::~FullSnapshotWriter() {
-  delete forward_list_;
-  // We may run Dart code afterwards, restore the symbol table if needed.
-  if (!saved_symbol_table_.IsNull()) {
-    isolate()->object_store()->set_symbol_table(saved_symbol_table_);
-    saved_symbol_table_ = Array::null();
-  }
-  new_vm_symbol_table_ = Array::null();
-  scripts_ = Array::null();
-}
-
-
-void FullSnapshotWriter::WriteVmIsolateSnapshot() {
-  NOT_IN_PRODUCT(TimelineDurationScope tds(thread(),
-      Timeline::GetIsolateStream(), "WriteVmIsolateSnapshot"));
-
-  ASSERT(vm_isolate_snapshot_buffer_ != NULL);
-  SnapshotWriter writer(thread(),
-                        kind_,
-                        vm_isolate_snapshot_buffer_,
-                        alloc_,
-                        kInitialSize,
-                        forward_list_,
-                        instructions_writer_,
-                        true, /* can_send_any_object */
-                        true /* writing_vm_isolate */);
-  // Write full snapshot for the VM isolate.
-  // Setup for long jump in case there is an exception while writing
-  // the snapshot.
-  LongJumpScope jump;
-  if (setjmp(*jump.Set()) == 0) {
-    // Reserve space in the output buffer for a snapshot header.
-    writer.ReserveHeader();
-
-    // Write out the version string.
-    writer.WriteVersionAndFeatures();
-
-    /*
-     * Now Write out the following
-     * - the symbol table
-     * - all the scripts and token streams for these scripts
-     * - the stub code (precompiled snapshots only)
-     **/
-    // Write out the symbol table.
-    writer.WriteObject(new_vm_symbol_table_.raw());
-
-    // Write out all the script objects and the accompanying token streams
-    // for the bootstrap libraries so that they are in the VM isolate
-    // read only memory.
-    writer.WriteObject(scripts_.raw());
-
-    if (Snapshot::IncludesCode(kind_)) {
-      StubCode::WriteTo(&writer);
-    }
-
-    writer.FillHeader(writer.kind());
-
-    vm_isolate_snapshot_size_ = writer.BytesWritten();
-  } else {
-    writer.ThrowException(writer.exception_type(), writer.exception_msg());
-  }
-}
-
-
-void FullSnapshotWriter::WriteIsolateFullSnapshot() {
-  NOT_IN_PRODUCT(TimelineDurationScope tds(thread(),
-      Timeline::GetIsolateStream(), "WriteIsolateFullSnapshot"));
-
-  SnapshotWriter writer(thread(),
-                        kind_,
-                        isolate_snapshot_buffer_,
-                        alloc_,
-                        kInitialSize,
-                        forward_list_,
-                        instructions_writer_,
-                        true, /* can_send_any_object */
-                        false /* writing_vm_isolate */);
-  ObjectStore* object_store = isolate()->object_store();
-  ASSERT(object_store != NULL);
-
-  // Write full snapshot for a regular isolate.
-  // Setup for long jump in case there is an exception while writing
-  // the snapshot.
-  LongJumpScope jump;
-  if (setjmp(*jump.Set()) == 0) {
-    // Reserve space in the output buffer for a snapshot header.
-    writer.ReserveHeader();
-
-    // Write out the version string.
-    writer.WriteVersionAndFeatures();
-
-    // Write out the full snapshot.
-
-    // Write out all the objects in the object store of the isolate which
-    // is the root set for all dart allocated objects at this point.
-    SnapshotWriterVisitor visitor(&writer, false);
-    visitor.VisitPointers(object_store->from(),
-                          object_store->to_snapshot(kind_));
-
-    // Write out all forwarded objects.
-    writer.WriteForwardedObjects();
-
-    writer.FillHeader(writer.kind());
-
-    isolate_snapshot_size_ = writer.BytesWritten();
-  } else {
-    writer.ThrowException(writer.exception_type(), writer.exception_msg());
-  }
-}
-
-
-void FullSnapshotWriter::WriteFullSnapshot() {
-  if (vm_isolate_snapshot_buffer() != NULL) {
-    WriteVmIsolateSnapshot();
-  }
-  WriteIsolateFullSnapshot();
-  if (Snapshot::IncludesCode(kind_)) {
-    instructions_writer_->Write();
-
-    OS::Print("VMIsolate(CodeSize): %" Pd "\n", VmIsolateSnapshotSize());
-    OS::Print("Isolate(CodeSize): %" Pd "\n", IsolateSnapshotSize());
-    OS::Print("Instructions(CodeSize): %" Pd "\n",
-              instructions_writer_->binary_size());
-    intptr_t total = VmIsolateSnapshotSize() +
-                     IsolateSnapshotSize() +
-                     instructions_writer_->binary_size();
-    OS::Print("Total(CodeSize): %" Pd "\n", total);
-  }
-}
-
-
 ForwardList::ForwardList(Thread* thread, intptr_t first_object_id)
     : thread_(thread),
       first_object_id_(first_object_id),
diff --git a/runtime/vm/snapshot.h b/runtime/vm/snapshot.h
index 601eba5..1d8f585 100644
--- a/runtime/vm/snapshot.h
+++ b/runtime/vm/snapshot.h
@@ -429,10 +429,7 @@
   // Get an object from the backward references list.
   Object* GetBackRef(intptr_t id);
 
-  // Read a full snap shot.
-  RawApiError* ReadFullSnapshot();
-
-  // Read a script snap shot.
+  // Read a script snapshot.
   RawObject* ReadScriptSnapshot();
 
   // Read version number of snapshot and verify.
@@ -650,38 +647,6 @@
 };
 
 
-class VmIsolateSnapshotReader : public SnapshotReader {
- public:
-  VmIsolateSnapshotReader(Snapshot::Kind kind,
-                          const uint8_t* buffer,
-                          intptr_t size,
-                          const uint8_t* instructions_buffer,
-                          const uint8_t* data_buffer,
-                          Thread* thread);
-  ~VmIsolateSnapshotReader();
-
-  RawApiError* ReadVmIsolateSnapshot();
-
- private:
-  DISALLOW_COPY_AND_ASSIGN(VmIsolateSnapshotReader);
-};
-
-
-class IsolateSnapshotReader : public SnapshotReader {
- public:
-  IsolateSnapshotReader(Snapshot::Kind kind,
-                        const uint8_t* buffer,
-                        intptr_t size,
-                        const uint8_t* instructions_buffer,
-                        const uint8_t* data_buffer,
-                        Thread* thread);
-  ~IsolateSnapshotReader();
-
- private:
-  DISALLOW_COPY_AND_ASSIGN(IsolateSnapshotReader);
-};
-
-
 class ScriptSnapshotReader : public SnapshotReader {
  public:
   ScriptSnapshotReader(const uint8_t* buffer,
@@ -849,7 +814,6 @@
   GrowableArray<Node*> nodes_;
   intptr_t first_unprocessed_object_id_;
 
-  friend class FullSnapshotWriter;
   DISALLOW_COPY_AND_ASSIGN(ForwardList);
 };
 
@@ -1075,7 +1039,6 @@
   bool can_send_any_object_;  // True if any Dart instance can be sent.
   bool writing_vm_isolate_;
 
-  friend class FullSnapshotWriter;
   friend class RawArray;
   friend class RawClass;
   friend class RawClosureData;
@@ -1109,63 +1072,6 @@
 };
 
 
-class FullSnapshotWriter {
- public:
-  static const intptr_t kInitialSize = 64 * KB;
-  FullSnapshotWriter(Snapshot::Kind kind,
-                     uint8_t** vm_isolate_snapshot_buffer,
-                     uint8_t** isolate_snapshot_buffer,
-                     ReAlloc alloc,
-                     InstructionsWriter* instructions_writer);
-  ~FullSnapshotWriter();
-
-  uint8_t** vm_isolate_snapshot_buffer() {
-    return vm_isolate_snapshot_buffer_;
-  }
-
-  uint8_t** isolate_snapshot_buffer() {
-    return isolate_snapshot_buffer_;
-  }
-
-  Thread* thread() const { return thread_; }
-  Zone* zone() const { return thread_->zone(); }
-  Isolate* isolate() const { return thread_->isolate(); }
-  Heap* heap() const { return isolate()->heap(); }
-
-  // Writes a full snapshot of the Isolate.
-  void WriteFullSnapshot();
-
-  intptr_t VmIsolateSnapshotSize() const {
-    return vm_isolate_snapshot_size_;
-  }
-  intptr_t IsolateSnapshotSize() const {
-    return isolate_snapshot_size_;
-  }
-
- private:
-  // Writes a snapshot of the VM Isolate.
-  void WriteVmIsolateSnapshot();
-
-  // Writes a full snapshot of a regular Dart Isolate.
-  void WriteIsolateFullSnapshot();
-
-  Thread* thread_;
-  Snapshot::Kind kind_;
-  uint8_t** vm_isolate_snapshot_buffer_;
-  uint8_t** isolate_snapshot_buffer_;
-  ReAlloc alloc_;
-  intptr_t vm_isolate_snapshot_size_;
-  intptr_t isolate_snapshot_size_;
-  ForwardList* forward_list_;
-  InstructionsWriter* instructions_writer_;
-  Array& scripts_;
-  Array& saved_symbol_table_;
-  Array& new_vm_symbol_table_;
-
-  DISALLOW_COPY_AND_ASSIGN(FullSnapshotWriter);
-};
-
-
 class ScriptSnapshotWriter : public SnapshotWriter {
  public:
   static const intptr_t kInitialSize = 64 * KB;
diff --git a/runtime/vm/snapshot_test.cc b/runtime/vm/snapshot_test.cc
index 493607a..c5862e4 100644
--- a/runtime/vm/snapshot_test.cc
+++ b/runtime/vm/snapshot_test.cc
@@ -7,6 +7,7 @@
 #include "include/dart_tools_api.h"
 #include "platform/assert.h"
 #include "vm/class_finalizer.h"
+#include "vm/clustered_snapshot.h"
 #include "vm/dart_api_impl.h"
 #include "vm/dart_api_message.h"
 #include "vm/dart_api_state.h"
diff --git a/runtime/vm/source_report.cc b/runtime/vm/source_report.cc
index ee8858d6..8601b85 100644
--- a/runtime/vm/source_report.cc
+++ b/runtime/vm/source_report.cc
@@ -30,6 +30,22 @@
 }
 
 
+SourceReport::~SourceReport() {
+  ClearScriptTable();
+}
+
+
+void SourceReport::ClearScriptTable() {
+  for (intptr_t i = 0; i < script_table_entries_.length(); i++) {
+    delete script_table_entries_[i];
+    script_table_entries_[i] = NULL;
+  }
+  script_table_entries_.Clear();
+  script_table_.Clear();
+  next_script_index_ = 0;
+}
+
+
 void SourceReport::Init(Thread* thread,
                         const Script* script,
                         TokenPosition start_pos,
@@ -38,9 +54,7 @@
   script_ = script;
   start_pos_ = start_pos;
   end_pos_ = end_pos;
-  script_table_entries_.Clear();
-  script_table_.Clear();
-  next_script_index_ = 0;
+  ClearScriptTable();
   if (IsReportRequested(kProfile)) {
     // Build the profile.
     SampleFilter samplesForIsolate(thread_->isolate(),
@@ -99,21 +113,40 @@
 
 intptr_t SourceReport::GetScriptIndex(const Script& script) {
   const String& url = String::Handle(zone(), script.url());
-  ScriptTableEntry* pair = script_table_.Lookup(&url);
+  ScriptTableEntry* pair = script_table_.LookupValue(&url);
   if (pair != NULL) {
     return pair->index;
   }
-
-  ScriptTableEntry tmp;
-  tmp.key = &url;
-  tmp.index = next_script_index_++;
-  tmp.script = &script;
+  ScriptTableEntry* tmp = new ScriptTableEntry();
+  tmp->key = &url;
+  tmp->index = next_script_index_++;
+  tmp->script = &Script::Handle(zone(), script.raw());
   script_table_entries_.Add(tmp);
-  script_table_.Insert(&(script_table_entries_.Last()));
-  return tmp.index;
+  script_table_.Insert(tmp);
+  ASSERT(script_table_entries_.length() == next_script_index_);
+#if defined(DEBUG)
+  VerifyScriptTable();
+#endif
+  return tmp->index;
 }
 
 
+#if defined(DEBUG)
+void SourceReport::VerifyScriptTable() {
+  for (intptr_t i = 0; i < script_table_entries_.length(); i++) {
+    const String* url = script_table_entries_[i]->key;
+    const Script* script = script_table_entries_[i]->script;
+    intptr_t index = script_table_entries_[i]->index;
+    ASSERT(i == index);
+    const String& url2 = String::Handle(zone(), script->url());
+    ASSERT(url2.Equals(*url));
+    ScriptTableEntry* pair = script_table_.LookupValue(&url2);
+    ASSERT(i == pair->index);
+  }
+}
+#endif
+
+
 bool SourceReport::ScriptIsLoadedByLibrary(const Script& script,
                                            const Library& lib) {
   const Array& scripts = Array::Handle(zone(), lib.LoadedScripts());
@@ -337,8 +370,8 @@
 
 
 void SourceReport::PrintScriptTable(JSONArray* scripts) {
-  for (int i = 0; i < script_table_entries_.length(); i++) {
-    const Script* script = script_table_entries_[i].script;
+  for (intptr_t i = 0; i < script_table_entries_.length(); i++) {
+    const Script* script = script_table_entries_[i]->script;
     scripts->AddValue(*script);
   }
 }
diff --git a/runtime/vm/source_report.h b/runtime/vm/source_report.h
index cd2ad3a..dec18cfa 100644
--- a/runtime/vm/source_report.h
+++ b/runtime/vm/source_report.h
@@ -40,6 +40,7 @@
   // (e.g. kCallSites | kCoverage).
   explicit SourceReport(intptr_t report_set,
                         CompileMode compile = kNoCompile);
+  ~SourceReport();
 
   // Generate a source report for (some subrange of) a script.
   //
@@ -50,6 +51,7 @@
                  TokenPosition end_pos = TokenPosition::kNoSource);
 
  private:
+  void ClearScriptTable();
   void Init(Thread* thread, const Script* script,
             TokenPosition start_pos, TokenPosition end_pos);
 
@@ -69,6 +71,9 @@
   void PrintPossibleBreakpointsData(JSONObject* jsobj,
                                     const Function& func, const Code& code);
   void PrintProfileData(JSONObject* jsobj, ProfileFunction* profile_function);
+#if defined(DEBUG)
+  void VerifyScriptTable();
+#endif
   void PrintScriptTable(JSONArray* jsarr);
 
   void VisitFunction(JSONArray* jsarr, const Function& func);
@@ -114,7 +119,7 @@
   TokenPosition start_pos_;
   TokenPosition end_pos_;
   Profile profile_;
-  GrowableArray<ScriptTableEntry> script_table_entries_;
+  GrowableArray<ScriptTableEntry*> script_table_entries_;
   DirectChainedHashMap<ScriptTableTrait> script_table_;
   intptr_t next_script_index_;
 };
diff --git a/runtime/vm/source_report_test.cc b/runtime/vm/source_report_test.cc
index ad00dec..2d3f5bc 100644
--- a/runtime/vm/source_report_test.cc
+++ b/runtime/vm/source_report_test.cc
@@ -425,6 +425,49 @@
 }
 
 
+TEST_CASE(SourceReport_Coverage_AllFunctions_ForceCompile) {
+  const char* kScript =
+      "helper0() {}\n"
+      "helper1() {}\n"
+      "main() {\n"
+      "  if (true) {\n"
+      "    helper0();\n"
+      "  } else {\n"
+      "    helper1();\n"
+      "  }\n"
+      "}";
+
+  Library& lib = Library::Handle();
+  lib ^= ExecuteScript(kScript);
+  ASSERT(!lib.IsNull());
+
+  SourceReport report(SourceReport::kCoverage, SourceReport::kForceCompile);
+  JSONStream js;
+
+  // We generate a report with all functions in the VM.
+  Script& null_script = Script::Handle();
+  {
+    TransitionNativeToVM transition(Thread::Current());
+    report.PrintJSON(&js, null_script);
+  }
+  const char* result = js.ToCString();
+
+  // Sanity check the header.
+  EXPECT_SUBSTRING("{\"type\":\"SourceReport\",\"ranges\":[", result);
+
+  // Make sure that the main function was found.
+  EXPECT_SUBSTRING(
+      "\"startPos\":12,\"endPos\":39,\"compiled\":true,"
+      "\"coverage\":{\"hits\":[23],\"misses\":[32]}",
+      result);
+
+  // More than one script is referenced in the report.
+  EXPECT_SUBSTRING("\"scriptIndex\":0", result);
+  EXPECT_SUBSTRING("\"scriptIndex\":1", result);
+  EXPECT_SUBSTRING("\"scriptIndex\":2", result);
+}
+
+
 TEST_CASE(SourceReport_CallSites_SimpleCall) {
   char buffer[1024];
   const char* kScript =
diff --git a/runtime/vm/store_buffer.cc b/runtime/vm/store_buffer.cc
index b257c0a..67f0fcb 100644
--- a/runtime/vm/store_buffer.cc
+++ b/runtime/vm/store_buffer.cc
@@ -104,7 +104,6 @@
     // Sanity check: it makes no sense to schedule the GC in another isolate.
     // (If Isolate ever gets multiple store buffers, we should avoid this
     // coupling by passing in an explicit callback+parameter at construction.)
-    ASSERT(thread->isolate()->mutator_thread() == thread);
     ASSERT(thread->isolate()->store_buffer() == this);
     thread->ScheduleInterrupts(Thread::kVMInterrupt);
   }
diff --git a/runtime/vm/stub_code.cc b/runtime/vm/stub_code.cc
index 04ca4f4..a6992bf 100644
--- a/runtime/vm/stub_code.cc
+++ b/runtime/vm/stub_code.cc
@@ -14,6 +14,7 @@
 #include "vm/snapshot.h"
 #include "vm/virtual_memory.h"
 #include "vm/visitor.h"
+#include "vm/clustered_snapshot.h"
 
 namespace dart {
 
@@ -59,22 +60,31 @@
 #undef STUB_CODE_GENERATE
 
 
-void StubCode::ReadFrom(SnapshotReader* reader) {
+void StubCode::Push(Serializer* serializer) {
+#define WRITE_STUB(name)                                                       \
+  serializer->Push(StubCode::name##_entry()->code());
+  VM_STUB_CODE_LIST(WRITE_STUB);
+#undef WRITE_STUB
+}
+
+
+void StubCode::WriteRef(Serializer* serializer) {
+#define WRITE_STUB(name)                                                       \
+  serializer->WriteRef(StubCode::name##_entry()->code());
+  VM_STUB_CODE_LIST(WRITE_STUB);
+#undef WRITE_STUB
+}
+
+
+void StubCode::ReadRef(Deserializer* deserializer) {
+  Code& code = Code::Handle();
 #define READ_STUB(name)                                                        \
-  *(reader->CodeHandle()) ^= reader->ReadObject();                             \
-  name##_entry_ = new StubEntry(*(reader->CodeHandle()));
+  code ^= deserializer->ReadRef();                                             \
+  name##_entry_ = new StubEntry(code);
   VM_STUB_CODE_LIST(READ_STUB);
 #undef READ_STUB
 }
 
-void StubCode::WriteTo(SnapshotWriter* writer) {
-  // TODO(rmacnak): Consider writing only the instructions to avoid
-  // vm_isolate_is_symbolic.
-#define WRITE_STUB(name)                                                       \
-  writer->WriteObject(StubCode::name##_entry()->code());
-  VM_STUB_CODE_LIST(WRITE_STUB);
-#undef WRITE_STUB
-}
 
 
 void StubCode::Init(Isolate* isolate) { }
diff --git a/runtime/vm/stub_code.h b/runtime/vm/stub_code.h
index 8232a4b..cecec5d 100644
--- a/runtime/vm/stub_code.h
+++ b/runtime/vm/stub_code.h
@@ -17,6 +17,8 @@
 class RawCode;
 class SnapshotReader;
 class SnapshotWriter;
+class Serializer;
+class Deserializer;
 
 // List of stubs created in the VM isolate, these stubs are shared by different
 // isolates running in this dart process.
@@ -71,6 +73,7 @@
   V(FixCallersTarget)                                                          \
   V(Deoptimize)                                                                \
   V(DeoptimizeLazy)                                                            \
+  V(FrameAwaitingMaterialization)                                              \
 
 #endif  // !defined(TARGET_ARCH_DBC)
 
@@ -112,8 +115,9 @@
   // only once and the stub code resides in the vm_isolate heap.
   static void InitOnce();
 
-  static void ReadFrom(SnapshotReader* reader);
-  static void WriteTo(SnapshotWriter* writer);
+  static void Push(Serializer* serializer);
+  static void WriteRef(Serializer* serializer);
+  static void ReadRef(Deserializer* deserializer);
 
   // Generate all stubs which are generated on a per isolate basis as they
   // have embedded objects which are isolate specific.
diff --git a/runtime/vm/stub_code_arm.cc b/runtime/vm/stub_code_arm.cc
index 538cd33..9a7429b 100644
--- a/runtime/vm/stub_code_arm.cc
+++ b/runtime/vm/stub_code_arm.cc
@@ -371,7 +371,8 @@
   Label loop;
   __ Bind(&loop);
   __ ldr(IP, Address(R1, kWordSize, Address::PreIndex));
-  __ InitializeFieldNoBarrier(R0, Address(R3, R2, LSL, 1), IP);
+  // Generational barrier is needed, array is not necessarily in new space.
+  __ StoreIntoObject(R0, Address(R3, R2, LSL, 1), IP);
   __ Bind(&enter);
   __ subs(R2, R2, Operand(Smi::RawValue(1)));  // R2 is Smi.
   __ b(&loop, PL);
@@ -625,7 +626,7 @@
   __ b(&slow_case, GT);
 
   const intptr_t cid = kArrayCid;
-  __ MaybeTraceAllocation(cid, R4, &slow_case);
+  NOT_IN_PRODUCT(__ MaybeTraceAllocation(cid, R4, &slow_case));
 
   const intptr_t fixed_size = sizeof(RawArray) + kObjectAlignment - 1;
   __ LoadImmediate(R9, fixed_size);
@@ -634,7 +635,7 @@
   __ bic(R9, R9, Operand(kObjectAlignment - 1));
 
   // R9: Allocation size.
-  Heap::Space space = Heap::SpaceForAllocation(cid);
+  Heap::Space space = Heap::kNew;
   __ LoadIsolate(R8);
   __ ldr(R8, Address(R8, Isolate::heap_offset()));
   // Potential new object start.
@@ -652,7 +653,7 @@
 
   // Successfully allocated the object(s), now update top to point to
   // next object start and initialize the object.
-  __ LoadAllocationStatsAddress(R3, cid);
+  NOT_IN_PRODUCT(__ LoadAllocationStatsAddress(R3, cid));
   __ str(NOTFP, Address(R8, Heap::TopOffset(space)));
   __ add(R0, R0, Operand(kHeapObjectTag));
 
@@ -678,12 +679,12 @@
   // R0: new object start as a tagged pointer.
   // NOTFP: new object end address.
   // Store the type argument field.
-  __ InitializeFieldNoBarrier(R0,
+  __ StoreIntoObjectNoBarrier(R0,
                               FieldAddress(R0, Array::type_arguments_offset()),
                               R1);
 
   // Set the length field.
-  __ InitializeFieldNoBarrier(R0,
+  __ StoreIntoObjectNoBarrier(R0,
                               FieldAddress(R0, Array::length_offset()),
                               R2);
 
@@ -695,7 +696,7 @@
   // data area to be initialized.
   // NOTFP: new object end address.
   // R9: allocation size.
-  __ IncrementAllocationStatsWithSize(R3, R9, space);
+  NOT_IN_PRODUCT(__ IncrementAllocationStatsWithSize(R3, R9, space));
 
   __ LoadObject(R8, Object::null_object());
   __ mov(R9, Operand(R8));
@@ -858,12 +859,12 @@
     ASSERT(kSmiTagShift == 1);
     __ bic(R2, R2, Operand(kObjectAlignment - 1));
 
-    __ MaybeTraceAllocation(kContextCid, R8, &slow_case);
+    NOT_IN_PRODUCT(__ MaybeTraceAllocation(kContextCid, R8, &slow_case));
     // Now allocate the object.
     // R1: number of context variables.
     // R2: object size.
     const intptr_t cid = kContextCid;
-    Heap::Space space = Heap::SpaceForAllocation(cid);
+    Heap::Space space = Heap::kNew;
     __ LoadIsolate(R9);
     __ ldr(R9, Address(R9, Isolate::heap_offset()));
     __ ldr(R0, Address(R9, Heap::TopOffset(space)));
@@ -889,7 +890,7 @@
     // R2: object size.
     // R3: next object start.
     // R9: heap.
-    __ LoadAllocationStatsAddress(R4, cid);
+    NOT_IN_PRODUCT(__ LoadAllocationStatsAddress(R4, cid));
     __ str(R3, Address(R9, Heap::TopOffset(space)));
     __ add(R0, R0, Operand(kHeapObjectTag));
 
@@ -926,7 +927,7 @@
     // R3: next object start.
     // R4: allocation stats address.
     __ LoadObject(R8, Object::null_object());
-    __ InitializeFieldNoBarrier(R0, FieldAddress(R0, Context::parent_offset()),
+    __ StoreIntoObjectNoBarrier(R0, FieldAddress(R0, Context::parent_offset()),
                                 R8);
 
     // Initialize the context variables.
@@ -939,7 +940,7 @@
     Label loop;
     __ AddImmediate(NOTFP, R0, Context::variable_offset(0) - kHeapObjectTag);
     __ InitializeFieldsNoBarrier(R0, NOTFP, R3, R8, R9);
-    __ IncrementAllocationStatsWithSize(R4, R2, space);
+    NOT_IN_PRODUCT(__ IncrementAllocationStatsWithSize(R4, R2, space));
 
     // Done allocating and initializing the context.
     // R0: new object.
@@ -1062,7 +1063,7 @@
     Label slow_case;
     // Allocate the object and update top to point to
     // next object start and initialize the allocated object.
-    Heap::Space space = Heap::SpaceForAllocation(cls.id());
+    Heap::Space space = Heap::kNew;
     __ ldr(R9, Address(THR, Thread::heap_offset()));
     __ ldr(R0, Address(R9, Heap::TopOffset(space)));
     __ AddImmediate(R1, R0, instance_size);
@@ -1081,7 +1082,7 @@
 
     // Load the address of the allocation stats table. We split up the load
     // and the increment so that the dependent load is not too nearby.
-    __ LoadAllocationStatsAddress(R9, cls.id());
+    NOT_IN_PRODUCT(__ LoadAllocationStatsAddress(R9, cls.id()));
 
     // R0: new object start.
     // R1: next object start.
@@ -1130,7 +1131,7 @@
       // Set the type arguments in the new object.
       __ ldr(R4, Address(SP, 0));
       FieldAddress type_args(R0, cls.type_arguments_field_offset());
-      __ InitializeFieldNoBarrier(R0, type_args, R4);
+      __ StoreIntoObjectNoBarrier(R0, type_args, R4);
     }
 
     // Done allocating and initializing the instance.
@@ -1138,7 +1139,7 @@
     // R9: allocation stats table.
 
     // Update allocation stats.
-    __ IncrementAllocationStats(R9, cls.id(), space);
+    NOT_IN_PRODUCT(__ IncrementAllocationStats(R9, cls.id(), space));
 
     // R0: new object (tagged).
     __ Ret();
diff --git a/runtime/vm/stub_code_arm64.cc b/runtime/vm/stub_code_arm64.cc
index 15b7e0f..04dd0f4 100644
--- a/runtime/vm/stub_code_arm64.cc
+++ b/runtime/vm/stub_code_arm64.cc
@@ -652,9 +652,9 @@
   __ b(&slow_case, GT);
 
   const intptr_t cid = kArrayCid;
-  __ MaybeTraceAllocation(kArrayCid, R4, &slow_case);
+  NOT_IN_PRODUCT(__ MaybeTraceAllocation(kArrayCid, R4, &slow_case));
 
-  Heap::Space space = Heap::SpaceForAllocation(cid);
+  Heap::Space space = Heap::kNew;
   __ LoadIsolate(R8);
   __ ldr(R8, Address(R8, Isolate::heap_offset()));
 
@@ -693,7 +693,7 @@
   // R8: heap.
   __ StoreToOffset(R7, R8, Heap::TopOffset(space));
   __ add(R0, R0, Operand(kHeapObjectTag));
-  __ UpdateAllocationStatsWithSize(cid, R3, space);
+  NOT_IN_PRODUCT(__ UpdateAllocationStatsWithSize(cid, R3, space));
 
   // R0: new object start as a tagged pointer.
   // R1: array element type.
@@ -780,8 +780,8 @@
   __ Comment("InvokeDartCodeStub");
 
   // Copy the C stack pointer (R31) into the stack pointer we'll actually use
-  // to access the stack, and put the C stack pointer at the stack limit.
-  __ SetupDartSP(OSThread::GetSpecifiedStackSize());
+  // to access the stack.
+  __ SetupDartSP();
   __ EnterFrame(0);
 
   // Push code object to PC marker slot.
@@ -896,7 +896,7 @@
 
   // Restore the frame pointer and C stack pointer and return.
   __ LeaveFrame();
-  __ mov(CSP, SP);
+  __ RestoreCSP();
   __ ret();
 }
 
@@ -917,12 +917,12 @@
     ASSERT(kSmiTagShift == 1);
     __ andi(R2, R2, Immediate(~(kObjectAlignment - 1)));
 
-    __ MaybeTraceAllocation(kContextCid, R4, &slow_case);
+    NOT_IN_PRODUCT(__ MaybeTraceAllocation(kContextCid, R4, &slow_case));
     // Now allocate the object.
     // R1: number of context variables.
     // R2: object size.
     const intptr_t cid = kContextCid;
-    Heap::Space space = Heap::SpaceForAllocation(cid);
+    Heap::Space space = Heap::kNew;
     __ LoadIsolate(R5);
     __ ldr(R5, Address(R5, Isolate::heap_offset()));
     __ ldr(R0, Address(R5, Heap::TopOffset(space)));
@@ -950,7 +950,7 @@
     // R5: heap.
     __ str(R3, Address(R5, Heap::TopOffset(space)));
     __ add(R0, R0, Operand(kHeapObjectTag));
-    __ UpdateAllocationStatsWithSize(cid, R2, space);
+    NOT_IN_PRODUCT(__ UpdateAllocationStatsWithSize(cid, R2, space));
 
     // Calculate the size tag.
     // R0: new object.
@@ -1108,7 +1108,7 @@
     // Allocate the object and update top to point to
     // next object start and initialize the allocated object.
     // R1: instantiated type arguments (if is_cls_parameterized).
-    Heap::Space space = Heap::SpaceForAllocation(cls.id());
+    Heap::Space space = Heap::kNew;
     __ ldr(R5, Address(THR, Thread::heap_offset()));
     __ ldr(R2, Address(R5, Heap::TopOffset(space)));
     __ AddImmediate(R3, R2, instance_size);
@@ -1124,7 +1124,7 @@
       __ b(&slow_case, CS);  // Unsigned higher or equal.
     }
     __ str(R3, Address(R5, Heap::TopOffset(space)));
-    __ UpdateAllocationStats(cls.id(), space);
+    NOT_IN_PRODUCT(__ UpdateAllocationStats(cls.id(), space));
 
     // R2: new object start.
     // R3: next object start.
diff --git a/runtime/vm/stub_code_dbc.cc b/runtime/vm/stub_code_dbc.cc
index 36e44b3c..66a46bd 100644
--- a/runtime/vm/stub_code_dbc.cc
+++ b/runtime/vm/stub_code_dbc.cc
@@ -64,13 +64,17 @@
 }
 
 
+void StubCode::GenerateFrameAwaitingMaterializationStub(Assembler* assembler) {
+  __ Trap();
+}
+
+
 // Print the stop message.
 DEFINE_LEAF_RUNTIME_ENTRY(void, PrintStopMessage, 1, const char* message) {
   OS::Print("Stop message: %s\n", message);
 }
 END_LEAF_RUNTIME_ENTRY
 
-
 }  // namespace dart
 
 #endif  // defined TARGET_ARCH_DBC
diff --git a/runtime/vm/stub_code_ia32.cc b/runtime/vm/stub_code_ia32.cc
index 836e4b5..139a4a2 100644
--- a/runtime/vm/stub_code_ia32.cc
+++ b/runtime/vm/stub_code_ia32.cc
@@ -323,8 +323,8 @@
   __ jmp(&loop_condition, Assembler::kNearJump);
   __ Bind(&loop);
   __ movl(EDI, Address(EBX, 0));
-  // No generational barrier needed, since array is in new space.
-  __ InitializeFieldNoBarrier(EAX, Address(ECX, 0), EDI);
+  // Generational barrier is needed, array is not necessarily in new space.
+  __ StoreIntoObject(EAX, Address(ECX, 0), EDI);
   __ AddImmediate(ECX, Immediate(kWordSize));
   __ AddImmediate(EBX, Immediate(-kWordSize));
   __ Bind(&loop_condition);
@@ -561,10 +561,10 @@
   __ cmpl(EDX, max_len);
   __ j(GREATER, &slow_case);
 
-  __ MaybeTraceAllocation(kArrayCid,
-                          EAX,
-                          &slow_case,
-                          Assembler::kFarJump);
+  NOT_IN_PRODUCT(__ MaybeTraceAllocation(kArrayCid,
+                                         EAX,
+                                         &slow_case,
+                                         Assembler::kFarJump));
 
   const intptr_t fixed_size = sizeof(RawArray) + kObjectAlignment - 1;
   __ leal(EBX, Address(EDX, TIMES_2, fixed_size));  // EDX is Smi.
@@ -576,7 +576,7 @@
   // EBX: allocation size.
 
   const intptr_t cid = kArrayCid;
-  Heap::Space space = Heap::SpaceForAllocation(cid);
+  Heap::Space space = Heap::kNew;
   __ movl(EDI, Address(THR, Thread::heap_offset()));
   __ movl(EAX, Address(EDI, Heap::TopOffset(space)));
   __ addl(EBX, EAX);
@@ -596,7 +596,7 @@
   __ movl(Address(EDI, Heap::TopOffset(space)), EBX);
   __ subl(EBX, EAX);
   __ addl(EAX, Immediate(kHeapObjectTag));
-  __ UpdateAllocationStatsWithSize(cid, EBX, EDI, space);
+  NOT_IN_PRODUCT(__ UpdateAllocationStatsWithSize(cid, EBX, EDI, space));
 
   // Initialize the tags.
   // EAX: new object start as a tagged pointer.
@@ -624,12 +624,13 @@
   // ECX: array element type.
   // EDX: Array length as Smi (preserved).
   // Store the type argument field.
-  __ InitializeFieldNoBarrier(EAX,
+  // No generetional barrier needed, since we store into a new object.
+  __ StoreIntoObjectNoBarrier(EAX,
                               FieldAddress(EAX, Array::type_arguments_offset()),
                               ECX);
 
   // Set the length field.
-  __ InitializeFieldNoBarrier(EAX,
+  __ StoreIntoObjectNoBarrier(EAX,
                               FieldAddress(EAX, Array::length_offset()),
                               EDX);
 
@@ -648,7 +649,7 @@
   __ cmpl(EDI, EBX);
   __ j(ABOVE_EQUAL, &done, Assembler::kNearJump);
   // No generational barrier needed, since we are storing null.
-  __ InitializeFieldNoBarrier(EAX, Address(EDI, 0), Object::null_object());
+  __ StoreIntoObjectNoBarrier(EAX, Address(EDI, 0), Object::null_object());
   __ addl(EDI, Immediate(kWordSize));
   __ jmp(&init_loop, Assembler::kNearJump);
   __ Bind(&done);
@@ -798,15 +799,15 @@
     __ leal(EBX, Address(EDX, TIMES_4, fixed_size));
     __ andl(EBX, Immediate(-kObjectAlignment));
 
-    __ MaybeTraceAllocation(kContextCid,
-                            EAX,
-                            &slow_case,
-                            Assembler::kFarJump);
+    NOT_IN_PRODUCT(__ MaybeTraceAllocation(kContextCid,
+                                           EAX,
+                                           &slow_case,
+                                           Assembler::kFarJump));
 
     // Now allocate the object.
     // EDX: number of context variables.
     const intptr_t cid = kContextCid;
-    Heap::Space space = Heap::SpaceForAllocation(cid);
+    Heap::Space space = Heap::kNew;
     __ movl(ECX, Address(THR, Thread::heap_offset()));
     __ movl(EAX, Address(ECX, Heap::TopOffset(space)));
     __ addl(EBX, EAX);
@@ -836,7 +837,7 @@
     __ subl(EBX, EAX);
     __ addl(EAX, Immediate(kHeapObjectTag));
     // Generate isolate-independent code to allow sharing between isolates.
-    __ UpdateAllocationStatsWithSize(cid, EBX, EDI, space);
+    NOT_IN_PRODUCT(__ UpdateAllocationStatsWithSize(cid, EBX, EDI, space));
 
     // Calculate the size tag.
     // EAX: new object.
@@ -872,7 +873,7 @@
     // EAX: new object.
     // EDX: number of context variables.
     // No generational barrier needed, since we are storing null.
-    __ InitializeFieldNoBarrier(EAX,
+    __ StoreIntoObjectNoBarrier(EAX,
                                 FieldAddress(EAX, Context::parent_offset()),
                                 Object::null_object());
 
@@ -887,7 +888,7 @@
       __ Bind(&loop);
       __ decl(EDX);
       // No generational barrier needed, since we are storing null.
-      __ InitializeFieldNoBarrier(EAX,
+      __ StoreIntoObjectNoBarrier(EAX,
                                   Address(EBX, EDX, TIMES_4, 0),
                                   Object::null_object());
       __ Bind(&entry);
@@ -1018,7 +1019,7 @@
     // Allocate the object and update top to point to
     // next object start and initialize the allocated object.
     // EDX: instantiated type arguments (if is_cls_parameterized).
-    Heap::Space space = Heap::SpaceForAllocation(cls.id());
+    Heap::Space space = Heap::kNew;
     __ movl(EDI, Address(THR, Thread::heap_offset()));
     __ movl(EAX, Address(EDI, Heap::TopOffset(space)));
     __ leal(EBX, Address(EAX, instance_size));
@@ -1033,7 +1034,7 @@
       __ j(ABOVE_EQUAL, &slow_case);
     }
     __ movl(Address(EDI, Heap::TopOffset(space)), EBX);
-    __ UpdateAllocationStats(cls.id(), ECX, space);
+    NOT_IN_PRODUCT(__ UpdateAllocationStats(cls.id(), ECX, space));
 
     // EAX: new object start (untagged).
     // EBX: next object start.
@@ -1058,7 +1059,7 @@
       for (intptr_t current_offset = Instance::NextFieldOffset();
            current_offset < instance_size;
            current_offset += kWordSize) {
-        __ InitializeFieldNoBarrier(EAX,
+        __ StoreIntoObjectNoBarrier(EAX,
                                     FieldAddress(EAX, current_offset),
                                     Object::null_object());
       }
@@ -1074,7 +1075,7 @@
       __ Bind(&init_loop);
       __ cmpl(ECX, EBX);
       __ j(ABOVE_EQUAL, &done, Assembler::kNearJump);
-      __ InitializeFieldNoBarrier(EAX,
+      __ StoreIntoObjectNoBarrier(EAX,
                                   Address(ECX, 0),
                                   Object::null_object());
       __ addl(ECX, Immediate(kWordSize));
@@ -1082,11 +1083,11 @@
       __ Bind(&done);
     }
     if (is_cls_parameterized) {
+      // EAX: new object (tagged).
       // EDX: new object type arguments.
       // Set the type arguments in the new object.
       intptr_t offset = cls.type_arguments_field_offset();
-      // TODO(koda): Figure out why previous content is sometimes null here.
-      __ InitializeFieldNoBarrier(EAX, FieldAddress(EAX, offset), EDX);
+      __ StoreIntoObjectNoBarrier(EAX, FieldAddress(EAX, offset), EDX);
     }
     // Done allocating and initializing the instance.
     // EAX: new object (tagged).
diff --git a/runtime/vm/stub_code_mips.cc b/runtime/vm/stub_code_mips.cc
index 27aa782..ef36894 100644
--- a/runtime/vm/stub_code_mips.cc
+++ b/runtime/vm/stub_code_mips.cc
@@ -656,7 +656,7 @@
   __ BranchUnsignedGreater(T3, Immediate(max_len), &slow_case);
 
   const intptr_t cid = kArrayCid;
-  __ MaybeTraceAllocation(kArrayCid, T4, &slow_case);
+  NOT_IN_PRODUCT(__ MaybeTraceAllocation(kArrayCid, T4, &slow_case));
 
   const intptr_t fixed_size = sizeof(RawArray) + kObjectAlignment - 1;
   __ LoadImmediate(T2, fixed_size);
@@ -668,7 +668,7 @@
 
   // T2: Allocation size.
 
-  Heap::Space space = Heap::SpaceForAllocation(cid);
+  Heap::Space space = Heap::kNew;
   __ LoadIsolate(T3);
   __ lw(T3, Address(T3, Isolate::heap_offset()));
   // Potential new object start.
@@ -690,7 +690,7 @@
   // T3: heap.
   __ sw(T1, Address(T3, Heap::TopOffset(space)));
   __ addiu(T0, T0, Immediate(kHeapObjectTag));
-  __ UpdateAllocationStatsWithSize(cid, T2, T4, space);
+  NOT_IN_PRODUCT(__ UpdateAllocationStatsWithSize(cid, T2, T4, space));
 
   // Initialize the tags.
   // T0: new object start as a tagged pointer.
@@ -935,12 +935,12 @@
     __ LoadImmediate(T0, ~((kObjectAlignment) - 1));
     __ and_(T2, T2, T0);
 
-    __ MaybeTraceAllocation(kContextCid, T4, &slow_case);
+    NOT_IN_PRODUCT(__ MaybeTraceAllocation(kContextCid, T4, &slow_case));
     // Now allocate the object.
     // T1: number of context variables.
     // T2: object size.
     const intptr_t cid = kContextCid;
-    Heap::Space space = Heap::SpaceForAllocation(cid);
+    Heap::Space space = Heap::kNew;
     __ LoadIsolate(T5);
     __ lw(T5, Address(T5, Isolate::heap_offset()));
     __ lw(V0, Address(T5, Heap::TopOffset(space)));
@@ -968,7 +968,7 @@
     // T5: heap.
     __ sw(T3, Address(T5, Heap::TopOffset(space)));
     __ addiu(V0, V0, Immediate(kHeapObjectTag));
-    __ UpdateAllocationStatsWithSize(cid, T2, T5, space);
+    NOT_IN_PRODUCT(__ UpdateAllocationStatsWithSize(cid, T2, T5, space));
 
     // Calculate the size tag.
     // V0: new object.
@@ -1135,7 +1135,7 @@
     // Allocate the object and update top to point to
     // next object start and initialize the allocated object.
     // T1: instantiated type arguments (if is_cls_parameterized).
-    Heap::Space space = Heap::SpaceForAllocation(cls.id());
+    Heap::Space space = Heap::kNew;
     __ lw(T5, Address(THR, Thread::heap_offset()));
     __ lw(T2, Address(T5, Heap::TopOffset(space)));
     __ LoadImmediate(T4, instance_size);
@@ -1153,7 +1153,7 @@
     // Successfully allocated the object(s), now update top to point to
     // next object start and initialize the object.
     __ sw(T3, Address(T5, Heap::TopOffset(space)));
-    __ UpdateAllocationStats(cls.id(), T5, space);
+    NOT_IN_PRODUCT(__ UpdateAllocationStats(cls.id(), T5, space));
 
     // T2: new object start.
     // T3: next object start.
diff --git a/runtime/vm/stub_code_x64.cc b/runtime/vm/stub_code_x64.cc
index d66c125..45edaf4 100644
--- a/runtime/vm/stub_code_x64.cc
+++ b/runtime/vm/stub_code_x64.cc
@@ -339,8 +339,8 @@
   __ jmp(&loop_condition, kJumpLength);
   __ Bind(&loop);
   __ movq(RDI, Address(R12, 0));
-  // No generational barrier needed, since array is in new space.
-  __ InitializeFieldNoBarrier(RAX, Address(RBX, 0), RDI);
+  // Generational barrier is needed, array is not necessarily in new space.
+  __ StoreIntoObject(RAX, Address(RBX, 0), RDI);
   __ addq(RBX, Immediate(kWordSize));
   __ subq(R12, Immediate(kWordSize));
   __ Bind(&loop_condition);
@@ -588,9 +588,9 @@
   __ j(GREATER, &slow_case);
 
   // Check for allocation tracing.
-  __ MaybeTraceAllocation(kArrayCid,
-                          &slow_case,
-                          Assembler::kFarJump);
+  NOT_IN_PRODUCT(__ MaybeTraceAllocation(kArrayCid,
+                                         &slow_case,
+                                         Assembler::kFarJump));
 
   const intptr_t fixed_size = sizeof(RawArray) + kObjectAlignment - 1;
   __ leaq(RDI, Address(RDI, TIMES_4, fixed_size));  // RDI is a Smi.
@@ -598,7 +598,7 @@
   __ andq(RDI, Immediate(-kObjectAlignment));
 
   const intptr_t cid = kArrayCid;
-  Heap::Space space = Heap::SpaceForAllocation(cid);
+  Heap::Space space = Heap::kNew;
   __ movq(R13, Address(THR, Thread::heap_offset()));
   __ movq(RAX, Address(R13, Heap::TopOffset(space)));
 
@@ -619,7 +619,7 @@
   // next object start and initialize the object.
   __ movq(Address(R13, Heap::TopOffset(space)), RCX);
   __ addq(RAX, Immediate(kHeapObjectTag));
-  __ UpdateAllocationStatsWithSize(cid, RDI, space);
+  NOT_IN_PRODUCT(__ UpdateAllocationStatsWithSize(cid, RDI, space));
   // Initialize the tags.
   // RAX: new object start as a tagged pointer.
   // RDI: allocation size.
@@ -641,12 +641,13 @@
 
   // RAX: new object start as a tagged pointer.
   // Store the type argument field.
-  __ InitializeFieldNoBarrier(RAX,
+  // No generetional barrier needed, since we store into a new object.
+  __ StoreIntoObjectNoBarrier(RAX,
                               FieldAddress(RAX, Array::type_arguments_offset()),
                               RBX);
 
   // Set the length field.
-  __ InitializeFieldNoBarrier(RAX,
+  __ StoreIntoObjectNoBarrier(RAX,
                               FieldAddress(RAX, Array::length_offset()),
                               R10);
 
@@ -668,7 +669,7 @@
 #endif  // DEBUG
   __ j(ABOVE_EQUAL, &done, kJumpLength);
   // No generational barrier needed, since we are storing null.
-  __ InitializeFieldNoBarrier(RAX, Address(RDI, 0), R12);
+  __ StoreIntoObjectNoBarrier(RAX, Address(RDI, 0), R12);
   __ addq(RDI, Immediate(kWordSize));
   __ jmp(&init_loop, kJumpLength);
   __ Bind(&done);
@@ -843,14 +844,14 @@
     __ andq(R13, Immediate(-kObjectAlignment));
 
     // Check for allocation tracing.
-    __ MaybeTraceAllocation(kContextCid,
-                            &slow_case,
-                            Assembler::kFarJump);
+    NOT_IN_PRODUCT(__ MaybeTraceAllocation(kContextCid,
+                                           &slow_case,
+                                           Assembler::kFarJump));
 
     // Now allocate the object.
     // R10: number of context variables.
     const intptr_t cid = kContextCid;
-    Heap::Space space = Heap::SpaceForAllocation(cid);
+    Heap::Space space = Heap::kNew;
     __ movq(RCX, Address(THR, Thread::heap_offset()));
     __ movq(RAX, Address(RCX, Heap::TopOffset(space)));
     __ addq(R13, RAX);
@@ -877,7 +878,7 @@
     __ subq(R13, RAX);
     __ addq(RAX, Immediate(kHeapObjectTag));
     // Generate isolate-independent code to allow sharing between isolates.
-    __ UpdateAllocationStatsWithSize(cid, R13, space);
+    NOT_IN_PRODUCT(__ UpdateAllocationStatsWithSize(cid, R13, space));
 
     // Calculate the size tag.
     // RAX: new object.
@@ -913,7 +914,7 @@
     // RAX: new object.
     // R10: number of context variables.
     // No generational barrier needed, since we are storing null.
-    __ InitializeFieldNoBarrier(RAX,
+    __ StoreIntoObjectNoBarrier(RAX,
                                 FieldAddress(RAX, Context::parent_offset()),
                                 R9);
 
@@ -932,7 +933,7 @@
       __ Bind(&loop);
       __ decq(R10);
       // No generational barrier needed, since we are storing null.
-      __ InitializeFieldNoBarrier(RAX,
+      __ StoreIntoObjectNoBarrier(RAX,
                                   Address(R13, R10, TIMES_8, 0),
                                   R9);
       __ Bind(&entry);
@@ -1053,7 +1054,7 @@
     // Allocate the object and update top to point to
     // next object start and initialize the allocated object.
     // RDX: instantiated type arguments (if is_cls_parameterized).
-    Heap::Space space = Heap::SpaceForAllocation(cls.id());
+    Heap::Space space = Heap::kNew;
     __ movq(RCX, Address(THR, Thread::heap_offset()));
     __ movq(RAX, Address(RCX, Heap::TopOffset(space)));
     __ leaq(RBX, Address(RAX, instance_size));
@@ -1068,7 +1069,7 @@
       __ j(ABOVE_EQUAL, &slow_case);
     }
     __ movq(Address(RCX, Heap::TopOffset(space)), RBX);
-    __ UpdateAllocationStats(cls.id(), space);
+    NOT_IN_PRODUCT(__ UpdateAllocationStats(cls.id(), space));
 
     // RAX: new object start (untagged).
     // RBX: next object start.
@@ -1093,7 +1094,7 @@
       for (intptr_t current_offset = Instance::NextFieldOffset();
            current_offset < instance_size;
            current_offset += kWordSize) {
-        __ InitializeFieldNoBarrier(RAX,
+        __ StoreIntoObjectNoBarrier(RAX,
                                     FieldAddress(RAX, current_offset),
                                     R9);
       }
@@ -1114,16 +1115,17 @@
       static const bool kJumpLength = Assembler::kNearJump;
 #endif  // DEBUG
       __ j(ABOVE_EQUAL, &done, kJumpLength);
-      __ InitializeFieldNoBarrier(RAX, Address(RCX, 0), R9);
+      __ StoreIntoObjectNoBarrier(RAX, Address(RCX, 0), R9);
       __ addq(RCX, Immediate(kWordSize));
       __ jmp(&init_loop, Assembler::kNearJump);
       __ Bind(&done);
     }
     if (is_cls_parameterized) {
+      // RAX: new object (tagged).
       // RDX: new object type arguments.
       // Set the type arguments in the new object.
       intptr_t offset = cls.type_arguments_field_offset();
-      __ InitializeFieldNoBarrier(RAX, FieldAddress(RAX, offset), RDX);
+      __ StoreIntoObjectNoBarrier(RAX, FieldAddress(RAX, offset), RDX);
     }
     // Done allocating and initializing the instance.
     // RAX: new object (tagged).
diff --git a/runtime/vm/symbols.h b/runtime/vm/symbols.h
index 7057da2..5e6a165 100644
--- a/runtime/vm/symbols.h
+++ b/runtime/vm/symbols.h
@@ -71,6 +71,7 @@
   V(CyclicInitializationError, "CyclicInitializationError")                    \
   V(ThrowNew, "_throwNew")                                                     \
   V(ThrowNewIfNotLoaded, "_throwNewIfNotLoaded")                               \
+  V(CheckAssertion, "_checkAssertion")                                         \
   V(Symbol, "Symbol")                                                          \
   V(SymbolCtor, "Symbol.")                                                     \
   V(List, "List")                                                              \
@@ -274,6 +275,8 @@
   V(ByteBufferDot_New, "ByteBuffer._New")                                      \
   V(_WeakProperty, "_WeakProperty")                                            \
   V(_MirrorReference, "_MirrorReference")                                      \
+  V(FreeListElement, "FreeListElement")                                        \
+  V(ForwardingCorpse, "ForwardingCorpse")                                      \
   V(InvocationMirror, "_InvocationMirror")                                     \
   V(AllocateInvocationMirror, "_allocateInvocationMirror")                     \
   V(toString, "toString")                                                      \
@@ -673,6 +676,8 @@
   friend class String;
   friend class SnapshotReader;
   friend class SnapshotWriter;
+  friend class Serializer;
+  friend class Deserializer;
   friend class ApiMessageReader;
 
   DISALLOW_COPY_AND_ASSIGN(Symbols);
diff --git a/runtime/vm/thread.h b/runtime/vm/thread.h
index ea5396a..1ce083a 100644
--- a/runtime/vm/thread.h
+++ b/runtime/vm/thread.h
@@ -145,6 +145,7 @@
     kCompilerTask = 0x2,
     kSweeperTask = 0x4,
     kMarkerTask = 0x8,
+    kFinalizerTask = 0x10,
   };
   ~Thread();
 
diff --git a/runtime/vm/thread_interrupter_fuchsia.cc b/runtime/vm/thread_interrupter_fuchsia.cc
new file mode 100644
index 0000000..d089211
--- /dev/null
+++ b/runtime/vm/thread_interrupter_fuchsia.cc
@@ -0,0 +1,30 @@
+// Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+#include "platform/globals.h"
+#if defined(TARGET_OS_FUCHSIA)
+
+#include "vm/thread_interrupter.h"
+
+#include "platform/assert.h"
+
+namespace dart {
+
+void ThreadInterrupter::InterruptThread(OSThread* thread) {
+  UNIMPLEMENTED();
+}
+
+
+void ThreadInterrupter::InstallSignalHandler() {
+  UNIMPLEMENTED();
+}
+
+
+void ThreadInterrupter::RemoveSignalHandler() {
+  UNIMPLEMENTED();
+}
+
+}  // namespace dart
+
+#endif  // defined(TARGET_OS_FUCHSIA)
diff --git a/runtime/vm/timeline.cc b/runtime/vm/timeline.cc
index dbe9ef0..f4c62a8 100644
--- a/runtime/vm/timeline.cc
+++ b/runtime/vm/timeline.cc
@@ -2,6 +2,8 @@
 // for details. All rights reserved. Use of this source code is governed by a
 // BSD-style license that can be found in the LICENSE file.
 
+#ifndef PRODUCT
+
 #include <cstdlib>
 
 #include "vm/atomic.h"
@@ -16,8 +18,6 @@
 
 namespace dart {
 
-#ifndef PRODUCT
-
 DEFINE_FLAG(bool, complete_timeline, false, "Record the complete timeline");
 DEFINE_FLAG(bool, startup_timeline, false, "Record the startup timeline");
 DEFINE_FLAG(bool, trace_timeline, false,
@@ -1692,6 +1692,6 @@
   return r;
 }
 
-#endif  // !PRODUCT
-
 }  // namespace dart
+
+#endif  // !PRODUCT
diff --git a/runtime/vm/unicode.h b/runtime/vm/unicode.h
index e06f1f9..7ea10e9 100644
--- a/runtime/vm/unicode.h
+++ b/runtime/vm/unicode.h
@@ -117,17 +117,17 @@
   }
 
   // Returns true if ch is a lead or trail surrogate.
-  static bool IsSurrogate(int32_t ch) {
+  static bool IsSurrogate(uint32_t ch) {
     return (ch & 0xFFFFF800) == 0xD800;
   }
 
   // Returns true if ch is a lead surrogate.
-  static bool IsLeadSurrogate(int32_t ch) {
+  static bool IsLeadSurrogate(uint32_t ch) {
     return (ch & 0xFFFFFC00) == 0xD800;
   }
 
   // Returns true if ch is a low surrogate.
-  static bool IsTrailSurrogate(int32_t ch) {
+  static bool IsTrailSurrogate(uint32_t ch) {
     return (ch & 0xFFFFFC00) == 0xDC00;
   }
 
@@ -147,8 +147,8 @@
   }
 
   // Decodes a surrogate pair into a supplementary code point.
-  static int32_t Decode(int32_t lead, int32_t trail) {
-    return 0x10000 + ((lead & 0x3FF) << 10) + (trail & 0x3FF);
+  static int32_t Decode(uint16_t lead, uint16_t trail) {
+    return 0x10000 + ((lead & 0x000003FF) << 10) + (trail & 0x3FF);
   }
 
   // Encodes a single code point.
diff --git a/runtime/vm/unit_test.cc b/runtime/vm/unit_test.cc
index c6b6bdb..b0c773e 100644
--- a/runtime/vm/unit_test.cc
+++ b/runtime/vm/unit_test.cc
@@ -75,7 +75,7 @@
 
 
 static bool IsImportableTestLib(const char* url_name) {
-  const char* kImportTestLibUri = "importable_test_lib";
+  const char* kImportTestLibUri = "test:importable_lib";
   static const intptr_t kImportTestLibUriLen = strlen(kImportTestLibUri);
   return (strncmp(url_name, kImportTestLibUri, kImportTestLibUriLen) == 0);
 }
@@ -92,8 +92,9 @@
 }
 
 
+#ifndef PRODUCT
 static bool IsIsolateReloadTestLib(const char* url_name) {
-  const char* kIsolateReloadTestLibUri = "isolate_reload_test_helper";
+  const char* kIsolateReloadTestLibUri = "test:isolate_reload_helper";
   static const intptr_t kIsolateReloadTestLibUriLen =
       strlen(kIsolateReloadTestLibUri);
   return (strncmp(url_name,
@@ -102,7 +103,6 @@
 }
 
 
-#ifndef PRODUCT
 static Dart_Handle IsolateReloadTestLibSource() {
   // Special library with one function.
   return DartUtils::NewString("void reloadTest() native 'Reload_Test';\n");
@@ -139,6 +139,13 @@
 static Dart_Handle LibraryTagHandler(Dart_LibraryTag tag,
                                      Dart_Handle library,
                                      Dart_Handle url) {
+  if (tag == Dart_kCanonicalizeUrl) {
+    Dart_Handle library_url = Dart_LibraryUrl(library);
+    if (Dart_IsError(library_url)) {
+      return library_url;
+    }
+    return Dart_DefaultCanonicalizeUrl(library_url, url);
+  }
   if (tag == Dart_kScriptTag) {
     // Reload request.
     ASSERT(script_reload_key != kUnsetThreadLocalKey);
@@ -146,7 +153,7 @@
        reinterpret_cast<const char*>(
            OSThread::GetThreadLocal(script_reload_key));
     ASSERT(script_source != NULL);
-    OSThread::SetThreadLocal(script_reload_key, NULL);
+    OSThread::SetThreadLocal(script_reload_key, 0);
     return Dart_LoadScript(url,
                            NewString(script_source),
                            0,
@@ -172,23 +179,6 @@
 
   bool is_dart_scheme_url = DartUtils::IsDartSchemeURL(url_chars);
   bool is_io_library = DartUtils::IsDartIOLibURL(library_url_string);
-  if (tag == Dart_kCanonicalizeUrl) {
-    // Already canonicalized.
-    if (IsImportableTestLib(url_chars) || IsIsolateReloadTestLib(url_chars)) {
-      return url;
-    }
-    // If this is a Dart Scheme URL then it is not modified as it will be
-    // handled by the VM internally.
-    if (is_dart_scheme_url || is_io_library) {
-      return url;
-    }
-
-    Dart_Handle library_url = Dart_LibraryUrl(library);
-    if (Dart_IsError(library_url)) {
-      return library_url;
-    }
-    return DartUtils::ResolveUri(library_url, url);
-  }
   if (is_dart_scheme_url) {
     ASSERT(tag == Dart_kImportTag);
     // Handle imports of other built-in libraries present in the SDK.
diff --git a/runtime/vm/uri.cc b/runtime/vm/uri.cc
new file mode 100644
index 0000000..7d9ef76
--- /dev/null
+++ b/runtime/vm/uri.cc
@@ -0,0 +1,541 @@
+// Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+#include "vm/uri.h"
+
+#include "vm/zone.h"
+
+namespace dart {
+
+static bool IsUnreservedChar(intptr_t value) {
+  return ((value >= 'a' && value <= 'z') ||
+          (value >= 'A' && value <= 'Z') ||
+          (value >= '0' && value <= '9') ||
+          value == '-' ||
+          value == '.' ||
+          value == '_' ||
+          value == '~');
+}
+
+
+static bool IsDelimiter(intptr_t value) {
+  switch (value) {
+    case ':': case '/': case '?': case '#':
+    case '[': case ']': case '@': case '!':
+    case '$': case '&': case '\'': case '(':
+    case ')': case '*': case '+': case ',':
+    case ';': case '=':
+      return true;
+    default:
+      return false;
+  }
+}
+
+
+static bool IsHexDigit(char value) {
+  return ((value >= '0' && value <= '9') ||
+          (value >= 'A' && value <= 'F') ||
+          (value >= 'a' && value <= 'f'));
+}
+
+
+static int HexValue(char digit) {
+  if ((digit >= '0' && digit <= '9')) {
+    return digit - '0';
+  }
+  if ((digit >= 'A' && digit <= 'F')) {
+    return digit - 'A' + 10;
+  }
+  if ((digit >= 'a' && digit <= 'f')) {
+    return digit - 'a' + 10;
+  }
+  UNREACHABLE();
+  return 0;
+}
+
+
+static int GetEscapedValue(const char* str, intptr_t pos, intptr_t len) {
+  if (pos + 2 >= len) {
+    // Not enough room for a valid escape sequence.
+    return -1;
+  }
+  if (str[pos] != '%') {
+    // Escape sequences start with '%'.
+    return -1;
+  }
+
+  char digit1 = str[pos + 1];
+  char digit2 = str[pos + 2];
+  if (!IsHexDigit(digit1) || !IsHexDigit(digit2)) {
+    // Invalid escape sequence.  Ignore it.
+    return -1;
+  }
+  return HexValue(digit1) * 16 + HexValue(digit2);
+}
+
+
+static char* NormalizeEscapes(const char* str, intptr_t len) {
+  // Allocate the buffer.
+  Zone* zone = Thread::Current()->zone();
+  // We multiply len by three because a percent-escape sequence is
+  // three characters long (e.g. ' ' -> '%20).  +1 for '\0'.  We could
+  // take two passes through the string and avoid the excess
+  // allocation, but it's zone-memory so it doesn't seem necessary.
+  char* buffer = zone->Alloc<char>(len * 3 + 1);
+
+  // Copy the string, normalizing as we go.
+  intptr_t buffer_pos = 0;
+  intptr_t pos = 0;
+  while (pos < len) {
+    int escaped_value = GetEscapedValue(str, pos, len);
+    if (escaped_value >= 0) {
+      // If one of the special "unreserved" characters has been
+      // escaped, revert the escaping.  Otherwise preserve the
+      // escaping.
+      if (IsUnreservedChar(escaped_value)) {
+        buffer[buffer_pos] = escaped_value;
+        buffer_pos++;
+      } else {
+        OS::SNPrint(buffer + buffer_pos, 4, "%%%02X", escaped_value);
+        buffer_pos += 3;
+      }
+      pos += 3;
+    } else {
+      char c = str[pos];
+      // If a delimiter or unreserved character is currently not
+      // escaped, preserve that.  If there is a busted %-sequence in
+      // the input, preserve that too.
+      if (c == '%' || IsDelimiter(c) || IsUnreservedChar(c)) {
+        buffer[buffer_pos] = c;
+        buffer_pos++;
+      } else {
+        // Escape funky characters.
+        OS::SNPrint(buffer + buffer_pos, 4, "%%%02X", c);
+        buffer_pos += 3;
+      }
+      pos++;
+    }
+  }
+  buffer[buffer_pos] = '\0';
+  return buffer;
+}
+
+
+// Lower-case a string in place.
+static void StringLower(char* str) {
+  const intptr_t len = strlen(str);
+  intptr_t i = 0;
+  while (i < len) {
+    int escaped_value = GetEscapedValue(str, i, len);
+    if (escaped_value >= 0) {
+      // Don't lowercase escape sequences.
+      i += 3;
+    } else {
+      // I don't use tolower() because I don't want the locale
+      // transforming any non-acii characters.
+      char c = str[i];
+      if (c >= 'A' && c <= 'Z') {
+        str[i] = c + ('a' - 'A');
+      }
+      i++;
+    }
+  }
+}
+
+
+static void ClearParsedUri(ParsedUri* parsed_uri) {
+  parsed_uri->scheme = NULL;
+  parsed_uri->userinfo = NULL;
+  parsed_uri->host = NULL;
+  parsed_uri->port = NULL;
+  parsed_uri->path = NULL;
+  parsed_uri->query = NULL;
+  parsed_uri->fragment = NULL;
+}
+
+
+static intptr_t ParseAuthority(const char* authority, ParsedUri* parsed_uri) {
+  Zone* zone = Thread::Current()->zone();
+  const char* current = authority;
+  intptr_t len = 0;
+
+  size_t userinfo_len = strcspn(current, "@/");
+  if (current[userinfo_len] == '@') {
+    // The '@' character follows the optional userinfo string.
+    parsed_uri->userinfo = NormalizeEscapes(current, userinfo_len);
+    current += userinfo_len + 1;
+    len += userinfo_len + 1;
+  } else {
+    parsed_uri->userinfo = NULL;
+  }
+
+  size_t host_len = strcspn(current, ":/");
+  char* host = NormalizeEscapes(current, host_len);
+  StringLower(host);
+  parsed_uri->host = host;
+  len += host_len;
+
+  if (current[host_len] == ':') {
+    // The ':' character precedes the optional port string.
+    const char* port_start = current + host_len + 1;  // +1 for ':'
+    size_t port_len = strcspn(port_start, "/");
+    parsed_uri->port = zone->MakeCopyOfStringN(port_start, port_len);
+    len += 1 + port_len;  // +1 for ':'
+  } else {
+    parsed_uri->port = NULL;
+  }
+  return len;
+}
+
+
+// Performs a simple parse of a uri into its components.
+// See RFC 3986 Section 3: Syntax.
+bool ParseUri(const char* uri, ParsedUri* parsed_uri) {
+  Zone* zone = Thread::Current()->zone();
+
+  // The first ':' separates the scheme from the rest of the uri.  If
+  // a ':' occurs after the first '/' it doesn't count.
+  size_t scheme_len = strcspn(uri, ":/");
+  const char* rest = uri;
+  if (uri[scheme_len] == ':') {
+    char* scheme = zone->MakeCopyOfStringN(uri, scheme_len);
+    StringLower(scheme);
+    parsed_uri->scheme = scheme;
+    rest = uri + scheme_len + 1;
+  } else {
+    parsed_uri->scheme = NULL;
+  }
+
+  // The first '#' separates the optional fragment
+  const char* hash_pos = rest + strcspn(rest, "#");
+  if (*hash_pos == '#') {
+    // There is a fragment part.
+    const char* fragment_start = hash_pos + 1;
+    parsed_uri->fragment =
+        NormalizeEscapes(fragment_start, strlen(fragment_start));
+  } else {
+    parsed_uri->fragment = NULL;
+  }
+
+  // The first '?' or '#' separates the hierarchical part from the
+  // optional query.
+  const char* question_pos = rest + strcspn(rest, "?#");
+  if (*question_pos == '?') {
+    // There is a query part.
+    const char* query_start = question_pos + 1;
+    parsed_uri->query =
+        NormalizeEscapes(query_start, (hash_pos - query_start));
+  } else {
+    parsed_uri->query = NULL;
+  }
+
+  const char* path_start = rest;
+  if (rest[0] == '/' && rest[1] == '/') {
+    // There is an authority part.
+    const char* authority_start = rest + 2;  // 2 for '//'.
+
+    intptr_t authority_len =
+        ParseAuthority(authority_start, parsed_uri);
+    if (authority_len < 0) {
+      ClearParsedUri(parsed_uri);
+      return false;
+    }
+    path_start = authority_start + authority_len;
+  } else {
+    parsed_uri->userinfo = NULL;
+    parsed_uri->host = NULL;
+    parsed_uri->port = NULL;
+  }
+
+  // The path is the substring between the authority and the query.
+  parsed_uri->path = NormalizeEscapes(path_start, (question_pos - path_start));
+  return true;
+}
+
+
+static char* RemoveLastSegment(char* current, char* base) {
+  if (current == base) {
+    return current;
+  }
+  ASSERT(current > base);
+  for (current--; current > base; current--) {
+    if (*current == '/') {
+      // We have found the beginning of the last segment.
+      return current;
+    }
+  }
+  ASSERT(current == base);
+  return current;
+}
+
+
+static intptr_t SegmentLength(const char* input) {
+  const char* cp = input;
+
+  // Include initial slash in the segment, if any.
+  if (*cp == '/') {
+    cp++;
+  }
+
+  // Don't include trailing slash in the segment.
+  cp += strcspn(cp, "/");
+  return cp - input;
+}
+
+
+// See RFC 3986 Section 5.2.4: Remove Dot Segments.
+static const char* RemoveDotSegments(const char* path) {
+  const char* input = path;
+
+  // The output path will always be less than or equal to the size of
+  // the input path.
+  Zone* zone = Thread::Current()->zone();
+  char* buffer = zone->Alloc<char>(strlen(path) + 1);  // +1 for '\0'
+  char* output = buffer;
+
+  while (*input != '\0') {
+    if (strncmp("../", input, 3) == 0) {
+      // Discard initial "../" from the input.  It's junk.
+      input += 3;
+
+    } else if (strncmp("./", input, 3) == 0) {
+      // Discard initial "./" from the input.  It's junk.
+      input += 2;
+
+    } else if (strncmp("/./", input, 3) == 0) {
+      // Advance past the "/." part of the input.
+      input += 2;
+
+    } else if (strcmp("/.", input) == 0) {
+      // Pretend the input just contains a "/".
+      input = "/";
+
+    } else if (strncmp("/../", input, 4) == 0) {
+      // Advance past the "/.." part of the input and remove one
+      // segment from the output.
+      input += 3;
+      output = RemoveLastSegment(output, buffer);
+
+    } else if (strcmp("/..", input) == 0) {
+      // Pretend the input contains a "/" and remove one segment from
+      // the output.
+      input = "/";
+      output = RemoveLastSegment(output, buffer);
+
+    } else if (strcmp("..", input) == 0) {
+      // The input has been reduced to nothing useful.
+      input += 2;
+
+    } else if (strcmp(".", input) == 0) {
+      // The input has been reduced to nothing useful.
+      input += 1;
+
+    } else {
+      intptr_t segment_len = SegmentLength(input);
+      if (input[0] != '/' && output != buffer) {
+        *output = '/';
+        output++;
+      }
+      strncpy(output, input, segment_len);
+      output += segment_len;
+      input += segment_len;
+    }
+  }
+  *output = '\0';
+  return buffer;
+}
+
+
+// See RFC 3986 Section 5.2.3: Merge Paths.
+static const char* MergePaths(const char* base_path, const char* ref_path) {
+  Zone* zone = Thread::Current()->zone();
+  if (base_path[0] == '\0') {
+    // If the base_path is empty, we prepend '/'.
+    return zone->PrintToString("/%s", ref_path);
+  }
+
+  // We need to find the last '/' in base_path.
+  const char* last_slash = strrchr(base_path, '/');
+  if (last_slash == NULL) {
+    // There is no slash in the base_path.  Return the ref_path unchanged.
+    return ref_path;
+  }
+
+  // We found a '/' in the base_path.  Cut off everything after it and
+  // add the ref_path.
+  intptr_t truncated_base_len = last_slash - base_path;
+  intptr_t ref_path_len = strlen(ref_path);
+  intptr_t len = truncated_base_len + ref_path_len + 1;  // +1 for '/'
+  char* buffer = zone->Alloc<char>(len + 1);  // +1 for '\0'
+
+  // Copy truncated base.
+  strncpy(buffer, base_path, truncated_base_len);
+
+  // Add a slash.
+  buffer[truncated_base_len] = '/';
+
+  // Copy the ref_path.
+  strncpy((buffer + truncated_base_len + 1), ref_path, ref_path_len);
+
+  // Add the trailing '\0'.
+  buffer[len] = '\0';
+
+  return buffer;
+}
+
+
+static char* BuildUri(const ParsedUri& uri) {
+  Zone* zone = Thread::Current()->zone();
+  ASSERT(uri.path != NULL);
+
+  const char* fragment = uri.fragment == NULL ? "" : uri.fragment;
+  const char* fragment_separator = uri.fragment == NULL ? "" : "#";
+  const char* query = uri.query == NULL ? "" : uri.query;
+  const char* query_separator = uri.query == NULL ? "" : "?";
+
+  // If there is no scheme for this uri, just build a relative uri of
+  // the form: "path[?query][#fragment]".  This occurs when we resolve
+  // relative urls inside a "dart:" library.
+  if (uri.scheme == NULL) {
+    ASSERT(uri.userinfo == NULL && uri.host == NULL && uri.port == NULL);
+    return zone->PrintToString("%s%s%s%s%s",
+                               uri.path, query_separator, query,
+                               fragment_separator, fragment);
+  }
+
+  // Uri with no authority: "scheme:path[?query][#fragment]"
+  if (uri.host == NULL) {
+    ASSERT(uri.userinfo == NULL && uri.port == NULL);
+    return zone->PrintToString("%s:%s%s%s%s%s",
+                               uri.scheme, uri.path, query_separator, query,
+                               fragment_separator, fragment);
+  }
+
+  const char* user = uri.userinfo == NULL ? "" : uri.userinfo;
+  const char* user_separator = uri.userinfo == NULL ? "" : "@";
+  const char* port = uri.port == NULL ? "" : uri.port;
+  const char* port_separator = uri.port == NULL ? "" : ":";
+
+  // If the path doesn't start with a '/', add one.  We need it to
+  // separate the path from the authority.
+  const char* path_separator = ((uri.path[0] == '\0' || uri.path[0] == '/')
+                                ? "" : "/");
+
+  // Uri with authority:
+  //   "scheme://[userinfo@]host[:port][/]path[?query][#fragment]"
+  return zone->PrintToString(
+      "%s://%s%s%s%s%s%s%s%s%s%s%s",  // There is *nothing* wrong with this.
+      uri.scheme, user, user_separator, uri.host, port_separator, port,
+      path_separator, uri.path, query_separator, query,
+      fragment_separator, fragment);
+}
+
+
+// See RFC 3986 Section 5: Reference Resolution
+bool ResolveUri(const char* ref_uri,
+                const char* base_uri,
+                const char** target_uri) {
+  // Parse the reference uri.
+  ParsedUri ref;
+  if (!ParseUri(ref_uri, &ref)) {
+    *target_uri = NULL;
+    return false;
+  }
+
+  ParsedUri target;
+  if (ref.scheme != NULL) {
+    if (strcmp(ref.scheme, "dart") == 0) {
+      Zone* zone = Thread::Current()->zone();
+      *target_uri = zone->MakeCopyOfString(ref_uri);
+      return true;
+    }
+
+    // When the ref_uri specifies a scheme, the base_uri is ignored.
+    target.scheme = ref.scheme;
+    target.userinfo = ref.userinfo;
+    target.host = ref.host;
+    target.port = ref.port;
+    target.path = RemoveDotSegments(ref.path);
+    target.query = ref.query;
+    target.fragment = ref.fragment;
+    *target_uri = BuildUri(target);
+    return true;
+  }
+
+  // Parse the base uri.
+  ParsedUri base;
+  if (!ParseUri(base_uri, &base)) {
+    *target_uri = NULL;
+    return false;
+  }
+
+  if ((base.scheme != NULL) && strcmp(base.scheme, "dart") == 0) {
+    Zone* zone = Thread::Current()->zone();
+    *target_uri = zone->MakeCopyOfString(ref_uri);
+    return true;
+  }
+
+  if (ref.host != NULL) {
+    // When the ref_uri specifies an authority, we only use the base scheme.
+    target.scheme = base.scheme;
+    target.userinfo = ref.userinfo;
+    target.host = ref.host;
+    target.port = ref.port;
+    target.path = RemoveDotSegments(ref.path);
+    target.query = ref.query;
+    target.fragment = ref.fragment;
+    *target_uri = BuildUri(target);
+    return true;
+  }
+
+  if (ref.path[0] == '\0') {
+    // Empty path.  Use most parts of base_uri.
+    target.scheme = base.scheme;
+    target.userinfo = base.userinfo;
+    target.host = base.host;
+    target.port = base.port;
+    target.path = base.path;
+    target.query = ((ref.query == NULL) ? base.query : ref.query);
+    target.fragment = ref.fragment;
+    *target_uri = BuildUri(target);
+    return true;
+
+  } else if (ref.path[0] == '/') {
+    // Absolute path.  ref_path wins.
+    target.scheme = base.scheme;
+    target.userinfo = base.userinfo;
+    target.host = base.host;
+    target.port = base.port;
+    target.path = RemoveDotSegments(ref.path);
+    target.query = ref.query;
+    target.fragment = ref.fragment;
+    *target_uri = BuildUri(target);
+    return true;
+
+  } else {
+    // Relative path.  We need to merge the base path and the ref path.
+
+    if (base.scheme == NULL && base.host == NULL && base.path[0] != '/') {
+      // The dart:core Uri class handles resolving a relative uri
+      // against a second relative uri specially, in a way not
+      // described in the RFC.  We do not need to support this for
+      // library resolution.  If we need to implement this later, we
+      // can.
+      *target_uri = NULL;
+      return false;
+    }
+
+    target.scheme = base.scheme;
+    target.userinfo = base.userinfo;
+    target.host = base.host;
+    target.port = base.port;
+    target.path = RemoveDotSegments(MergePaths(base.path, ref.path));
+    target.query = ref.query;
+    target.fragment = ref.fragment;
+    *target_uri = BuildUri(target);
+    return true;
+  }
+}
+
+}  // namespace dart
diff --git a/runtime/vm/uri.h b/runtime/vm/uri.h
new file mode 100644
index 0000000..453b371
--- /dev/null
+++ b/runtime/vm/uri.h
@@ -0,0 +1,33 @@
+// Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+#ifndef VM_URI_H_
+#define VM_URI_H_
+
+#include "platform/utils.h"
+#include "vm/globals.h"
+
+namespace dart {
+
+struct ParsedUri {
+  const char* scheme;
+  const char* userinfo;
+  const char* host;
+  const char* port;
+  const char* path;
+  const char* query;
+  const char* fragment;
+};
+
+// Parses a uri into its parts.  Returns false if the parse fails.
+bool ParseUri(const char* uri, ParsedUri* parsed_uri);
+
+// Resolves some reference uri with respect to a base uri.
+bool ResolveUri(const char* ref_uri,
+                const char* base_uri,
+                const char** target_uri);
+
+}  // namespace dart
+
+#endif  // VM_URI_H_
diff --git a/runtime/vm/uri_test.cc b/runtime/vm/uri_test.cc
new file mode 100644
index 0000000..34631a5
--- /dev/null
+++ b/runtime/vm/uri_test.cc
@@ -0,0 +1,666 @@
+// Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+#include "vm/uri.h"
+#include "vm/unit_test.h"
+
+namespace dart {
+
+TEST_CASE(ParseUri_WithScheme_NoQueryNoUser) {
+  ParsedUri uri;
+  EXPECT(ParseUri("foo://example.com:8042/over/there", &uri));
+  EXPECT_STREQ("foo", uri.scheme);
+  EXPECT(uri.userinfo == NULL);
+  EXPECT_STREQ("example.com", uri.host);
+  EXPECT_STREQ("8042", uri.port);
+  EXPECT_STREQ("/over/there", uri.path);
+  EXPECT(uri.query == NULL);
+  EXPECT(uri.fragment == NULL);
+}
+
+
+TEST_CASE(ParseUri_WithScheme_WithQuery) {
+  ParsedUri uri;
+  EXPECT(ParseUri("foo://example.com:8042/over/there?name=ferret", &uri));
+  EXPECT_STREQ("foo", uri.scheme);
+  EXPECT(uri.userinfo == NULL);
+  EXPECT_STREQ("example.com", uri.host);
+  EXPECT_STREQ("8042", uri.port);
+  EXPECT_STREQ("/over/there", uri.path);
+  EXPECT_STREQ("name=ferret", uri.query);
+  EXPECT(uri.fragment == NULL);
+}
+
+
+TEST_CASE(ParseUri_WithScheme_WithFragment) {
+  ParsedUri uri;
+  EXPECT(ParseUri("foo://example.com:8042/over/there#fragment", &uri));
+  EXPECT_STREQ("foo", uri.scheme);
+  EXPECT(uri.userinfo == NULL);
+  EXPECT_STREQ("example.com", uri.host);
+  EXPECT_STREQ("8042", uri.port);
+  EXPECT_STREQ("/over/there", uri.path);
+  EXPECT(uri.query == NULL);
+  EXPECT_STREQ("fragment", uri.fragment);
+}
+
+
+TEST_CASE(ParseUri_WithScheme_WithQueryWithFragment) {
+  ParsedUri uri;
+  EXPECT(ParseUri("foo://example.com:8042/over/there?name=ferret#fragment",
+                  &uri));
+  EXPECT_STREQ("foo", uri.scheme);
+  EXPECT(uri.userinfo == NULL);
+  EXPECT_STREQ("example.com", uri.host);
+  EXPECT_STREQ("8042", uri.port);
+  EXPECT_STREQ("/over/there", uri.path);
+  EXPECT_STREQ("name=ferret", uri.query);
+  EXPECT_STREQ("fragment", uri.fragment);
+}
+
+
+TEST_CASE(ParseUri_WithScheme_WithUser) {
+  ParsedUri uri;
+  EXPECT(ParseUri("foo://user@example.com:8042/over/there", &uri));
+  EXPECT_STREQ("foo", uri.scheme);
+  EXPECT_STREQ("user", uri.userinfo);
+  EXPECT_STREQ("example.com", uri.host);
+  EXPECT_STREQ("8042", uri.port);
+  EXPECT_STREQ("/over/there", uri.path);
+  EXPECT(uri.query == NULL);
+  EXPECT(uri.fragment == NULL);
+}
+
+
+TEST_CASE(ParseUri_WithScheme_ShortPath) {
+  ParsedUri uri;
+  EXPECT(ParseUri("foo://example.com:8042/", &uri));
+  EXPECT_STREQ("foo", uri.scheme);
+  EXPECT(uri.userinfo == NULL);
+  EXPECT_STREQ("example.com", uri.host);
+  EXPECT_STREQ("8042", uri.port);
+  EXPECT_STREQ("/", uri.path);
+  EXPECT(uri.query == NULL);
+  EXPECT(uri.fragment == NULL);
+}
+
+
+TEST_CASE(ParseUri_WithScheme_EmptyPath) {
+  ParsedUri uri;
+  EXPECT(ParseUri("foo://example.com:8042", &uri));
+  EXPECT_STREQ("foo", uri.scheme);
+  EXPECT(uri.userinfo == NULL);
+  EXPECT_STREQ("example.com", uri.host);
+  EXPECT_STREQ("8042", uri.port);
+  EXPECT_STREQ("", uri.path);
+  EXPECT(uri.query == NULL);
+  EXPECT(uri.fragment == NULL);
+}
+
+
+TEST_CASE(ParseUri_WithScheme_Rootless1) {
+  ParsedUri uri;
+  EXPECT(ParseUri("foo:here", &uri));
+  EXPECT_STREQ("foo", uri.scheme);
+  EXPECT(uri.userinfo == NULL);
+  EXPECT(uri.host == NULL);
+  EXPECT(uri.port == NULL);
+  EXPECT_STREQ("here", uri.path);
+  EXPECT(uri.query == NULL);
+  EXPECT(uri.fragment == NULL);
+}
+
+
+TEST_CASE(ParseUri_WithScheme_Rootless2) {
+  ParsedUri uri;
+  EXPECT(ParseUri("foo:or/here", &uri));
+  EXPECT_STREQ("foo", uri.scheme);
+  EXPECT(uri.userinfo == NULL);
+  EXPECT(uri.host == NULL);
+  EXPECT(uri.port == NULL);
+  EXPECT_STREQ("or/here", uri.path);
+  EXPECT(uri.query == NULL);
+  EXPECT(uri.fragment == NULL);
+}
+
+
+TEST_CASE(ParseUri_NoScheme_AbsPath_WithAuthority) {
+  ParsedUri uri;
+  EXPECT(ParseUri("//example.com:8042/over/there", &uri));
+  EXPECT(uri.scheme == NULL);
+  EXPECT(uri.userinfo == NULL);
+  EXPECT_STREQ("example.com", uri.host);
+  EXPECT_STREQ("8042", uri.port);
+  EXPECT_STREQ("/over/there", uri.path);
+  EXPECT(uri.query == NULL);
+  EXPECT(uri.fragment == NULL);
+}
+
+
+TEST_CASE(ParseUri_NoScheme_AbsPath_NoAuthority) {
+  ParsedUri uri;
+  EXPECT(ParseUri("/over/there", &uri));
+  EXPECT(uri.scheme == NULL);
+  EXPECT(uri.userinfo == NULL);
+  EXPECT(uri.host == NULL);
+  EXPECT(uri.port == NULL);
+  EXPECT_STREQ("/over/there", uri.path);
+  EXPECT(uri.query == NULL);
+  EXPECT(uri.fragment == NULL);
+}
+
+
+// Colons are permitted in path segments, in many cases.
+TEST_CASE(ParseUri_NoScheme_AbsPath_StrayColon) {
+  ParsedUri uri;
+  EXPECT(ParseUri("/ov:er/there", &uri));
+  EXPECT(uri.scheme == NULL);
+  EXPECT(uri.userinfo == NULL);
+  EXPECT(uri.host == NULL);
+  EXPECT(uri.port == NULL);
+  EXPECT_STREQ("/ov:er/there", uri.path);
+  EXPECT(uri.query == NULL);
+}
+
+
+TEST_CASE(ParseUri_NoScheme_Rootless1) {
+  ParsedUri uri;
+  EXPECT(ParseUri("here", &uri));
+  EXPECT(uri.scheme == NULL);
+  EXPECT(uri.userinfo == NULL);
+  EXPECT(uri.host == NULL);
+  EXPECT(uri.port == NULL);
+  EXPECT_STREQ("here", uri.path);
+  EXPECT(uri.query == NULL);
+  EXPECT(uri.fragment == NULL);
+}
+
+
+TEST_CASE(ParseUri_NoScheme_Rootless2) {
+  ParsedUri uri;
+  EXPECT(ParseUri("or/here", &uri));
+  EXPECT(uri.scheme == NULL);
+  EXPECT(uri.userinfo == NULL);
+  EXPECT(uri.host == NULL);
+  EXPECT(uri.port == NULL);
+  EXPECT_STREQ("or/here", uri.path);
+  EXPECT(uri.query == NULL);
+  EXPECT(uri.fragment == NULL);
+}
+
+
+TEST_CASE(ParseUri_NoScheme_Empty) {
+  ParsedUri uri;
+  EXPECT(ParseUri("", &uri));
+  EXPECT(uri.scheme == NULL);
+  EXPECT(uri.userinfo == NULL);
+  EXPECT(uri.host == NULL);
+  EXPECT(uri.port == NULL);
+  EXPECT_STREQ("", uri.path);
+  EXPECT(uri.query == NULL);
+  EXPECT(uri.fragment == NULL);
+}
+
+
+TEST_CASE(ParseUri_NoScheme_QueryOnly) {
+  ParsedUri uri;
+  EXPECT(ParseUri("?name=ferret", &uri));
+  EXPECT(uri.scheme == NULL);
+  EXPECT(uri.userinfo == NULL);
+  EXPECT(uri.host == NULL);
+  EXPECT(uri.port == NULL);
+  EXPECT_STREQ("", uri.path);
+  EXPECT_STREQ("name=ferret", uri.query);
+  EXPECT(uri.fragment == NULL);
+}
+
+
+TEST_CASE(ParseUri_NoScheme_FragmentOnly) {
+  ParsedUri uri;
+  EXPECT(ParseUri("#fragment", &uri));
+  EXPECT(uri.scheme == NULL);
+  EXPECT(uri.userinfo == NULL);
+  EXPECT(uri.host == NULL);
+  EXPECT(uri.port == NULL);
+  EXPECT_STREQ("", uri.path);
+  EXPECT(uri.query == NULL);
+  EXPECT_STREQ("fragment", uri.fragment);
+}
+
+
+TEST_CASE(ParseUri_LowerCaseScheme) {
+  ParsedUri uri;
+  EXPECT(ParseUri("ScHeMe:path", &uri));
+  EXPECT_STREQ("scheme", uri.scheme);
+  EXPECT(uri.userinfo == NULL);
+  EXPECT(uri.host == NULL);
+  EXPECT(uri.port == NULL);
+  EXPECT_STREQ("path", uri.path);
+  EXPECT(uri.query == NULL);
+  EXPECT(uri.fragment == NULL);
+}
+
+
+TEST_CASE(ParseUri_NormalizeEscapes_PathQueryFragment) {
+  ParsedUri uri;
+  EXPECT(ParseUri(
+      "scheme:/This%09Is A P%61th?This%09Is A Qu%65ry#A Fr%61gment", &uri));
+  EXPECT_STREQ("scheme", uri.scheme);
+  EXPECT(uri.userinfo == NULL);
+  EXPECT(uri.host == NULL);
+  EXPECT(uri.port == NULL);
+  EXPECT_STREQ("/This%09Is%20A%20Path", uri.path);
+  EXPECT_STREQ("This%09Is%20A%20Query", uri.query);
+  EXPECT_STREQ("A%20Fragment", uri.fragment);
+}
+
+
+TEST_CASE(ParseUri_NormalizeEscapes_UppercaseEscapesPreferred) {
+  ParsedUri uri;
+  EXPECT(ParseUri(
+      "scheme:/%1b%1B", &uri));
+  EXPECT_STREQ("scheme", uri.scheme);
+  EXPECT(uri.userinfo == NULL);
+  EXPECT(uri.host == NULL);
+  EXPECT(uri.port == NULL);
+  EXPECT_STREQ("/%1B%1B", uri.path);
+  EXPECT(uri.query == NULL);
+  EXPECT(uri.fragment == NULL);
+}
+
+
+TEST_CASE(ParseUri_NormalizeEscapes_Authority) {
+  ParsedUri uri;
+  EXPECT(ParseUri(
+      "scheme://UsEr N%61%4de@h%4FsT.c%6fm:80/", &uri));
+  EXPECT_STREQ("scheme", uri.scheme);
+  EXPECT_STREQ("UsEr%20NaMe", uri.userinfo);  // Normalized, case preserved.
+  EXPECT_STREQ("host.com", uri.host);         // Normalized, lower-cased.
+  EXPECT_STREQ("80", uri.port);
+  EXPECT_STREQ("/", uri.path);
+  EXPECT(uri.query == NULL);
+  EXPECT(uri.fragment == NULL);
+}
+
+
+TEST_CASE(ParseUri_NormalizeEscapes_UppercaseEscapeInHost) {
+  ParsedUri uri;
+  EXPECT(ParseUri("scheme://tEst%1b/", &uri));
+  EXPECT_STREQ("scheme", uri.scheme);
+  EXPECT(uri.userinfo == NULL);
+  EXPECT_STREQ("test%1B", uri.host);  // Notice that %1B is upper-cased.
+  EXPECT(uri.port == NULL);
+  EXPECT_STREQ("/", uri.path);
+  EXPECT(uri.query == NULL);
+  EXPECT(uri.fragment == NULL);
+}
+
+
+TEST_CASE(ParseUri_BrokenEscapeSequence) {
+  ParsedUri uri;
+  EXPECT(ParseUri(
+      "scheme:/%1g", &uri));
+  EXPECT_STREQ("scheme", uri.scheme);
+  EXPECT(uri.userinfo == NULL);
+  EXPECT(uri.host == NULL);
+  EXPECT(uri.port == NULL);
+  EXPECT_STREQ("/%1g", uri.path);  // Broken sequence is unchanged.
+  EXPECT(uri.query == NULL);
+  EXPECT(uri.fragment == NULL);
+}
+
+
+TEST_CASE(ResolveUri_WithScheme_NoAuthorityNoQuery) {
+  const char* target_uri;
+  EXPECT(ResolveUri("rscheme:/ref/path",
+                    "bscheme://buser@bhost:11/base/path?baseQuery",
+                    &target_uri));
+  EXPECT_STREQ("rscheme:/ref/path", target_uri);
+}
+
+
+TEST_CASE(ResolveUri_WithScheme_WithAuthorityWithQuery) {
+  const char* target_uri;
+  EXPECT(ResolveUri("rscheme://ruser@rhost:22/ref/path?refQuery",
+                    "bscheme://buser@bhost:11/base/path?baseQuery",
+                    &target_uri));
+  EXPECT_STREQ("rscheme://ruser@rhost:22/ref/path?refQuery", target_uri);
+}
+
+
+TEST_CASE(ResolveUri_NoScheme_WithAuthority) {
+  const char* target_uri;
+  EXPECT(ResolveUri("//ruser@rhost:22/ref/path",
+                    "bscheme://buser@bhost:11/base/path?baseQuery",
+                    &target_uri));
+  EXPECT_STREQ("bscheme://ruser@rhost:22/ref/path", target_uri);
+}
+
+
+TEST_CASE(ResolveUri_NoSchemeNoAuthority_AbsolutePath) {
+  const char* target_uri;
+  EXPECT(ResolveUri("/ref/path",
+                    "bscheme://buser@bhost:11/base/path?baseQuery",
+                    &target_uri));
+  EXPECT_STREQ("bscheme://buser@bhost:11/ref/path", target_uri);
+}
+
+
+TEST_CASE(ResolveUri_NoSchemeNoAuthority_RelativePath) {
+  const char* target_uri;
+  EXPECT(ResolveUri("ref/path",
+                    "bscheme://buser@bhost:11/base/path?baseQuery",
+                    &target_uri));
+  EXPECT_STREQ("bscheme://buser@bhost:11/base/ref/path", target_uri);
+}
+
+
+TEST_CASE(ResolveUri_NoSchemeNoAuthority_RelativePathEmptyBasePath) {
+  const char* target_uri;
+  EXPECT(ResolveUri("ref/path",
+                    "bscheme://buser@bhost:11",
+                    &target_uri));
+  EXPECT_STREQ("bscheme://buser@bhost:11/ref/path", target_uri);
+}
+
+
+TEST_CASE(ResolveUri_NoSchemeNoAuthority_RelativePathWeirdBasePath) {
+  const char* target_uri;
+  EXPECT(ResolveUri("ref/path",
+                    "bscheme:base",
+                    &target_uri));
+  EXPECT_STREQ("bscheme:ref/path", target_uri);
+}
+
+
+TEST_CASE(ResolveUri_NoSchemeNoAuthority_EmptyPath) {
+  const char* target_uri;
+  EXPECT(ResolveUri("",
+                    "bscheme://buser@bhost:11/base/path?baseQuery#bfragment",
+                    &target_uri));
+  // Note that we drop the base fragment from the resolved uri.
+  EXPECT_STREQ("bscheme://buser@bhost:11/base/path?baseQuery", target_uri);
+}
+
+
+TEST_CASE(ResolveUri_NoSchemeNoAuthority_EmptyPathWithQuery) {
+  const char* target_uri;
+  EXPECT(ResolveUri("?refQuery",
+                    "bscheme://buser@bhost:11/base/path?baseQuery#bfragment",
+                    &target_uri));
+  EXPECT_STREQ("bscheme://buser@bhost:11/base/path?refQuery", target_uri);
+}
+
+
+TEST_CASE(ResolveUri_NoSchemeNoAuthority_EmptyPathWithFragment) {
+  const char* target_uri;
+  EXPECT(ResolveUri("#rfragment",
+                    "bscheme://buser@bhost:11/base/path?baseQuery#bfragment",
+                    &target_uri));
+  EXPECT_STREQ("bscheme://buser@bhost:11/base/path?baseQuery#rfragment",
+               target_uri);
+}
+
+
+TEST_CASE(ResolveUri_RemoveDots_RemoveOneDotSegment) {
+  const char* target_uri;
+  EXPECT(ResolveUri("./refpath",
+                    "scheme://auth/a/b/c/d",
+                    &target_uri));
+  EXPECT_STREQ("scheme://auth/a/b/c/refpath", target_uri);
+}
+
+
+TEST_CASE(ResolveUri_RemoveDots_RemoveTwoDotSegments) {
+  const char* target_uri;
+  EXPECT(ResolveUri("././refpath",
+                    "scheme://auth/a/b/c/d",
+                    &target_uri));
+  EXPECT_STREQ("scheme://auth/a/b/c/refpath", target_uri);
+}
+
+
+TEST_CASE(ResolveUri_RemoveDots_RemoveOneDotDotSegment) {
+  const char* target_uri;
+  EXPECT(ResolveUri("../refpath",
+                    "scheme://auth/a/b/c/d",
+                    &target_uri));
+  EXPECT_STREQ("scheme://auth/a/b/refpath", target_uri);
+}
+
+
+TEST_CASE(ResolveUri_RemoveDots_RemoveTwoDotDotSegments) {
+  const char* target_uri;
+  EXPECT(ResolveUri("../../refpath",
+                    "scheme://auth/a/b/c/d",
+                    &target_uri));
+  EXPECT_STREQ("scheme://auth/a/refpath", target_uri);
+}
+
+
+TEST_CASE(ResolveUri_RemoveDots_RemoveTooManyDotDotSegments) {
+  const char* target_uri;
+  EXPECT(ResolveUri("../../../../../../../../../refpath",
+                    "scheme://auth/a/b/c/d",
+                    &target_uri));
+  EXPECT_STREQ("scheme://auth/refpath", target_uri);
+}
+
+
+TEST_CASE(ResolveUri_RemoveDots_RemoveDotSegmentsNothingLeft1) {
+  const char* target_uri;
+  EXPECT(ResolveUri("../../../../..",
+                    "scheme://auth/a/b/c/d",
+                    &target_uri));
+  EXPECT_STREQ("scheme://auth/", target_uri);
+}
+
+
+
+TEST_CASE(ResolveUri_RemoveDots_RemoveDotSegmentsNothingLeft2) {
+  const char* target_uri;
+  EXPECT(ResolveUri(".",
+                    "scheme://auth/",
+                    &target_uri));
+  EXPECT_STREQ("scheme://auth/", target_uri);
+}
+
+
+
+TEST_CASE(ResolveUri_RemoveDots_RemoveDotSegmentsInitialPrefix) {
+  const char* target_uri;
+  EXPECT(ResolveUri("../../../../refpath",
+                    "scheme://auth",
+                    &target_uri));
+  EXPECT_STREQ("scheme://auth/refpath", target_uri);
+}
+
+
+
+TEST_CASE(ResolveUri_RemoveDots_RemoveDotSegmentsMixed) {
+  const char* target_uri;
+  EXPECT(ResolveUri("../../1/./2/../3/4/../5/././6/../7",
+                    "scheme://auth/a/b/c/d/e",
+                    &target_uri));
+  EXPECT_STREQ("scheme://auth/a/b/1/3/5/7", target_uri);
+}
+
+
+TEST_CASE(ResolveUri_NormalizeEscapes_PathQueryFragment) {
+  const char* target_uri;
+  EXPECT(ResolveUri("#A Fr%61gment",
+                    "scheme:/This%09Is A P%61th?This%09Is A Qu%65ry",
+                    &target_uri));
+  EXPECT_STREQ(
+      "scheme:/This%09Is%20A%20Path?This%09Is%20A%20Query#A%20Fragment",
+      target_uri);
+}
+
+
+TEST_CASE(ResolveUri_NormalizeEscapes_UppercaseHexPreferred) {
+  const char* target_uri;
+  EXPECT(ResolveUri("",
+                    "scheme:/%1b%1B",
+                    &target_uri));
+  EXPECT_STREQ("scheme:/%1B%1B",
+               target_uri);
+}
+
+
+TEST_CASE(ResolveUri_NormalizeEscapes_Authority) {
+  const char* target_uri;
+  EXPECT(ResolveUri("",
+                    "scheme://UsEr N%61%4de@h%4FsT.c%6fm:80/",
+                    &target_uri));
+  // userinfo is normalized and case is preserved.  host is normalized
+  // and lower-cased.
+  EXPECT_STREQ("scheme://UsEr%20NaMe@host.com:80/", target_uri);
+}
+
+
+TEST_CASE(ResolveUri_NormalizeEscapes_BrokenEscapeSequence) {
+  const char* target_uri;
+  EXPECT(ResolveUri("",
+                    "scheme:/%1g",
+                    &target_uri));
+  // We don't change broken escape sequences.
+  EXPECT_STREQ("scheme:/%1g",
+               target_uri);
+}
+
+
+TEST_CASE(ResolveUri_DataUri) {
+  const char* data_uri =
+      "data:application/dart;charset=utf-8,%20%20%20%20%20%20%20%20import%20%22dart:isolate%22;%0A%0A%20%20%20%20%20%20%20%20import%20%22package:stream_channel/stream_channel.dart%22;%0A%0A%20%20%20%20%20%20%20%20import%20%22package:test/src/runner/plugin/remote_platform_helpers.dart%22;%0A%20%20%20%20%20%20%20%20import%20%22package:test/src/runner/vm/catch_isolate_errors.dart%22;%0A%0A%20%20%20%20%20%20%20%20import%20%22file:///home/sra/xxxx/dev_compiler/test/all_tests.dart%22%20as%20test;%0A%0A%20%20%20%20%20%20%20%20void%20main(_,%20SendPort%20message)%20%7B%0A%20%20%20%20%20%20%20%20%20%20var%20channel%20=%20serializeSuite(()%20%7B%0A%20%20%20%20%20%20%20%20%20%20%20%20catchIsolateErrors();%0A%20%20%20%20%20%20%20%20%20%20%20%20return%20test.main;%0A%20%20%20%20%20%20%20%20%20%20%7D);%0A%20%20%20%20%20%20%20%20%20%20new%20IsolateChannel.connectSend(message).pipe(channel);%0A%20%20%20%20%20%20%20%20%7D%0A%20%20%20%20%20%20";  // NOLINT
+
+  const char* target_uri;
+  EXPECT(ResolveUri(data_uri,
+                    "bscheme://buser@bhost:11/base/path?baseQuery#bfragment",
+                    &target_uri));
+  EXPECT_STREQ(data_uri, target_uri);
+}
+
+// dart:core Uri allows for the base url to be relative (no scheme, no
+// authory, relative path) but this behavior is not in RFC 3986.  We
+// do not implement this.
+TEST_CASE(ResolveUri_RelativeBase_NotImplemented) {
+  const char* target_uri;
+  EXPECT(!ResolveUri("../r1", "b1/b2", &target_uri));
+  EXPECT(target_uri == NULL);
+
+  EXPECT(!ResolveUri("..", "b1/b2", &target_uri));
+  EXPECT(target_uri == NULL);
+
+  EXPECT(!ResolveUri("../..", "b1/b2", &target_uri));
+  EXPECT(target_uri == NULL);
+
+  EXPECT(!ResolveUri("../../..", "b1/b2", &target_uri));
+  EXPECT(target_uri == NULL);
+
+  EXPECT(!ResolveUri("../../../r1", "b1/b2", &target_uri));
+  EXPECT(target_uri == NULL);
+
+  EXPECT(!ResolveUri("../r1", "../../b1/b2/b3", &target_uri));
+  EXPECT(target_uri == NULL);
+
+  EXPECT(!ResolveUri("../../../r1", "../../b1/b2/b3", &target_uri));
+  EXPECT(target_uri == NULL);
+}
+
+
+static const char* TestResolve(const char* base_uri, const char* uri) {
+  const char* target_uri;
+  EXPECT(ResolveUri(uri, base_uri, &target_uri));
+  return target_uri;
+}
+
+
+// This test is ported from sdk/tests/corelib/uri_test.dart (testUriPerRFCs).
+TEST_CASE(ResolveUri_TestUriPerRFCs) {
+  const char* base = "http://a/b/c/d;p?q";
+
+  // From RFC 3986
+  EXPECT_STREQ("g:h",                   TestResolve(base, "g:h"));
+  EXPECT_STREQ("http://a/b/c/g",        TestResolve(base, "g"));
+  EXPECT_STREQ("http://a/b/c/g",        TestResolve(base, "./g"));
+  EXPECT_STREQ("http://a/b/c/g/",       TestResolve(base, "g/"));
+  EXPECT_STREQ("http://a/g",            TestResolve(base, "/g"));
+  EXPECT_STREQ("http://g",              TestResolve(base, "//g"));
+  EXPECT_STREQ("http://a/b/c/d;p?y",    TestResolve(base, "?y"));
+  EXPECT_STREQ("http://a/b/c/g?y",      TestResolve(base, "g?y"));
+  EXPECT_STREQ("http://a/b/c/d;p?q#s",  TestResolve(base, "#s"));
+  EXPECT_STREQ("http://a/b/c/g#s",      TestResolve(base, "g#s"));
+  EXPECT_STREQ("http://a/b/c/g?y#s",    TestResolve(base, "g?y#s"));
+  EXPECT_STREQ("http://a/b/c/;x",       TestResolve(base, ";x"));
+  EXPECT_STREQ("http://a/b/c/g;x",      TestResolve(base, "g;x"));
+  EXPECT_STREQ("http://a/b/c/g;x?y#s",  TestResolve(base, "g;x?y#s"));
+  EXPECT_STREQ("http://a/b/c/d;p?q",    TestResolve(base, ""));
+  EXPECT_STREQ("http://a/b/c/",         TestResolve(base, "."));
+  EXPECT_STREQ("http://a/b/c/",         TestResolve(base, "./"));
+  EXPECT_STREQ("http://a/b/",           TestResolve(base, ".."));
+  EXPECT_STREQ("http://a/b/",           TestResolve(base, "../"));
+  EXPECT_STREQ("http://a/b/g",          TestResolve(base, "../g"));
+  EXPECT_STREQ("http://a/",             TestResolve(base, "../.."));
+  EXPECT_STREQ("http://a/",             TestResolve(base, "../../"));
+  EXPECT_STREQ("http://a/g",            TestResolve(base, "../../g"));
+  EXPECT_STREQ("http://a/g",            TestResolve(base, "../../../g"));
+  EXPECT_STREQ("http://a/g",            TestResolve(base, "../../../../g"));
+  EXPECT_STREQ("http://a/g",            TestResolve(base, "/./g"));
+  EXPECT_STREQ("http://a/g",            TestResolve(base, "/../g"));
+  EXPECT_STREQ("http://a/b/c/g.",       TestResolve(base, "g."));
+  EXPECT_STREQ("http://a/b/c/.g",       TestResolve(base, ".g"));
+  EXPECT_STREQ("http://a/b/c/g..",      TestResolve(base, "g.."));
+  EXPECT_STREQ("http://a/b/c/..g",      TestResolve(base, "..g"));
+  EXPECT_STREQ("http://a/b/g",          TestResolve(base, "./../g"));
+  EXPECT_STREQ("http://a/b/c/g/",       TestResolve(base, "./g/."));
+  EXPECT_STREQ("http://a/b/c/g/h",      TestResolve(base, "g/./h"));
+  EXPECT_STREQ("http://a/b/c/h",        TestResolve(base, "g/../h"));
+  EXPECT_STREQ("http://a/b/c/g;x=1/y",  TestResolve(base, "g;x=1/./y"));
+  EXPECT_STREQ("http://a/b/c/y",        TestResolve(base, "g;x=1/../y"));
+  EXPECT_STREQ("http://a/b/c/g?y/./x",  TestResolve(base, "g?y/./x"));
+  EXPECT_STREQ("http://a/b/c/g?y/../x", TestResolve(base, "g?y/../x"));
+  EXPECT_STREQ("http://a/b/c/g#s/./x",  TestResolve(base, "g#s/./x"));
+  EXPECT_STREQ("http://a/b/c/g#s/../x", TestResolve(base, "g#s/../x"));
+  EXPECT_STREQ("http:g",                TestResolve(base, "http:g"));
+
+  // Additional tests (not from RFC 3986).
+  EXPECT_STREQ("http://a/b/g;p/h;s",    TestResolve(base, "../g;p/h;s"));
+
+  base = "s:a/b";
+  EXPECT_STREQ("s:/c", TestResolve(base, "../c"));
+}
+
+
+// This test is ported from sdk/tests/corelib/uri_test.dart (testResolvePath).
+TEST_CASE(ResolveUri_MoreDotSegmentTests) {
+  const char* base = "/";
+  EXPECT_STREQ("/a/g",    TestResolve(base, "/a/b/c/./../../g"));
+  EXPECT_STREQ("/a/g",    TestResolve(base, "/a/b/c/./../../g"));
+  EXPECT_STREQ("/mid/6",  TestResolve(base, "mid/content=5/../6"));
+  EXPECT_STREQ("/a/b/e",  TestResolve(base, "a/b/c/d/../../e"));
+  EXPECT_STREQ("/a/b/e",  TestResolve(base, "../a/b/c/d/../../e"));
+  EXPECT_STREQ("/a/b/e",  TestResolve(base, "./a/b/c/d/../../e"));
+  EXPECT_STREQ("/a/b/e",  TestResolve(base, "../a/b/./c/d/../../e"));
+  EXPECT_STREQ("/a/b/e",  TestResolve(base, "./a/b/./c/d/../../e"));
+  EXPECT_STREQ("/a/b/e/", TestResolve(base, "./a/b/./c/d/../../e/."));
+  EXPECT_STREQ("/a/b/e/", TestResolve(base, "./a/b/./c/d/../../e/./."));
+  EXPECT_STREQ("/a/b/e/", TestResolve(base, "./a/b/./c/d/../../e/././."));
+
+  #define LH "http://localhost"
+  base = LH;
+  EXPECT_STREQ(LH "/a/g",    TestResolve(base, "/a/b/c/./../../g"));
+  EXPECT_STREQ(LH "/a/g",    TestResolve(base, "/a/b/c/./../../g"));
+  EXPECT_STREQ(LH "/mid/6",  TestResolve(base, "mid/content=5/../6"));
+  EXPECT_STREQ(LH "/a/b/e",  TestResolve(base, "a/b/c/d/../../e"));
+  EXPECT_STREQ(LH "/a/b/e",  TestResolve(base, "../a/b/c/d/../../e"));
+  EXPECT_STREQ(LH "/a/b/e",  TestResolve(base, "./a/b/c/d/../../e"));
+  EXPECT_STREQ(LH "/a/b/e",  TestResolve(base, "../a/b/./c/d/../../e"));
+  EXPECT_STREQ(LH "/a/b/e",  TestResolve(base, "./a/b/./c/d/../../e"));
+  EXPECT_STREQ(LH "/a/b/e/", TestResolve(base, "./a/b/./c/d/../../e/."));
+  EXPECT_STREQ(LH "/a/b/e/", TestResolve(base, "./a/b/./c/d/../../e/./."));
+  EXPECT_STREQ(LH "/a/b/e/", TestResolve(base, "./a/b/./c/d/../../e/././."));
+  #undef LH
+}
+
+}  // namespace dart
diff --git a/runtime/vm/verified_memory.cc b/runtime/vm/verified_memory.cc
deleted file mode 100644
index a3a3d9e..0000000
--- a/runtime/vm/verified_memory.cc
+++ /dev/null
@@ -1,36 +0,0 @@
-// Copyright (c) 2014, the Dart project authors.  Please see the AUTHORS file
-// for details. All rights reserved. Use of this source code is governed by a
-// BSD-style license that can be found in the LICENSE file.
-
-#include "vm/verified_memory.h"
-
-namespace dart {
-
-#if defined(DEBUG)
-
-DEFINE_FLAG(bool, verified_mem, false,
-            "Enable write-barrier verification mode (slow, DEBUG only).");
-DEFINE_FLAG(int, verified_mem_max_reserve_mb, (kWordSize <= 4) ? 16 : 32,
-            "When verified_mem is true, largest supported reservation (MB).");
-
-
-VirtualMemory* VerifiedMemory::ReserveInternal(intptr_t size) {
-  if (size > offset()) {
-    FATAL1("Requested reservation of %" Pd " bytes exceeds the limit. "
-           "Use --verified_mem_max_reserve_mb to increase it.", size);
-  }
-  VirtualMemory* result = VirtualMemory::Reserve(size + offset());
-  if (result != NULL) {
-    // Commit the offset part of the reservation (writable, not executable).
-    result->Commit(result->start() + offset(), size, /* executable = */ false);
-    // Truncate without unmapping, so that the returned object looks like
-    // a normal 'size' bytes reservation (but VirtualMemory will correctly
-    // unmap the entire original reservation on destruction).
-    result->Truncate(size, /* try_unmap = */ false);
-  }
-  return result;
-}
-
-#endif  // DEBUG
-
-}  // namespace dart
diff --git a/runtime/vm/verified_memory.h b/runtime/vm/verified_memory.h
deleted file mode 100644
index db292cc..0000000
--- a/runtime/vm/verified_memory.h
+++ /dev/null
@@ -1,90 +0,0 @@
-// Copyright (c) 2014, the Dart project authors.  Please see the AUTHORS file
-// for details. All rights reserved. Use of this source code is governed by a
-// BSD-style license that can be found in the LICENSE file.
-
-#ifndef VM_VERIFIED_MEMORY_H_
-#define VM_VERIFIED_MEMORY_H_
-
-#include "vm/allocation.h"
-#include "vm/flags.h"
-#include "vm/virtual_memory.h"
-
-namespace dart {
-
-#if defined(DEBUG)
-DECLARE_FLAG(bool, verified_mem);
-DECLARE_FLAG(int, verified_mem_max_reserve_mb);
-#endif
-
-
-// A wrapper around VirtualMemory for verifying that a particular class of
-// memory writes are only performed through a particular interface.
-//
-// The main use case is verifying that storing pointers into objects is only
-// performed by code aware of the GC write barrier.
-//
-// NOTE: Verification is enabled only if 'verified_mem' is true, and this flag
-// only exists in DEBUG builds.
-class VerifiedMemory : public AllStatic {
- public:
-  // Reserves a block of memory for which all methods in this class may
-  // be called. Returns NULL if out of memory.
-  static VirtualMemory* Reserve(intptr_t size) {
-    return enabled() ? ReserveInternal(size) : VirtualMemory::Reserve(size);
-  }
-
-  // Verifies that [start, start + size) has only been mutated through
-  // methods in this class (or explicitly accepted by calling Accept).
-  static void Verify(uword start, intptr_t size) {
-    if (!enabled()) return;
-    ASSERT(size <= offset());
-    ASSERT(memcmp(reinterpret_cast<void*>(start + offset()),
-                  reinterpret_cast<void*>(start),
-                  size) == 0);
-  }
-
-  // Assigns value to *ptr after verifying previous content at that location.
-  template<typename T>
-  static void Write(T* ptr, const T& value) {
-    if (enabled()) {
-      uword addr = reinterpret_cast<uword>(ptr);
-      Verify(addr, sizeof(T));
-      T* offset_ptr = reinterpret_cast<T*>(addr + offset());
-      *offset_ptr = value;
-    }
-    *ptr = value;
-  }
-
-  // Accepts the current state of [start, start + size), even if it has been
-  // mutated by other means.
-  static void Accept(uword start, intptr_t size) {
-    if (!enabled()) return;
-    ASSERT(size <= offset());
-    memmove(reinterpret_cast<void*>(start + offset()),
-            reinterpret_cast<void*>(start),
-            size);
-  }
-
- private:
-#if defined(DEBUG)
-  static bool enabled() { return FLAG_verified_mem; }
-  static intptr_t offset() { return FLAG_verified_mem_max_reserve_mb * MB; }
-  static VirtualMemory* ReserveInternal(intptr_t size);
-#else
-  // In release mode, most code in this class is optimized away.
-  static bool enabled() { return false; }
-  static intptr_t offset() { UNREACHABLE(); return -1; }
-  static VirtualMemory* ReserveInternal(intptr_t size) {
-    UNREACHABLE();
-    return NULL;
-  }
-#endif
-
-  friend class Assembler;  // To use enabled/offset when generating code.
-  friend class FlowGraphCompiler;  // To compute edge counter code size.
-  friend class Intrinsifier;  // To know whether a jump is near or far.
-};
-
-}  // namespace dart
-
-#endif  // VM_VERIFIED_MEMORY_H_
diff --git a/runtime/vm/verifier.cc b/runtime/vm/verifier.cc
index 78b4697..18a7874 100644
--- a/runtime/vm/verifier.cc
+++ b/runtime/vm/verifier.cc
@@ -20,7 +20,7 @@
 void VerifyObjectVisitor::VisitObject(RawObject* raw_obj) {
   if (raw_obj->IsHeapObject()) {
     uword raw_addr = RawObject::ToAddr(raw_obj);
-    if (raw_obj->IsFreeListElement()) {
+    if (raw_obj->IsFreeListElement() || raw_obj->IsForwardingCorpse()) {
       if (raw_obj->IsMarked()) {
         FATAL1("Marked free list element encountered %#" Px "\n", raw_addr);
       }
@@ -48,7 +48,6 @@
 
 void VerifyPointersVisitor::VisitPointers(RawObject** first, RawObject** last) {
   for (RawObject** current = first; current <= last; current++) {
-    VerifiedMemory::Verify(reinterpret_cast<uword>(current), kWordSize);
     RawObject* raw_obj = *current;
     if (raw_obj->IsHeapObject()) {
       if (!allocated_set_->Contains(raw_obj)) {
diff --git a/runtime/vm/virtual_memory_fuchsia.cc b/runtime/vm/virtual_memory_fuchsia.cc
new file mode 100644
index 0000000..da32045
--- /dev/null
+++ b/runtime/vm/virtual_memory_fuchsia.cc
@@ -0,0 +1,55 @@
+// Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+#include "vm/globals.h"
+#if defined(TARGET_OS_FUCHSIA)
+
+#include "vm/virtual_memory.h"
+
+#include <unistd.h>  // NOLINT
+
+#include "platform/assert.h"
+#include "vm/os.h"
+
+namespace dart {
+
+uword VirtualMemory::page_size_ = 0;
+
+
+void VirtualMemory::InitOnce() {
+  page_size_ = getpagesize();
+}
+
+
+VirtualMemory* VirtualMemory::ReserveInternal(intptr_t size) {
+  UNIMPLEMENTED();
+  return NULL;
+}
+
+
+VirtualMemory::~VirtualMemory() {
+  UNIMPLEMENTED();
+}
+
+
+bool VirtualMemory::FreeSubSegment(void* address, intptr_t size) {
+  UNIMPLEMENTED();
+  return false;
+}
+
+
+bool VirtualMemory::Commit(uword addr, intptr_t size, bool executable) {
+  UNIMPLEMENTED();
+  return false;
+}
+
+
+bool VirtualMemory::Protect(void* address, intptr_t size, Protection mode) {
+  UNIMPLEMENTED();
+  return false;
+}
+
+}  // namespace dart
+
+#endif  // defined(TARGET_OS_FUCHSIA)
diff --git a/runtime/vm/vm_sources.gypi b/runtime/vm/vm_sources.gypi
index edb97e4..156b120 100644
--- a/runtime/vm/vm_sources.gypi
+++ b/runtime/vm/vm_sources.gypi
@@ -43,6 +43,7 @@
     'ast_transformer.h',
     'atomic.h',
     'atomic_android.h',
+    'atomic_fuchsia.h',
     'atomic_linux.h',
     'atomic_macos.h',
     'atomic_simulator.h',
@@ -80,6 +81,8 @@
     'class_finalizer_test.cc',
     'class_table.cc',
     'class_table.h',
+    'clustered_snapshot.cc',
+    'clustered_snapshot.h',
     'code_descriptors.cc',
     'code_descriptors.h',
     'code_descriptors_test.cc',
@@ -125,6 +128,7 @@
     'cpuid.cc',
     'cpuinfo.h',
     'cpuinfo_android.cc',
+    'cpuinfo_fuchsia.cc',
     'cpuinfo_linux.cc',
     'cpuinfo_macos.cc',
     'cpuinfo_test.cc',
@@ -136,6 +140,7 @@
     'dart_api_impl_test.cc',
     'dart_api_message.cc',
     'dart_api_message.h',
+    'dart_api_state.cc',
     'dart_api_state.h',
     'dart_entry.cc',
     'dart_entry.h',
@@ -302,6 +307,7 @@
     'native_message_handler.h',
     'native_symbol.h',
     'native_symbol_android.cc',
+    'native_symbol_fuchsia.cc',
     'native_symbol_linux.cc',
     'native_symbol_macos.cc',
     'native_symbol_win.cc',
@@ -328,6 +334,7 @@
     'object_x64_test.cc',
     'os.h',
     'os_android.cc',
+    'os_fuchsia.cc',
     'os_linux.cc',
     'os_macos.cc',
     'os_test.cc',
@@ -335,6 +342,8 @@
     'os_thread.h',
     'os_thread_android.cc',
     'os_thread_android.h',
+    'os_thread_fuchsia.cc',
+    'os_thread_fuchsia.h',
     'os_thread_linux.cc',
     'os_thread_linux.h',
     'os_thread_macos.cc',
@@ -421,6 +430,7 @@
     'service_isolate.h',
     'service_test.cc',
     'signal_handler_android.cc',
+    'signal_handler_fuchsia.cc',
     'signal_handler_linux.cc',
     'signal_handler_macos.cc',
     'signal_handler_win.cc',
@@ -476,6 +486,7 @@
     'thread_interrupter.cc',
     'thread_interrupter.h',
     'thread_interrupter_android.cc',
+    'thread_interrupter_fuchsia.cc',
     'thread_interrupter_linux.cc',
     'thread_interrupter_macos.cc',
     'thread_interrupter_win.cc',
@@ -506,15 +517,16 @@
     'unicode_test.cc',
     'unit_test.cc',
     'unit_test.h',
+    'uri.cc',
+    'uri.h',
+    'uri_test.cc',
     'utils_test.cc',
-    'verified_memory.cc',
-    'verified_memory.h',
-    'verified_memory_test.cc',
     'verifier.cc',
     'verifier.h',
     'virtual_memory.cc',
     'virtual_memory.h',
     'virtual_memory_android.cc',
+    'virtual_memory_fuchsia.cc',
     'virtual_memory_linux.cc',
     'virtual_memory_macos.cc',
     'virtual_memory_test.cc',
diff --git a/runtime/vm/zone.cc b/runtime/vm/zone.cc
index dc17044..0f6754f 100644
--- a/runtime/vm/zone.cc
+++ b/runtime/vm/zone.cc
@@ -169,6 +169,21 @@
 }
 
 
+char* Zone::MakeCopyOfStringN(const char* str, intptr_t len) {
+  ASSERT(len >= 0);
+  for (intptr_t i = 0; i < len; i++) {
+    if (str[i] == '\0') {
+      len = i;
+      break;
+    }
+  }
+  char* copy = Alloc<char>(len + 1);  // +1 for '\0'
+  strncpy(copy, str, len);
+  copy[len] = '\0';
+  return copy;
+}
+
+
 char* Zone::ConcatStrings(const char* a, const char* b, char join) {
   intptr_t a_len = (a == NULL) ? 0 : strlen(a);
   const intptr_t b_len = strlen(b) + 1;  // '\0'-terminated.
diff --git a/runtime/vm/zone.h b/runtime/vm/zone.h
index dc7b25c..29d2765 100644
--- a/runtime/vm/zone.h
+++ b/runtime/vm/zone.h
@@ -45,6 +45,10 @@
   // Make a copy of the string in the zone allocated area.
   char* MakeCopyOfString(const char* str);
 
+  // Make a copy of the first n characters of a string in the zone
+  // allocated area.
+  char* MakeCopyOfStringN(const char* str, intptr_t len);
+
   // Concatenate strings |a| and |b|. |a| may be NULL. If |a| is not NULL,
   // |join| will be inserted between |a| and |b|.
   char* ConcatStrings(const char* a, const char* b, char join = ',');
@@ -172,6 +176,8 @@
   friend class ApiZone;
   template<typename T, typename B, typename Allocator>
   friend class BaseGrowableArray;
+  template<typename T, typename B, typename Allocator>
+  friend class BaseDirectChainedHashMap;
   DISALLOW_COPY_AND_ASSIGN(Zone);
 };
 
diff --git a/sdk/bin/dart.bat b/sdk/bin/dart.bat
index 9458999..a6a24de 100644
--- a/sdk/bin/dart.bat
+++ b/sdk/bin/dart.bat
@@ -13,4 +13,4 @@
 
 set arguments=%*
 
-"%SCRIPTPATH%\..\..\build\%DART_CONFIGURATION%\dart.exe" %arguments%
+"%SCRIPTPATH%\..\..\out\%DART_CONFIGURATION%\dart.exe" %arguments%
diff --git a/sdk/bin/dart2js.bat b/sdk/bin/dart2js.bat
index d1a4095..2869680 100644
--- a/sdk/bin/dart2js.bat
+++ b/sdk/bin/dart2js.bat
@@ -42,7 +42,7 @@
 rem DART_CONFIGURATION defaults to ReleaseX64
 if "%DART_CONFIGURATION%"=="" set DART_CONFIGURATION=ReleaseX64
 
-set BUILD_DIR=%DART_ROOT%\build\%DART_CONFIGURATION%
+set BUILD_DIR=%DART_ROOT%\out\%DART_CONFIGURATION%
 
 set PACKAGE_ROOT=%BUILD_DIR%\packages
 
diff --git a/sdk/bin/dartanalyzer.bat b/sdk/bin/dartanalyzer.bat
index 0f33752..8130d98 100644
--- a/sdk/bin/dartanalyzer.bat
+++ b/sdk/bin/dartanalyzer.bat
@@ -43,7 +43,7 @@
 rem DART_CONFIGURATION defaults to ReleaseX64
 if "%DART_CONFIGURATION%"=="" set DART_CONFIGURATION=ReleaseX64
 
-set BUILD_DIR=%DART_ROOT%\build\%DART_CONFIGURATION%
+set BUILD_DIR=%DART_ROOT%\out\%DART_CONFIGURATION%
 
 set PACKAGE_ROOT=%BUILD_DIR%\packages
 
@@ -57,7 +57,7 @@
 rem canonical path. Output with a link looks something like this
 rem
 rem 01/03/2013  10:11 PM    <JUNCTION>     abc def
-rem [c:\dart_bleeding\dart-repo.9\dart\build\ReleaseIA32\dart-sdk]
+rem [c:\dart_bleeding\dart-repo.9\dart\out\ReleaseIA32\dart-sdk]
 rem
 rem So in the output of 'dir /a:l "targetdir"' we are looking for a filename
 rem surrounded by right angle bracket and left square bracket. Once we get
diff --git a/sdk/bin/dartanalyzer_sdk.bat b/sdk/bin/dartanalyzer_sdk.bat
index bca4afb..5fe4ae5 100644
--- a/sdk/bin/dartanalyzer_sdk.bat
+++ b/sdk/bin/dartanalyzer_sdk.bat
@@ -32,7 +32,7 @@
 rem canonical path. Output with a link looks something like this
 rem
 rem 01/03/2013  10:11 PM    <JUNCTION>     abc def
-rem [c:\dart_bleeding\dart-repo.9\dart\build\ReleaseIA32\dart-sdk]
+rem [c:\dart_bleeding\dart-repo.9\dart\out\ReleaseIA32\dart-sdk]
 rem
 rem So in the output of 'dir /a:l "targetdir"' we are looking for a filename
 rem surrounded by right angle bracket and left square bracket. Once we get
diff --git a/sdk/bin/dartdevc.bat b/sdk/bin/dartdevc.bat
index 4e02005..f42c660 100644
--- a/sdk/bin/dartdevc.bat
+++ b/sdk/bin/dartdevc.bat
@@ -39,7 +39,7 @@
 rem DART_CONFIGURATION defaults to ReleaseX64
 if "%DART_CONFIGURATION%"=="" set DART_CONFIGURATION=ReleaseX64
 
-set BUILD_DIR=%DART_ROOT%\build\%DART_CONFIGURATION%
+set BUILD_DIR=%DART_ROOT%\out\%DART_CONFIGURATION%
 
 set PACKAGE_ROOT=%BUILD_DIR%\packages
 
@@ -53,7 +53,7 @@
 rem canonical path. Output with a link looks something like this
 rem
 rem 01/03/2013  10:11 PM    <JUNCTION>     abc def
-rem [c:\dart_bleeding\dart-repo.9\dart\build\ReleaseIA32\dart-sdk]
+rem [c:\dart_bleeding\dart-repo.9\dart\out\ReleaseIA32\dart-sdk]
 rem
 rem So in the output of 'dir /a:l "targetdir"' we are looking for a filename
 rem surrounded by right angle bracket and left square bracket. Once we get
diff --git a/sdk/bin/dartdevc_sdk.bat b/sdk/bin/dartdevc_sdk.bat
index 72c2069..5dbf707 100644
--- a/sdk/bin/dartdevc_sdk.bat
+++ b/sdk/bin/dartdevc_sdk.bat
@@ -32,7 +32,7 @@
 rem canonical path. Output with a link looks something like this
 rem
 rem 01/03/2013  10:11 PM    <JUNCTION>     abc def
-rem [c:\dart_bleeding\dart-repo.9\dart\build\ReleaseIA32\dart-sdk]
+rem [c:\dart_bleeding\dart-repo.9\dart\out\ReleaseIA32\dart-sdk]
 rem
 rem So in the output of 'dir /a:l "targetdir"' we are looking for a filename
 rem surrounded by right angle bracket and left square bracket. Once we get
diff --git a/sdk/bin/dartdoc.bat b/sdk/bin/dartdoc.bat
index 880ae55..b519129 100644
--- a/sdk/bin/dartdoc.bat
+++ b/sdk/bin/dartdoc.bat
@@ -24,7 +24,7 @@
 rem canonical path. Output with a link looks something like this
 rem
 rem 01/03/2013  10:11 PM    <JUNCTION>     abc def
-rem [c:\dart_bleeding\dart-repo.9\dart\build\ReleaseIA32\dart-sdk]
+rem [c:\dart_bleeding\dart-repo.9\dart\out\ReleaseIA32\dart-sdk]
 rem
 rem So in the output of 'dir /a:l "targetdir"' we are looking for a filename
 rem surrounded by right angle bracket and left square bracket. Once we get
diff --git a/sdk/bin/dartfmt.bat b/sdk/bin/dartfmt.bat
index f063221..c4ac9aa 100644
--- a/sdk/bin/dartfmt.bat
+++ b/sdk/bin/dartfmt.bat
@@ -30,7 +30,7 @@
 rem DART_CONFIGURATION defaults to ReleaseX64
 if "%DART_CONFIGURATION%"=="" set DART_CONFIGURATION=ReleaseX64
 
-set BUILD_DIR=%DART_ROOT%\build\%DART_CONFIGURATION%
+set BUILD_DIR=%DART_ROOT%\out\%DART_CONFIGURATION%
 
 set PACKAGE_ROOT=%BUILD_DIR%\packages
 
@@ -44,7 +44,7 @@
 rem canonical path. Output with a link looks something like this
 rem
 rem 01/03/2013  10:11 PM    <JUNCTION>     abc def
-rem [c:\dart_bleeding\dart-repo.9\dart\build\ReleaseIA32\dart-sdk]
+rem [c:\dart_bleeding\dart-repo.9\dart\out\ReleaseIA32\dart-sdk]
 rem
 rem So in the output of 'dir /a:l "targetdir"' we are looking for a filename
 rem surrounded by right angle bracket and left square bracket. Once we get
diff --git a/sdk/bin/dartfmt_sdk.bat b/sdk/bin/dartfmt_sdk.bat
index 7de72c0..295b977 100644
--- a/sdk/bin/dartfmt_sdk.bat
+++ b/sdk/bin/dartfmt_sdk.bat
@@ -24,7 +24,7 @@
 rem canonical path. Output with a link looks something like this
 rem
 rem 01/03/2013  10:11 PM    <JUNCTION>     abc def
-rem [c:\dart_bleeding\dart-repo.9\dart\build\ReleaseIA32\dart-sdk]
+rem [c:\dart_bleeding\dart-repo.9\dart\out\ReleaseIA32\dart-sdk]
 rem
 rem So in the output of 'dir /a:l "targetdir"' we are looking for a filename
 rem surrounded by right angle bracket and left square bracket. Once we get
diff --git a/sdk/bin/pub.bat b/sdk/bin/pub.bat
index 2b8f94b..55ff636 100644
--- a/sdk/bin/pub.bat
+++ b/sdk/bin/pub.bat
@@ -30,7 +30,7 @@
 
 rem Use the Dart binary in the built SDK so pub can find the version file next
 rem to it.
-set BUILD_DIR=%SDK_DIR%\..\build\ReleaseX64
+set BUILD_DIR=%SDK_DIR%\..\out\ReleaseX64
 set PACKAGES_DIR=%BUILD_DIR%\packages
 set DART=%BUILD_DIR%\dart-sdk\bin\dart
 
diff --git a/sdk/lib/_internal/js_runtime/lib/internal_patch.dart b/sdk/lib/_internal/js_runtime/lib/internal_patch.dart
index cb8820c..a0c519f 100644
--- a/sdk/lib/_internal/js_runtime/lib/internal_patch.dart
+++ b/sdk/lib/_internal/js_runtime/lib/internal_patch.dart
@@ -5,12 +5,23 @@
 import 'dart:_js_primitives' show printString;
 import 'dart:_js_helper' show patch;
 import 'dart:_interceptors' show JSArray;
+import 'dart:_foreign_helper' show JS;
 
 @patch
 class Symbol implements core.Symbol {
   @patch
   const Symbol(String name)
       : this._name = name;
+
+  @patch
+  int get hashCode {
+    int hash = JS('int|Null', '#._hashCode', this);
+    if (hash != null) return hash;
+    const arbitraryPrime = 664597;
+    hash = 0x1fffffff & (arbitraryPrime * _name.hashCode);
+    JS('', '#._hashCode = #', this, hash);
+    return hash;
+  }
 }
 
 @patch
diff --git a/sdk/lib/_internal/js_runtime/lib/isolate_helper.dart b/sdk/lib/_internal/js_runtime/lib/isolate_helper.dart
index a74fb12..c181c6d 100644
--- a/sdk/lib/_internal/js_runtime/lib/isolate_helper.dart
+++ b/sdk/lib/_internal/js_runtime/lib/isolate_helper.dart
@@ -1206,7 +1206,7 @@
       if (!_receivePort._isClosed) {
         _receivePort._add(msg);
       }
-    }, 'receive $message');
+    }, 'receive');
   }
 
   bool operator ==(var other) => (other is _NativeJsSendPort) &&
diff --git a/sdk/lib/_internal/js_runtime/lib/js_helper.dart b/sdk/lib/_internal/js_runtime/lib/js_helper.dart
index 6cfcda8..7354ca2 100644
--- a/sdk/lib/_internal/js_runtime/lib/js_helper.dart
+++ b/sdk/lib/_internal/js_runtime/lib/js_helper.dart
@@ -1369,6 +1369,11 @@
     }
 
     if (!acceptsOptionalArguments) {
+      if (namedArguments != null && namedArguments.isNotEmpty) {
+        // Tried to invoke a function that takes a fixed number of arguments
+        // with named (optional) arguments.
+        return functionNoSuchMethod(function, arguments, namedArguments);
+      }
       if (argumentCount == requiredParameterCount) {
         return JS('var', r'#.apply(#, #)', jsFunction, function, arguments);
       }
@@ -2456,8 +2461,9 @@
    *
    * V8 will share the underlying function code objects when the same string is
    * passed to "new Function".  Shared function code objects can lead to
-   * sub-optimal performance due to polymorhism, and can be prevented by
-   * ensuring the strings are different.
+   * sub-optimal performance due to polymorphism, and can be prevented by
+   * ensuring the strings are different, for example, by generating a local
+   * variable with a name dependent on [functionCounter].
    */
   static int functionCounter = 0;
 
@@ -2559,8 +2565,9 @@
         : isCsp
             ? JS('', 'function(a,b,c,d) {this.\$initialize(a,b,c,d)}')
             : JS('',
-                 'new Function("a,b,c,d", "this.\$initialize(a,b,c,d);" + #)',
-                 functionCounter++);
+                 'new Function("a,b,c,d" + #,'
+                 ' "this.\$initialize(a,b,c,d" + # + ")")',
+                 functionCounter, functionCounter++);
 
     // It is necessary to set the constructor property, otherwise it will be
     // "Object".
@@ -2722,12 +2729,14 @@
     }
 
     if (arity == 0) {
+      // Incorporate functionCounter into a local.
+      String selfName = 'self${functionCounter++}';
       return JS(
           '',
           '(new Function(#))()',
           'return function(){'
-            'return this.${BoundClosure.selfFieldName()}.$stubName();'
-            '${functionCounter++}'
+            'var $selfName = this.${BoundClosure.selfFieldName()};'
+            'return $selfName.$stubName();'
           '}');
     }
     assert (1 <= arity && arity < 27);
@@ -2735,12 +2744,12 @@
         'String',
         '"abcdefghijklmnopqrstuvwxyz".split("").splice(0,#).join(",")',
         arity);
+    arguments += '${functionCounter++}';
     return JS(
         '',
         '(new Function(#))()',
         'return function($arguments){'
           'return this.${BoundClosure.selfFieldName()}.$stubName($arguments);'
-          '${functionCounter++}'
         '}');
   }
 
diff --git a/sdk/lib/async/stream.dart b/sdk/lib/async/stream.dart
index eca6c34..7661d9f 100644
--- a/sdk/lib/async/stream.dart
+++ b/sdk/lib/async/stream.dart
@@ -316,26 +316,37 @@
   /**
    * Adds a subscription to this stream.
    *
-   * On each data event from this stream, the subscriber's [onData] handler
-   * is called. If [onData] is null, nothing happens.
+   * Returns a [StreamSubscription] which handles events from the stream using
+   * the provided [onData], [onError] and [onDone] handlers.
+   * The handlers can be changed on the subscription, but they start out
+   * as the provided functions.
    *
-   * On errors from this stream, the [onError] handler is given a
-   * object describing the error.
+   * On each data event from this stream, the subscriber's [onData] handler
+   * is called. If [onData] is `null`, nothing happens.
+   *
+   * On errors from this stream, the [onError] handler is called with the
+   * error object and possibly a stack trace.
    *
    * The [onError] callback must be of type `void onError(error)` or
    * `void onError(error, StackTrace stackTrace)`. If [onError] accepts
-   * two arguments it is called with the stack trace (which could be `null` if
-   * the stream itself received an error without stack trace).
+   * two arguments it is called with the error object and the stack trace
+   * (which could be `null` if the stream itself received an error without
+   * stack trace).
    * Otherwise it is called with just the error object.
    * If [onError] is omitted, any errors on the stream are considered unhandled,
    * and will be passed to the current [Zone]'s error handler.
    * By default unhandled async errors are treated
    * as if they were uncaught top-level errors.
    *
-   * If this stream closes, the [onDone] handler is called.
+   * If this stream closes and sends a done event, the [onDone] handler is
+   * called. If [onDone] is `null`, nothing happens.
    *
-   * If [cancelOnError] is true, the subscription is ended when
-   * the first error is reported. The default is false.
+   * If [cancelOnError] is true, the subscription is automatically cancelled
+   * when the first error event is delivered. The default is `false`.
+   *
+   * While a subscription is paused, or when it has been cancelled,
+   * the subscription doesn't receive events and none of the
+   * event handler functions are called.
    */
   StreamSubscription<T> listen(void onData(T event),
                                { Function onError,
diff --git a/sdk/lib/collection/linked_list.dart b/sdk/lib/collection/linked_list.dart
index 481f04d..f1b55bb 100644
--- a/sdk/lib/collection/linked_list.dart
+++ b/sdk/lib/collection/linked_list.dart
@@ -258,13 +258,13 @@
   }
 
   /**
-   * Return the succeessor of this element in its linked list.
+   * Return the successor of this element in its linked list.
    *
    * Returns `null` if there is no successor in the linked list, or if this
    * entry is not currently in any list.
    */
   E get next {
-    if (identical(this, _next)) return null;
+    if (_list == null || identical(_list.first, _next)) return null;
     return _next;
   }
 
@@ -275,7 +275,7 @@
    * entry is not currently in any list.
    */
   E get previous {
-    if (identical(this, _previous)) return null;
+    if (_list == null || identical(this, _list.first)) return null;
     return _previous;
   }
 
diff --git a/sdk/lib/core/date_time.dart b/sdk/lib/core/date_time.dart
index 35e93e0..b5e9c3c 100644
--- a/sdk/lib/core/date_time.dart
+++ b/sdk/lib/core/date_time.dart
@@ -664,8 +664,10 @@
   external int get microsecondsSinceEpoch;
 
   /**
-   * The abbreviated time zone name&mdash;for example,
-   * [:"CET":] or [:"CEST":].
+   * The time zone name provided by the platform.
+   *
+   * On Unix-like systems this will probably be an abbreviation. On Windows
+   * this will probably be the full-name, e.g. "Pacific Standard Time".
    */
   external String get timeZoneName;
 
diff --git a/sdk/lib/core/list.dart b/sdk/lib/core/list.dart
index 9d25927..d887e67 100644
--- a/sdk/lib/core/list.dart
+++ b/sdk/lib/core/list.dart
@@ -88,6 +88,24 @@
    * entries with [fill]. After being created and filled, the list is
    * no different from any other growable or fixed-length list
    * created using [List].
+   *
+   * All entries in the returned list point to the same provided [fill] value.
+   * That all items in the list are the same object is
+   * observable when the given value is a mutable object.
+   *
+   * ```
+   * var shared = new List.filled(3, []);
+   * shared[0].add(499);
+   * print(shared);  // => [[499], [499], [499]]
+   * ```
+   *
+   * You may use [List.generate] to create a new object for each position in
+   * in the list.
+   * ```
+   * var unique = new List.generate(3, (_) => []);
+   * unique[0].add(499);
+   * print(unique); // => [[499], [], []]
+   * ```
    */
   external factory List.filled(int length, E fill, {bool growable: false});
 
diff --git a/sdk/lib/html/dart2js/html_dart2js.dart b/sdk/lib/html/dart2js/html_dart2js.dart
index 0bc3749..b4f835b 100644
--- a/sdk/lib/html/dart2js/html_dart2js.dart
+++ b/sdk/lib/html/dart2js/html_dart2js.dart
@@ -22889,7 +22889,7 @@
 
   @DomName('MediaStream.getAudioTracks')
   @DocsEditable()
-  @Creates('JSExtendableArray')
+  @Creates('JSExtendableArray|MediaStreamTrack')
   @Returns('JSExtendableArray')
   List<MediaStreamTrack> getAudioTracks() native;
 
@@ -22904,7 +22904,7 @@
 
   @DomName('MediaStream.getVideoTracks')
   @DocsEditable()
-  @Creates('JSExtendableArray')
+  @Creates('JSExtendableArray|MediaStreamTrack')
   @Returns('JSExtendableArray')
   List<MediaStreamTrack> getVideoTracks() native;
 
diff --git a/sdk/lib/html/dartium/html_dartium.dart b/sdk/lib/html/dartium/html_dartium.dart
index bc92d6d..68ec664 100644
--- a/sdk/lib/html/dartium/html_dartium.dart
+++ b/sdk/lib/html/dartium/html_dartium.dart
@@ -37662,10 +37662,10 @@
     if ((blob_OR_source_OR_stream is Blob || blob_OR_source_OR_stream == null)) {
       return _blink.BlinkURL.instance.createObjectURL_Callback_1_(blob_OR_source_OR_stream);
     }
-    if ((blob_OR_source_OR_stream is MediaStream)) {
+    if ((blob_OR_source_OR_stream is MediaSource)) {
       return _blink.BlinkURL.instance.createObjectURL_Callback_1_(blob_OR_source_OR_stream);
     }
-    if ((blob_OR_source_OR_stream is MediaSource)) {
+    if ((blob_OR_source_OR_stream is MediaStream)) {
       return _blink.BlinkURL.instance.createObjectURL_Callback_1_(blob_OR_source_OR_stream);
     }
     throw new ArgumentError("Incorrect number or type of arguments");
diff --git a/sdk/lib/internal/symbol.dart b/sdk/lib/internal/symbol.dart
index 21d976b..64fcdb3 100644
--- a/sdk/lib/internal/symbol.dart
+++ b/sdk/lib/internal/symbol.dart
@@ -112,10 +112,7 @@
 
   bool operator ==(other) => other is Symbol && _name == other._name;
 
-  int get hashCode {
-    const arbitraryPrime = 664597;
-    return 0x1fffffff & (arbitraryPrime * _name.hashCode);
-  }
+  external int get hashCode;
 
   toString() => 'Symbol("$_name")';
 
diff --git a/sdk/lib/io/bytes_builder.dart b/sdk/lib/io/bytes_builder.dart
index 0c97748..2dca39d 100644
--- a/sdk/lib/io/bytes_builder.dart
+++ b/sdk/lib/io/bytes_builder.dart
@@ -160,7 +160,7 @@
     if (bytes is Uint8List) {
       typedBytes = bytes;
     } else {
-      bytes = new Uint8List.fromList(bytes);
+      typedBytes = new Uint8List.fromList(bytes);
     }
     _chunks.add(typedBytes);
     _length += typedBytes.length;
diff --git a/sdk/lib/io/file.dart b/sdk/lib/io/file.dart
index 4ac4486..92d144f 100644
--- a/sdk/lib/io/file.dart
+++ b/sdk/lib/io/file.dart
@@ -52,7 +52,11 @@
   /// Shared file lock.
   SHARED,
   /// Exclusive file lock.
-  EXCLUSIVE
+  EXCLUSIVE,
+  /// Blocking shared file lock.
+  BLOCKING_SHARED,
+  /// Blocking exclusive file lock.
+  BLOCKING_EXCLUSIVE,
 }
 
 /**
@@ -735,6 +739,11 @@
    *
    * To obtain an exclusive lock on a file it must be opened for writing.
    *
+   * If [mode] is [FileLock.EXCLUSIVE] or [FileLock.SHARED], an error is
+   * signaled if the lock cannot be obtained. If [mode] is
+   * [FileLock.BLOCKING_EXCLUSIVE] or [FileLock.BLOCKING_SHARED], the
+   * returned [Future] is resolved only when the lock has been obtained.
+   *
    * *NOTE* file locking does have slight differences in behavior across
    * platforms:
    *
@@ -768,6 +777,11 @@
    *
    * To obtain an exclusive lock on a file it must be opened for writing.
    *
+   * If [mode] is [FileLock.EXCLUSIVE] or [FileLock.SHARED], an exception is
+   * thrown if the lock cannot be obtained. If [mode] is
+   * [FileLock.BLOCKING_EXCLUSIVE] or [FileLock.BLOCKING_SHARED], the
+   * call returns only after the lock has been obtained.
+   *
    * *NOTE* file locking does have slight differences in behavior across
    * platforms:
    *
diff --git a/sdk/lib/io/file_impl.dart b/sdk/lib/io/file_impl.dart
index bb00477..5b61fb3 100644
--- a/sdk/lib/io/file_impl.dart
+++ b/sdk/lib/io/file_impl.dart
@@ -919,6 +919,18 @@
   static final int LOCK_UNLOCK = 0;
   static final int LOCK_SHARED = 1;
   static final int LOCK_EXCLUSIVE = 2;
+  static final int LOCK_BLOCKING_SHARED = 3;
+  static final int LOCK_BLOCKING_EXCLUSIVE = 4;
+
+  int _fileLockValue(FileLock fl) {
+    switch (fl) {
+      case FileLock.SHARED: return LOCK_SHARED;
+      case FileLock.EXCLUSIVE: return LOCK_EXCLUSIVE;
+      case FileLock.BLOCKING_SHARED: return LOCK_BLOCKING_SHARED;
+      case FileLock.BLOCKING_EXCLUSIVE: return LOCK_BLOCKING_EXCLUSIVE;
+      default: return -1;
+    }
+  }
 
   Future<RandomAccessFile> lock(
       [FileLock mode = FileLock.EXCLUSIVE, int start = 0, int end = -1]) {
@@ -928,7 +940,7 @@
     if ((start < 0) || (end < -1) || ((end != -1) && (start >= end))) {
       throw new ArgumentError();
     }
-    int lock = (mode == FileLock.EXCLUSIVE) ? LOCK_EXCLUSIVE : LOCK_SHARED;
+    int lock = _fileLockValue(mode);
     return _dispatch(_FILE_LOCK, [null, lock, start, end])
         .then((response) {
           if (_isErrorResponse(response)) {
@@ -963,7 +975,7 @@
     if ((start < 0) || (end < -1) || ((end != -1) && (start >= end))) {
       throw new ArgumentError();
     }
-    int lock = (mode == FileLock.EXCLUSIVE) ? LOCK_EXCLUSIVE : LOCK_SHARED;
+    int lock = _fileLockValue(mode);
     var result = _ops.lock(lock, start, end);
     if (result is OSError) {
       throw new FileSystemException('lock failed', path, result);
diff --git a/sdk/lib/io/http.dart b/sdk/lib/io/http.dart
index 4e1d8e3..79bfaa5 100644
--- a/sdk/lib/io/http.dart
+++ b/sdk/lib/io/http.dart
@@ -969,7 +969,7 @@
   /**
    * The requested URI for the request.
    *
-   * The returend URI is reconstructed by using http-header fields, to access
+   * The returned URI is reconstructed by using http-header fields, to access
    * otherwise lost information, e.g. host and scheme.
    *
    * To reconstruct the scheme, first 'X-Forwarded-Proto' is checked, and then
diff --git a/sdk/lib/io/process.dart b/sdk/lib/io/process.dart
index 93aa63e..6159113 100644
--- a/sdk/lib/io/process.dart
+++ b/sdk/lib/io/process.dart
@@ -445,7 +445,7 @@
   /**
    * Standard output from the process. The value used for the
    * `stdoutEncoding` argument to `Process.run` determines the type. If
-   * `null` was used this value is of type `List<int> otherwise it is
+   * `null` was used this value is of type `List<int>` otherwise it is
    * of type `String`.
    */
   final stdout;
@@ -453,7 +453,7 @@
   /**
    * Standard error from the process. The value used for the
    * `stderrEncoding` argument to `Process.run` determines the type. If
-   * `null` was used this value is of type `List<int>
+   * `null` was used this value is of type `List<int>`
    * otherwise it is of type `String`.
    */
   final stderr;
diff --git a/sdk/lib/js/dart2js/js_dart2js.dart b/sdk/lib/js/dart2js/js_dart2js.dart
index f98b29d..4b3e4a6 100644
--- a/sdk/lib/js/dart2js/js_dart2js.dart
+++ b/sdk/lib/js/dart2js/js_dart2js.dart
@@ -707,7 +707,7 @@
   return Function.apply(callback, [self]..addAll(arguments));
 }
 
-Function allowInterop(Function f) {
+Function /*=F*/ allowInterop/*<F extends Function>*/(Function /*=F*/ f) {
   if (JS('bool', 'typeof(#) == "function"', f)) {
     // Already supports interop, just use the existing function.
     return f;
diff --git a/sdk/lib/js/dartium/js_dartium.dart b/sdk/lib/js/dartium/js_dartium.dart
index 8d0b248..4d1d682 100644
--- a/sdk/lib/js/dartium/js_dartium.dart
+++ b/sdk/lib/js/dartium/js_dartium.dart
@@ -1528,7 +1528,7 @@
 /// JavaScript. We may remove the need to call this method completely in the
 /// future if Dart2Js is refactored so that its function calling conventions
 /// are more compatible with JavaScript.
-JSFunction allowInterop(Function f) {
+Function /*=F*/ allowInterop/*<F extends Function>*/(Function /*=F*/ f) {
   if (f is JSFunction) {
     // The function is already a JSFunction... no need to do anything.
     return f;
diff --git a/sdk/lib/vmservice/devfs.dart b/sdk/lib/vmservice/devfs.dart
new file mode 100644
index 0000000..35af748
--- /dev/null
+++ b/sdk/lib/vmservice/devfs.dart
@@ -0,0 +1,319 @@
+// Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+part of dart._vmservice;
+
+String _encodeDevFSDisabledError(Message message) {
+  return encodeRpcError(
+      message, kFeatureDisabled,
+      details: "DevFS is not supported by this Dart implementation");
+}
+
+String _encodeFileSystemAlreadyExistsError(Message message, String fsName) {
+  return encodeRpcError(
+      message, kFileSystemAlreadyExists,
+      details: "${message.method}: file system '${fsName}' already exists");
+}
+
+String _encodeFileSystemDoesNotExistError(Message message, String fsName) {
+  return encodeRpcError(
+      message, kFileSystemDoesNotExist,
+      details: "${message.method}: file system '${fsName}' does not exist");
+}
+
+class _FileSystem {
+  _FileSystem(this.name, this.uri);
+
+  final String name;
+  final Uri uri;
+
+  Uri resolvePath(String path) {
+    if (path.startsWith('/')) {
+      path = path.substring(1);
+    }
+    if (path.isEmpty) {
+      return null;
+    }
+    Uri pathUri;
+    try {
+      pathUri = Uri.parse(path);
+    } on FormatException catch(e) {
+      return null;
+    }
+    Uri resolvedUri = uri.resolveUri(pathUri);
+    if (!resolvedUri.toString().startsWith(uri.toString())) {
+      // Resolved uri must be within the filesystem's base uri.
+      return null;
+    }
+    return resolvedUri;
+  }
+
+  Map toMap() {
+    return {
+      'type': 'FileSystem',
+      'name': name,
+      'uri': uri.toString(),
+    };
+  }
+}
+
+class DevFS {
+  DevFS();
+
+  Map<String, _FileSystem> _fsMap = {};
+
+  final Set _rpcNames = new Set.from([
+      '_listDevFS',
+      '_createDevFS',
+      '_deleteDevFS',
+      '_readDevFSFile',
+      '_writeDevFSFile',
+      '_writeDevFSFiles',
+      '_listDevFSFiles',
+  ]);
+
+  void cleanup() {
+    var deleteDir = VMServiceEmbedderHooks.deleteDir;
+    if (deleteDir == null) {
+      return;
+    }
+    var deletions = [];
+    for (var fs in _fsMap.values) {
+      deletions.add(deleteDir(fs.uri));
+    }
+    Future.wait(deletions);
+    _fsMap.clear();
+  }
+
+  bool shouldHandleMessage(Message message) {
+    return _rpcNames.contains(message.method);
+  }
+
+  Future<String> handleMessage(Message message) async {
+    switch (message.method) {
+      case '_listDevFS':
+        return _listDevFS(message);
+      case '_createDevFS':
+        return _createDevFS(message);
+      case '_deleteDevFS':
+        return _deleteDevFS(message);
+      case '_readDevFSFile':
+        return _readDevFSFile(message);
+      case '_writeDevFSFile':
+        return _writeDevFSFile(message);
+      case '_writeDevFSFiles':
+        return _writeDevFSFiles(message);
+      case '_listDevFSFiles':
+        return _listDevFSFiles(message);
+      default:
+        return encodeRpcError(
+            message, kInternalError,
+            details: 'Unexpected rpc ${message.method}');
+    }
+  }
+
+  Future<String> _listDevFS(Message message) async {
+    var result = {};
+    result['type'] = 'FileSystemList';
+    result['fsNames'] =  _fsMap.keys.toList();
+    return encodeResult(message, result);
+  }
+
+  Future<String> _createDevFS(Message message) async {
+    var createTempDir = VMServiceEmbedderHooks.createTempDir;
+    if (createTempDir == null) {
+      return _encodeDevFSDisabledError(message);
+    }
+    var fsName = message.params['fsName'];
+    if (fsName == null) {
+      return encodeMissingParamError(message, 'fsName');
+    }
+    if (fsName is! String) {
+      return encodeInvalidParamError(message, 'fsName');
+    }
+    var fs = _fsMap[fsName];
+    if (fs != null) {
+      return _encodeFileSystemAlreadyExistsError(message, fsName);
+    }
+    var tempDir = await createTempDir(fsName);
+    fs = new _FileSystem(fsName, tempDir);
+    _fsMap[fsName] = fs;
+    return encodeResult(message, fs.toMap());
+  }
+
+  Future<String> _deleteDevFS(Message message) async {
+    var deleteDir = VMServiceEmbedderHooks.deleteDir;
+    if (deleteDir == null) {
+      return _encodeDevFSDisabledError(message);
+    }
+    var fsName = message.params['fsName'];
+    if (fsName == null) {
+      return encodeMissingParamError(message, 'fsName');
+    }
+    if (fsName is! String) {
+      return encodeInvalidParamError(message, 'fsName');
+    }
+    var fs = _fsMap.remove(fsName);
+    if (fs == null) {
+      return _encodeFileSystemDoesNotExistError(message, fsName);
+    }
+    await deleteDir(fs.uri);
+    return encodeSuccess(message);
+  }
+
+  Future<String> _readDevFSFile(Message message) async {
+    var readFile = VMServiceEmbedderHooks.readFile;
+    if (readFile == null) {
+      return _encodeDevFSDisabledError(message);
+    }
+    var fsName = message.params['fsName'];
+    if (fsName == null) {
+      return encodeMissingParamError(message, 'fsName');
+    }
+    if (fsName is! String) {
+      return encodeInvalidParamError(message, 'fsName');
+    }
+    var fs = _fsMap[fsName];
+    if (fs == null) {
+      return _encodeFileSystemDoesNotExistError(message, fsName);
+    }
+    var path = message.params['path'];
+    if (path == null) {
+      return encodeMissingParamError(message, 'path');
+    }
+    if (path is! String) {
+      return encodeInvalidParamError(message, 'path');
+    }
+    Uri uri = fs.resolvePath(path);
+    if (uri == null) {
+      return encodeInvalidParamError(message, 'path');
+    }
+
+    try {
+      List<int> bytes = await readFile(uri);
+      var result = {
+        'type': 'FSFile',
+        'fileContents': BASE64.encode(bytes)
+      };
+      return encodeResult(message, result);
+    } catch (e) {
+      return encodeRpcError(
+          message, kFileDoesNotExist,
+          details: "_readDevFSFile: $e");
+    }
+  }
+
+  Future<String> _writeDevFSFile(Message message) async {
+    var writeFile = VMServiceEmbedderHooks.writeFile;
+    if (writeFile == null) {
+      return _encodeDevFSDisabledError(message);
+    }
+    var fsName = message.params['fsName'];
+    if (fsName == null) {
+      return encodeMissingParamError(message, 'fsName');
+    }
+    if (fsName is! String) {
+      return encodeInvalidParamError(message, 'fsName');
+    }
+    var fs = _fsMap[fsName];
+    if (fs == null) {
+      return _encodeFileSystemDoesNotExistError(message, fsName);
+    }
+    var path = message.params['path'];
+    if (path == null) {
+      return encodeMissingParamError(message, 'path');
+    }
+    if (path is! String) {
+      return encodeInvalidParamError(message, 'path');
+    }
+    Uri uri = fs.resolvePath(path);
+    if (uri == null) {
+      return encodeInvalidParamError(message, 'path');
+    }
+    var fileContents = message.params['fileContents'];
+    if (fileContents == null) {
+      return encodeMissingParamError(message, 'fileContents');
+    }
+    if (fileContents is! String) {
+      return encodeInvalidParamError(message, 'fileContents');
+    }
+    List<int> decodedFileContents = BASE64.decode(fileContents);
+
+    await writeFile(uri, decodedFileContents);
+    return encodeSuccess(message);
+  }
+
+  Future<String> _writeDevFSFiles(Message message) async {
+    var writeFile = VMServiceEmbedderHooks.writeFile;
+    if (writeFile == null) {
+      return _encodeDevFSDisabledError(message);
+    }
+    var fsName = message.params['fsName'];
+    if (fsName == null) {
+      return encodeMissingParamError(message, 'fsName');
+    }
+    if (fsName is! String) {
+      return encodeInvalidParamError(message, 'fsName');
+    }
+    var fs = _fsMap[fsName];
+    if (fs == null) {
+      return _encodeFileSystemDoesNotExistError(message, fsName);
+    }
+    var files = message.params['files'];
+    if (files == null) {
+      return encodeMissingParamError(message, 'files');
+    }
+    if (files is! List) {
+      return encodeInvalidParamError(message, 'files');
+    }
+    var uris = [];
+    for (int i = 0; i < files.length; i++) {
+      var fileInfo = files[i];
+      if (fileInfo is! List ||
+          fileInfo.length != 2 ||
+          fileInfo[0] is! String || fileInfo[1] is! String) {
+        return encodeRpcError(
+            message, kInvalidParams,
+            details: "${message.method}: invalid 'files' parameter "
+                     "at index ${i}: ${fileInfo}");
+      }
+      var uri = fs.resolvePath(fileInfo[0]);
+      if (uri == null) {
+        return encodeRpcError(
+            message, kInvalidParams,
+            details: "${message.method}: invalid 'files' parameter "
+                     "at index ${i}: ${fileInfo}");
+      }
+      uris.add(uri);
+    }
+    var pendingWrites = [];
+    for (int i = 0; i < uris.length; i++) {
+      List<int> decodedFileContents = BASE64.decode(files[i][1]);
+      pendingWrites.add(writeFile(uris[i], decodedFileContents));
+    }
+    await Future.wait(pendingWrites);
+    return encodeSuccess(message);
+  }
+
+  Future<String> _listDevFSFiles(Message message) async {
+    var listFiles = VMServiceEmbedderHooks.listFiles;
+    if (listFiles == null) {
+      return _encodeDevFSDisabledError(message);
+    }
+    var fsName = message.params['fsName'];
+    if (fsName == null) {
+      return encodeMissingParamError(message, 'fsName');
+    }
+    if (fsName is! String) {
+      return encodeInvalidParamError(message, 'fsName');
+    }
+    var fs = _fsMap[fsName];
+    if (fs == null) {
+      return _encodeFileSystemDoesNotExistError(message, fsName);
+    }
+    var fileList = await listFiles(fs.uri);
+    var result = { 'type': 'FSFileList', 'files': fileList };
+    return encodeResult(message, result);
+  }
+}
diff --git a/sdk/lib/vmservice/message.dart b/sdk/lib/vmservice/message.dart
index 481206e..08bb5b1 100644
--- a/sdk/lib/vmservice/message.dart
+++ b/sdk/lib/vmservice/message.dart
@@ -116,23 +116,58 @@
     return _completer.future;
   }
 
+  // We currently support two ways of passing parameters from Dart code to C
+  // code. The original way always converts the parameters to strings before
+  // passing them over. Our goal is to convert all C handlers to take the
+  // parameters as Dart objects but until the conversion is complete, we
+  // maintain the list of supported methods below.
+  bool _methodNeedsObjectParameters(String method) {
+    switch (method) {
+      case '_listDevFS':
+      case '_listDevFSFiles':
+      case '_createDevFS':
+      case '_deleteDevFS':
+      case '_writeDevFSFile':
+      case '_writeDevFSFiles':
+      case '_readDevFSFile':
+        return true;
+      default:
+        return false;
+    }
+  }
+
   Future<String> sendToVM() {
     final receivePort = new RawReceivePort();
     receivePort.handler = (value) {
       receivePort.close();
       _completer.complete(value);
     };
-    var keys = _makeAllString(params.keys.toList(growable:false));
-    var values = _makeAllString(params.values.toList(growable:false));
-    var request = new List(6)
-        ..[0] = 0  // Make room for OOB message type.
-        ..[1] = receivePort.sendPort
-        ..[2] = serial
-        ..[3] = method
-        ..[4] = keys
-        ..[5] = values;
-    sendRootServiceMessage(request);
-    return _completer.future;
+    if (_methodNeedsObjectParameters(method)) {
+      // We use a different method invocation path here.
+      var keys = params.keys.toList(growable:false);
+      var values = params.values.toList(growable:false);
+      var request = new List(6)
+          ..[0] = 0  // Make room for OOB message type.
+          ..[1] = receivePort.sendPort
+          ..[2] = serial
+          ..[3] = method
+          ..[4] = keys
+          ..[5] = values;
+      sendObjectRootServiceMessage(request);
+      return _completer.future;
+    } else {
+      var keys = _makeAllString(params.keys.toList(growable:false));
+      var values = _makeAllString(params.values.toList(growable:false));
+      var request = new List(6)
+          ..[0] = 0  // Make room for OOB message type.
+          ..[1] = receivePort.sendPort
+          ..[2] = serial
+          ..[3] = method
+          ..[4] = keys
+          ..[5] = values;
+      sendRootServiceMessage(request);
+      return _completer.future;
+    }
   }
 
   void setResponse(String response) {
@@ -147,3 +182,4 @@
 
 external bool sendIsolateServiceMessage(SendPort sp, List m);
 external void sendRootServiceMessage(List m);
+external void sendObjectRootServiceMessage(List m);
\ No newline at end of file
diff --git a/sdk/lib/vmservice/vmservice.dart b/sdk/lib/vmservice/vmservice.dart
index 445f5d5..4101023 100644
--- a/sdk/lib/vmservice/vmservice.dart
+++ b/sdk/lib/vmservice/vmservice.dart
@@ -12,6 +12,7 @@
 
 part 'asset.dart';
 part 'client.dart';
+part 'devfs.dart';
 part 'constants.dart';
 part 'running_isolate.dart';
 part 'running_isolates.dart';
@@ -21,17 +22,35 @@
 final RawReceivePort isolateLifecyclePort = new RawReceivePort();
 final RawReceivePort scriptLoadPort = new RawReceivePort();
 
+abstract class IsolateEmbedderData {
+  void cleanup();
+}
+
+// This is for use by the embedder. It is a map from the isolateId to
+// anything implementing IsolateEmbedderData. When an isolate goes away,
+// the cleanup method will be invoked after being removed from the map.
+final Map<int, IsolateEmbedderData> isolateEmbedderData =
+    new Map<int, IsolateEmbedderData>();
+
 // These must be kept in sync with the declarations in vm/json_stream.h.
-const kInvalidParams = -32602;
-const kInternalError = -32603;
-const kStreamAlreadySubscribed = 103;
-const kStreamNotSubscribed = 104;
+const kInvalidParams             = -32602;
+const kInternalError             = -32603;
+const kFeatureDisabled           = 100;
+const kStreamAlreadySubscribed   = 103;
+const kStreamNotSubscribed       = 104;
+const kFileSystemAlreadyExists   = 1001;
+const kFileSystemDoesNotExist    = 1002;
+const kFileDoesNotExist          = 1003;
 
 var _errorMessages = {
   kInvalidParams: 'Invalid params',
   kInternalError: 'Internal error',
+  kFeatureDisabled: 'Feature is disabled',
   kStreamAlreadySubscribed: 'Stream already subscribed',
   kStreamNotSubscribed: 'Stream not subscribed',
+  kFileSystemAlreadyExists: 'File system already exists',
+  kFileSystemDoesNotExist: 'File system does not exist',
+  kFileDoesNotExist: 'File does not exist',
 };
 
 String encodeRpcError(Message message, int code, {String details}) {
@@ -51,6 +70,19 @@
   return JSON.encode(response);
 }
 
+String encodeMissingParamError(Message message, String param) {
+  return encodeRpcError(
+      message, kInvalidParams,
+      details: "${message.method} expects the '${param}' parameter");
+}
+
+String encodeInvalidParamError(Message message, String param) {
+  var value = message.params[param];
+  return encodeRpcError(
+      message, kInvalidParams,
+      details: "${message.method}: invalid '${param}' parameter: ${value}");
+}
+
 String encodeResult(Message message, Map result) {
   var response = {
     'jsonrpc': '2.0',
@@ -60,6 +92,10 @@
   return JSON.encode(response);
 }
 
+String encodeSuccess(Message message) {
+  return encodeResult(message, { 'type': 'Success' });
+}
+
 const shortDelay = const Duration(milliseconds: 10);
 
 /// Called when the server should be started.
@@ -71,11 +107,31 @@
 /// Called when the service is exiting.
 typedef Future CleanupCallback();
 
+/// Called to create a temporary directory
+typedef Future<Uri> CreateTempDirCallback(String base);
+
+/// Called to delete a directory
+typedef Future DeleteDirCallback(Uri path);
+
+/// Called to write a file.
+typedef Future WriteFileCallback(Uri path, List<int> bytes);
+
+/// Called to read a file.
+typedef Future<List<int>> ReadFileCallback(Uri path);
+
+/// Called to list all files under some path.
+typedef Future<List<Map<String,String>>> ListFilesCallback(Uri path);
+
 /// Hooks that are setup by the embedder.
 class VMServiceEmbedderHooks {
   static ServerStartCallback serverStart;
   static ServerStopCallback serverStop;
   static CleanupCallback cleanup;
+  static CreateTempDirCallback createTempDir;
+  static DeleteDirCallback deleteDir;
+  static WriteFileCallback writeFile;
+  static ReadFileCallback readFile;
+  static ListFilesCallback listFiles;
 }
 
 class VMService extends MessageRouter {
@@ -90,6 +146,8 @@
   /// A port used to receive events from the VM.
   final RawReceivePort eventPort;
 
+  final _devfs = new DevFS();
+
   void _addClient(Client client) {
     assert(client.streams.isEmpty);
     clients.add(client);
@@ -124,6 +182,10 @@
       break;
       case Constants.ISOLATE_SHUTDOWN_MESSAGE_ID:
         runningIsolates.isolateShutdown(portId, sp);
+        IsolateEmbedderData ied = isolateEmbedderData.remove(portId);
+        if (ied != null) {
+          ied.cleanup();
+        }
       break;
     }
   }
@@ -144,6 +206,7 @@
     for (var client in clientsList) {
       client.disconnect();
     }
+    _devfs.cleanup();
     if (VMServiceEmbedderHooks.cleanup != null) {
       await VMServiceEmbedderHooks.cleanup();
     }
@@ -214,8 +277,7 @@
     }
     client.streams.add(streamId);
 
-    var result = { 'type' : 'Success' };
-    return encodeResult(message, result);
+    return encodeSuccess(message);
   }
 
   Future<String> _streamCancel(Message message) async {
@@ -230,8 +292,7 @@
       _vmCancelStream(streamId);
     }
 
-    var result = { 'type' : 'Success' };
-    return encodeResult(message, result);
+    return encodeSuccess(message);
   }
 
   // TODO(johnmccutchan): Turn this into a command line tool that uses the
@@ -306,6 +367,9 @@
     if (message.method == 'streamCancel') {
       return _streamCancel(message);
     }
+    if (_devfs.shouldHandleMessage(message)) {
+      return _devfs.handleMessage(message);
+    }
     if (message.params['isolateId'] != null) {
       return runningIsolates.route(message);
     }
diff --git a/sdk/lib/vmservice/vmservice_sources.gypi b/sdk/lib/vmservice/vmservice_sources.gypi
index 869e304..16eb2c8 100644
--- a/sdk/lib/vmservice/vmservice_sources.gypi
+++ b/sdk/lib/vmservice/vmservice_sources.gypi
@@ -11,6 +11,7 @@
     'asset.dart',
     'client.dart',
     'constants.dart',
+    'devfs.dart',
     'running_isolate.dart',
     'running_isolates.dart',
     'message.dart',
diff --git a/sdk/lib/web_audio/dart2js/web_audio_dart2js.dart b/sdk/lib/web_audio/dart2js/web_audio_dart2js.dart
index 2a7e664..b178851 100644
--- a/sdk/lib/web_audio/dart2js/web_audio_dart2js.dart
+++ b/sdk/lib/web_audio/dart2js/web_audio_dart2js.dart
@@ -483,12 +483,14 @@
   void disconnect([destination_OR_output, int output, int input]) native;
 
   @DomName('AudioNode.connect')
-  void connectNode(AudioNode destination, [int output = 0, int input = 0]) =>
-      _connect(destination, output, input);
+  void connectNode(AudioNode destination, [int output = 0, int input = 0]) {
+    _connect(destination, output, input);
+  }
 
   @DomName('AudioNode.connect')
-  void connectParam(AudioParam destination, [int output = 0]) =>
-      _connect(destination, output);
+  void connectParam(AudioParam destination, [int output = 0]) {
+    _connect(destination, output);
+  }
 }
 // Copyright (c) 2012, the Dart project authors.  Please see the AUTHORS file
 // for details. All rights reserved. Use of this source code is governed by a
diff --git a/sdk/lib/web_audio/dartium/web_audio_dartium.dart b/sdk/lib/web_audio/dartium/web_audio_dartium.dart
index c73b31b..6fd5356 100644
--- a/sdk/lib/web_audio/dartium/web_audio_dartium.dart
+++ b/sdk/lib/web_audio/dartium/web_audio_dartium.dart
@@ -689,12 +689,14 @@
   }
 
   @DomName('AudioNode.connect')
-  void connectNode(AudioNode destination, [int output = 0, int input = 0]) =>
-      _connect(destination, output, input);
+  void connectNode(AudioNode destination, [int output = 0, int input = 0]) {
+    _connect(destination, output, input);
+  }
 
   @DomName('AudioNode.connect')
-  void connectParam(AudioParam destination, [int output = 0]) =>
-      _connect(destination, output);
+  void connectParam(AudioParam destination, [int output = 0]) {
+    _connect(destination, output);
+  }
 }
 // Copyright (c) 2012, the Dart project authors.  Please see the AUTHORS file
 // for details. All rights reserved. Use of this source code is governed by a
diff --git a/site/try/poi/poi.dart b/site/try/poi/poi.dart
index f40705a..cb04b8d 100644
--- a/site/try/poi/poi.dart
+++ b/site/try/poi/poi.dart
@@ -455,7 +455,7 @@
     int position) {
   bool isFullCompile = cachedCompiler != newCompiler;
   cachedCompiler = newCompiler;
-  if (poiTask == null) {
+  if (poiTask == null || poiTask.compiler != cachedCompiler) {
     poiTask = new PoiTask(cachedCompiler);
     cachedCompiler.tasks.add(poiTask);
   }
@@ -565,7 +565,8 @@
 }
 
 class PoiTask extends CompilerTask {
-  PoiTask(Compiler compiler) : super(compiler.measurer);
+  final Compiler compiler;
+  PoiTask(Compiler compiler) : compiler = compiler, super(compiler.measurer);
 
   String get name => 'POI';
 }
diff --git a/tests/co19/co19-analyzer2.status b/tests/co19/co19-analyzer2.status
index aa0daa9..1845277 100644
--- a/tests/co19/co19-analyzer2.status
+++ b/tests/co19/co19-analyzer2.status
@@ -21,18 +21,6 @@
 Language/Expressions/Constants/exception_t01: fail, OK
 Language/Expressions/Constants/exception_t02: fail, OK
 
-# co19 issue #543: invocation of a non-function
-Language/Expressions/Function_Invocation/Function_Expression_Invocation/static_type_t02: fail, OK
-
-# co19 issue #564: URI can be any number adjacent string literals
-Language/Libraries_and_Scripts/URIs/syntax_t14: fail, OK
-Language/Libraries_and_Scripts/URIs/syntax_t15: fail, OK
-
-# co19 issue #615: Expect import missing
-LibTest/collection/LinkedList/LinkedList_A01_t01: Fail, OK
-
-Language/Generics/syntax_t04: StaticWarning # co19 issue #56
-
 LibTest/isolate/IsolateStream/any_A01_t01: Fail # co19-roll r706: Please triage this failure.
 LibTest/isolate/IsolateStream/asBroadcastStream_A01_t01: Fail # co19-roll r706: Please triage this failure.
 LibTest/isolate/IsolateStream/contains_A01_t01: Fail # co19-roll r706: Please triage this failure.
@@ -68,8 +56,6 @@
 LibTest/collection/ListQueue/ListQueue_class_A01_t01: Fail, OK
 LibTest/collection/Queue/Queue_class_A01_t01: Fail, OK
 
-
-Language/Expressions/Method_Invocation/Cascaded_Invocations/syntax_t19: MissingStaticWarning
 Language/Statements/Switch/last_statement_t03: MissingStaticWarning
 Language/Statements/Assert/type_t04: MissingStaticWarning
 
@@ -83,7 +69,6 @@
 Language/Expressions/Function_Invocation/Unqualified_Invocation/invocation_t17: MissingCompileTimeError # co19-roll r651: Please triage this failure
 Language/Expressions/Function_Invocation/Unqualified_Invocation/invocation_t18: MissingCompileTimeError # co19-roll r651: Please triage this failure
 
-Language/Classes/Superinterfaces/no_member_t05: StaticWarning # co19-roll r667: Please triage this failure
 LibTest/convert/JsonEncoder/JsonEncoder_A01_t01: StaticWarning # co19-roll r667: Please triage this failure
 
 # co19 issue 656
@@ -100,9 +85,6 @@
 Language/Expressions/Method_Invocation/Super_Invocation/accessible_instance_member_t03: StaticWarning # co19-roll r706: Please triage this failure.
 Language/Expressions/Method_Invocation/Super_Invocation/accessible_instance_member_t04: StaticWarning # co19-roll r706: Please triage this failure.
 Language/Expressions/Method_Invocation/Super_Invocation/accessible_instance_member_t05: StaticWarning # co19-roll r706: Please triage this failure.
-Language/Libraries_and_Scripts/Parts/compilation_t04: CompileTimeError # co19-roll r706: Please triage this failure.
-Language/Libraries_and_Scripts/Parts/static_warning_t01: CompileTimeError # co19-roll r706: Please triage this failure.
-Language/Libraries_and_Scripts/Scripts/syntax_t11: CompileTimeError # co19-roll r706: Please triage this failure.
 LayoutTests/fast/dom/DOMImplementation/createDocument-namespace-err_t01: StaticWarning # co19-roll r706: Please triage this failure.
 LayoutTests/fast/dom/DOMImplementation/createDocumentType-err_t01: StaticWarning # co19-roll r706: Please triage this failure.
 LayoutTests/fast/dom/Document/CaretRangeFromPoint/caretRangeFromPoint-in-zoom-and-scroll_t01: StaticWarning # co19-roll r706: Please triage this failure.
@@ -190,7 +172,6 @@
 # co19-roll r738
 Language/Classes/Classes/method_definition_t06: MissingStaticWarning # co19-roll r738: Please triage this failure.
 LayoutTests/fast/dom/StyleSheet/detached-parent-rule-without-wrapper_t01: StaticWarning # co19-roll r738: Please triage this failure.
-LayoutTests/fast/dom/StyleSheet/detached-stylesheet-without-wrapper_t01: StaticWarning # co19-roll r738: Please triage this failure.
 LayoutTests/fast/dom/StyleSheet/removed-media-rule-deleted-parent-crash_t01: StaticWarning # co19-roll r738: Please triage this failure.
 LayoutTests/fast/dom/StyleSheet/removed-stylesheet-rule-deleted-parent-crash_t01: StaticWarning # co19-roll r738: Please triage this failure.
 LayoutTests/fast/dom/TreeWalker/TreeWalker-basic_t01: StaticWarning # co19-roll r738: Please triage this failure.
@@ -215,8 +196,6 @@
 LayoutTests/fast/dom/remove-named-attribute-crash_t01: StaticWarning # co19-roll r738: Please triage this failure.
 LayoutTests/fast/dom/shadow/content-pseudo-element-css-text_t01: StaticWarning # co19-roll r738: Please triage this failure.
 LayoutTests/fast/dom/shadow/content-pseudo-element-relative-selector-css-text_t01: StaticWarning # co19-roll r738: Please triage this failure.
-LayoutTests/fast/dom/shadow/host-context-pseudo-class-css-text_t01: StaticWarning # co19-roll r738: Please triage this failure.
-LayoutTests/fast/dom/shadow/host-pseudo-class-css-text_t01: StaticWarning # co19-roll r738: Please triage this failure.
 LayoutTests/fast/dom/shadow/shadow-root-js-api_t01: StaticWarning # co19-roll r738: Please triage this failure.
 WebPlatformTest/DOMEvents/approved/Event.bubbles.false_t01: StaticWarning # co19-roll r738: Please triage this failure.
 WebPlatformTest/DOMEvents/approved/Propagation.path.target.removed_t01: StaticWarning # co19-roll r738: Please triage this failure.
@@ -284,12 +263,6 @@
 LayoutTests/fast/xpath/4XPath/Core/test_core_functions_t01: StaticWarning # co19 issue 703
 WebPlatformTest/dom/Node-replaceChild_t01: CompileTimeError # co19-roll r761: Please triage this failure.
 WebPlatformTest/html/semantics/forms/the-input-element/email_t02: StaticWarning # co19 issue 701
-Language/Expressions/Instance_Creation/Const/abstract_class_t01: MissingCompileTimeError # Issue 22010
-Language/Expressions/Instance_Creation/Const/abstract_class_t03: MissingCompileTimeError # Issue 22010
-Language/Types/Interface_Types/subtype_t19: StaticWarning # co19 issue 745
-Language/Types/Interface_Types/subtype_t22: StaticWarning # co19 issue 745
-Language/Types/Interface_Types/subtype_t24: StaticWarning # co19 issue 745
-Language/Statements/Assert/type_t07: StaticWarning # Issue 23663
 
 # isProtocolHandlerRegistered and unregisterProtocolHandler don't exist
 LayoutTests/fast/dom/navigatorcontentutils/is-protocol-handler-registered_t01: Skip # Please triage this failure.
@@ -302,22 +275,16 @@
 LayoutTests/fast/dom/navigatorcontentutils/unregister-protocol-handler_t01: StaticWarning # Please triage this failure.
 
 # co19 roll to Sep 29 2015 (3ed795ea02e022ef19c77cf1b6095b7c8f5584d0)
-Language/Classes/Abstract_Instance_Members/invocation_t03: MissingStaticWarning # Please triage this failure.
-Language/Classes/Abstract_Instance_Members/invocation_t04: MissingStaticWarning # Please triage this failure.
 Language/Classes/Getters/static_t01: StaticWarning # Please triage this failure.
 Language/Classes/Getters/type_object_t01: StaticWarning # Please triage this failure.
 Language/Classes/Getters/type_object_t02: StaticWarning # Please triage this failure.
 Language/Classes/Instance_Variables/definition_t03: StaticWarning # Please triage this failure.
-Language/Classes/Setters/syntax_t04: StaticWarning # Please triage this failure.
 Language/Classes/Setters/type_object_t01: StaticWarning # Please triage this failure.
 Language/Classes/Setters/type_object_t02: StaticWarning # Please triage this failure.
 Language/Classes/Static_Methods/same_name_method_and_setter_t01: MissingStaticWarning # Please triage this failure.
 Language/Classes/Static_Methods/type_object_t01: StaticWarning # Please triage this failure.
 Language/Classes/Static_Methods/type_object_t02: StaticWarning # Please triage this failure.
 Language/Classes/method_definition_t06: MissingStaticWarning # Please triage this failure.
-Language/Enums/declaration_equivalent_t03: StaticWarning # Please triage this failure.
-Language/Enums/declaration_equivalent_t05: StaticWarning # Please triage this failure.
-Language/Enums/declaration_equivalent_t08: StaticWarning # Please triage this failure.
 Language/Expressions/Lookup/Method_Lookup/superclass_t07: StaticWarning # Please triage this failure.
 Language/Expressions/Lookup/Method_Lookup/superclass_t08: StaticWarning # Please triage this failure.
 Language/Expressions/Property_Extraction/General_Closurization/expression_evaluation_t01: CompileTimeError # Please triage this failure.
@@ -368,21 +335,118 @@
 Language/Expressions/Property_Extraction/Named_Constructor_Extraction/not_class_t01: CompileTimeError # Please triage this failure.
 Language/Expressions/Property_Extraction/Named_Constructor_Extraction/not_class_t02: CompileTimeError # Please triage this failure.
 Language/Expressions/Property_Extraction/Named_Constructor_Extraction/not_class_t03: CompileTimeError # Please triage this failure.
-Language/Expressions/Property_Extraction/Super_Getter_Access_and_Method_Closurization/no_such_method_t01: StaticWarning # Please triage this failure.
-Language/Expressions/Property_Extraction/Super_Getter_Access_and_Method_Closurization/static_type_t01: StaticWarning # Please triage this failure.
-Language/Expressions/Property_Extraction/Super_Getter_Access_and_Method_Closurization/static_type_t02: StaticWarning # Please triage this failure.
 Language/Interfaces/Superinterfaces/Inheritance_and_Overriding/inheritance_t03: StaticWarning # Please triage this failure.
-Language/Interfaces/Superinterfaces/Inheritance_and_Overriding/same_name_getters_type_t07: StaticWarning # Please triage this failure.
 Language/Interfaces/Superinterfaces/Inheritance_and_Overriding/same_name_method_and_getter_t01: CompileTimeError # Please triage this failure.
 Language/Interfaces/Superinterfaces/Inheritance_and_Overriding/same_name_method_and_getter_t02: CompileTimeError # Please triage this failure.
-Language/Libraries_and_Scripts/Imports/namespace_changes_t10: CompileTimeError # Please triage this failure.
-Language/Libraries_and_Scripts/URIs/syntax_t04: CompileTimeError # Please triage this failure.
-Language/Libraries_and_Scripts/URIs/syntax_t05: CompileTimeError # Please triage this failure.
-Language/Libraries_and_Scripts/URIs/syntax_t09: CompileTimeError # Please triage this failure.
-Language/Libraries_and_Scripts/URIs/syntax_t10: CompileTimeError # Please triage this failure.
-Language/Mixins/Mixin_Application/abstract_t01: StaticWarning # Please triage this failure.
-Language/Mixins/Mixin_Application/abstract_t02: StaticWarning # Please triage this failure.
 Language/Mixins/Mixin_Application/error_t01: MissingCompileTimeError # Please triage this failure.
 Language/Mixins/Mixin_Application/error_t02: MissingCompileTimeError # Please triage this failure.
 Language/Mixins/Mixin_Application/warning_t01: MissingStaticWarning # Please triage this failure.
 Language/Mixins/Mixin_Application/warning_t02: MissingStaticWarning # Please triage this failure.
+
+Language/Classes/Getters/static_getter_t02: CompileTimeError # Issue 24534
+Language/Classes/Setters/name_t08: CompileTimeError # Issue 23777
+Language/Classes/Setters/name_t09: CompileTimeError # Issue 23777
+Language/Classes/Setters/name_t10: CompileTimeError # Issue 23777
+Language/Classes/Setters/name_t11: CompileTimeError # Issue 23777
+Language/Classes/Setters/name_t12: CompileTimeError # Issue 23777
+Language/Classes/Setters/name_t13: CompileTimeError # Issue 23777
+Language/Classes/Setters/name_t14: CompileTimeError # Issue 23777
+Language/Classes/Setters/name_t15: CompileTimeError # Issue 23777
+Language/Enums/syntax_t08: MissingCompileTimeError # Please triage this failure.
+Language/Enums/syntax_t09: MissingCompileTimeError # Please triage this failure.
+Language/Expressions/Assignment/super_assignment_static_warning_t03: StaticWarning # Issue 15467
+Language/Expressions/Function_Invocation/Unqualified_Invocation/instance_context_invocation_t03: MissingCompileTimeError # Please triage this failure.
+Language/Expressions/Function_Invocation/Unqualified_Invocation/instance_context_invocation_t04: MissingCompileTimeError # Please triage this failure.
+Language/Expressions/Identifier_Reference/built_in_identifier_t53: MissingCompileTimeError # Issue 25733
+Language/Expressions/Identifier_Reference/built_in_identifier_t54: MissingCompileTimeError # Issue 25733
+Language/Expressions/Identifier_Reference/built_in_identifier_t55: MissingCompileTimeError # Issue 25733
+Language/Expressions/Identifier_Reference/built_in_identifier_t56: MissingCompileTimeError # Issue 25733
+Language/Expressions/Identifier_Reference/built_in_identifier_t57: MissingCompileTimeError # Issue 25733
+Language/Expressions/Identifier_Reference/built_in_identifier_t58: MissingCompileTimeError # Issue 25733
+Language/Expressions/Identifier_Reference/built_in_identifier_t59: MissingCompileTimeError # Issue 25733
+Language/Expressions/Identifier_Reference/built_in_identifier_t60: MissingCompileTimeError # Issue 25733
+Language/Expressions/Identifier_Reference/built_in_identifier_t61: MissingCompileTimeError # Issue 25733
+Language/Expressions/Identifier_Reference/built_in_identifier_t62: MissingCompileTimeError # Issue 25733
+Language/Expressions/Identifier_Reference/built_in_identifier_t63: MissingCompileTimeError # Issue 25733
+Language/Expressions/Identifier_Reference/built_in_identifier_t64: MissingCompileTimeError # Issue 25733
+Language/Expressions/Identifier_Reference/built_in_identifier_t65: MissingCompileTimeError # Issue 25733
+Language/Expressions/Identifier_Reference/built_in_identifier_t66: MissingCompileTimeError # Issue 25733
+Language/Expressions/Identifier_Reference/built_in_identifier_t67: MissingCompileTimeError # Issue 25733
+Language/Expressions/Identifier_Reference/built_in_identifier_t68: MissingCompileTimeError # Issue 25733
+Language/Expressions/Identifier_Reference/built_in_not_dynamic_t17: MissingCompileTimeError # Please triage this failure.
+Language/Expressions/Identifier_Reference/built_in_not_dynamic_t18: MissingCompileTimeError # Please triage this failure.
+Language/Expressions/Identifier_Reference/built_in_not_dynamic_t19: MissingCompileTimeError # Please triage this failure.
+Language/Expressions/Identifier_Reference/built_in_not_dynamic_t21: MissingCompileTimeError # Please triage this failure.
+Language/Expressions/Identifier_Reference/built_in_not_dynamic_t22: MissingCompileTimeError # Please triage this failure.
+Language/Expressions/Identifier_Reference/built_in_not_dynamic_t23: MissingCompileTimeError # Please triage this failure.
+Language/Expressions/Identifier_Reference/built_in_not_dynamic_t24: MissingCompileTimeError # Please triage this failure.
+Language/Expressions/Identifier_Reference/built_in_not_dynamic_t25: MissingCompileTimeError # Please triage this failure.
+Language/Expressions/Identifier_Reference/built_in_not_dynamic_t26: MissingCompileTimeError # Please triage this failure.
+Language/Expressions/Identifier_Reference/built_in_not_dynamic_t27: MissingCompileTimeError # Please triage this failure.
+Language/Expressions/Identifier_Reference/built_in_not_dynamic_t28: MissingCompileTimeError # Please triage this failure.
+Language/Expressions/Identifier_Reference/built_in_not_dynamic_t29: MissingCompileTimeError # Please triage this failure.
+Language/Expressions/Identifier_Reference/built_in_not_dynamic_t30: MissingCompileTimeError # Please triage this failure.
+Language/Expressions/Identifier_Reference/built_in_not_dynamic_t31: MissingCompileTimeError # Please triage this failure.
+Language/Expressions/Identifier_Reference/built_in_not_dynamic_t32: MissingCompileTimeError # Please triage this failure.
+Language/Expressions/Identifier_Reference/evaluation_type_parameter_t02: MissingCompileTimeError # Please triage this failure.
+Language/Expressions/Method_Invocation/Ordinary_Invocation/object_method_invocation_t01: MissingCompileTimeError # Issue 25496
+Language/Expressions/Method_Invocation/Ordinary_Invocation/object_method_invocation_t02: MissingCompileTimeError # Issue 25496
+Language/Expressions/Property_Extraction/Anonymous_Constructor_Extraction: CompileTimeError # Issue 23777
+Language/Expressions/Property_Extraction/General_Super_Property_Extraction: CompileTimeError # Issue 23777
+Language/Expressions/Property_Extraction/Named_Constructor_Extraction: CompileTimeError # Issue 23777
+Language/Expressions/Property_Extraction/Ordinary_Member_Closurization: CompileTimeError # Issue 23777
+Language/Expressions/Property_Extraction/Super_Closurization: CompileTimeError # Issue 23777
+Language/Libraries_and_Scripts/Imports/invalid_uri_deferred_t02: CompileTimeError # Please triage this failure.
+Language/Mixins/Mixin_Application/static_warning_t01: CompileTimeError # Issue 26409
+Language/Mixins/Mixin_Application/static_warning_t02: CompileTimeError # Issue 26409
+Language/Mixins/Mixin_Application/superinterfaces_t06: CompileTimeError # Issue 26409
+Language/Mixins/Mixin_Application/superinterfaces_t07: CompileTimeError # Issue 26409
+Language/Mixins/Mixin_Application/syntax_t11: CompileTimeError # Issue 26409
+Language/Mixins/Mixin_Application/syntax_t12: CompileTimeError # Issue 26409
+Language/Mixins/Mixin_Application/syntax_t13: CompileTimeError # Issue 26409
+Language/Mixins/Mixin_Application/syntax_t14: CompileTimeError # Issue 26409
+Language/Mixins/Mixin_Application/syntax_t20: CompileTimeError # Issue 26409
+Language/Mixins/Mixin_Application/syntax_t21: CompileTimeError # Issue 26409
+Language/Mixins/Mixin_Application/syntax_t22: CompileTimeError # Issue 26409
+Language/Mixins/Mixin_Application/syntax_t23: CompileTimeError # Issue 26409
+Language/Mixins/Mixin_Application/syntax_t24: CompileTimeError # Issue 26409
+Language/Mixins/Mixin_Application/syntax_t25: CompileTimeError # Issue 26409
+Language/Mixins/declaring_constructor_t05: MissingCompileTimeError # Issue 24767
+Language/Mixins/declaring_constructor_t06: MissingCompileTimeError # Issue 24767
+Language/Statements/Yield_and_Yield_Each/Yield_Each/location_t01: MissingCompileTimeError # Issue 25495
+Language/Statements/Yield_and_Yield_Each/Yield_Each/location_t03: MissingCompileTimeError # Issue 25495
+Language/Statements/Yield_and_Yield_Each/Yield_Each/location_t05: MissingCompileTimeError # Issue 25495
+
+Language/Expressions/Property_Extraction/Anonymous_Constructor_Closurization/identical_t01: CompileTimeError # Issue 23777
+Language/Expressions/Property_Extraction/Anonymous_Constructor_Closurization/identical_t02: CompileTimeError # Issue 23777
+Language/Expressions/Property_Extraction/Anonymous_Constructor_Closurization/identical_t03: CompileTimeError # Issue 23777
+Language/Expressions/Property_Extraction/Anonymous_Constructor_Closurization/positional_parameters_t01: CompileTimeError # Issue 23777
+Language/Expressions/Property_Extraction/Anonymous_Constructor_Closurization/named_parameters_t01: CompileTimeError # Issue 23777
+Language/Expressions/Property_Extraction/Named_Constructor_Closurization/identical_t01: CompileTimeError # Issue 23777
+Language/Expressions/Property_Extraction/Named_Constructor_Closurization/identical_t02: CompileTimeError # Issue 23777
+Language/Expressions/Property_Extraction/Named_Constructor_Closurization/positional_parameters_t01: CompileTimeError # Issue 23777
+Language/Expressions/Property_Extraction/Named_Constructor_Closurization/named_parameters_t01: CompileTimeError # Issue 23777
+Language/Expressions/Identifier_Reference/evaluation_library_or_getter_t05: StaticWarning # Misspelled "@static-waning"
+LibTest/core/Set/IterableBase_A01_t01: StaticWarning # Imports libraries with static warnings
+LibTest/collection/LinkedHashSet/LinkedHashSet_class_A01_t01: StaticWarning # Imports libraries with static warnings
+LibTest/collection/IterableBase/IterableBase_class_A01_t02: StaticWarning # Imports libraries with static warnings
+LibTest/collection/HashSet/HashSet_class_A01_t01: StaticWarning # Imports libraries with static warnings
+Language/Expressions/Property_Extraction/General_Closurization/method_lookup_t05: CompileTimeError # Issue 23777
+Language/Expressions/Property_Extraction/General_Closurization/method_lookup_t06: CompileTimeError # Issue 23777
+Language/Expressions/Property_Extraction/General_Closurization/method_lookup_t07: CompileTimeError # Issue 23777
+Language/Expressions/Property_Extraction/General_Closurization/getter_lookup_t05: CompileTimeError # Issue 23777
+Language/Expressions/Property_Extraction/General_Closurization/getter_lookup_t06: CompileTimeError # Issue 23777
+Language/Expressions/Property_Extraction/General_Closurization/getter_lookup_t07: CompileTimeError # Issue 23777
+Language/Expressions/Property_Extraction/General_Closurization/getter_lookup_t08: CompileTimeError # Issue 23777
+Language/Expressions/Property_Extraction/General_Closurization/getter_lookup_t09: CompileTimeError # Issue 23777
+Language/Expressions/Property_Extraction/General_Closurization/setter_lookup_t05: CompileTimeError # Issue 23777
+Language/Expressions/Property_Extraction/General_Closurization/setter_lookup_t06: CompileTimeError # Issue 23777
+Language/Expressions/Property_Extraction/General_Closurization/setter_lookup_t07: CompileTimeError # Issue 23777
+Language/Expressions/Property_Extraction/General_Closurization/setter_lookup_t08: CompileTimeError # Issue 23777
+Language/Expressions/Property_Extraction/General_Closurization/setter_lookup_t09: CompileTimeError # Issue 23777
+Language/Expressions/Property_Extraction/General_Closurization/static_type_t06: CompileTimeError # Issue 23777
+Language/Expressions/Property_Extraction/General_Closurization/static_type_t07: CompileTimeError # Issue 23777
+Language/Libraries_and_Scripts/Scripts/syntax_t11: Pass, CompileTimeError # Issue 26592
+Language/Libraries_and_Scripts/Parts/compilation_t04: Pass, CompileTimeError # Issue 26592
+Language/Libraries_and_Scripts/Parts/compilation_t02: Pass, MissingCompileTimeError # Issue 26692
+Language/Libraries_and_Scripts/Parts/compilation_t01: Pass, MissingCompileTimeError # Issue 26692
diff --git a/tests/co19/co19-co19.status b/tests/co19/co19-co19.status
index f6e0f6d..80a6317 100644
--- a/tests/co19/co19-co19.status
+++ b/tests/co19/co19-co19.status
@@ -14,50 +14,29 @@
 [ $runtime == vm || $runtime != vm ]
 # Tests that fail everywhere, including the analyzer.
 
-Language/Classes/Constructors/Constant_Constructors/initializer_not_a_constant_t01: Pass, Fail, OK # co19 issue 18
-Language/Classes/Constructors/Constant_Constructors/initializer_not_a_constant_t02: Pass, Fail, OK # co19 issue 18
-Language/Classes/Constructors/Constant_Constructors/initializer_not_a_constant_t03: Pass, Fail, OK # co19 issue 18
-Language/Classes/Constructors/Constant_Constructors/not_a_constant_in_superclass_t01: Pass, Fail, OK # co19 issue 18
-Language/Classes/Constructors/Constant_Constructors/not_a_constant_in_superclass_t02: Pass, Fail, OK # co19 issue 18
-
-# Super is now allowed in mixins and mixins may now extend a subclass of Object.
-Language/09_Mixins/09_Mixins_A01_t01: Skip # co19 issue 9.
-Language/09_Mixins/09_Mixins_A03_t01: Skip # co19 issue 9.
-
-# No longer correct, y#$ now has a meaning. github.com/dart-lang/co19/issues/2
-Language/12_Expressions/30_Identifier_Reference_A01_t03: Skip
-
 LibTest/typed_data/ByteData/buffer_A01_t01: Fail # co19 r736 bug - sent comment.
 
 LibTest/core/RegExp/firstMatch_A01_t01: Fail # co19 issue 742
 
-# These tests are obsolete and need updating.
-WebPlatformTest/shadow-dom/elements-and-dom-objects/shadowroot-object/shadowroot-attributes/test-002_t01: Skip # Issue 19019
-WebPlatformTest/shadow-dom/elements-and-dom-objects/shadowroot-object/shadowroot-attributes/test-003_t01: Skip # Issue 19019
-
-# These tests are broken in both Javascript and Dart (co19 folks contacted to fix).
-WebPlatformTest/shadow-dom/elements-and-dom-objects/shadowroot-object/shadowroot-methods/test-004_t01: Skip # Issue 21115
-
 [ $compiler != dart2analyzer ]
 # Tests that fail on every runtime, but not on the analyzer.
-
-LibTest/async/Future/Future.error_A01_t01: RuntimeError # co19 issue 712
-LibTest/async/Completer/completeError_A02_t01: RuntimeError # co19 issue 712
 LibTest/isolate/ReceivePort/asBroadcastStream_A02_t01: Fail # co19 issue 687
 LibTest/async/Stream/asBroadcastStream_A02_t01: Fail # co19 issue 687
 
 LibTest/core/Symbol/Symbol_A01_t04: RuntimeError # Issue 25804
 
-Language/Classes/same_name_type_variable_t01: Pass, MissingCompileTimeError, Fail # Issue 14513
-Language/Classes/same_name_type_variable_t04: Pass, MissingCompileTimeError, Fail # Issue 14513
-Language/Classes/same_name_type_variable_t07: Pass, MissingCompileTimeError, Fail # Issue 14513
+Language/Classes/same_name_type_variable_t04: Pass, MissingCompileTimeError, Fail # Issue 14513,25525
+Language/Classes/same_name_type_variable_t07: Pass, MissingCompileTimeError, Fail # Issue 14513,25525
 
-LibTest/math/acos_A01_t01: PASS, FAIL, OK # co19 issue 44
-LibTest/math/asin_A01_t01: PASS, FAIL, OK # co19 issue 44
-LibTest/math/atan_A01_t01: PASS, FAIL, OK # co19 issue 44
+Language/Expressions/Instance_Creation/Const/abstract_class_t01: Pass, Fail # co19 issue 66
+Language/Expressions/Instance_Creation/Const/abstract_class_t03: Pass, Fail # co19 issue 66
 
-LibTest/math/cos_A01_t01: PASS, FAIL, OK # co19 issue 44
-LibTest/math/tan_A01_t01: PASS, FAIL, OK # co19 issue 44
+LibTest/math/acos_A01_t01: PASS, FAIL, OK  # Issue 26261
+LibTest/math/asin_A01_t01: PASS, FAIL, OK  # Issue 26261
+LibTest/math/atan_A01_t01: PASS, FAIL, OK  # Issue 26261
+
+LibTest/math/cos_A01_t01: PASS, FAIL, OK  # Issue 26261
+LibTest/math/tan_A01_t01: PASS, FAIL, OK  # Issue 26261
 
 LibTest/core/Expando/Expando_A03_t01: RuntimeError # Issue 17735
 LibTest/core/Expando/Expando_A03_t03: RuntimeError # Issue 17735
@@ -97,9 +76,13 @@
 [ $runtime == dartium || $compiler == dart2js ]
 LibTest/async/Future/Future.delayed_A01_t02: Pass, Fail # Issue 15524
 
-### CHECKED MODE FAILURES ###
-
-[ $compiler != dart2analyzer && $checked ]
-LibTest/collection/DoubleLinkedQueue/removeFirst_A01_t01: RuntimeError # co19 issue 22
-LibTest/collection/LinkedList/LinkedList_A01_t01: RuntimeError # co19 issue 23
-LibTest/collection/LinkedList/lastWhere_A02_t01: RuntimeError # co19 issue 737
+[ ($compiler == none || $compiler == precompiler) && ($runtime == vm || $runtime == drt || $runtime == dartium || $runtime == dart_precompiled) ]
+# Optional trailing commas for argument and parameter lists added to language.
+# https://github.com/dart-lang/co19/issues/68
+Language/Expressions/Function_Invocation/Actual_Argument_List_Evaluation/syntax_t05: Fail, OK
+Language/Expressions/Method_Invocation/Ordinary_Invocation/syntax_t05: Fail, OK
+Language/Expressions/Method_Invocation/Ordinary_Invocation/syntax_t10: Fail, OK
+Language/Expressions/Method_Invocation/Super_Invocation/syntax_t05: Fail, OK
+Language/Functions/Formal_Parameters/syntax_t04: Fail, OK
+Language/Functions/Formal_Parameters/syntax_t05: Fail, OK
+Language/Functions/Formal_Parameters/syntax_t12: Fail, OK
diff --git a/tests/co19/co19-dart2js.status b/tests/co19/co19-dart2js.status
index d12cbfc..2740bdf 100644
--- a/tests/co19/co19-dart2js.status
+++ b/tests/co19/co19-dart2js.status
@@ -7,6 +7,7 @@
 Language/Classes/Constructors/Generative_Constructors/execution_of_a_superinitializer_t01: RuntimeError, OK # co19 issue 258
 Language/Classes/Constructors/Generative_Constructors/execution_of_an_initializer_t02: fail # Issue 13363
 Language/Classes/Constructors/Generative_Constructors/initializing_formals_execution_t02: fail # Issue 13363
+Language/Classes/Getters/static_getter_t02: CompileTimeError # Issue 24534
 Language/Classes/Getters/type_object_t01: RuntimeError # Please triage this failure
 Language/Classes/Getters/type_object_t02: RuntimeError # Please triage this failure
 Language/Classes/Instance_Methods/same_name_setter_t01: fail # Issue 21201
@@ -14,40 +15,98 @@
 Language/Classes/Setters/name_t02: CompileTimeError # Issue 5023
 Language/Classes/Setters/name_t03: RuntimeError # Issue 5023
 Language/Classes/Setters/name_t07: CompileTimeError # Issue 5023
-Language/Classes/Setters/syntax_t04: RuntimeError # Please triage this failure
+Language/Classes/Setters/name_t08: CompileTimeError # Please triage this failure
+Language/Classes/Setters/name_t09: CompileTimeError # Please triage this failure
+Language/Classes/Setters/name_t10: CompileTimeError # Please triage this failure
+Language/Classes/Setters/name_t11: CompileTimeError # Please triage this failure
+Language/Classes/Setters/name_t12: CompileTimeError # Please triage this failure
+Language/Classes/Setters/name_t13: CompileTimeError # Please triage this failure
+Language/Classes/Setters/name_t14: CompileTimeError # Please triage this failure
+Language/Classes/Setters/name_t15: CompileTimeError # Please triage this failure
+Language/Classes/Setters/static_setter_t06: RuntimeError  # Please triage this failure
 Language/Classes/Setters/type_object_t01: RuntimeError # Please triage this failure
 Language/Classes/Setters/type_object_t02: RuntimeError # Please triage this failure
 Language/Classes/Static_Methods/same_name_method_and_setter_t01: CompileTimeError # Please triage this failure
 Language/Classes/Static_Methods/type_object_t01: RuntimeError # Please triage this failure
 Language/Classes/Static_Methods/type_object_t02: RuntimeError # Please triage this failure
-Language/Classes/Superclasses/wrong_superclass_t04: MissingCompileTimeError # Please triage this failure
-Language/Classes/Superclasses/wrong_superclass_t07: MissingCompileTimeError # Please triage this failure
-Language/Classes/Superclasses/wrong_superclass_t08: MissingCompileTimeError # Please triage this failure
-Language/Classes/Superinterfaces/wrong_type_t04: MissingCompileTimeError # Please triage this failure
-Language/Classes/Superinterfaces/wrong_type_t05: MissingCompileTimeError # Please triage this failure
+Language/Classes/definition_t23: CompileTimeError # Please triage this failure
+Language/Classes/same_name_type_variable_t01: Fail # Missing CT error on class with same name a type parameter
 Language/Classes/same_name_type_variable_t02: Fail # Missing CT error on member with same name a type parameter
 Language/Classes/same_name_type_variable_t03: Fail # Missing CT error on member with same name a type parameter
 Language/Classes/same_name_type_variable_t05: Fail # Missing CT error on member with same name a type parameter
 Language/Classes/same_name_type_variable_t06: Fail # Missing CT error on member with same name a type parameter
 Language/Classes/same_name_type_variable_t08: Fail # Missing CT error on member with same name a type parameter
 Language/Classes/same_name_type_variable_t09: Fail # Missing CT error on member with same name a type parameter
-Language/Enums/restrictions_t01: MissingCompileTimeError # Please triage this failure
-Language/Enums/restrictions_t02: MissingCompileTimeError # Please triage this failure
-Language/Enums/restrictions_t05: MissingCompileTimeError # Please triage this failure
-Language/Enums/restrictions_t06: MissingCompileTimeError # Please triage this failure
-Language/Enums/restrictions_t07: MissingCompileTimeError # Please triage this failure
-Language/Enums/syntax_t02: MissingCompileTimeError # Please triage this failure
 Language/Errors_and_Warnings/compile_error_t06: MissingCompileTimeError # Please triage this failure
 Language/Errors_and_Warnings/compile_error_t07: MissingCompileTimeError # Please triage this failure
+Language/Expressions/Assignment/super_assignment_failed_t05: RuntimeError # Issue 25671
+Language/Expressions/Assignment/super_assignment_value_t02: RuntimeError # Please triage this failure
+Language/Expressions/Await_Expressions/evaluation_throws_t03: RuntimeError # Please triage this failure
+Language/Expressions/Await_Expressions/evaluation_throws_t04: RuntimeError # Please triage this failure
+Language/Expressions/Await_Expressions/evaluation_throws_t06: RuntimeError # Please triage this failure
+Language/Expressions/Await_Expressions/evaluation_throws_t07: RuntimeError # Please triage this failure
+Language/Expressions/Constants/identifier_denotes_a_constant_t06: MissingCompileTimeError # Issue 26580
+Language/Expressions/Constants/identifier_denotes_a_constant_t07: MissingCompileTimeError # Issue 26580
+Language/Expressions/Function_Invocation/async_generator_invokation_t08: Timeout, Skip # Issue 25967
+Language/Expressions/Function_Invocation/async_generator_invokation_t10: Timeout, Skip # Issue 25967
+Language/Expressions/Identifier_Reference/built_in_identifier_t35: MissingCompileTimeError # Issue 25732
+Language/Expressions/Identifier_Reference/built_in_identifier_t36: MissingCompileTimeError # Issue 25732
+Language/Expressions/Identifier_Reference/built_in_identifier_t37: MissingCompileTimeError # Issue 25732
+Language/Expressions/Identifier_Reference/built_in_identifier_t53: MissingCompileTimeError # Issue 25733
+Language/Expressions/Identifier_Reference/built_in_identifier_t54: MissingCompileTimeError # Issue 25733
+Language/Expressions/Identifier_Reference/built_in_identifier_t55: MissingCompileTimeError # Issue 25733
+Language/Expressions/Identifier_Reference/built_in_identifier_t56: MissingCompileTimeError # Issue 25733
+Language/Expressions/Identifier_Reference/built_in_identifier_t57: MissingCompileTimeError # Issue 25733
+Language/Expressions/Identifier_Reference/built_in_identifier_t58: MissingCompileTimeError # Issue 25733
+Language/Expressions/Identifier_Reference/built_in_identifier_t59: MissingCompileTimeError # Issue 25733
+Language/Expressions/Identifier_Reference/built_in_identifier_t60: MissingCompileTimeError # Issue 25733
+Language/Expressions/Identifier_Reference/built_in_identifier_t61: MissingCompileTimeError # Issue 25733
+Language/Expressions/Identifier_Reference/built_in_identifier_t62: MissingCompileTimeError # Issue 25733
+Language/Expressions/Identifier_Reference/built_in_identifier_t63: MissingCompileTimeError # Issue 25733
+Language/Expressions/Identifier_Reference/built_in_identifier_t64: MissingCompileTimeError # Issue 25733
+Language/Expressions/Identifier_Reference/built_in_identifier_t65: MissingCompileTimeError # Issue 25733
+Language/Expressions/Identifier_Reference/built_in_identifier_t66: MissingCompileTimeError # Issue 25733
+Language/Expressions/Identifier_Reference/built_in_identifier_t67: MissingCompileTimeError # Issue 25733
+Language/Expressions/Identifier_Reference/built_in_identifier_t68: MissingCompileTimeError # Issue 25733
+Language/Expressions/Identifier_Reference/built_in_not_dynamic_t14: MissingCompileTimeError # Issue 25732
+Language/Expressions/Identifier_Reference/built_in_not_dynamic_t15: MissingCompileTimeError # Issue 26581
+Language/Expressions/Identifier_Reference/built_in_not_dynamic_t19: MissingCompileTimeError # Issue 25732
 Language/Expressions/Identifier_Reference/syntax_built_in_t01: fail # Issue 21154
+Language/Expressions/Instance_Creation/New/evaluation_t19: RuntimeError # Please triage this failure
+Language/Expressions/Instance_Creation/New/evaluation_t20: RuntimeError # Please triage this failure
+Language/Expressions/Maps/key_value_equals_operator_t02: CompileTimeError # Please triage this failure
 Language/Expressions/Maps/static_type_dynamic_t01: CompileTimeError # Maybe ok. Issue 17207
+Language/Expressions/Method_Invocation/Ordinary_Invocation/object_method_invocation_t01: MissingCompileTimeError # Issue 25496
+Language/Expressions/Method_Invocation/Ordinary_Invocation/object_method_invocation_t02: MissingCompileTimeError # Issue 25496
 Language/Expressions/Numbers/syntax_t06: fail # Issue 21098
 Language/Expressions/Numbers/syntax_t09: fail # Issue 21098
 Language/Expressions/Object_Identity/Object_Identity/constant_objects_t01: fail # Issue 11551, also related to issue 563, 18738
 Language/Expressions/Object_Identity/Object_Identity/double_t02: fail # Issue 11551, also related to issue 563, 18738
+Language/Expressions/Object_Identity/double_t03: RuntimeError # Please triage this failure
 Language/Expressions/Object_Identity/constant_objects_t01: RuntimeError # Please triage this failure
 Language/Expressions/Object_Identity/double_t02: RuntimeError # Please triage this failure
 Language/Expressions/Object_Identity/object_t02: RuntimeError # Issue 1533 (int/double related)
+Language/Expressions/Property_Extraction/Anonymous_Constructor_Closurization/identical_t01: CompileTimeError # Issue 24607
+Language/Expressions/Property_Extraction/Anonymous_Constructor_Closurization/identical_t02: CompileTimeError # Issue 24607
+Language/Expressions/Property_Extraction/Anonymous_Constructor_Closurization/identical_t03: CompileTimeError # Please triage this failure
+Language/Expressions/Property_Extraction/Anonymous_Constructor_Closurization/named_parameters_t01: CompileTimeError # Please triage this failure
+Language/Expressions/Property_Extraction/Anonymous_Constructor_Closurization/positional_parameters_t01: CompileTimeError # Please triage this failure
+Language/Expressions/Property_Extraction/Anonymous_Constructor_Extraction/closurization_t01: CompileTimeError # Please triage this failure
+Language/Expressions/Property_Extraction/Anonymous_Constructor_Extraction/closurization_t02: CompileTimeError # Please triage this failure
+Language/Expressions/Property_Extraction/Anonymous_Constructor_Extraction/closurization_t03: CompileTimeError # Please triage this failure
+Language/Expressions/Property_Extraction/Anonymous_Constructor_Extraction/deferred_type_t01: CompileTimeError # Please triage this failure
+Language/Expressions/Property_Extraction/Anonymous_Constructor_Extraction/malbounded_type_t01: CompileTimeError # Please triage this failure
+Language/Expressions/Property_Extraction/Anonymous_Constructor_Extraction/malbounded_type_t02: CompileTimeError # Please triage this failure
+Language/Expressions/Property_Extraction/Anonymous_Constructor_Extraction/malformed_type_t01: CompileTimeError # Please triage this failure
+Language/Expressions/Property_Extraction/Anonymous_Constructor_Extraction/malformed_type_t02: CompileTimeError # Please triage this failure
+Language/Expressions/Property_Extraction/Anonymous_Constructor_Extraction/no_such_method_t01: CompileTimeError # Please triage this failure
+Language/Expressions/Property_Extraction/Anonymous_Constructor_Extraction/no_such_method_t02: CompileTimeError # Please triage this failure
+Language/Expressions/Property_Extraction/Anonymous_Constructor_Extraction/not_class_t01: CompileTimeError # Please triage this failure
+Language/Expressions/Property_Extraction/Anonymous_Constructor_Extraction/not_class_t02: CompileTimeError # Please triage this failure
+Language/Expressions/Property_Extraction/Anonymous_Constructor_Extraction/not_class_t03: CompileTimeError # Please triage this failure
+Language/Expressions/Property_Extraction/Anonymous_Constructor_Extraction/static_type_t01: CompileTimeError # Please triage this failure
+Language/Expressions/Property_Extraction/Anonymous_Constructor_Extraction/static_type_t02: CompileTimeError # Please triage this failure
+Language/Expressions/Property_Extraction/Anonymous_Constructor_Extraction/static_type_t03: CompileTimeError # Please triage this failure
 Language/Expressions/Property_Extraction/General_Closurization/expression_evaluation_t01: CompileTimeError # Please triage this failure
 Language/Expressions/Property_Extraction/General_Closurization/expression_evaluation_t02: CompileTimeError # Please triage this failure
 Language/Expressions/Property_Extraction/General_Closurization/expression_evaluation_t03: CompileTimeError # Please triage this failure
@@ -55,10 +114,25 @@
 Language/Expressions/Property_Extraction/General_Closurization/getter_lookup_t02: CompileTimeError # Please triage this failure
 Language/Expressions/Property_Extraction/General_Closurization/getter_lookup_t03: CompileTimeError # Please triage this failure
 Language/Expressions/Property_Extraction/General_Closurization/getter_lookup_t04: CompileTimeError # Please triage this failure
+Language/Expressions/Property_Extraction/General_Closurization/getter_lookup_t05: CompileTimeError # Please triage this failure
+Language/Expressions/Property_Extraction/General_Closurization/getter_lookup_t06: CompileTimeError # Please triage this failure
+Language/Expressions/Property_Extraction/General_Closurization/getter_lookup_t07: CompileTimeError # Please triage this failure
+Language/Expressions/Property_Extraction/General_Closurization/getter_lookup_t08: CompileTimeError # Please triage this failure
+Language/Expressions/Property_Extraction/General_Closurization/getter_lookup_t09: CompileTimeError # Please triage this failure
 Language/Expressions/Property_Extraction/General_Closurization/method_lookup_t01: CompileTimeError # Please triage this failure
 Language/Expressions/Property_Extraction/General_Closurization/method_lookup_t02: CompileTimeError # Please triage this failure
 Language/Expressions/Property_Extraction/General_Closurization/method_lookup_t03: CompileTimeError # Please triage this failure
 Language/Expressions/Property_Extraction/General_Closurization/method_lookup_t04: CompileTimeError # Please triage this failure
+Language/Expressions/Property_Extraction/General_Closurization/method_lookup_t05: CompileTimeError # Please triage this failure
+Language/Expressions/Property_Extraction/General_Closurization/method_lookup_t06: CompileTimeError # Please triage this failure
+Language/Expressions/Property_Extraction/General_Closurization/method_lookup_t07: CompileTimeError # Please triage this failure
+Language/Expressions/Property_Extraction/General_Closurization/setter_lookup_t05: CompileTimeError # Please triage this failure
+Language/Expressions/Property_Extraction/General_Closurization/setter_lookup_t06: CompileTimeError # Please triage this failure
+Language/Expressions/Property_Extraction/General_Closurization/setter_lookup_t07: CompileTimeError # Please triage this failure
+Language/Expressions/Property_Extraction/General_Closurization/setter_lookup_t08: CompileTimeError # Please triage this failure
+Language/Expressions/Property_Extraction/General_Closurization/setter_lookup_t09: CompileTimeError # Please triage this failure
+Language/Expressions/Property_Extraction/General_Closurization/static_type_t06: CompileTimeError # Please triage this failure
+Language/Expressions/Property_Extraction/General_Closurization/static_type_t07: CompileTimeError # Please triage this failure
 Language/Expressions/Property_Extraction/General_Closurization/no_accessible_member_t01: CompileTimeError # Please triage this failure
 Language/Expressions/Property_Extraction/General_Closurization/no_accessible_member_t02: CompileTimeError # Please triage this failure
 Language/Expressions/Property_Extraction/General_Closurization/no_accessible_member_t03: CompileTimeError # Please triage this failure
@@ -86,24 +160,83 @@
 Language/Expressions/Property_Extraction/Getter_Access_and_Method_Extraction/class_object_member_t08: MissingCompileTimeError # Please triage this failure
 Language/Expressions/Property_Extraction/Getter_Access_and_Method_Extraction/expression_evaluation_t07: CompileTimeError # Please triage this failure
 Language/Expressions/Property_Extraction/Getter_Access_and_Method_Extraction/no_such_method_t01: RuntimeError # Please triage this failure
+
+Language/Expressions/Property_Extraction/General_Super_Property_Extraction: CompileTimeError # Issue 26287
+
+Language/Expressions/Property_Extraction/Named_Constructor_Closurization/identical_t01: CompileTimeError # Please triage this failure
+Language/Expressions/Property_Extraction/Named_Constructor_Closurization/identical_t02: CompileTimeError # Please triage this failure
+Language/Expressions/Property_Extraction/Named_Constructor_Extraction/closurization_t01: CompileTimeError # Please triage this failure
+Language/Expressions/Property_Extraction/Named_Constructor_Extraction/closurization_t02: CompileTimeError # Please triage this failure
 Language/Expressions/Property_Extraction/Named_Constructor_Extraction/deferred_type_t01: CompileTimeError # Please triage this failure
+Language/Expressions/Property_Extraction/Named_Constructor_Extraction/malbounded_type_t01: CompileTimeError # Please triage this failure
+Language/Expressions/Property_Extraction/Named_Constructor_Extraction/malbounded_type_t02: CompileTimeError # Please triage this failure
 Language/Expressions/Property_Extraction/Named_Constructor_Extraction/malformed_type_t01: CompileTimeError # Please triage this failure
 Language/Expressions/Property_Extraction/Named_Constructor_Extraction/malformed_type_t02: CompileTimeError # Please triage this failure
+Language/Expressions/Property_Extraction/Named_Constructor_Closurization/named_parameters_t01: CompileTimeError # Please triage this failure
+Language/Expressions/Property_Extraction/Named_Constructor_Extraction/no_such_method_t01: CompileTimeError # Please triage this failure
+Language/Expressions/Property_Extraction/Named_Constructor_Extraction/no_such_method_t02: CompileTimeError # Please triage this failure
 Language/Expressions/Property_Extraction/Named_Constructor_Extraction/not_class_t01: CompileTimeError # Please triage this failure
 Language/Expressions/Property_Extraction/Named_Constructor_Extraction/not_class_t02: CompileTimeError # Please triage this failure
 Language/Expressions/Property_Extraction/Named_Constructor_Extraction/not_class_t03: CompileTimeError # Please triage this failure
+Language/Expressions/Property_Extraction/Named_Constructor_Extraction/static_type_t01: CompileTimeError # Please triage this failure
+Language/Expressions/Property_Extraction/Named_Constructor_Extraction/static_type_t02: CompileTimeError # Please triage this failure
+Language/Expressions/Property_Extraction/Named_Constructor_Extraction/static_type_t03: CompileTimeError # Please triage this failure
+Language/Expressions/Property_Extraction/Named_Constructor_Extraction/static_type_t04: CompileTimeError # Please triage this failure
+Language/Expressions/Property_Extraction/Named_Constructor_Closurization/positional_parameters_t01: CompileTimeError # Please triage this failure
+Language/Expressions/Property_Extraction/Ordinary_Member_Closurization/getter_closurization_t01: CompileTimeError # Please triage this failure
+Language/Expressions/Property_Extraction/Ordinary_Member_Closurization/getter_closurization_t02: CompileTimeError # Please triage this failure
+Language/Expressions/Property_Extraction/Ordinary_Member_Closurization/getter_closurization_t03: CompileTimeError # Please triage this failure
+Language/Expressions/Property_Extraction/Ordinary_Member_Closurization/getter_closurization_t04: CompileTimeError # Please triage this failure
+Language/Expressions/Property_Extraction/Ordinary_Member_Closurization/getter_closurization_t05: CompileTimeError # Please triage this failure
+Language/Expressions/Property_Extraction/Ordinary_Member_Closurization/getter_closurization_t06: CompileTimeError # Please triage this failure
+Language/Expressions/Property_Extraction/Ordinary_Member_Closurization/getter_closurization_t07: CompileTimeError # Please triage this failure
+Language/Expressions/Property_Extraction/Ordinary_Member_Closurization/getter_closurization_t08: CompileTimeError # Please triage this failure
+Language/Expressions/Property_Extraction/Ordinary_Member_Closurization/method_closurization_named_params_t01: CompileTimeError # Please triage this failure
+Language/Expressions/Property_Extraction/Ordinary_Member_Closurization/method_closurization_named_params_t02: CompileTimeError # Please triage this failure
+Language/Expressions/Property_Extraction/Ordinary_Member_Closurization/method_closurization_positional_params_t01: CompileTimeError # Please triage this failure
+Language/Expressions/Property_Extraction/Ordinary_Member_Closurization/method_closurization_positional_params_t02: CompileTimeError # Please triage this failure
+Language/Expressions/Property_Extraction/Ordinary_Member_Closurization/method_identical_t01: CompileTimeError # Please triage this failure
+Language/Expressions/Property_Extraction/Ordinary_Member_Closurization/method_identical_t02: CompileTimeError # Please triage this failure
+Language/Expressions/Property_Extraction/Ordinary_Member_Closurization/method_identical_t03: CompileTimeError # Please triage this failure
+Language/Expressions/Property_Extraction/Ordinary_Member_Closurization/operator_closurization_list_access_t01: CompileTimeError # Please triage this failure
+Language/Expressions/Property_Extraction/Ordinary_Member_Closurization/operator_closurization_list_assignment_t01: CompileTimeError # Please triage this failure
+Language/Expressions/Property_Extraction/Ordinary_Member_Closurization/operator_closurization_t01: CompileTimeError # Please triage this failure
+Language/Expressions/Property_Extraction/Ordinary_Member_Closurization/operator_closurization_t02: CompileTimeError # Please triage this failure
+Language/Expressions/Property_Extraction/Ordinary_Member_Closurization/operator_closurization_t03: CompileTimeError # Please triage this failure
+Language/Expressions/Property_Extraction/Ordinary_Member_Closurization/operator_closurization_t04: CompileTimeError # Please triage this failure
+Language/Expressions/Property_Extraction/Ordinary_Member_Closurization/operator_closurization_t05: CompileTimeError # Please triage this failure
+Language/Expressions/Property_Extraction/Ordinary_Member_Closurization/operator_closurization_t06: CompileTimeError # Please triage this failure
+Language/Expressions/Property_Extraction/Ordinary_Member_Closurization/operator_closurization_t07: CompileTimeError # Please triage this failure
+Language/Expressions/Property_Extraction/Ordinary_Member_Closurization/operator_closurization_t08: CompileTimeError # Please triage this failure
+Language/Expressions/Property_Extraction/Ordinary_Member_Closurization/operator_closurization_t09: CompileTimeError # Please triage this failure
+Language/Expressions/Property_Extraction/Ordinary_Member_Closurization/operator_closurization_t10: CompileTimeError # Please triage this failure
+Language/Expressions/Property_Extraction/Ordinary_Member_Closurization/operator_closurization_t11: CompileTimeError # Please triage this failure
+Language/Expressions/Property_Extraction/Ordinary_Member_Closurization/operator_closurization_t12: CompileTimeError # Please triage this failure
+Language/Expressions/Property_Extraction/Ordinary_Member_Closurization/operator_closurization_t13: CompileTimeError # Please triage this failure
+Language/Expressions/Property_Extraction/Ordinary_Member_Closurization/operator_closurization_t14: CompileTimeError # Please triage this failure
+Language/Expressions/Property_Extraction/Ordinary_Member_Closurization/operator_closurization_t15: CompileTimeError # Please triage this failure
+Language/Expressions/Property_Extraction/Ordinary_Member_Closurization/operator_closurization_t16: CompileTimeError # Please triage this failure
+Language/Expressions/Property_Extraction/Ordinary_Member_Closurization/operator_closurization_t17: CompileTimeError # Please triage this failure
+Language/Expressions/Property_Extraction/Ordinary_Member_Closurization/operator_closurization_unary_bitwise_t01: CompileTimeError # Please triage this failure
+Language/Expressions/Property_Extraction/Ordinary_Member_Closurization/setter_closurization_t01: CompileTimeError # Please triage this failure
+Language/Expressions/Property_Extraction/Ordinary_Member_Closurization/setter_closurization_t02: CompileTimeError # Please triage this failure
+Language/Expressions/Property_Extraction/Ordinary_Member_Closurization/setter_closurization_t03: CompileTimeError # Please triage this failure
+Language/Expressions/Property_Extraction/Ordinary_Member_Closurization/setter_closurization_t04: CompileTimeError # Please triage this failure
+Language/Expressions/Property_Extraction/Ordinary_Member_Closurization/setter_closurization_t05: CompileTimeError # Please triage this failure
+Language/Expressions/Property_Extraction/Ordinary_Member_Closurization/setter_closurization_t06: CompileTimeError # Please triage this failure
+Language/Expressions/Property_Extraction/Ordinary_Member_Closurization/setter_closurization_t07: CompileTimeError # Please triage this failure
+Language/Expressions/Property_Extraction/Ordinary_Member_Closurization/setter_closurization_t08: CompileTimeError # Please triage this failure
+
+Language/Expressions/Property_Extraction/Super_Closurization: CompileTimeError # Issue 26287
+
 Language/Expressions/Property_Extraction/Super_Getter_Access_and_Method_Closurization/no_such_method_t01: RuntimeError # Please triage this failure
 Language/Functions/External_Functions/not_connected_to_a_body_t01: CompileTimeError, OK # Issue 5021
 Language/Generics/syntax_t17: fail # Issue 21203
 Language/Interfaces/Superinterfaces/Inheritance_and_Overriding/same_name_method_and_getter_t01: CompileTimeError # Please triage this failure
 Language/Interfaces/Superinterfaces/Inheritance_and_Overriding/same_name_method_and_getter_t02: CompileTimeError # Please triage this failure
-Language/Libraries_and_Scripts/Imports/namespace_changes_t10: CompileTimeError # Please triage this failure
-Language/Libraries_and_Scripts/URIs/syntax_t04: CompileTimeError # Please triage this failure
-Language/Libraries_and_Scripts/URIs/syntax_t05: CompileTimeError # Please triage this failure
-Language/Libraries_and_Scripts/URIs/syntax_t09: CompileTimeError # Please triage this failure
-Language/Libraries_and_Scripts/URIs/syntax_t10: CompileTimeError # Please triage this failure
-Language/Libraries_and_Scripts/URIs/syntax_t14: CompileTimeError # Please triage this failure
-Language/Libraries_and_Scripts/URIs/syntax_t15: CompileTimeError # Please triage this failure
+Language/Libraries_and_Scripts/Imports/invalid_uri_deferred_t01: CompileTimeError # Please triage this failure
+Language/Libraries_and_Scripts/Imports/invalid_uri_deferred_t02: CompileTimeError # Please triage this failure
+Language/Libraries_and_Scripts/Scripts/top_level_main_t05: RuntimeError # Please triage this failure
 Language/Metadata/before_export_t01: RuntimeError # Please triage this failure
 Language/Metadata/before_import_t01: RuntimeError # Please triage this failure
 Language/Metadata/before_library_t01: RuntimeError # Please triage this failure
@@ -111,22 +244,40 @@
 Language/Metadata/before_type_param_t01: CompileTimeError # Please triage this failure
 Language/Metadata/before_typedef_t01: RuntimeError # Please triage this failure
 Language/Metadata/before_variable_t01: RuntimeError # Please triage this failure
-Language/Mixins/Mixin_Application/error_t01: MissingCompileTimeError # Please triage this failure
-Language/Mixins/Mixin_Application/error_t02: MissingCompileTimeError # Please triage this failure
-Language/Mixins/Mixin_Application/wrong_mixin_type_t01: MissingCompileTimeError # Please triage this failure
-Language/Mixins/Mixin_Application/wrong_mixin_type_t02: MissingCompileTimeError # Please triage this failure
-Language/Mixins/Mixin_Application/wrong_mixin_type_t03: MissingCompileTimeError # Please triage this failure
-Language/Mixins/Mixin_Application/wrong_mixin_type_t04: MissingCompileTimeError # Please triage this failure
-Language/Mixins/Mixin_Application/wrong_type_t01: MissingCompileTimeError # Please triage this failure
-Language/Mixins/Mixin_Application/wrong_type_t02: MissingCompileTimeError # Please triage this failure
-Language/Mixins/declaring_constructor_t01: MissingCompileTimeError # Please triage this failure
+Language/Mixins/Mixin_Application/static_warning_t01: CompileTimeError # Please triage this failure
+Language/Mixins/Mixin_Application/static_warning_t02: CompileTimeError # Please triage this failure
+Language/Mixins/Mixin_Application/superinterfaces_t06: CompileTimeError # Please triage this failure
+Language/Mixins/Mixin_Application/superinterfaces_t07: CompileTimeError # Please triage this failure
+Language/Mixins/Mixin_Application/syntax_t11: CompileTimeError # Please triage this failure
+Language/Mixins/Mixin_Application/syntax_t12: CompileTimeError # Please triage this failure
+Language/Mixins/Mixin_Application/syntax_t13: CompileTimeError # Please triage this failure
+Language/Mixins/Mixin_Application/syntax_t14: CompileTimeError # Please triage this failure
+Language/Mixins/Mixin_Application/syntax_t20: CompileTimeError # Please triage this failure
+Language/Mixins/Mixin_Application/syntax_t21: CompileTimeError # Please triage this failure
+Language/Mixins/Mixin_Application/syntax_t22: CompileTimeError # Please triage this failure
+Language/Mixins/Mixin_Application/syntax_t23: CompileTimeError # Please triage this failure
+Language/Mixins/Mixin_Application/syntax_t24: CompileTimeError # Please triage this failure
+Language/Mixins/Mixin_Application/syntax_t25: CompileTimeError # Please triage this failure
+Language/Mixins/declaring_constructor_t05: MissingCompileTimeError # Issue 24767
+Language/Mixins/declaring_constructor_t06: MissingCompileTimeError # Issue 24767
 Language/Mixins/deferred_t01: MissingCompileTimeError # Please triage this failure
 Language/Mixins/not_object_superclass_t01: MissingCompileTimeError # Please triage this failure
 Language/Mixins/reference_to_super_t01: MissingCompileTimeError # Please triage this failure
 Language/Reference/Lexical_Rules/Reserved_Words/whitespace_t04: MissingCompileTimeError # Checks that other Unicode whitespaces are not allowed:  check NO-BREAK SPACE (U+00A0)
 Language/Reference/Lexical_Rules/whitespace_t06: MissingCompileTimeError # Checks that Unicode whitespaces other than WHITESPACE are not permitted in the source code. Checks symbol U+00a0.
+Language/Reference/Operator_Precedence/precedence_12_Shift_t02: RuntimeError # Issue 26573
+Language/Reference/Operator_Precedence/precedence_15_unary_prefix_t04: RuntimeError # Issue 26573
+Language/Reference/Operator_Precedence/precedence_15_unary_prefix_t08: RuntimeError # Please triage this failure
+Language/Statements/Continue/async_loops_t09: Crash # Please triage this failure
 Language/Statements/Local_Function_Declaration/reference_before_declaration_t01: MissingCompileTimeError # Issue 21050
 Language/Statements/Local_Function_Declaration/reference_before_declaration_t03: MissingCompileTimeError # Issue 21050
+Language/Statements/Try/catch_scope_t01: RuntimeError # Please triage this failure
+Language/Statements/Yield_and_Yield_Each/Yield/execution_async_t02: RuntimeError # Please triage this failure
+Language/Statements/Yield_and_Yield_Each/Yield_Each/execution_sync_t05: RuntimeError # Issue 25662,25634
+Language/Statements/Yield_and_Yield_Each/Yield_Each/location_t01: MissingCompileTimeError # Issue 25495
+Language/Statements/Yield_and_Yield_Each/Yield_Each/location_t03: MissingCompileTimeError # Issue 25495
+Language/Statements/Yield_and_Yield_Each/Yield_Each/location_t05: MissingCompileTimeError # Issue 25495
+Language/Types/Dynamic_Type_System/deferred_type_error_t01: RuntimeError # Please triage this failure
 Language/Types/Interface_Types/subtype_t27: Skip # Times out or crashes. Issue 21174
 Language/Types/Interface_Types/subtype_t30: fail # Issue 14654
 Language/Types/Interface_Types/subtype_t28: Pass, Fail, Crash # Stack overflow. Issue 25282
@@ -143,7 +294,13 @@
 LibTest/convert/JsonDecoder/fuse_A01_t01: RuntimeError # co19-roll r667: Please triage this failure
 LibTest/convert/JsonEncoder/convert_A01_t01: RuntimeError # co19-roll r667: Please triage this failure
 LibTest/core/DateTime/DateTime_A01_t03: fail # co19-roll r546: Please triage this failure
-LibTest/core/DateTime/parse_A03_t01: fail # co19-roll r546: Please triage this failure
+LibTest/core/DateTime/DateTime.fromMicrosecondsSinceEpoch_A01_t01: RuntimeError # Please triage this failure
+LibTest/core/DateTime/microsecond_A01_t01: RuntimeError # Please triage this failure
+LibTest/core/DateTime/microsecondsSinceEpoch_A01_t01: RuntimeError # Please triage this failure
+LibTest/core/DateTime/parse_A01_t03: RuntimeError # Please triage this failure
+LibTest/core/DateTime/to8601String_A01_t01: RuntimeError # Please triage this failure
+LibTest/core/DateTime/to8601String_A01_t02: RuntimeError # Please triage this failure
+LibTest/core/DateTime/to8601String_A01_t03: RuntimeError # Please triage this failure
 LibTest/core/Duration/operator_div_A01_t01: fail # co19-roll r546: Please triage this failure
 LibTest/core/List/List_class_A01_t01: RuntimeError # co19-roll r623: Please triage this failure
 LibTest/core/List/getRange_A03_t01: RuntimeError, OK # Tests that fail because they use the legacy try-catch syntax. co19 issue 184.
@@ -286,30 +443,54 @@
 LibTest/typed_data/Uint8List/Uint8List.view_A05_t02: RuntimeError # co19-roll r559: Please triage this failure
 LibTest/typed_data/Uint8List/Uint8List.view_A05_t03: RuntimeError # co19-roll r559: Please triage this failure
 LibTest/typed_data/Uint8List/Uint8List_A02_t01: fail # co19-roll r576: Please triage this failure
-Utils/tests/Expect/identical_A01_t01: fail # co19-roll r546: Please triage this
-WebPlatformTest/DOMEvents/approved/domnodeinserted_t01: Skip # Issue 51
+Utils/tests/Expect/identical_A01_t01: fail # co19-roll r546: Please triage this failure
 WebPlatformTest/Utils/test/testFail_t01: RuntimeError # co19-roll r722: Please triage this failure.
 WebPlatformTest/dom/nodes/DOMImplementation-createHTMLDocument_t01: CompileTimeError # co19-roll r722: Please triage this failure.
 WebPlatformTest/dom/nodes/Document-createElement_t01: CompileTimeError # co19-roll r722: Please triage this failure.
 WebPlatformTest/dom/nodes/Element-childElementCount-nochild_t01: CompileTimeError # co19-roll r722: Please triage this failure.
 WebPlatformTest/webstorage/storage_session_setitem_quotaexceedederr_t01: Pass, Slow
+Language/Expressions/Function_Invocation/async_invokation_t04: RuntimeError, Pass # co19 issue 57
 
 [ $compiler == dart2js && $checked ]
-Language/Errors_and_Warnings/static_warning_t01: RuntimeError # Please triage this failure
 Language/Errors_and_Warnings/static_warning_t02: RuntimeError # Please triage this failure
 Language/Errors_and_Warnings/static_warning_t03: RuntimeError # Please triage this failure
 Language/Errors_and_Warnings/static_warning_t04: RuntimeError # Please triage this failure
 Language/Errors_and_Warnings/static_warning_t05: RuntimeError # Please triage this failure
 Language/Errors_and_Warnings/static_warning_t06: RuntimeError # Please triage this failure
+Language/Expressions/Assignment/super_assignment_dynamic_error_t01: RuntimeError # Please triage this failure
+Language/Expressions/Function_Expressions/static_type_dynamic_async_t03: RuntimeError # Please triage this failure
+Language/Expressions/Function_Expressions/static_type_dynamic_asyncs_t03: RuntimeError # Please triage this failure
+Language/Expressions/Function_Expressions/static_type_dynamic_syncs_t03: RuntimeError # Please triage this failure
+Language/Expressions/Function_Expressions/static_type_form_3_async_t03: RuntimeError # Please triage this failure
+Language/Expressions/Function_Expressions/static_type_form_3_asyncs_t03: RuntimeError # Please triage this failure
+Language/Expressions/Function_Expressions/static_type_form_3_syncs_t03: RuntimeError # Please triage this failure
+Language/Expressions/If_null_Expressions/static_type_t01: RuntimeError # Please triage this failure
+Language/Expressions/If_null_Expressions/static_type_t02: RuntimeError # Please triage this failure
+Language/Expressions/Property_Extraction/General_Super_Property_Extraction/getter_lookup_t02: CompileTimeError # Please triage this failure
+Language/Expressions/Property_Extraction/Super_Closurization/setter_closurization_t09: Timeout, Skip  # Please triage this failure
+Language/Functions/async_return_type_t01: RuntimeError # Please triage this failure
+Language/Functions/generator_return_type_t01: RuntimeError # Please triage this failure
+Language/Functions/generator_return_type_t02: RuntimeError # Please triage this failure
+Language/Libraries_and_Scripts/Imports/deferred_import_t02: RuntimeError # co19 issue 60
 Language/Statements/Switch/execution_t01: Fail # Missing type check in switch expression
 Language/Statements/Switch/type_t01: RuntimeError # Issue 16089
+Language/Statements/Return/runtime_type_t04: RuntimeError # Issue 26584
 Language/Types/Static_Types/malformed_type_t01: RuntimeError # Issue 21089
 Language/Types/Dynamic_Type_System/malbounded_type_error_t01: RuntimeError # Issue 21088
 Language/Types/Parameterized_Types/malbounded_t06: RuntimeError # Issue 21088
+#LibTest/async/Future/whenComplete_A01_t01: Timeout, Skip # Please triage this failure
+#LibTest/async/Stream/Stream.eventTransformed_A01_t01: Timeout, Skip # Please triage this failure
+#LibTest/async/Stream/join_A02_t01: Timeout, Skip # Please triage this failure
+#LibTest/async/Stream/last_A02_t01: Timeout, Skip # Please triage this failure
+#LibTest/async/Stream/listen_A03_t01: Timeout, Skip # Please triage this failure
 LibTest/core/Map/Map_class_A01_t04: Slow, Pass
 LibTest/core/Uri/Uri_A06_t03: Slow, Pass
 LibTest/math/Point/operator_mult_A02_t01: RuntimeError # Issue 1533
 
+[ $compiler == dart2js && $checked != true ]
+Language/Expressions/Property_Extraction/General_Super_Property_Extraction/getter_lookup_t02: Timeout, Skip # Please triage this failure
+Language/Expressions/Property_Extraction/Super_Closurization/setter_closurization_t09: CompileTimeError # Please triage this failure
+
 [ $compiler == dart2js && $minified ]
 LibTest/typed_data/Float32List/runtimeType_A01_t01: fail # co19-roll r559: Please triage this failure
 LibTest/typed_data/Float32x4List/runtimeType_A01_t01: fail # co19-roll r559: Please triage this failure
@@ -328,6 +509,24 @@
 LibTest/core/List/List_class_A01_t01: Pass, Timeout
 
 [ $compiler == dart2js && $runtime == jsshell ]
+Language/Expressions/Await_Expressions/execution_t01: RuntimeError # Issue 7728, timer not supported in jsshell
+Language/Expressions/Await_Expressions/execution_t02: RuntimeError # Issue 7728, timer not supported in jsshell
+Language/Expressions/Await_Expressions/execution_t03: RuntimeError # Issue 7728, timer not supported in jsshell
+Language/Expressions/Await_Expressions/execution_t04: RuntimeError # Issue 7728, timer not supported in jsshell
+Language/Expressions/Await_Expressions/execution_t05: RuntimeError # Issue 7728, timer not supported in jsshell
+Language/Expressions/Await_Expressions/execution_t06: RuntimeError # Issue 7728, timer not supported in jsshell
+Language/Statements/For/Asynchronous_For_in/execution_t04: RuntimeError # Issue 7728, timer not supported in jsshell
+Language/Statements/Yield_and_Yield_Each/Yield/execution_async_t01: RuntimeError # Issue 7728, timer not supported in jsshell
+Language/Statements/Yield_and_Yield_Each/Yield/execution_async_t04: RuntimeError # Issue 7728, timer not supported in jsshell
+Language/Statements/Yield_and_Yield_Each/Yield_Each/execution_async_t02: RuntimeError # Issue 7728, timer not supported in jsshell
+Language/Statements/Yield_and_Yield_Each/Yield_Each/execution_async_t03: RuntimeError # Issue 7728, timer not supported in jsshell
+Language/Statements/Yield_and_Yield_Each/Yield_Each/execution_async_t04: RuntimeError # Issue 7728, timer not supported in jsshell
+Language/Statements/Yield_and_Yield_Each/Yield_Each/execution_async_t05: RuntimeError # Issue 7728, timer not supported in jsshell
+Language/Statements/Yield_and_Yield_Each/Yield_Each/execution_async_t06: RuntimeError # Issue 7728, timer not supported in jsshell
+Language/Statements/Yield_and_Yield_Each/Yield_Each/execution_async_t07: RuntimeError # Issue 7728, timer not supported in jsshell
+Language/Statements/Yield_and_Yield_Each/Yield_Each/execution_async_t08: RuntimeError # Issue 7728, timer not supported in jsshell
+Language/Statements/Yield_and_Yield_Each/Yield_Each/execution_async_t09: RuntimeError # Issue 7728, timer not supported in jsshell
+Language/Statements/Yield_and_Yield_Each/Yield_Each/execution_async_t10: RuntimeError # Issue 7728, timer not supported in jsshell
 LibTest/async/Future/Future.delayed_A01_t01: RuntimeError # Issue 7728, timer not supported in jsshell
 LibTest/async/Future/Future.delayed_A03_t01: fail # Issue 7728, timer not supported in jsshell
 LibTest/async/Future/wait_A01_t07: RuntimeError # Issue 7728, timer not supported in jsshell
@@ -362,7 +561,10 @@
 LibTest/core/Stopwatch/start_A01_t03: RuntimeError # Issue 7728, timer not supported in jsshell
 LibTest/core/Stopwatch/stop_A01_t01: RuntimeError # Issue 7728, timer not supported in jsshell
 LibTest/core/Uri/Uri_A06_t03: Pass, Slow
+LibTest/isolate/RawReceivePort/close_A01_t01: RuntimeError # Issue 7728, timer not supported in jsshell
+LibTest/isolate/ReceivePort/asBroadcastStream_A03_t01: RuntimeError # Issue 7728, timer not supported in jsshell
 LibTest/isolate/ReceivePort/asBroadcastStream_A04_t03: RuntimeError # Issue 7728, timer not supported in jsshell
+LibTest/isolate/ReceivePort/close_A01_t01: RuntimeError # Issue 7728, timer not supported in jsshell
 LibTest/typed_data/Float32List/Float32List.view_A06_t01: fail # co19-roll r587: Please triage this failure
 LibTest/typed_data/Float32List/toList_A01_t01: Skip # co19-roll r559: Please triage this failure
 LibTest/typed_data/Float32x4List/Float32x4List.view_A06_t01: fail # co19-roll r587: Please triage this failure
@@ -428,6 +630,7 @@
 LayoutTests/fast/dom/cssTarget-crash_t01: Skip # Test reloads itself. Issue 18558.
 LayoutTests/fast/dom/empty-hash-and-search_t01: Skip # Test reloads itself. Issue 18558.
 LayoutTests/fast/dom/shadow/form-in-shadow_t01: Skip # Test reloads itself. Issue 18558.
+LayoutTests/fast/forms/date/date-interactive-validation-required_t01: Skip # Test reloads itself. Issue 18558.
 LayoutTests/fast/forms/datetimelocal/datetimelocal-interactive-validation-required_t01: Skip # Test reloads itself. Issue 18558.
 LayoutTests/fast/forms/form-submission-create-crash_t01: Skip # Test reloads itself. Issue 18558.
 LayoutTests/fast/forms/formmethod-attribute-button-html_t01: Skip # Test reloads itself. Issue 18558.
@@ -445,16 +648,9 @@
 # Everything below this point is associated with co19 Issue 747
 #
 LayoutTests/fast/dynamic/insertAdjacentHTML_t01: Pass, RuntimeError
-LayoutTests/fast/layers/zindex-hit-test_t01: RuntimeError
-LayoutTests/fast/layers/normal-flow-hit-test_t01: RuntimeError
 LayoutTests/fast/multicol/balance-unbreakable_t01: Pass, RuntimeError
-LayoutTests/fast/multicol/fixed-column-percent-logical-height-orthogonal-writing-mode_t01: RuntimeError
-LayoutTests/fast/multicol/image-inside-nested-blocks-with-border_t01: RuntimeError
-LayoutTests/fast/multicol/inherit-column-values_t01: RuntimeError
-LayoutTests/fast/multicol/inline-getclientrects_t01: RuntimeError
 LayoutTests/fast/multicol/newmulticol/balance_t01: Pass, RuntimeError
 LayoutTests/fast/multicol/newmulticol/balance-maxheight_t01: Pass, RuntimeError
-LayoutTests/fast/multicol/newmulticol/balance-maxheight_t02: RuntimeError
 LayoutTests/fast/multicol/newmulticol/balance_t02: Pass, RuntimeError
 LayoutTests/fast/multicol/newmulticol/balance_t04: Pass, RuntimeError
 LayoutTests/fast/multicol/newmulticol/balance_t05: Pass, RuntimeError
@@ -466,96 +662,48 @@
 LayoutTests/fast/multicol/newmulticol/balance-images_t01: Pass, RuntimeError
 LayoutTests/fast/multicol/column-width-zero_t01: Pass, RuntimeError
 LayoutTests/fast/multicol/widows_t01: Pass, RuntimeError
-LayoutTests/fast/multicol/vertical-lr/break-properties_t01: RuntimeError
 LayoutTests/fast/multicol/orphans-relayout_t01: Pass, RuntimeError
-LayoutTests/fast/multicol/zeroColumnCount_t01: RuntimeError
 LayoutTests/fast/media/mq-parsing_t01: Pass, RuntimeError # False passes on Firefox, but trying to keep these grouped with the issue.
 LayoutTests/fast/mediastream/RTCPeerConnection-AddRemoveStream_t01: Skip # Passes on Safari, Issue 23475
 LayoutTests/fast/overflow/replaced-child-100percent-height-inside-fixed-container-with-overflow-auto_t01: Pass, RuntimeError # False pass on Safari
-LayoutTests/fast/lists/marker-preferred-margins_t01: RuntimeError
-LayoutTests/fast/lists/item-not-in-list-line-wrapping_t01: RuntimeError
 LayoutTests/fast/lists/list-style-position-inside_t01: Pass, RuntimeError
-LayoutTests/fast/innerHTML/innerHTML-special-elements_t01: RuntimeError
-LayoutTests/fast/inline/fixed-pos-moves-with-abspos-inline-parent_t01: RuntimeError
-LayoutTests/fast/inline/fixed-pos-moves-with-abspos-parent-relative-ancestor_t01: RuntimeError
-LayoutTests/fast/inline/inline-relative-offset-boundingbox_t01: RuntimeError
-LayoutTests/fast/inline/fixed-pos-moves-with-abspos-parent_t01: RuntimeError
-LayoutTests/fast/inline/fixed-pos-with-transform-container-moves-with-abspos-parent_t01: RuntimeError
-LayoutTests/fast/inline/empty-inline-before-collapsed-space_t01: RuntimeError
-LayoutTests/fast/inline/reattach-inlines-in-anonymous-blocks-with-out-of-flow-siblings_t01: RuntimeError
 LayoutTests/fast/overflow/overflow-rtl-vertical-origin_t01: Pass, RuntimeError # False passes on Firefox, but trying to keep these grouped with the issue.
 LayoutTests/fast/replaced/computed-image-width-with-percent-height-and-fixed-ancestor_t01: Pass # False pass
 LayoutTests/fast/table/col-width-span-expand_t01: Skip
-LayoutTests/fast/text/container-align-with-inlines_t01: RuntimeError
 LayoutTests/fast/text/font-fallback-synthetic-italics_t01: Pass, RuntimeError
-LayoutTests/fast/text/international/listbox-width-rtl_t01: RuntimeError
 LayoutTests/fast/text/glyph-reordering_t01: Pass, RuntimeError # This is a false pass. The font gets sanitized, so whether it works or not probably depends on default sizes.
 LayoutTests/fast/text/international/rtl-text-wrapping_t01: Pass # This is a false pass. All the content gets sanitized, so there's nothing to assert fail on. If the code did anything it would fail.
 LayoutTests/fast/text/line-break-after-empty-inline-hebrew_t01: Pass, RuntimeError
-LayoutTests/fast/text/line-break-after-inline-latin1_t01: RuntimeError
-LayoutTests/fast/text/line-breaks-after-closing-punctuations_t01: RuntimeError
-LayoutTests/fast/text/line-breaks-after-hyphen-before-number_t01: RuntimeError
-LayoutTests/fast/text/line-breaks-after-ideographic-comma-or-full-stop_t01: RuntimeError
 LayoutTests/fast/text/regional-indicator-symobls_t01: Pass, Fail
-LayoutTests/fast/text/container-align-with-inlines_t01: RuntimeError
 LayoutTests/fast/text/font-fallback-synthetic-italics_t01: RuntimeError
 LayoutTests/fast/text/font-ligatures-linebreak_t01: Skip
 LayoutTests/fast/text/font-ligatures-linebreak-word_t01: Skip
 LayoutTests/fast/text/ipa-tone-letters_t01: Pass, RuntimeError
-LayoutTests/fast/text/pre-wrap-trailing-tab_t01: RuntimeError
-LayoutTests/fast/url/trivial_t01: RuntimeError
-LayoutTests/fast/url/trivial-segments_t01: RuntimeError
-LayoutTests/fast/url/scheme_t01: RuntimeError
-LayoutTests/fast/url/host-lowercase-per-scheme_t01: RuntimeError
-LayoutTests/fast/url/safari-extension_t01: RuntimeError
-LayoutTests/fast/url/port_t01: RuntimeError
-LayoutTests/fast/url/mailto_t01: RuntimeError
-LayoutTests/fast/url/path-url_t01: RuntimeError
-LayoutTests/fast/url/anchor_t01: RuntimeError
-LayoutTests/fast/writing-mode/auto-margins-across-boundaries_t01: RuntimeError
-LayoutTests/fast/writing-mode/display-mutation_t01: RuntimeError
-LayoutTests/fast/writing-mode/percentage-padding_t01: RuntimeError
-LayoutTests/fast/writing-mode/relative-positioning-percentages_t01: RuntimeError
-LayoutTests/fast/writing-mode/block-formatting-context_t01: RuntimeError
 LayoutTests/fast/writing-mode/percentage-margins-absolute-replaced_t01: Pass, RuntimeError
 LayoutTests/fast/writing-mode/positionForPoint_t01: Pass, RuntimeError
 LayoutTests/fast/text/font-ligatures-linebreak-word_t01: Skip
 LayoutTests/fast/html/adjacent-html-context-element_t01:RuntimeError
 LayoutTests/fast/dom/HTMLElement/insertAdjacentHTML-errors_t01: RuntimeError
-LayoutTests/fast/transforms/transform-inside-overflow-scroll_t01: RuntimeError
 LayoutTests/fast/transforms/transform-hit-test-flipped_t01: Pass, RuntimeError # Passes on Firefox, but is clearly not testing what it's trying to test.
 LayoutTests/fast/transforms/scrollIntoView-transformed_t01: Pass, RuntimeError # False passes on Firefox.
 LayoutTests/fast/transforms/bounding-rect-zoom_t01: RuntimeError, Pass # Erratic, but only passes because divs have been entirely removed.
-LayoutTests/fast/transforms/topmost-becomes-bottomost-for-scrolling_t01: RuntimeError
 LayoutTests/fast/table/anonymous-table-section-removed_t01: Skip
 LayoutTests/fast/table/hittest-tablecell-bottom-edge_t01: Skip
 LayoutTests/fast/table/hittest-tablecell-with-borders-bottom-edge_t01: Skip
 LayoutTests/fast/table/table-width-exceeding-max-width_t01: Pass, RuntimeError
-LayoutTests/fast/table/table-sections-border-spacing_t01: RuntimeError
-LayoutTests/fast/table/switch-table-layout-multiple-section_t01: RuntimeError
-LayoutTests/fast/table/resize-table-row_t01: RuntimeError
 LayoutTests/fast/table/min-max-width-preferred-size_t01: Pass, RuntimeError
 LayoutTests/fast/table/margins-flipped-text-direction_t01: Pass, RuntimeError
 LayoutTests/fast/table/html-table-width-max-width-constrained_t01: Pass, RuntimeError
 LayoutTests/fast/table/fixed-table-layout-width-change_t01: Pass, RuntimeError # False passes on Firefox
-LayoutTests/fast/table/absolute-table-percent-lengths_t01: RuntimeError
-LayoutTests/fast/sub-pixel/float-containing-block-with-margin_t01: RuntimeError
 LayoutTests/fast/sub-pixel/replaced-element-baseline_t01: Pass, RuntimeError # Fails on Safari, false pass on others
-LayoutTests/fast/selectors/style-sharing-last-child_t01: RuntimeError
-LayoutTests/fast/selectors/specificity-overflow_t01: RuntimeError
 LayoutTests/fast/ruby/parse-rp_t01: Pass, RuntimeError
-LayoutTests/fast/replaced/table-replaced-element_t01: RuntimeError
-LayoutTests/fast/replaced/table-percent-height-text-controls_t01: RuntimeError
 LayoutTests/fast/replaced/iframe-with-percentage-height-within-table-with-table-cell-ignore-height_t01: RuntimeError, Pass # Spurious intermittent pass
 LayoutTests/fast/replaced/iframe-with-percentage-height-within-table-with-anonymous-table-cell_t01: RuntimeError, Pass # Spurious intermittent pass.
 LayoutTests/fast/replaced/computed-image-width-with-percent-height-inside-table-cell-and-fixed-ancestor_t01: RuntimeError, Pass # Spurious intermittent pass
 LayoutTests/fast/replaced/computed-image-width-with-percent-height-inside-table-cell-and-fixed-ancestor-vertical-lr_t01: RuntimeError, Pass # Spurious intermittent pass
 LayoutTests/fast/replaced/computed-image-width-with-percent-height-and-fixed-ancestor_t01: Pass, RuntimeError
 LayoutTests/fast/replaced/computed-image-width-with-percent-height-and-fixed-ancestor-vertical-lr_t01: Pass, RuntimeError
-LayoutTests/fast/parser/stray-param_t01: RuntimeError
-LayoutTests/fast/replaced/available-height-for-content_t01: RuntimeError
 LayoutTests/fast/parser/parse-wbr_t01: Pass, RuntimeError
-LayoutTests/fast/overflow/child-100percent-height-inside-fixed-container-with-overflow-auto_t01: RuntimeError
 WebPlatformTest/html-imports/link-import-null_t01: RuntimeError
 WebPlatformTest/html/syntax/parsing/Document.getElementsByTagName-foreign_t01: RuntimeError
 WebPlatformTest/html/syntax/parsing/math-parse_t03: RuntimeError
@@ -581,6 +729,7 @@
 #
 
 [ $compiler == dart2js && $runtime == chromeOnAndroid ]
+LayoutTests/fast/multicol/newmulticol/balance-maxheight_t02: RuntimeError
 Language/Expressions/Strings/escape_backspace_t01: Pass, Slow # Please triage this failure.
 LibTest/core/RegExp/Pattern_semantics/firstMatch_NonEmptyClassRanges_A01_t01: Fail # Issue 22200.
 LibTest/core/RegExp/Pattern_semantics/firstMatch_NonEmptyClassRanges_A01_t05: Fail # Issue 22200.
@@ -610,6 +759,8 @@
 LibTest/typed_data/Uint8ClampedList/map_A02_t01: Pass, Slow # Please triage this failure.
 
 [ $compiler == dart2js && $runtime == chrome ]
+Language/Statements/Yield_and_Yield_Each/Yield/execution_async_t01: RuntimeError # Issue 26615
+Language/Statements/Yield_and_Yield_Each/Yield/execution_async_t04: RuntimeError # Issue 26615
 LayoutTests/fast/alignment/parse-align-items_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/alignment/parse-align-self_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/alignment/parse-justify-self_t01: RuntimeError # Please triage this failure
@@ -619,8 +770,8 @@
 LayoutTests/fast/animation/request-animation-frame-timestamps-advance_t01: Skip # Times out. Please triage this failure
 LayoutTests/fast/animation/request-animation-frame-timestamps_t01: Skip # Times out. Please triage this failure
 LayoutTests/fast/animation/request-animation-frame-within-callback_t01: Skip # Times out. Please triage this failure
-LayoutTests/fast/backgrounds/background-shorthand-with-backgroundSize-style_t01: RuntimeError # co19 issue 14
-LayoutTests/fast/backgrounds/multiple-backgrounds-computed-style_t01: RuntimeError # co19 issue 14
+LayoutTests/fast/backgrounds/background-position-parsing-2_t01: RuntimeError # Please triage this failure
+LayoutTests/fast/backgrounds/background-shorthand-with-backgroundSize-style_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/backgrounds/repeat/parsing-background-repeat_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/borders/border-width-percent_t01: RuntimeError # Issue 25155
 LayoutTests/fast/canvas/2d.fillText.gradient_t01: RuntimeError # Please triage this failure
@@ -631,9 +782,7 @@
 LayoutTests/fast/canvas/alpha_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/canvas-arc-negative-radius_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/canvas-as-image-incremental-repaint_t01: Skip # Times out. Please triage this failure
-LayoutTests/fast/canvas/canvas-as-image_t01: RuntimeError # co19 issue 19
 LayoutTests/fast/canvas/canvas-blending-text_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/canvas-css-crazy_t01: RuntimeError # co19 issue 19
 LayoutTests/fast/canvas/canvas-currentTransform_t01: RuntimeError # Feature is behind a flag.
 LayoutTests/fast/canvas/canvas-empty-image-pattern_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/canvas-getImageData-invalid_t01: RuntimeError # Please triage this failure
@@ -648,16 +797,12 @@
 LayoutTests/fast/canvas/canvas-putImageData_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/canvas-resize-after-paint_t01: Skip # Times out. Please triage this failure
 LayoutTests/fast/canvas/canvas-scale-drawImage-shadow_t01: Pass, RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/crash-set-font_t01: RuntimeError # co19 issue 19
 LayoutTests/fast/canvas/draw-custom-focus-ring_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/getPutImageDataPairTest_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/setWidthResetAfterForcedRender_t01: Skip # Times out. Please triage this failure
-LayoutTests/fast/canvas/webgl/bad-arguments-test_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/webgl/buffer-data-array-buffer_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/webgl/context-attributes-alpha-depth-stencil-antialias-t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/webgl/context-lost-restored_t01: Pass, Timeout # Please triage this failure
-LayoutTests/fast/canvas/webgl/css-webkit-canvas-repaint_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/webgl/css-webkit-canvas_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/webgl/tex-image-and-sub-image-2d-with-image-rgb565_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/webgl/tex-image-and-sub-image-2d-with-image-rgba4444_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/webgl/tex-image-and-sub-image-2d-with-image-rgba5551_t01: RuntimeError # Please triage this failure
@@ -669,7 +814,6 @@
 LayoutTests/fast/canvas/webgl/webgl-depth-texture_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/webgl/webgl-large-texture_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/webgl/webgl-layer-update_t01: Skip # Times out. Please triage this failure
-LayoutTests/fast/css/background-clip-text_t01: RuntimeError # Issue 54
 LayoutTests/fast/css-generated-content/hit-test-generated-content_t01: Pass, RuntimeError # Please triage this failure
 LayoutTests/fast/css-generated-content/malformed-url_t01: Skip # Times out. Please triage this failure
 LayoutTests/fast/css-generated-content/pseudo-animation-before-onload_t01: Pass, RuntimeError # Please triage this failure
@@ -721,35 +865,19 @@
 LayoutTests/fast/css-grid-layout/place-cell-by-index_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css-intrinsic-dimensions/multicol_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/MarqueeLayoutTest_t01: Pass, RuntimeError # Please triage this failure
-LayoutTests/fast/css/add-remove-stylesheets-at-once-minimal-recalc-style_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/aspect-ratio-inheritance_t01: Pass, RuntimeError # Please triage this failure
 LayoutTests/fast/css/aspect-ratio-parsing-tests_t01: Pass, RuntimeError # Please triage this failure
 LayoutTests/fast/css/auto-min-size_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/css/background-position-serialize_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/css/background-serialize_t01: RuntimeError # https://github.com/dart-lang/co19/issues/14
 LayoutTests/fast/css/checked-pseudo-selector_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/css/computed-offset-with-zoom_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/css/content-language-case-insensitivity_t01: RuntimeError # Issue 23506
-LayoutTests/fast/css/content-language-dynamically-added_t01: RuntimeError # Issue 23506
-LayoutTests/fast/css/content-language-dynamically-removed_t01: RuntimeError # Issue 23506
-LayoutTests/fast/css/content-language-mapped-to-webkit-locale_t01: RuntimeError # Issue 23506
-LayoutTests/fast/css/content-language-multiple_t01: RuntimeError # Issue 23506
-LayoutTests/fast/css/content-language-no-content_t01: RuntimeError # Issue 23506
 LayoutTests/fast/css/content/content-none_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/content/content-normal_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/content/content-quotes-01_t01: RuntimeError # Issue https://github.com/dart-lang/co19/issues/46
 LayoutTests/fast/css/content/content-quotes-05_t01: RuntimeError # Issue https://github.com/dart-lang/co19/issues/46
 LayoutTests/fast/css/counters/complex-before_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/css/counters/counter-cssText_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/css-escaped-identifier_t01: RuntimeError # co19 issue 14
 LayoutTests/fast/css/css-properties-case-insensitive_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/css/css-selector-text_t01: RuntimeError # co19 Issue 15
 LayoutTests/fast/css/css3-nth-tokens-style_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/css/cssText-shorthand_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/csstext-of-content-string_t01: Pass, RuntimeError # Please triage this failure
-LayoutTests/fast/css/cursor-parsing-image-set_t01: RuntimeError # co19 issue 14
-LayoutTests/fast/css/cursor-parsing-quirks_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/css/cursor-parsing_t01: RuntimeError # co19 issue 14
 LayoutTests/fast/css/deprecated-flexbox-auto-min-size_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/ex-unit-with-no-x-height_t01: Pass, RuntimeError # Please triage this failure
 LayoutTests/fast/css/first-child-display-change-inverse_t01: RuntimeError # Please triage this failure
@@ -760,20 +888,16 @@
 LayoutTests/fast/css/font-face-unicode-range-load_t01: Pass, RuntimeError # Please triage this failure
 LayoutTests/fast/css/font-face-unicode-range-monospace_t01: Pass, RuntimeError # Please triage this failure
 LayoutTests/fast/css/font-face-unicode-range-overlap-load_t01: Pass, RuntimeError # Please triage this failure
+LayoutTests/fast/css/font-family-trailing-bracket-gunk_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/font-shorthand-from-longhands_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/fontfaceset-events_t01: Pass, RuntimeError # Please triage this failure
 LayoutTests/fast/css/fontfaceset-loadingdone_t01: Pass, RuntimeError # Please triage this failure
 LayoutTests/fast/css/getComputedStyle/computed-style-border-image_t01: RuntimeError # co19 issue 14
 LayoutTests/fast/css/getComputedStyle/computed-style-cross-fade_t01: RuntimeError # co19 issue 14
-LayoutTests/fast/css/getComputedStyle/computed-style-font_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/css/getComputedStyle/computed-style-properties_t01: RuntimeError # Issue 23506
-LayoutTests/fast/css/getComputedStyle/counterIncrement-without-counter_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/css/getPropertyValue-columns_t01: RuntimeError # Please triage this failure
+LayoutTests/fast/css/getComputedStyle/font-family-fallback-reset_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/html-attr-case-sensitivity_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/id-or-class-before-stylesheet_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/css/image-set-setting_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/implicit-attach-marking_t01: Skip # Times out. Please triage this failure
-LayoutTests/fast/css/important-js-override_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/inherit-initial-shorthand-values_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/invalid-predefined-color_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/link-alternate-stylesheet-1_t01: RuntimeError # Please triage this failure
@@ -783,16 +907,11 @@
 LayoutTests/fast/css/link-alternate-stylesheet-5_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/media-query-recovery_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/modify-ua-rules-from-javascript_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/css/nested-at-rules_t01: RuntimeError # Issue 23506
-LayoutTests/fast/css/parse-color-int-or-percent-crash_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/parsing-at-rule-recovery_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/parsing-css-allowed-string-characters_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/parsing-css-nonascii_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/css/parsing-css-nth-child_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/css/parsing-object-position_t01: RuntimeError # https://github.com/dart-lang/co19/issues/47
 LayoutTests/fast/css/parsing-page-rule_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/parsing-selector-error-recovery_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/css/parsing-text-rendering_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/pseudo-any_t01: Pass, RuntimeError # Please triage this failure
 LayoutTests/fast/css/pseudo-target-indirect-sibling-001_t01: Skip # Times out. Please triage this failure
 LayoutTests/fast/css/pseudo-target-indirect-sibling-002_t01: Skip # Times out. Please triage this failure
@@ -805,7 +924,6 @@
 LayoutTests/fast/css/selector-text-escape_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/shadow-current-color_t01: Skip # Times out. Please triage this failure
 LayoutTests/fast/css/sticky/parsing-position-sticky_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/css/string-quote-binary_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/style-element-process-crash_t01: Skip # Times out. Please triage this failure
 LayoutTests/fast/css/style-scoped/style-scoped-nested_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/style-scoped/style-scoped-with-dom-operation_t01: RuntimeError # Please triage this failure
@@ -825,8 +943,8 @@
 LayoutTests/fast/css3-text/css3-text-indent/getComputedStyle/getComputedStyle-text-indent_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css3-text/css3-text-justify/getComputedStyle/getComputedStyle-text-justify_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/dom/52776_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/dom/DOMException/dispatch-event-exception_t01: RuntimeError # https://github.com/dart-lang/sdk/issues/25928
 LayoutTests/fast/dom/DOMException/XPathException_t01: RuntimeError # Please triage this failure
+LayoutTests/fast/dom/DOMException/dispatch-event-exception_t01: RuntimeError # https://github.com/dart-lang/sdk/issues/25928
 LayoutTests/fast/dom/DOMImplementation/createDocument-namespace-err_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/dom/Document/CaretRangeFromPoint/basic_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/dom/Document/CaretRangeFromPoint/caretRangeFromPoint-in-strict-mode-wtih-checkbox_t01: Pass, RuntimeError # Please triage this failure
@@ -835,7 +953,6 @@
 LayoutTests/fast/dom/Document/CaretRangeFromPoint/replace-element_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/dom/Document/createElementNS-namespace-err_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/dom/Element/attribute-uppercase_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/dom/Element/getBoundingClientRect-getClientRects-relative-to-viewport_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/dom/Element/getClientRects_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/dom/Element/setAttributeNS-namespace-err_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/dom/HTMLAnchorElement/remove-href-from-focused-anchor_t01: Skip # Times out. Please triage this failure
@@ -868,6 +985,7 @@
 LayoutTests/fast/dom/HTMLObjectElement/set-type-to-null-crash_t01: RuntimeError # Issue 25155
 LayoutTests/fast/dom/HTMLOptionElement/collection-setter-getter_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/dom/HTMLOutputElement/dom-settable-token-list_t01: RuntimeError # Please triage this failure
+LayoutTests/fast/dom/HTMLOutputElement/htmloutputelement_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/dom/HTMLScriptElement/async-false-inside-async-false-load_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/dom/HTMLScriptElement/async-inline-script_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/dom/HTMLScriptElement/async-onbeforeload_t01: RuntimeError # Please triage this failure
@@ -876,6 +994,7 @@
 LayoutTests/fast/dom/HTMLScriptElement/remove-in-beforeload_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/dom/HTMLScriptElement/remove-source_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/dom/HTMLScriptElement/script-set-src_t01: Pass, RuntimeError # Please triage this failure
+LayoutTests/fast/dom/HTMLSelectElement/change-multiple-preserve-selection_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/dom/HTMLTemplateElement/custom-element-wrapper-gc_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/dom/HTMLTemplateElement/innerHTML_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/dom/HTMLTemplateElement/ownerDocumentXHTML_t01: RuntimeError # Please triage this failure
@@ -897,7 +1016,6 @@
 LayoutTests/fast/dom/SelectorAPI/dumpNodeList_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/dom/StyleSheet/css-medialist-item_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/dom/StyleSheet/detached-parent-rule-without-wrapper_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/dom/StyleSheet/detached-stylesheet-without-wrapper_t01: RuntimeError # Issue 55
 LayoutTests/fast/dom/TreeWalker/TreeWalker-basic_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/dom/Window/getMatchedCSSRules-nested-rules_t01: Pass, RuntimeError # Please triage this failure
 LayoutTests/fast/dom/Window/getMatchedCSSRules-with-pseudo-elements-complex_t01: RuntimeError # Please triage this failure
@@ -906,12 +1024,10 @@
 LayoutTests/fast/dom/Window/window-scroll-arguments_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/dom/anchor-without-content_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/dom/attribute-namespaces-get-set_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/dom/background-shorthand-csstext_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/dom/blur-contenteditable_t01: Pass, RuntimeError # Please triage this failure
 LayoutTests/fast/dom/characterdata-api-arguments_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/dom/click-method-on-html-element_t01: RuntimeError # Issue 25155
 LayoutTests/fast/dom/client-width-height-quirks_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/dom/css-selectorText_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/dom/custom/document-register-basic_t01: RuntimeError # Dartium JSInterop failure
 LayoutTests/fast/dom/custom/document-register-svg-extends_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/dom/custom/element-names_t01: Pass, RuntimeError # Please triage this failure
@@ -938,11 +1054,15 @@
 LayoutTests/fast/dom/option-properties_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/dom/partial-layout-overlay-scrollbars_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/dom/set-innerHTML_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/dom/shadow/content-reprojection-fallback-crash_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/dom/shadow/event-path_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/dom/shadow/content-pseudo-element-css-text_t01: RuntimeError # https://github.com/dart-lang/co19/issues/49
+LayoutTests/fast/dom/shadow/event-path_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/dom/shadow/host-context-pseudo-class-css-text_t01: RuntimeError # https://github.com/dart-lang/co19/issues/49
 LayoutTests/fast/dom/shadow/host-pseudo-class-css-text_t01: RuntimeError # https://github.com/dart-lang/co19/issues/49
+LayoutTests/fast/dom/shadow/olderShadowRoot_t01: RuntimeError # Issue 26729
+LayoutTests/fast/dom/shadow/shadowdom-for-input-spellcheck_t01: RuntimeError # Issue 26729
+LayoutTests/fast/dom/shadow/shadowdom-for-input-type-change_t01: RuntimeError # Issue 26729
+LayoutTests/fast/dom/shadow/shadowroot-clonenode_t01: RuntimeError # Issue 26729
+LayoutTests/fast/dom/shadow/shadowroot-host_t01: RuntimeError # Issue 26729
 LayoutTests/fast/dom/shadow/no-renderers-for-light-children_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/dom/shadow/pseudoclass-update-checked-option_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/dom/shadow/pseudoclass-update-disabled-optgroup_t01: RuntimeError # Please triage this failure
@@ -953,7 +1073,6 @@
 LayoutTests/fast/dom/shadow/shadow-disable_t01: Pass, RuntimeError # Please triage this failure
 LayoutTests/fast/dom/shadow/shadow-element-inactive_t01: RuntimeError # Issue 25155
 LayoutTests/fast/dom/shadow/shadow-removechild-and-blur-event_t01: Pass, RuntimeError # Please triage this failure
-LayoutTests/fast/dom/shadow/shadow-root-js-api_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/dom/shadow/shadowhost-keyframes_t01: Pass, RuntimeError # Please triage this failure
 LayoutTests/fast/dom/shadow/shadowroot-keyframes_t01: Pass, RuntimeError # Please triage this failure
 LayoutTests/fast/dynamic/crash-generated-counter_t01: RuntimeError # Please triage this failure
@@ -1002,7 +1121,8 @@
 LayoutTests/fast/filesystem/directory-entry-to-uri_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/filesystem/file-entry-to-uri_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/filesystem/filesystem-reference_t01: Pass, RuntimeError # Please triage this failure
-LayoutTests/fast/forms/ValidityState-customError_t01: RuntimeError # Please triage this failure
+LayoutTests/fast/forms/ValidityState-tooLong-input_t01: RuntimeError # https://github.com/dart-lang/co19/issues/48
+LayoutTests/fast/forms/ValidityState-tooLong-textarea_t01: RuntimeError # https://github.com/dart-lang/co19/issues/48
 LayoutTests/fast/forms/autofocus-focus-only-once_t01: Skip # Times out. Please triage this failure
 LayoutTests/fast/forms/autofocus-input-css-style-change_t01: Skip # Times out. Please triage this failure
 LayoutTests/fast/forms/autofocus-opera-007_t01: Skip # Times out. Please triage this failure
@@ -1022,6 +1142,7 @@
 LayoutTests/fast/forms/input-appearance-elementFromPoint_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/forms/input-hit-test-border_t01: Pass, RuntimeError # Please triage this failure
 LayoutTests/fast/forms/input-inputmode_t01: RuntimeError # Please triage this failure
+LayoutTests/fast/forms/input-maxlength_t01: RuntimeError # co19 issue 62
 LayoutTests/fast/forms/input-width-height-attributes-without-renderer-loaded-image_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/forms/listbox-selection-2_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/forms/menulist-disabled-selected-option_t01: RuntimeError # Please triage this failure
@@ -1045,8 +1166,6 @@
 LayoutTests/fast/forms/textfield-focus-out_t01: Skip # Times out. Please triage this failure
 LayoutTests/fast/forms/validationMessage_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/forms/validity-property_t01: RuntimeError # Issue 25155
-LayoutTests/fast/forms/ValidityState-tooLong-textarea_t01: RuntimeError # https://github.com/dart-lang/co19/issues/48
-LayoutTests/fast/forms/ValidityState-tooLong-input_t01: RuntimeError # https://github.com/dart-lang/co19/issues/48
 LayoutTests/fast/forms/willvalidate_t01: RuntimeError # Issue 25155
 LayoutTests/fast/html/hidden-attr_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/html/imports/import-element-removed-flag_t01: RuntimeError # Please triage this failure
@@ -1060,8 +1179,6 @@
 LayoutTests/fast/inline/positioned-element-padding-contributes-width_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/innerHTML/innerHTML-uri-resolution_t01: RuntimeError # co19 issue 14
 LayoutTests/fast/innerHTML/innerHTML-uri-resolution_t01: RuntimeError # co19 issue 14
-LayoutTests/fast/innerHTML/javascript-url_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/layers/normal-flow-hit-test_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/layers/zindex-hit-test_t01: Pass, RuntimeError # Please triage this failure
 LayoutTests/fast/loader/about-blank-hash-change_t01: Skip # Times out. Please triage this failure
 LayoutTests/fast/loader/about-blank-hash-kept_t01: Skip # Times out. Please triage this failure
@@ -1070,16 +1187,13 @@
 LayoutTests/fast/loader/scroll-position-restored-on-back_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/loader/scroll-position-restored-on-reload-at-load-event_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/loader/stateobjects/replacestate-in-onunload_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/masking/parsing-clip-path-iri_t01: RuntimeError # co19 issue 14
 LayoutTests/fast/masking/parsing-clip-path-shape_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/masking/parsing-mask-source-type_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/masking/parsing-mask_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/media/media-query-list-syntax_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/mediastream/RTCPeerConnection-AddRemoveStream_t01: Skip # Issue 23475
-LayoutTests/fast/multicol/balance-trailing-border_t02: RuntimeError # Please triage this failure
 LayoutTests/fast/multicol/break-after-always-bottom-margin_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/multicol/break-properties_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/multicol/columns-shorthand-parsing_t02: RuntimeError # Please triage this failure
 LayoutTests/fast/multicol/cssom-view_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/multicol/flipped-blocks-hit-test_t01: Pass, RuntimeError # Please triage this failure
 LayoutTests/fast/multicol/float-truncation_t01: RuntimeError # Please triage this failure
@@ -1089,22 +1203,19 @@
 LayoutTests/fast/multicol/hit-test-float_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/multicol/hit-test-gap-between-pages_t01: Pass, RuntimeError # Please triage this failure
 LayoutTests/fast/multicol/newmulticol/balance-images_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/multicol/newmulticol/balance-maxheight_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/multicol/newmulticol/balance_t07: RuntimeError # Please triage this failure
 LayoutTests/fast/multicol/newmulticol/balance_t08: RuntimeError # Please triage this failure
 LayoutTests/fast/multicol/newmulticol/balance_t09: RuntimeError # Please triage this failure
 LayoutTests/fast/multicol/newmulticol/balance_t10: RuntimeError # Please triage this failure
 LayoutTests/fast/multicol/vertical-lr/float-truncation_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/multicol/vertical-rl/break-properties_t01: RuntimeError # Please triage this failure
+LayoutTests/fast/multicol/vertical-rl/break-properties_t01: RuntimeError # co19 issue 63
 LayoutTests/fast/multicol/vertical-rl/float-truncation_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/multicol/vertical-rl/image-inside-nested-blocks-with-border_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/multicol/widows_t01: Pass, RuntimeError # Please triage this failure
 LayoutTests/fast/overflow/replaced-child-100percent-height-inside-fixed-container-with-overflow-auto_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/overflow/scrollbar-restored_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/parser/foster-parent-adopted_t02: RuntimeError # Please triage this failure
 LayoutTests/fast/parser/fragment-parser-doctype_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/parser/innerhtml-with-prefixed-elements_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/parser/pre-first-line-break_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/replaced/container-width-zero_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/replaced/iframe-with-percentage-height-within-table-with-anonymous-table-cell_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/replaced/iframe-with-percentage-height-within-table-with-table-cell-ignore-height_t01: RuntimeError # Please triage this failure
@@ -1112,7 +1223,6 @@
 LayoutTests/fast/replaced/table-percent-width_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/ruby/ruby-line-height_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/shapes/parsing/parsing-shape-lengths_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/shapes/parsing/parsing-shape-outside_t01: RuntimeError # co19 issue 14
 LayoutTests/fast/shapes/shape-outside-floats/shape-outside-big-box-border-radius_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/shapes/shape-outside-floats/shape-outside-floats-diamond-margin-polygon_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/shapes/shape-outside-floats/shape-outside-floats-ellipse-margin-left_t01: RuntimeError # Please triage this failure
@@ -1130,14 +1240,12 @@
 LayoutTests/fast/storage/disallowed-storage_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/storage/storage-disallowed-in-data-url_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/sub-pixel/cssom-subpixel-precision_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/sub-pixel/float-containing-block-with-margin_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/sub-pixel/replaced-element-baseline_t01: Pass, RuntimeError # Please triage this failure
 LayoutTests/fast/table/anonymous-table-section-removed_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/table/caption-orthogonal-writing-mode-sizing_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/table/col-width-span-expand_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/table/css-table-max-height_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/table/css-table-max-width_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/table/css-table-width-with-border-padding_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/table/hittest-tablecell-bottom-edge_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/table/hittest-tablecell-right-edge_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/table/hittest-tablecell-with-borders-bottom-edge_t01: RuntimeError # Please triage this failure
@@ -1190,7 +1298,6 @@
 LayoutTests/fast/url/segments-from-data-url_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/url/segments_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/url/standard-url_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/writing-mode/auto-sizing-orthogonal-flows_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/writing-mode/flipped-blocks-hit-test-overflow-scroll_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/writing-mode/flipped-blocks-hit-test-overflow_t01: Pass, RuntimeError # Please triage this failure
 LayoutTests/fast/writing-mode/positionForPoint_t01: RuntimeError # Please triage this failure
@@ -1199,7 +1306,6 @@
 LayoutTests/fast/writing-mode/vertical-inline-block-hittest_t01: Pass, RuntimeError # Please triage this failure
 LayoutTests/fast/xmlhttprequest/xmlhttprequest-responseXML-xml-text-responsetype_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/xmlhttprequest/xmlhttprequest-responsetype-arraybuffer_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/xmlhttprequest/xmlhttprequest-get_t01: RuntimeError # Issue 25928
 LayoutTests/fast/xpath/4XPath/Borrowed/cz_20030217_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/xpath/4XPath/Borrowed/namespace-nodes_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/xpath/4XPath/Core/test_core_functions_t01: RuntimeError # Please triage this failure
@@ -1215,6 +1321,7 @@
 LayoutTests/fast/xpath/node-name-case-sensitivity_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/xpath/node-name-case-sensitivity_t02: RuntimeError # Please triage this failure
 LayoutTests/fast/xpath/position_t01: RuntimeError # Please triage this failure
+LayoutTests/fast/xpath/py-dom-xpath/abbreviations_t01: RuntimeError # Issue 24398
 LayoutTests/fast/xpath/py-dom-xpath/axes_t01: RuntimeError # Dartium JSInterop failure
 LayoutTests/fast/xpath/py-dom-xpath/data_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/xpath/py-dom-xpath/expressions_t01: RuntimeError # Please triage this failure
@@ -1412,7 +1519,6 @@
 WebPlatformTest/html/semantics/scripting-1/the-script-element/async_t11: RuntimeError # Please triage this failure
 WebPlatformTest/html/semantics/scripting-1/the-script-element/script-text_t02: RuntimeError # Please triage this failure
 WebPlatformTest/html/semantics/selectors/pseudo-classes/checked_t01: RuntimeError # Please triage this failure
-WebPlatformTest/html/semantics/selectors/pseudo-classes/default_t01: RuntimeError # Please triage this failure
 WebPlatformTest/html/semantics/selectors/pseudo-classes/dir_t01: RuntimeError # Please triage this failure
 WebPlatformTest/html/semantics/selectors/pseudo-classes/disabled_t01: RuntimeError # Please triage this failure
 WebPlatformTest/html/semantics/selectors/pseudo-classes/enabled_t01: RuntimeError # Please triage this failure
@@ -1436,7 +1542,6 @@
 WebPlatformTest/shadow-dom/elements-and-dom-objects/extensions-to-element-interface/attributes/test-004_t01: RuntimeError # Please triage this failure
 WebPlatformTest/shadow-dom/elements-and-dom-objects/extensions-to-element-interface/attributes/test-004_t02: RuntimeError # Please triage this failure
 WebPlatformTest/shadow-dom/elements-and-dom-objects/extensions-to-element-interface/methods/elements-001_t01: RuntimeError # Please triage this failure
-WebPlatformTest/shadow-dom/elements-and-dom-objects/extensions-to-event-interface/event-path-001_t01: RuntimeError # Please triage this failure
 WebPlatformTest/shadow-dom/elements-and-dom-objects/shadowroot-object/shadowroot-methods/test-002_t01: RuntimeError # Please triage this failure
 WebPlatformTest/shadow-dom/elements-and-dom-objects/shadowroot-object/shadowroot-methods/test-005_t01: RuntimeError # Please triage this failure
 WebPlatformTest/shadow-dom/elements-and-dom-objects/the-content-html-element/test-004_t01: RuntimeError # Please triage this failure
@@ -1451,7 +1556,7 @@
 WebPlatformTest/shadow-dom/events/event-retargeting/test-001_t01: RuntimeError # Please triage this failure
 WebPlatformTest/shadow-dom/events/event-retargeting/test-002_t01: RuntimeError # Please triage this failure
 WebPlatformTest/shadow-dom/events/event-retargeting/test-004_t01: RuntimeError # Please triage this failure
-WebPlatformTest/shadow-dom/events/events-that-are-always-stopped/test-001_t01: Pass, RuntimeError # Issue 52 
+WebPlatformTest/shadow-dom/events/events-that-are-always-stopped/test-001_t01: Pass, RuntimeError # Issue 52
 WebPlatformTest/shadow-dom/events/events-that-are-always-stopped/test-002_t01: Pass, RuntimeError # Issue 52
 WebPlatformTest/shadow-dom/events/events-that-are-always-stopped/test-003_t01: Pass, RuntimeError # Issue 52
 WebPlatformTest/shadow-dom/events/events-that-are-always-stopped/test-004_t01: Pass, RuntimeError # Issue 52
@@ -1464,17 +1569,17 @@
 WebPlatformTest/shadow-dom/events/retargeting-focus-events/test-001_t02: Skip # Times out. Please triage this failure
 WebPlatformTest/shadow-dom/events/retargeting-focus-events/test-001_t05: Skip # Times out. Please triage this failure
 WebPlatformTest/shadow-dom/events/retargeting-focus-events/test-001_t06: Skip # Times out. Please triage this failure
-WebPlatformTest/shadow-dom/events/retargeting-focus-events/test-002_t01: Pass, RuntimeError # Please triage this failure
-WebPlatformTest/shadow-dom/events/retargeting-focus-events/test-003_t01: Pass, RuntimeError # Please triage this failure
+WebPlatformTest/shadow-dom/events/retargeting-focus-events/test-002_t01: Pass, RuntimeError, Timeout # Please triage this failure
+WebPlatformTest/shadow-dom/events/retargeting-focus-events/test-003_t01: Skip # Times out. Please triage this failure
 WebPlatformTest/shadow-dom/events/retargeting-relatedtarget/test-003_t01: RuntimeError # Please triage this failure
 WebPlatformTest/shadow-dom/html-elements-and-their-shadow-trees/test-001_t01: RuntimeError # Please triage this failure
 WebPlatformTest/shadow-dom/html-elements-and-their-shadow-trees/test-002_t01: RuntimeError # Please triage this failure
 WebPlatformTest/shadow-dom/html-elements-and-their-shadow-trees/test-003_t01: RuntimeError # Please triage this failure
+WebPlatformTest/shadow-dom/html-elements-and-their-shadow-trees/test-004_t01: RuntimeError # Issue 26729
 WebPlatformTest/shadow-dom/shadow-trees/custom-pseudo-elements/test-001_t01: RuntimeError # Please triage this failure
 WebPlatformTest/shadow-dom/shadow-trees/distributed-pseudo-element/test-001_t01: RuntimeError # Please triage this failure
 WebPlatformTest/shadow-dom/shadow-trees/distributed-pseudo-element/test-002_t01: RuntimeError # Please triage this failure
 WebPlatformTest/shadow-dom/shadow-trees/lower-boundary-encapsulation/test-004_t01: RuntimeError # Please triage this failure
-WebPlatformTest/shadow-dom/shadow-trees/upper-boundary-encapsulation/dom-tree-accessors-002_t01: RuntimeError # Please triage this failure
 WebPlatformTest/shadow-dom/shadow-trees/upper-boundary-encapsulation/ownerdocument-002_t01: RuntimeError # Please triage this failure
 WebPlatformTest/shadow-dom/shadow-trees/upper-boundary-encapsulation/test-005_t01: RuntimeError # Issue 25155
 WebPlatformTest/shadow-dom/shadow-trees/upper-boundary-encapsulation/test-007_t01: RuntimeError # Issue 25155
@@ -1517,7 +1622,6 @@
 LayoutTests/fast/flexbox/intrinsic-min-width-applies-with-fixed-width_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/html/article-element_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/html/aside-element_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/inline/empty-inline-before-collapsed-space_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/lists/list-style-position-inside_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/lists/marker-preferred-margins_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/multicol/fixed-column-percent-logical-height-orthogonal-writing-mode_t01: RuntimeError # Please triage this failure
@@ -1546,6 +1650,7 @@
 LayoutTests/fast/xpath/py-dom-xpath/axes_t01: RuntimeError # Dartium JSInterop failure
 LayoutTests/fast/xpath/xpath-result-eventlistener-crash_t01: RuntimeError # Please triage this failure
 LibTest/html/Node/ownerDocument_A01_t01: RuntimeError # Issue 18251
+LibTest/html/Element/Element.tag_A01_t01: RuntimeError # Please triage this failure
 WebPlatformTest/DOMEvents/approved/Propagation.path.target.removed_t01: RuntimeError # Please triage this failure
 WebPlatformTest/custom-elements/instantiating/createElementNS_A05_t01: RuntimeError # Please triage this failure
 WebPlatformTest/custom-elements/instantiating/createElement_A05_t01: RuntimeError # Please triage this failure
@@ -1556,10 +1661,15 @@
 WebPlatformTest/shadow-dom/shadow-trees/upper-boundary-encapsulation/ownerdocument-001_t01: RuntimeError # Please triage this failure
 
 [ $compiler == dart2js && $runtime == chrome && $system == macos ]
+Language/Expressions/Function_Invocation/async_invokation_t04: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/webgl/canvas-test_t01: Skip # Times out. Please triage this failure
+LayoutTests/fast/canvas/webgl/context-lost-restored_t01: Skip # Times out. Please triage this failure.
 LayoutTests/fast/canvas/webgl/draw-webgl-to-canvas-2d_t01: Pass, RuntimeError # Please triage this failure
+LayoutTests/fast/canvas/webgl/oes-vertex-array-object_t01: RuntimeError # Please triage this failure
+LayoutTests/fast/dom/HTMLOutputElement/htmloutputelement_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/dom/MutationObserver/observe-attributes_t01: Skip # Times out. Please triage this failure
 LayoutTests/fast/multicol/newmulticol/balance_t04: RuntimeError # Please triage this failure
+LayoutTests/fast/multicol/vertical-rl/image-inside-nested-blocks-with-border_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/shapes/shape-outside-floats/shape-outside-floats-inset-rounded-different-writing-modes-left_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/shapes/shape-outside-floats/shape-outside-rounded-boxes_t02: RuntimeError # Please triage this failure
 LayoutTests/fast/text/glyph-reordering_t01: RuntimeError # Please triage this failure
@@ -1572,6 +1682,7 @@
 LayoutTests/fast/canvas/webgl/WebGLContextEvent_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/webgl/array-bounds-clamping_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/webgl/attrib-location-length-limits_t01: RuntimeError # Please triage this failure
+LayoutTests/fast/canvas/webgl/bad-arguments-test_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/webgl/buffer-bind-test_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/webgl/canvas-2d-webgl-texture_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/webgl/canvas-resize-crash_t01: RuntimeError # Please triage this failure
@@ -1579,9 +1690,11 @@
 LayoutTests/fast/canvas/webgl/canvas-zero-size_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/webgl/compressed-tex-image_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/webgl/context-destroyed-crash_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/webgl/context-lost_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/webgl/context-lost-restored_t01: RuntimeError # Please triage this failure
+LayoutTests/fast/canvas/webgl/context-lost_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/webgl/copy-tex-image-and-sub-image-2d_t01: RuntimeError # Please triage this failure
+LayoutTests/fast/canvas/webgl/css-webkit-canvas-repaint_t01: RuntimeError # Please triage this failure
+LayoutTests/fast/canvas/webgl/css-webkit-canvas_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/webgl/draw-arrays-out-of-bounds_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/webgl/draw-elements-out-of-bounds_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/webgl/draw-webgl-to-canvas-2d_t01: RuntimeError # Please triage this failure
@@ -1598,8 +1711,6 @@
 LayoutTests/fast/canvas/webgl/gl-get-calls_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/webgl/gl-getshadersource_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/webgl/gl-getstring_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/webgl/invalid-passed-params_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/webgl/object-deletion-behaviour_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/webgl/gl-object-get-calls_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/webgl/gl-pixelstorei_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/webgl/gl-teximage_t01: RuntimeError # Please triage this failure
@@ -1615,9 +1726,11 @@
 LayoutTests/fast/canvas/webgl/index-validation-with-resized-buffer_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/webgl/index-validation_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/webgl/invalid-UTF-16_t01: RuntimeError # Please triage this failure
+LayoutTests/fast/canvas/webgl/invalid-passed-params_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/webgl/is-object_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/webgl/null-object-behaviour_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/webgl/null-uniform-location_t01: RuntimeError # Please triage this failure
+LayoutTests/fast/canvas/webgl/object-deletion-behaviour_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/webgl/oes-element-index-uint_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/webgl/oes-vertex-array-object_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/webgl/point-size_t01: RuntimeError # Please triage this failure
@@ -1665,6 +1778,7 @@
 LayoutTests/fast/canvas/webgl/webgl-unprefixed-context-id_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/webgl/webgl-viewport-parameters-preserved_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/multicol/newmulticol/balance_t04: RuntimeError # Please triage this failure
+LayoutTests/fast/multicol/vertical-rl/image-inside-nested-blocks-with-border_t01: RuntimeError # co19 issue #65
 LayoutTests/fast/shapes/shape-outside-floats/shape-outside-floats-inset-rounded-different-writing-modes-left_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/shapes/shape-outside-floats/shape-outside-rounded-boxes_t02: RuntimeError # Please triage this failure
 LayoutTests/fast/text/line-break-after-question-mark_t01: RuntimeError # Please triage this failure
@@ -1675,16 +1789,13 @@
 Language/Expressions/Bitwise_Expressions/method_invocation_super_t01: Pass, Slow # Issue 25940
 Language/Classes/Constructors/Generative_Constructors/execution_of_an_initializer_t04: Pass, Slow # Issue 25940
 
-[ $compiler == dart2js && $runtime == chrome && $system != linux ]
-LayoutTests/fast/multicol/hit-test-gap-between-pages-flipped_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/xpath/py-dom-xpath/abbreviations_t01: RuntimeError # Issue 24398
-
 [ $compiler == dart2js && $runtime == chrome && $system == linux]
 LayoutTests/fast/text/international/combining-marks-position_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/xpath/py-dom-xpath/abbreviations_t01: RuntimeError # Dartium JSInterop failure
 
 [ $compiler == dart2js && $runtime == ff ]
 Language/Expressions/Postfix_Expressions/property_decrement_t02: Skip # Times out. Please triage this failure
+Language/Statements/Yield_and_Yield_Each/Yield/execution_async_t01: RuntimeError # Issue 26615
+Language/Statements/Yield_and_Yield_Each/Yield/execution_async_t04: RuntimeError # Issue 26615
 LayoutTests/fast/alignment/parse-align-items_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/alignment/parse-align-self_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/alignment/parse-justify-self_t01: RuntimeError # Please triage this failure
@@ -1710,13 +1821,10 @@
 LayoutTests/fast/canvas/DrawImageSinglePixelStretch_t01: Pass, RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/alpha_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/canvas-arc-negative-radius_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/canvas-as-image-incremental-repaint_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/canvas-as-image_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/canvas-blend-solid_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/canvas-blending-text_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/canvas-composite-canvas_t01: Pass, RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/canvas-composite-image_t01: Pass, RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/canvas-css-crazy_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/canvas-currentColor_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/canvas-currentTransform_t01: RuntimeError # Feature is not implemented.
 LayoutTests/fast/canvas/canvas-drawImage-incomplete_t01: RuntimeError # Please triage this failure
@@ -1753,105 +1861,31 @@
 LayoutTests/fast/canvas/canvas-strokeRect-zeroSizeGradient_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/canvas-strokeText-invalid-maxWidth_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/canvas-strokeText-zeroSizeGradient_t01: RuntimeError # Please triage this failure
+LayoutTests/fast/css/test-setting-canvas-color_t01: Pass, RuntimeError # co19 issue 64 (depends on the FF version)
 LayoutTests/fast/canvas/canvas-toDataURL-crash_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/crash-set-font_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/draw-custom-focus-ring_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/drawImage-with-valid-image_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/getPutImageDataPairTest_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/rgba-parsing_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/webgl/WebGLContextEvent_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/webgl/array-bounds-clamping_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/webgl/attrib-location-length-limits_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/webgl/bad-arguments-test_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/webgl/buffer-bind-test_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/webgl/canvas-resize-crash_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/webgl/canvas-test_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/webgl/canvas-zero-size_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/webgl/compressed-tex-image_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/webgl/context-attributes-alpha-depth-stencil-antialias-t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/webgl/context-destroyed-crash_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/webgl/context-lost-restored_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/webgl/context-lost_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/webgl/css-webkit-canvas-repaint_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/webgl/css-webkit-canvas_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/webgl/draw-arrays-out-of-bounds_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/webgl/draw-elements-out-of-bounds_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/webgl/draw-webgl-to-canvas-2d_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/webgl/drawingbuffer-test_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/webgl/error-reporting_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/webgl/framebuffer-bindings-unaffected-on-resize_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/webgl/get-active-test_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/webgl/gl-bind-attrib-location-test_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/webgl/gl-enable-enum-test_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/webgl/gl-enum-tests_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/webgl/gl-get-calls_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/webgl/gl-getshadersource_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/webgl/gl-object-get-calls_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/webgl/gl-teximage_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/webgl/gl-uniformmatrix4fv_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/webgl/gl-vertex-attrib-zero-issues_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/webgl/gl-vertex-attrib_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/webgl/glsl-conformance_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/webgl/incorrect-context-object-behaviour_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/webgl/index-validation-copies-indices_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/webgl/index-validation-crash-with-buffer-sub-data_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/webgl/index-validation-verifies-too-many-indices_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/webgl/index-validation-with-resized-buffer_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/webgl/index-validation_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/webgl/is-object_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/webgl/null-object-behaviour_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/webgl/object-deletion-behaviour_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/webgl/oes-element-index-uint_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/webgl/oes-vertex-array-object_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/webgl/point-size_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/webgl/premultiplyalpha-test_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/webgl/program-test_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/webgl/read-pixels-pack-alignment_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/webgl/renderbuffer-initialization_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/webgl/renderer-and-vendor-strings_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/webgl/shader-precision-format_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/webgl/tex-image-and-sub-image-2d-with-array-buffer-view_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/webgl/tex-image-and-sub-image-2d-with-canvas-rgb565_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/webgl/tex-image-and-sub-image-2d-with-canvas-rgba4444_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/webgl/tex-image-and-sub-image-2d-with-canvas-rgba5551_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/webgl/tex-image-and-sub-image-2d-with-canvas_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/webgl/tex-image-and-sub-image-2d-with-image-data-rgb565_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/webgl/tex-image-and-sub-image-2d-with-image-data-rgba4444_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/webgl/tex-image-and-sub-image-2d-with-image-data-rgba5551_t01: RuntimeError # Please triage this failure
+LayoutTests/fast/canvas/webgl/tex-image-and-sub-image-2d-with-array-buffer-view_t01: Skip # Times out.
+LayoutTests/fast/canvas/webgl/tex-image-and-sub-image-2d-with-image_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/webgl/tex-image-and-sub-image-2d-with-image-rgb565_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/webgl/tex-image-and-sub-image-2d-with-image-rgba4444_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/webgl/tex-image-and-sub-image-2d-with-image-rgba5551_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/webgl/tex-image-and-sub-image-2d-with-image_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/webgl/tex-image-and-sub-image-2d-with-video-rgb565_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/webgl/tex-image-and-sub-image-2d-with-video-rgba4444_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/webgl/tex-image-and-sub-image-2d-with-video-rgba5551_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/webgl/tex-image-and-sub-image-2d-with-video_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/webgl/tex-image-and-uniform-binding-bugs_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/webgl/tex-image-webgl_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/webgl/tex-input-validation_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/webgl/tex-sub-image-2d-bad-args_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/webgl/tex-sub-image-2d_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/webgl/tex-sub-image-cube-maps_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/webgl/texImage2DImageDataTest_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/webgl/texImageTest_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/webgl/texture-bindings-uneffected-on-resize_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/webgl/texture-color-profile_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/webgl/texture-complete_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/webgl/texture-npot_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/webgl/texture-transparent-pixels-initialized_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/webgl/triangle_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/webgl/uniform-location-length-limits_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/webgl/uniform-location_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/webgl/uninitialized-test_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/webgl/viewport-unchanged-upon-resize_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/webgl/webgl-composite-modes-repaint_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/webgl/webgl-composite-modes_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/webgl/webgl-depth-texture_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/webgl/webgl-exceptions_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/webgl/webgl-large-texture_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/webgl/webgl-specific_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/webgl/webgl-texture-binding-preserved_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/webgl/webgl-viewport-parameters-preserved_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css-generated-content/malformed-url_t01: Skip # Times out. Please triage this failure
 LayoutTests/fast/css-generated-content/pseudo-animation_t01: Pass, RuntimeError # Please triage this failure
 LayoutTests/fast/css-generated-content/pseudo-animation-before-onload_t01: Pass, RuntimeError # Please triage this failure
@@ -1909,14 +1943,11 @@
 LayoutTests/fast/css-intrinsic-dimensions/tables_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css-intrinsic-dimensions/width-property-value_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css-intrinsic-dimensions/width-shrinks-avoid-floats_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/css/MarqueeLayoutTest_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/css/add-remove-stylesheets-at-once-minimal-recalc-style_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/aspect-ratio-inheritance_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/aspect-ratio-parsing-tests_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/auto-min-size_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/background-clip-text_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/background-parser-crash_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/css/background-position-serialize_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/background-serialize_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/border-image-null-image-crash_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/border-image-style-length_t01: RuntimeError # Please triage this failure
@@ -1927,15 +1958,9 @@
 LayoutTests/fast/css/box-sizing-backwards-compat-prefix_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/checked-pseudo-selector_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/child-selector-implicit-tbody_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/css/computed-offset-with-zoom_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/css/content-language-case-insensitivity_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/css/content-language-dynamically-added_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/css/content-language-dynamically-removed_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/css/content-language-mapped-to-webkit-locale_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/css/content-language-multiple_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/css/content-language-no-content_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/content/content-none_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/content/content-normal_t01: RuntimeError # Please triage this failure
+LayoutTests/fast/css/content/content-quotes-01_t01: RuntimeError # co19 issue 61
 LayoutTests/fast/css/content/content-quotes-06_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/counters/complex-before_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/counters/counter-cssText_t01: RuntimeError # Please triage this failure
@@ -1944,19 +1969,15 @@
 LayoutTests/fast/css/css-keyframe-style-crash_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/css-keyframe-unexpected-end_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/css-properties-case-insensitive_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/css/css-selector-text_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/css3-nth-tokens-style_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/css/cssText-shorthand_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/csstext-of-content-string_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/cursor-parsing-image-set_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/css/cursor-parsing-quirks_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/cursor-parsing_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/deprecated-flex-box-zero-width-intrinsic-max-width_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/deprecated-flexbox-auto-min-size_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/draggable-region-parser_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/dynamic-class-backdrop-pseudo_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/dynamic-class-pseudo-elements_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/css/ex-unit-with-no-x-height_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/first-child-display-change-inverse_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/focus-display-block-inline_t01: Pass, RuntimeError # Please triage this failure
 LayoutTests/fast/css/font-face-insert-link_t01: Pass, RuntimeError # Please triage this failure
@@ -1970,7 +1991,6 @@
 LayoutTests/fast/css/getComputedStyle/computed-style-cross-fade_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/getComputedStyle/computed-style-font_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/getComputedStyle/computed-style-properties_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/css/getComputedStyle/counterIncrement-without-counter_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/getComputedStyle/font-family-fallback-reset_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/getComputedStyle/getComputedStyle-border-image-slice_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/getComputedStyle/getComputedStyle-border-radius-shorthand_t01: RuntimeError # Please triage this failure
@@ -1983,13 +2003,11 @@
 LayoutTests/fast/css/getPropertyValue-border_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/getPropertyValue-clip_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/getPropertyValue-column-rule_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/css/getPropertyValue-columns_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/getPropertyValue-webkit-margin-collapse_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/getPropertyValue-webkit-text-stroke_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/html-attr-case-sensitivity_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/id-or-class-before-stylesheet_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/image-set-setting_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/css/important-js-override_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/inherit-initial-shorthand-values_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/inherited-properties-rare-text_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/insertRule-font-face_t01: RuntimeError # Please triage this failure
@@ -2022,13 +2040,11 @@
 LayoutTests/fast/css/nested-at-rules_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/overflow-property_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/padding-start-end_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/css/parse-color-int-or-percent-crash_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/parsing-at-rule-recovery_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/parsing-css-allowed-string-characters_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/parsing-css-escapes_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/parsing-css-nonascii_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/parsing-css-nth-child_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/css/parsing-object-position_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/parsing-page-rule_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/parsing-selector-error-recovery_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/parsing-unexpected-eof_t01: RuntimeError # Please triage this failure
@@ -2056,7 +2072,6 @@
 LayoutTests/fast/css/selector-text-escape_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/shorthand-setProperty-important_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/sibling-selectors_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/css/string-quote-binary_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/style-element-process-crash_t01: Skip # Times out. Please triage this failure
 LayoutTests/fast/css/style-scoped/style-scoped-in-shadow_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/style-scoped/style-scoped-nested_t01: RuntimeError # Please triage this failure
@@ -2065,7 +2080,6 @@
 LayoutTests/fast/css/style-scoped/style-scoped-with-important-rule_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/text-align-webkit-match-parent-parse_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/text-align-webkit-match-parent_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/css/transform-origin-parsing_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/unicode-bidi-computed-value_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/url-with-multi-byte-unicode-escape_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/vertical-align-length-copy-bug_t01: RuntimeError # Please triage this failure
@@ -2223,7 +2237,6 @@
 LayoutTests/fast/dom/StyleSheet/css-medialist-item_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/dom/StyleSheet/detached-parent-rule-without-wrapper_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/dom/StyleSheet/detached-shadow-style_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/dom/StyleSheet/detached-stylesheet-without-wrapper_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/dom/StyleSheet/empty-shadow-style_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/dom/StyleSheet/removed-media-rule-deleted-parent-crash_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/dom/StyleSheet/removed-stylesheet-rule-deleted-parent-crash_t01: RuntimeError # Please triage this failure
@@ -2288,6 +2301,8 @@
 LayoutTests/fast/dom/horizontal-scrollbar-in-rtl_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/dom/horizontal-scrollbar-when-dir-change_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/dom/HTMLDialogElement/dialog-return-value_t01: RuntimeError # Dartium JSInterop failure
+LayoutTests/fast/dom/HTMLOutputElement/htmloutputelement_t01: RuntimeError # Issue 26714
+LayoutTests/fast/dom/icon-size-property_t01: RuntimeError # Issue 26714
 LayoutTests/fast/dom/importNode-unsupported-node-type_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/dom/insert-span-into-long-text-bug-28245_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/dom/insertBefore-refChild-crash_t01: RuntimeError # Please triage this failure
@@ -2505,7 +2520,6 @@
 LayoutTests/fast/forms/date/ValidityState-stepMismatch-date_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/forms/date/ValidityState-typeMismatch-date_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/forms/date/date-input-type_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/forms/date/date-interactive-validation-required_t01: Pass, RuntimeError # Please triage this failure
 LayoutTests/fast/forms/date/date-pseudo-classes_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/forms/date/input-date-validation-message_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/forms/date/input-valueasdate-date_t01: RuntimeError # Please triage this failure
@@ -2638,7 +2652,6 @@
 LayoutTests/fast/multicol/break-after-always-bottom-margin_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/multicol/break-properties_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/multicol/column-width-zero_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/multicol/columns-shorthand-parsing_t02: RuntimeError # Please triage this failure
 LayoutTests/fast/multicol/cssom-view_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/multicol/fixed-column-percent-logical-height-orthogonal-writing-mode_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/multicol/flipped-blocks-hit-test_t01: RuntimeError # Please triage this failure
@@ -2650,7 +2663,7 @@
 LayoutTests/fast/multicol/hit-test-float_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/multicol/hit-test-gap-between-pages-flipped_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/multicol/hit-test-gap-between-pages_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/multicol/image-inside-nested-blocks-with-border_t01: RuntimeError # Please triage this failure
+LayoutTests/fast/multicol/image-inside-nested-blocks-with-border_t01: RuntimeError # Uses -webkit-* style properties
 LayoutTests/fast/multicol/inherit-column-values_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/multicol/initial-column-values_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/multicol/inline-getclientrects_t01: RuntimeError # Please triage this failure
@@ -2670,7 +2683,7 @@
 LayoutTests/fast/multicol/vertical-lr/float-truncation_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/multicol/vertical-lr/gap-non-negative_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/multicol/vertical-lr/image-inside-nested-blocks-with-border_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/multicol/vertical-rl/break-properties_t01: RuntimeError # Please triage this failure
+LayoutTests/fast/multicol/vertical-rl/break-properties_t01: RuntimeError # co19 issue 63
 LayoutTests/fast/multicol/vertical-rl/float-truncation_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/multicol/vertical-rl/gap-non-negative_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/multicol/vertical-rl/image-inside-nested-blocks-with-border_t01: RuntimeError # Please triage this failure
@@ -2684,7 +2697,6 @@
 LayoutTests/fast/parser/foster-parent-adopted_t02: RuntimeError # Please triage this failure
 LayoutTests/fast/parser/fragment-parser-doctype_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/parser/innerhtml-with-prefixed-elements_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/parser/pre-first-line-break_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/parser/stray-param_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/parser/strict-img-in-map_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/replaced/container-width-zero_t01: RuntimeError # Please triage this failure
@@ -2789,7 +2801,6 @@
 LayoutTests/fast/writing-mode/positionForPoint_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/writing-mode/table-hit-test_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/writing-mode/vertical-inline-block-hittest_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/xmlhttprequest/xmlhttprequest-get_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/xmlhttprequest/xmlhttprequest-invalid-values_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/xmlhttprequest/xmlhttprequest-responseXML-invalid-xml_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/xmlhttprequest/xmlhttprequest-responseXML-xml-text-responsetype_t01: RuntimeError # Please triage this failure
@@ -3168,6 +3179,8 @@
 WebPlatformTest/webstorage/storage_session_setitem_quotaexceedederr_t01: Skip # Times out. Please triage this failure
 
 [ $compiler == dart2js && $runtime == ff && $system == windows ]
+LayoutTests/fast/canvas/webgl/drawingbuffer-test_t01: RuntimeError # Please triage this failure
+LayoutTests/fast/canvas/webgl/gl-get-calls_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/font-face-multiple-ranges-for-unicode-range_t01: RuntimeError # Please triage this failure
 WebPlatformTest/html/syntax/parsing/math-parse_t03: RuntimeError # Issue 22564
 Language/Classes/Getters/type_object_t02: RuntimeError, Slow # Issue 25940
@@ -3181,10 +3194,16 @@
 LayoutTests/fast/xpath/py-dom-xpath/abbreviations_t01: RuntimeError # Dartium JSInterop failure
 
 [ $compiler == dart2js && $runtime == ff && $system == linux]
+LayoutTests/fast/canvas/webgl/*: Timeout, Pass # Issue 26725
 LayoutTests/fast/canvas/canvas-composite-text-alpha_t01: RuntimeError # co19 issue 16
 LayoutTests/fast/text/whitespace/nowrap-line-break-after-white-space_t01: Pass, RuntimeError # Please triage this failure
+LayoutTests/fast/canvas/webgl/oes-element-index-uint_t01: Skip # Times out always
+LayoutTests/fast/canvas/webgl/texture-complete_t01: Skip # Times out sometimes
+LayoutTests/fast/canvas/webgl/texture-npot_t01: Skip # Times out sometimes
 
 [ $compiler == dart2js && $runtime == safari ]
+Language/Statements/Yield_and_Yield_Each/Yield/execution_async_t01: RuntimeError # Issue 26615
+Language/Statements/Yield_and_Yield_Each/Yield/execution_async_t04: RuntimeError # Issue 26615
 LayoutTests/fast/alignment/parse-align-items_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/alignment/parse-align-self_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/alignment/parse-justify-self_t01: RuntimeError # Please triage this failure
@@ -3196,7 +3215,6 @@
 LayoutTests/fast/backgrounds/background-repeat-computed-style_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/backgrounds/repeat/parsing-background-repeat_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/borders/border-image-width-numbers-computed-style_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/borders/border-radius-child_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/2d.fillText.gradient_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/2d.text.draw.fill.maxWidth.gradient_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/2d.text.draw.fill.maxWidth.negative_t01: RuntimeError # Please triage this failure
@@ -3254,7 +3272,6 @@
 LayoutTests/fast/canvas/drawImage-with-valid-image_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/getPutImageDataPairTest_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/setWidthResetAfterForcedRender_t01: Skip # Times out. Please triage this failure
-LayoutTests/fast/canvas/webgl/bad-arguments-test_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/webgl/context-attributes-alpha-depth-stencil-antialias-t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/webgl/tex-image-and-sub-image-2d-with-image-rgb565_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/webgl/tex-image-and-sub-image-2d-with-image-rgba4444_t01: RuntimeError # Please triage this failure
@@ -3269,8 +3286,8 @@
 LayoutTests/fast/canvas/webgl/webgl-depth-texture_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/webgl/webgl-large-texture_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css-generated-content/malformed-url_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/css-generated-content/pseudo-animation_t01: Pass, RuntimeError # Please triage this failure
 LayoutTests/fast/css-generated-content/pseudo-animation-before-onload_t01: Pass, RuntimeError # Please triage this failure
+LayoutTests/fast/css-generated-content/pseudo-animation_t01: Pass, RuntimeError # Please triage this failure
 LayoutTests/fast/css-generated-content/pseudo-element-events_t01: Skip # Times out. Please triage this failure
 LayoutTests/fast/css-generated-content/pseudo-transition-event_t01: Skip # Times out. Please triage this failure
 LayoutTests/fast/css-generated-content/pseudo-transition_t01: RuntimeError # Please triage this failure
@@ -3315,34 +3332,33 @@
 LayoutTests/fast/css-grid-layout/percent-padding-margin-resolution-grid-item_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css-grid-layout/percent-resolution-grid-item_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css-grid-layout/place-cell-by-index_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/css/add-remove-stylesheets-at-once-minimal-recalc-style_t01: RuntimeError # Please triage this failure
+LayoutTests/fast/css/MarqueeLayoutTest_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/aspect-ratio-inheritance_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/aspect-ratio-parsing-tests_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/css/background-position-serialize_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/background-serialize_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/border-image-style-length_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/checked-pseudo-selector_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/css/collapsed-whitespace-reattach-in-style-recalc_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/computed-offset-with-zoom_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/content/content-none_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/content/content-normal_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/content/content-quotes-01_t01: Pass, RuntimeError # Fails on 7.1. Please triage this failure
 LayoutTests/fast/css/content/content-quotes-05_t01: Pass, RuntimeError # Fails on 7.1. Please triage this failure
+LayoutTests/fast/css/counters/counter-cssText_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/css-escaped-identifier_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/css-properties-case-insensitive_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/css3-nth-tokens-style_t01: RuntimeError # Please triage this failure
+LayoutTests/fast/css/cssText-shorthand_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/csstext-of-content-string_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/css/cursor-parsing-quirks_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/deprecated-flexbox-auto-min-size_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/draggable-region-parser_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/dynamic-class-backdrop-pseudo_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/css/ex-unit-with-no-x-height_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/first-child-display-change-inverse_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/focus-display-block-inline_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/font-face-cache-bug_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/font-face-unicode-range-load_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/font-face-unicode-range-monospace_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/font-face-unicode-range-overlap-load_t01: RuntimeError # Please triage this failure
+LayoutTests/fast/css/font-property-priority_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/font-shorthand-from-longhands_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/fontface-properties_t01: RuntimeError # Uses FontFace class, not defined for this browser.
 LayoutTests/fast/css/fontfaceset-download-error_t01: RuntimeError # Please triage this failure
@@ -3350,10 +3366,15 @@
 LayoutTests/fast/css/fontfaceset-loadingdone_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/getComputedStyle/computed-style-font_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/getComputedStyle/computed-style-with-zoom_t01: RuntimeError # Please triage this failure
+LayoutTests/fast/css/getComputedStyle/counterIncrement-without-counter_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/getPropertyValue-clip_t01: RuntimeError # Please triage this failure
+LayoutTests/fast/css/getPropertyValue-columns_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/html-attr-case-sensitivity_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/id-or-class-before-stylesheet_t01: RuntimeError # Please triage this failure
+LayoutTests/fast/css/important-js-override_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/inherit-initial-shorthand-values_t01: RuntimeError # Please triage this failure
+LayoutTests/fast/css/inherited-properties-rare-text_t01: RuntimeError # Please triage this failure
+LayoutTests/fast/css/invalid-not-with-simple-selector-sequence_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/invalid-predefined-color_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/invalidation/detach-reattach-shadow_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/invalidation/shadow-host-toggle_t01: RuntimeError # Please triage this failure
@@ -3370,6 +3391,8 @@
 LayoutTests/fast/css/modify-ua-rules-from-javascript_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/parse-color-int-or-percent-crash_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/parsing-at-rule-recovery_t01: RuntimeError # Please triage this failure
+LayoutTests/fast/css/parsing-css-nth-child_t01: RuntimeError # Please triage this failure
+LayoutTests/fast/css/parsing-font-variant-ligatures_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/parsing-object-position_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/parsing-page-rule_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/parsing-selector-error-recovery_t01: RuntimeError # Please triage this failure
@@ -3384,6 +3407,7 @@
 LayoutTests/fast/css/readonly-pseudoclass-opera-005_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/shorthand-setProperty-important_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/sticky/parsing-position-sticky_t01: RuntimeError # Please triage this failure
+LayoutTests/fast/css/string-quote-binary_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/style-scoped/style-scoped-in-shadow_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/style-scoped/style-scoped-nested_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/style-scoped/style-scoped-scoping-nodes-different-order_t01: RuntimeError # Please triage this failure
@@ -3392,6 +3416,7 @@
 LayoutTests/fast/css/style-scoped/style-scoped-with-important-rule_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/stylesheet-enable-first-alternate-on-load-sheet_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/stylesheet-enable-second-alternate-link_t01: RuntimeError # Please triage this failure
+LayoutTests/fast/css/transform-origin-parsing_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/webkit-keyframes-errors_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/word-break-user-modify-allowed-values_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css3-text/css3-text-align-last/getComputedStyle/getComputedStyle-text-align-last-inherited_t01: RuntimeError # Please triage this failure
@@ -3426,6 +3451,7 @@
 LayoutTests/fast/dom/HTMLDialogElement/dialog-close-event_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/dom/HTMLDialogElement/dialog-enabled_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/dom/HTMLDialogElement/dialog-open_t01: RuntimeError # Please triage this failure
+LayoutTests/fast/dom/HTMLDialogElement/dialog-return-value_t01: RuntimeError # Dartium JSInterop failure
 LayoutTests/fast/dom/HTMLDialogElement/dialog-scrolled-viewport_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/dom/HTMLDialogElement/dialog-show-modal_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/dom/HTMLDialogElement/inert-does-not-match-disabled-selector_t01: RuntimeError # Please triage this failure
@@ -3439,7 +3465,6 @@
 LayoutTests/fast/dom/HTMLDialogElement/top-layer-position-relative_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/dom/HTMLDialogElement/top-layer-position-static_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/dom/HTMLDocument/active-element-gets-unforcusable_t01: Skip # Times out. Please triage this failure
-LayoutTests/fast/dom/HTMLDocument/set-focus-on-valid-element_t01: RuntimeError, Timeout # Please triage this failure
 LayoutTests/fast/dom/HTMLElement/insertAdjacentHTML-errors_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/dom/HTMLElement/set-inner-outer-optimization_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/dom/HTMLElement/spellcheck_t01: RuntimeError # Please triage this failure
@@ -3485,6 +3510,7 @@
 LayoutTests/fast/dom/Range/range-insertNode-splittext_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/dom/Range/range-isPointInRange_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/dom/Range/range-on-detached-node_t01: RuntimeError # Please triage this failure
+LayoutTests/fast/dom/Range/remove-twice-crash_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/dom/Range/surroundContents-for-detached-node_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/dom/SelectorAPI/dumpNodeList-2_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/dom/SelectorAPI/dumpNodeList_t01: RuntimeError # Please triage this failure
@@ -3502,6 +3528,7 @@
 LayoutTests/fast/dom/XMLSerializer-double-xmlns_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/dom/anchor-without-content_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/dom/attribute-namespaces-get-set_t01: RuntimeError # Please triage this failure
+LayoutTests/fast/dom/background-shorthand-csstext_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/dom/characterdata-api-arguments_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/dom/client-width-height-quirks_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/dom/css-selectorText_t01: RuntimeError # Please triage this failure
@@ -3525,12 +3552,12 @@
 LayoutTests/fast/dom/document-set-title-mutations_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/dom/document-set-title-no-reuse_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/dom/domparser-parsefromstring-mimetype-support_t01: RuntimeError # Please triage this failure
+LayoutTests/fast/dom/fragment-activation-focuses-target_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/dom/getElementsByClassName/dumpNodeList_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/dom/horizontal-scrollbar-in-rtl-doesnt-fire-onscroll_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/dom/horizontal-scrollbar-in-rtl_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/dom/horizontal-scrollbar-when-dir-change_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/dom/html-collections-named-getter-mandatory-arg_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/dom/HTMLDialogElement/dialog-return-value_t01: RuntimeError # Dartium JSInterop failure
 LayoutTests/fast/dom/implementation-api-args_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/dom/importNode-unsupported-node-type_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/dom/location-hash_t01: Skip # Times out. Please triage this failure
@@ -3639,6 +3666,7 @@
 LayoutTests/fast/events/mutation-during-replace-child-2_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/events/mutation-during-replace-child_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/events/overflowchanged-event-raf-timing_t01: Skip # Times out. Please triage this failure
+LayoutTests/fast/events/scoped/editing-commands_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/events/scroll-event-does-not-bubble_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/events/tabindex-removal-from-focused-element_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/exclusions/parsing/parsing-wrap-flow_t01: RuntimeError # Please triage this failure
@@ -3690,6 +3718,8 @@
 LayoutTests/fast/filesystem/simple-required-arguments-getfile_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/filesystem/simple-temporary_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/filesystem/snapshot-file-with-gc_t01: RuntimeError # Please triage this failure
+LayoutTests/fast/flexbox/repaint-scrollbar_t01: RuntimeError # Please triage this failure
+LayoutTests/fast/forms/ValidityState-customError_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/forms/ValidityState-typeMismatch-email_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/forms/autocomplete_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/forms/autofocus-input-css-style-change_t01: RuntimeError # Please triage this failure
@@ -3701,7 +3731,6 @@
 LayoutTests/fast/forms/datalist/datalist-child-validation_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/forms/datalist/datalist_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/forms/datalist/input-list_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/forms/datetimelocal/input-valueasnumber-datetimelocal_t01: RuntimeError # Dartium JSInterop failure
 LayoutTests/fast/forms/date-multiple-fields/date-multiple-fields-change-layout-by-value_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/forms/date-multiple-fields/date-multiple-fields-onblur-setvalue-onfocusremoved_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/forms/date/ValidityState-rangeOverflow-date_t01: RuntimeError # Please triage this failure
@@ -3709,7 +3738,6 @@
 LayoutTests/fast/forms/date/ValidityState-stepMismatch-date_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/forms/date/ValidityState-typeMismatch-date_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/forms/date/date-input-type_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/forms/date/date-interactive-validation-required_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/forms/date/date-pseudo-classes_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/forms/date/date-setrangetext_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/forms/date/input-date-validation-message_t01: RuntimeError # Please triage this failure
@@ -3724,6 +3752,7 @@
 LayoutTests/fast/forms/datetimelocal/datetimelocal-pseudo-classes_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/forms/datetimelocal/datetimelocal-setrangetext_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/forms/datetimelocal/input-valueasdate-datetimelocal_t01: RuntimeError # Please triage this failure
+LayoutTests/fast/forms/datetimelocal/input-valueasnumber-datetimelocal_t01: RuntimeError # Dartium JSInterop failure
 LayoutTests/fast/forms/file/file-input-capture_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/forms/form-attribute_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/forms/input-appearance-elementFromPoint_t01: RuntimeError # Please triage this failure
@@ -3734,7 +3763,6 @@
 LayoutTests/fast/forms/interactive-validation-assertion-by-validate-twice_t01: Pass, RuntimeError # Please triage this failure
 LayoutTests/fast/forms/interactive-validation-attach-assertion_t01: Pass, RuntimeError # Please triage this failure
 LayoutTests/fast/forms/interactive-validation-select-crash_t01: Pass, RuntimeError # Please triage this failure
-LayoutTests/fast/forms/listbox-selection-2_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/forms/menulist-disabled-selected-option_t01: Pass, RuntimeError # Fails on 7.1. Please triage this failure
 LayoutTests/fast/forms/menulist-selection-reset_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/forms/menulist-submit-without-selection_t01: Pass, RuntimeError # Fails on 7.1. Please triage this failure
@@ -3744,9 +3772,9 @@
 LayoutTests/fast/forms/parser-associated-form-removal_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/forms/plaintext-mode-1_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/forms/search-popup-crasher_t01: Pass, RuntimeError # Fails on 7.1. Please triage this failure
-LayoutTests/fast/forms/selection-wrongtype_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/forms/select-set-length-with-mutation-remove_t01: RuntimeError
 LayoutTests/fast/forms/select-set-length-with-mutation-reparent_t01: RuntimeError
+LayoutTests/fast/forms/selection-wrongtype_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/forms/setrangetext_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/forms/textarea-maxlength_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/forms/textarea-paste-newline_t01: RuntimeError # Please triage this failure
@@ -3763,7 +3791,7 @@
 LayoutTests/fast/inline/parent-inline-element-padding-contributes-width_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/inline/positioned-element-padding-contributes-width_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/innerHTML/innerHTML-svg-write_t01: RuntimeError # Issue 25941
-LayoutTests/fast/layers/normal-flow-hit-test_t01: RuntimeError # Please triage this failure
+LayoutTests/fast/innerHTML/javascript-url_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/loader/about-blank-hash-change_t01: Skip # Times out. Please triage this failure
 LayoutTests/fast/loader/about-blank-hash-kept_t01: Skip # Times out. Please triage this failure
 LayoutTests/fast/loader/hashchange-event-properties_t01: RuntimeError # Please triage this failure
@@ -3789,26 +3817,25 @@
 LayoutTests/fast/multicol/balance-unbreakable_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/multicol/break-after-always-bottom-margin_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/multicol/break-properties_t01: RuntimeError # Please triage this failure
+LayoutTests/fast/multicol/columns-shorthand-parsing_t02: RuntimeError # Please triage this failure
 LayoutTests/fast/multicol/cssom-view_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/multicol/float-truncation_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/multicol/hit-test-above-or-below_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/multicol/hit-test-end-of-column-with-line-height_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/multicol/hit-test-end-of-column_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/multicol/hit-test-float_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/multicol/hit-test-gap-between-pages-flipped_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/multicol/image-inside-nested-blocks-with-border_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/multicol/newmulticol/balance-images_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/multicol/newmulticol/balance-maxheight_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/multicol/orphans-relayout_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/multicol/vertical-lr/float-truncation_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/multicol/vertical-rl/break-properties_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/multicol/vertical-rl/float-truncation_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/multicol/vertical-rl/image-inside-nested-blocks-with-border_t01: RuntimeError # Please triage this failure
+LayoutTests/fast/overflow/child-100percent-height-inside-fixed-container-with-overflow-auto_t01: Pass, RuntimeError # Please triage this failure
 LayoutTests/fast/overflow/replaced-child-100percent-height-inside-fixed-container-with-overflow-auto_t01: RuntimeError # Please triage this failure
+LayoutTests/fast/overflow/scrollbar-restored_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/parser/foster-parent-adopted_t02: RuntimeError # Please triage this failure
 LayoutTests/fast/parser/fragment-parser-doctype_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/parser/innerhtml-with-prefixed-elements_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/parser/pre-first-line-break_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/replaced/available-height-for-content_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/replaced/container-width-zero_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/replaced/iframe-with-percentage-height-within-table-with-anonymous-table-cell_t01: RuntimeError # Please triage this failure
@@ -3841,9 +3868,7 @@
 LayoutTests/fast/storage/disallowed-storage_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/storage/storage-disallowed-in-data-url_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/sub-pixel/cssom-subpixel-precision_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/sub-pixel/float-containing-block-with-margin_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/sub-pixel/shadows-computed-style_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/sub-pixel/table-rows-have-stable-height_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/svg/tabindex-focus_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/svg/whitespace-angle_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/svg/whitespace-integer_t01: RuntimeError # Please triage this failure
@@ -3855,7 +3880,6 @@
 LayoutTests/fast/table/col-width-span-expand_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/table/css-table-max-height_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/table/css-table-max-width_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/table/css-table-width-with-border-padding_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/table/fixed-table-layout-width-change_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/table/hittest-tablecell-bottom-edge_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/table/hittest-tablecell-right-edge_t01: RuntimeError # Please triage this failure
@@ -3926,11 +3950,9 @@
 LayoutTests/fast/url/segments-from-data-url_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/url/segments_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/url/standard-url_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/writing-mode/auto-sizing-orthogonal-flows_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/writing-mode/positionForPoint_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/writing-mode/table-hit-test_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/writing-mode/vertical-font-vmtx-units-per-em_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/xmlhttprequest/xmlhttprequest-get_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/xmlhttprequest/xmlhttprequest-responseXML-xml-text-responsetype_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/xmlhttprequest/xmlhttprequest-responsetype-arraybuffer_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/xmlhttprequest/xmlhttprequest-responsetype-before-open-sync-request_t01: RuntimeError # Please triage this failure
@@ -3987,6 +4009,8 @@
 LibTest/html/Element/getBoundingClientRect_A01_t02: Pass, RuntimeError # Issue 53
 LibTest/html/Element/getClientRects_A01_t02: RuntimeError # Please triage this failure
 LibTest/html/Element/getNamespacedAttributes_A01_t01: RuntimeError # Please triage this failure
+LibTest/html/Element/isContentEditable_A01_t01: RuntimeError # Please triage this failure
+LibTest/html/Element/isContentEditable_A02_t01: RuntimeError # Please triage this failure
 LibTest/html/Element/isTagSupported_A01_t01: RuntimeError # Please triage this failure
 LibTest/html/Element/isTagSupported_A01_t02: RuntimeError # Please triage this failure
 LibTest/html/Element/leftView_A01_t01: RuntimeError # Please triage this failure
@@ -4027,6 +4051,7 @@
 LibTest/html/IFrameElement/getClientRects_A01_t02: RuntimeError # Please triage this failure
 LibTest/html/IFrameElement/getNamespacedAttributes_A01_t01: RuntimeError # Please triage this failure
 LibTest/html/IFrameElement/innerHtml_A01_t01: RuntimeError # Please triage this failure
+LibTest/html/IFrameElement/isContentEditable_A01_t01: RuntimeError # Please triage this failure
 LibTest/html/IFrameElement/leftView_A01_t01: RuntimeError # Please triage this failure
 LibTest/html/IFrameElement/marginEdge_A01_t01: RuntimeError # Please triage this failure
 LibTest/html/IFrameElement/offsetTo_A01_t01: RuntimeError # Please triage this failure
@@ -4311,8 +4336,38 @@
 WebPlatformTest/webstorage/event_session_key_t01: RuntimeError # Please triage this failure
 WebPlatformTest/webstorage/event_session_storagearea_t01: Pass, RuntimeError # Fails on 7.1. Please triage this failure
 WebPlatformTest/webstorage/event_session_url_t01: Skip # Times out. Please triage this failure
+WebPlatformTest/webstorage/storage_local_setitem_quotaexceedederr_t01: Skip # Makes the following test time out.
 WebPlatformTest/webstorage/storage_session_setitem_quotaexceedederr_t01: Skip # Times out. Please triage this failure
 
+LayoutTests/fast/canvas/webgl/tex-image-and-sub-image-2d-with-video_t01: Skip
+LayoutTests/fast/canvas/webgl/tex-image-and-sub-image-2d-with-video-rgb565_t01: Skip
+LayoutTests/fast/canvas/webgl/tex-image-and-sub-image-2d-with-video-rgba5551_t01: Skip
+LayoutTests/fast/dom/HTMLDocument/set-focus-on-valid-element_t01: Skip
+
+LayoutTests/fast/canvas/webgl/context-lost_t01: Skip
+
+LayoutTests/fast/css/font-property-priority_t01: RuntimeError # Fails 10 out of 10.
+LayoutTests/fast/css/inherited-properties-rare-text_t01: RuntimeError # Fails 10 out of 10.
+LayoutTests/fast/css/invalid-not-with-simple-selector-sequence_t01: RuntimeError # Fails 10 out of 10.
+LayoutTests/fast/css/parsing-font-variant-ligatures_t01: RuntimeError # Fails 10 out of 10.
+LayoutTests/fast/dom/HTMLLinkElement/resolve-url-on-insertion_t01: RuntimeError # Fails 10 out of 10.
+LayoutTests/fast/dom/css-selectorText_t01: RuntimeError # Fails 10 out of 10.
+LayoutTests/fast/forms/datalist/datalist-child-validation_t01: RuntimeError # Fails 10 out of 10.
+LayoutTests/fast/overflow/scrollbar-restored_t01: RuntimeError # Fails 10 out of 10.
+LibTest/html/Element/isContentEditable_A01_t01: RuntimeError # Fails 10 out of 10.
+LibTest/html/Element/isContentEditable_A02_t01: RuntimeError # Fails 10 out of 10.
+LibTest/html/IFrameElement/isContentEditable_A01_t01: RuntimeError, Pass # Fails 19 out of 20.
+
+LayoutTests/fast/flexbox/repaint-scrollbar_t01: Pass, RuntimeError # Fails 2 out of 10.
+LibTest/html/Element/getBoundingClientRect_A01_t02: Pass, RuntimeError # Fails 3 out of 10.
+LayoutTests/fast/css/focus-display-block-inline_t01: Pass, RuntimeError # Fails 5 out of 10.
+LayoutTests/fast/css/pseudo-any_t01: Pass, RuntimeError # Fails 4 out of 10.
+LayoutTests/fast/dom/partial-layout-overlay-scrollbars_t01: Pass, RuntimeError # Fails 8 out of 10.
+LayoutTests/fast/forms/autofocus-input-css-style-change_t01: Pass, RuntimeError # Fails 7 out of 10.
+LayoutTests/fast/forms/date-multiple-fields/date-multiple-fields-onblur-setvalue-onfocusremoved_t01: Pass, RuntimeError # Fails 6 out of 10.
+
+LayoutTests/fast/canvas/webgl/glsl-conformance_t01: Skip # Times out 1 out of 20.
+
 [ $compiler == dart2js && $runtime == safarimobilesim ]
 LayoutTests/fast/alignment/parse-align-items_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/alignment/parse-align-self_t01: RuntimeError # Please triage this failure
@@ -4984,7 +5039,6 @@
 LayoutTests/fast/forms/datalist/input-list_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/forms/date-multiple-fields/date-multiple-fields-change-layout-by-value_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/forms/date-multiple-fields/date-multiple-fields-onblur-setvalue-onfocusremoved_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/forms/date/date-interactive-validation-required_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/forms/date/input-date-validation-message_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/forms/date/input-valueasdate-date_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/forms/date/input-valueasnumber-date_t01: RuntimeError # Please triage this failure
@@ -5637,6 +5691,8 @@
 
 [ $compiler == dart2js && $runtime == ie10 ]
 Language/Expressions/Top_level_Getter_Invocation/17_Getter_Invocation_A03_t02: Skip # Times out. Please triage this failure
+Language/Statements/Yield_and_Yield_Each/Yield/execution_async_t01: RuntimeError # Issue 26615
+Language/Statements/Yield_and_Yield_Each/Yield/execution_async_t04: RuntimeError # Issue 26615
 LayoutTests/fast/alignment/parse-align-items_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/alignment/parse-align-self_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/alignment/parse-justify-self_t01: RuntimeError # Please triage this failure
@@ -5666,7 +5722,6 @@
 LayoutTests/fast/canvas/alpha_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/canvas-arc-negative-radius_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/canvas-as-image-incremental-repaint_t01: Skip # Times out. Please triage this failure
-LayoutTests/fast/canvas/canvas-as-image_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/canvas-blend-image_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/canvas-blend-solid_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/canvas-blending-clipping_t01: RuntimeError # Please triage this failure
@@ -5693,7 +5748,6 @@
 LayoutTests/fast/canvas/canvas-blending-transforms_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/canvas-clip-rule_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/canvas-composite-canvas_t01: Pass, RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/canvas-css-crazy_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/canvas-currentColor_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/canvas-currentTransform_t01: RuntimeError # Feature is not implemented
 LayoutTests/fast/canvas/canvas-ellipse-360-winding_t01: RuntimeError # Please triage this failure
@@ -5731,7 +5785,6 @@
 LayoutTests/fast/canvas/canvas-setTransform_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/canvas-to-canvas_t01: Pass, RuntimeError # Issue 22216
 LayoutTests/fast/canvas/canvas-transforms-fillRect-shadow_t01: Pass, RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/crash-set-font_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/draw-custom-focus-ring_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/drawImage-with-bad-canvas_t01: Skip # Times out. Please triage this failure
 LayoutTests/fast/canvas/drawImage-with-negative-source-destination_t01: Pass, RuntimeError # Issue 22216
@@ -5913,7 +5966,6 @@
 LayoutTests/fast/css-intrinsic-dimensions/tables_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css-intrinsic-dimensions/width-property-value_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css-intrinsic-dimensions/width-shrinks-avoid-floats_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/css/add-remove-stylesheets-at-once-minimal-recalc-style_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/aspect-ratio-inheritance_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/aspect-ratio-parsing-tests_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/auto-min-size_t01: RuntimeError # Please triage this failure
@@ -5931,14 +5983,9 @@
 LayoutTests/fast/css/child-selector-implicit-tbody_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/comment-before-charset-external_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/computed-offset-with-zoom_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/css/content-language-case-insensitivity_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/css/content-language-dynamically-added_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/css/content-language-dynamically-removed_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/css/content-language-mapped-to-webkit-locale_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/css/content-language-multiple_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/css/content-language-no-content_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/content/content-none_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/content/content-normal_t01: RuntimeError # Please triage this failure
+LayoutTests/fast/css/content/content-quotes-01_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/content/content-quotes-03_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/content/content-quotes-06_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/counters/complex-before_t01: RuntimeError # Please triage this failure
@@ -5949,18 +5996,15 @@
 LayoutTests/fast/css/css-properties-case-insensitive_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/css-selector-text_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/css3-nth-tokens-style_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/css/cssText-shorthand_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/cssom-remove-shorthand-property_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/csstext-of-content-string_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/cursor-parsing-image-set_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/css/cursor-parsing-quirks_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/cursor-parsing_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/deprecated-flex-box-zero-width-intrinsic-max-width_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/deprecated-flexbox-auto-min-size_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/draggable-region-parser_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/dynamic-class-backdrop-pseudo_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/dynamic-class-pseudo-elements_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/css/ex-unit-with-no-x-height_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/first-child-display-change-inverse_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/font-face-cache-bug_t01: Pass, RuntimeError # Please triage this failure
 LayoutTests/fast/css/font-face-insert-link_t01: Pass, RuntimeError # Please triage this failure
@@ -5980,7 +6024,6 @@
 LayoutTests/fast/css/getComputedStyle/computed-style-font_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/getComputedStyle/computed-style-properties_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/getComputedStyle/computed-style-select-overflow_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/css/getComputedStyle/counterIncrement-without-counter_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/getComputedStyle/font-family-fallback-reset_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/getComputedStyle/getComputedStyle-border-image-slice_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/getComputedStyle/getComputedStyle-border-radius-shorthand_t01: RuntimeError # Please triage this failure
@@ -6002,6 +6045,7 @@
 LayoutTests/fast/css/id-or-class-before-stylesheet_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/image-set-setting_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/implicit-attach-marking_t01: Skip # Times out. Please triage this failure
+LayoutTests/fast/css/important-js-override_t01:  RuntimeError # Please triage this failure
 LayoutTests/fast/css/inherit-initial-shorthand-values_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/inherited-properties-rare-text_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/insertRule-font-face_t01: RuntimeError # Please triage this failure
@@ -6027,7 +6071,7 @@
 LayoutTests/fast/css/link-alternate-stylesheet-3_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/link-alternate-stylesheet-4_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/link-alternate-stylesheet-5_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/css/link-disabled-attr-parser_t01: RuntimeError # Please triage this failure
+LayoutTests/fast/css/link-disabled-attr-parser_t01: Pass, RuntimeError # Please triage this failure
 LayoutTests/fast/css/list-item-text-align_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/margin-start-end_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/max-device-aspect-ratio_t01: Pass, RuntimeError # Please triage this failure
@@ -6100,7 +6144,6 @@
 LayoutTests/fast/css/text-align-initial_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/text-align-webkit-match-parent-parse_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/text-align-webkit-match-parent_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/css/transform-origin-parsing_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/unicode-bidi-computed-value_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/url-with-multi-byte-unicode-escape_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/webkit-color-adjust_t01: RuntimeError # Please triage this failure
@@ -6336,7 +6379,6 @@
 LayoutTests/fast/dom/StyleSheet/detached-shadow-style_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/dom/StyleSheet/detached-style-2_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/dom/StyleSheet/detached-style_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/dom/StyleSheet/detached-stylesheet-without-wrapper_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/dom/StyleSheet/empty-shadow-style_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/dom/StyleSheet/removed-media-rule-deleted-parent-crash_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/dom/StyleSheet/removed-stylesheet-rule-deleted-parent-crash_t01: RuntimeError # Please triage this failure
@@ -6407,7 +6449,6 @@
 LayoutTests/fast/dom/dom-parse-serialize_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/dom/domparser-parsefromstring-mimetype-support_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/dom/focus-contenteditable_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/dom/fragment-activation-focuses-target_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/dom/gc-image-element-2_t01: Skip # Times out. Please triage this failure
 LayoutTests/fast/dom/gc-image-element_t01: Skip # Times out. Please triage this failure
 LayoutTests/fast/dom/getElementById-consistency2_t01: RuntimeError # Please triage this failure
@@ -6664,7 +6705,6 @@
 LayoutTests/fast/forms/date/ValidityState-stepMismatch-date_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/forms/date/ValidityState-typeMismatch-date_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/forms/date/date-input-type_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/forms/date/date-interactive-validation-required_t01: Pass, RuntimeError # Please triage this failure
 LayoutTests/fast/forms/date/date-pseudo-classes_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/forms/date/input-date-validation-message_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/forms/date/input-valueasdate-date_t01: RuntimeError # Please triage this failure
@@ -6794,7 +6834,6 @@
 LayoutTests/fast/innerHTML/innerHTML-svg-read_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/innerHTML/innerHTML-svg-write_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/innerHTML/innerHTML-uri-resolution_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/innerHTML/javascript-url_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/layers/zindex-hit-test_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/lists/list-style-position-inside_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/loader/about-blank-hash-change_t01: RuntimeError # Please triage this failure
@@ -6822,7 +6861,6 @@
 LayoutTests/fast/multicol/balance-trailing-border_t02: RuntimeError # Please triage this failure
 LayoutTests/fast/multicol/break-after-always-bottom-margin_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/multicol/break-properties_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/multicol/columns-shorthand-parsing_t02: RuntimeError # Please triage this failure
 LayoutTests/fast/multicol/cssom-view_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/multicol/flipped-blocks-hit-test_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/multicol/float-truncation_t01: RuntimeError # Please triage this failure
@@ -6860,7 +6898,6 @@
 LayoutTests/fast/parser/foster-parent-adopted_t02: RuntimeError # Please triage this failure
 LayoutTests/fast/parser/fragment-parser-doctype_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/parser/innerhtml-with-prefixed-elements_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/parser/pre-first-line-break_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/parser/residual-style-close-across-n-blocks_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/replaced/container-width-zero_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/replaced/iframe-with-percentage-height-within-table-with-anonymous-table-cell_t01: RuntimeError # Please triage this failure
@@ -7007,7 +7044,6 @@
 LayoutTests/fast/writing-mode/table-hit-test_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/writing-mode/vertical-font-vmtx-units-per-em_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/writing-mode/vertical-inline-block-hittest_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/xmlhttprequest/xmlhttprequest-get_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/xmlhttprequest/xmlhttprequest-invalid-values_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/xmlhttprequest/xmlhttprequest-responseXML-xml-document-responsetype_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/xmlhttprequest/xmlhttprequest-responseXML-xml-text-responsetype_t01: RuntimeError # Please triage this failure
@@ -7662,6 +7698,8 @@
 Language/Expressions/Conditional/type_t04: Skip # Times out. Please triage this failure
 Language/Expressions/Identifier_Reference/evaluation_function_t02: Skip # Times out. Please triage this failure
 Language/Statements/Local_Variable_Declaration/syntax_t18: Skip # Times out. Please triage this failure
+Language/Statements/Yield_and_Yield_Each/Yield/execution_async_t01: RuntimeError # Issue 26615
+Language/Statements/Yield_and_Yield_Each/Yield/execution_async_t04: RuntimeError # Issue 26615
 Language/Types/Interface_Types/subtype_t37: Skip # Times out. Please triage this failure
 Language/Types/Function_Types/subtype_no_args_t03: Skip # Times out. Please triage this failure
 LayoutTests/fast/alignment/parse-align-items_t01: RuntimeError # Please triage this failure
@@ -7690,7 +7728,6 @@
 LayoutTests/fast/canvas/alpha_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/canvas-arc-negative-radius_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/canvas-as-image-incremental-repaint_t01: Skip # Times out. Please triage this failure
-LayoutTests/fast/canvas/canvas-as-image_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/canvas-blend-image_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/canvas-blend-solid_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/canvas-blending-clipping_t01: RuntimeError # Please triage this failure
@@ -7717,7 +7754,6 @@
 LayoutTests/fast/canvas/canvas-blending-transforms_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/canvas-composite-alpha_t01: Pass, RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/canvas-composite-text-alpha_t01: Pass, RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/canvas-css-crazy_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/canvas-currentColor_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/canvas-currentTransform_t01: RuntimeError # Feature is not implemented
 LayoutTests/fast/canvas/canvas-ellipse-360-winding_t01: RuntimeError # Please triage this failure
@@ -7756,7 +7792,6 @@
 LayoutTests/fast/canvas/canvas-to-canvas_t01: Pass, RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/canvas-toDataURL-crash_t01: Pass, RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/canvas-transforms-fillRect-shadow_t01: Pass, RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/crash-set-font_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/draw-custom-focus-ring_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/drawImage-with-bad-canvas_t01: Skip # Times out. Please triage this failure
 LayoutTests/fast/canvas/drawImage-with-negative-source-destination_t01: Pass, RuntimeError # Please triage this failure
@@ -7779,16 +7814,11 @@
 LayoutTests/fast/canvas/webgl/context-destroyed-crash_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/webgl/context-lost-restored_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/webgl/context-lost_t01: Pass, RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/webgl/copy-tex-image-and-sub-image-2d_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/webgl/css-webkit-canvas-repaint_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/webgl/css-webkit-canvas_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/webgl/draw-arrays-out-of-bounds_t01: Pass, RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/webgl/draw-elements-out-of-bounds_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/webgl/draw-webgl-to-canvas-2d_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/webgl/drawingbuffer-test_t01: Pass, RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/webgl/error-reporting_t01: Pass, RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/webgl/framebuffer-bindings-unaffected-on-resize_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/webgl/framebuffer-object-attachment_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/webgl/functions-returning-strings_t01: Pass, RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/webgl/get-active-test_t01: Pass, RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/webgl/gl-bind-attrib-location-test_t01: Pass, RuntimeError # Please triage this failure
@@ -7807,7 +7837,6 @@
 LayoutTests/fast/canvas/webgl/invalid-passed-params_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/webgl/null-object-behaviour_t01: Pass, RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/webgl/null-uniform-location_t01: Pass, RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/webgl/object-deletion-behaviour_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/webgl/oes-element-index-uint_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/webgl/point-size_t01: Pass, RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/webgl/premultiplyalpha-test_t01: RuntimeError # Please triage this failure
@@ -7815,15 +7844,10 @@
 LayoutTests/fast/canvas/webgl/read-pixels-pack-alignment_t01: Pass, RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/webgl/read-pixels-test_t01: Pass, RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/webgl/renderer-and-vendor-strings_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/webgl/tex-image-and-sub-image-2d-with-array-buffer-view_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/webgl/tex-image-and-sub-image-2d-with-canvas-rgb565_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/webgl/tex-image-and-sub-image-2d-with-canvas-rgba4444_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/webgl/tex-image-and-sub-image-2d-with-canvas-rgba5551_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/webgl/tex-image-and-sub-image-2d-with-canvas_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/webgl/tex-image-and-sub-image-2d-with-image-data-rgb565_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/webgl/tex-image-and-sub-image-2d-with-image-data-rgba4444_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/webgl/tex-image-and-sub-image-2d-with-image-data-rgba5551_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/webgl/tex-image-and-sub-image-2d-with-image-data_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/webgl/tex-image-and-sub-image-2d-with-image-rgb565_t01: Skip # Times out. Please triage this failure
 LayoutTests/fast/canvas/webgl/tex-image-and-sub-image-2d-with-image-rgba4444_t01: Skip # Times out. Please triage this failure
 LayoutTests/fast/canvas/webgl/tex-image-and-sub-image-2d-with-image-rgba5551_t01: Skip # Times out. Please triage this failure
@@ -7835,8 +7859,6 @@
 LayoutTests/fast/canvas/webgl/tex-image-and-uniform-binding-bugs_t01: Pass, RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/webgl/tex-image-webgl_t01: Pass, RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/webgl/tex-sub-image-2d-bad-args_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/webgl/tex-sub-image-2d_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/webgl/tex-sub-image-cube-maps_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/webgl/texImage2DImageDataTest_t01: Pass, RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/webgl/texImageTest_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/webgl/texture-active-bind_t01: Pass, RuntimeError # Please triage this failure
@@ -7851,7 +7873,6 @@
 LayoutTests/fast/canvas/webgl/viewport-unchanged-upon-resize_t01: Pass, RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/webgl/webgl-composite-modes-repaint_t01: Pass, RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/webgl/webgl-composite-modes_t01: Pass, RuntimeError # Please triage this failure
-LayoutTests/fast/canvas/webgl/webgl-depth-texture_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/webgl/webgl-large-texture_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/canvas/webgl/webgl-layer-update_t01: Skip # Times out. Please triage this failure
 LayoutTests/fast/canvas/webgl/webgl-specific_t01: RuntimeError # Please triage this failure
@@ -7918,7 +7939,6 @@
 LayoutTests/fast/css-intrinsic-dimensions/tables_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css-intrinsic-dimensions/width-property-value_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css-intrinsic-dimensions/width-shrinks-avoid-floats_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/css/add-remove-stylesheets-at-once-minimal-recalc-style_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/aspect-ratio-inheritance_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/aspect-ratio-parsing-tests_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/auto-min-size_t01: RuntimeError # Please triage this failure
@@ -7936,14 +7956,9 @@
 LayoutTests/fast/css/child-selector-implicit-tbody_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/comment-before-charset-external_t01: Pass, RuntimeError # Please triage this failure
 LayoutTests/fast/css/computed-offset-with-zoom_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/css/content-language-case-insensitivity_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/css/content-language-dynamically-added_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/css/content-language-dynamically-removed_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/css/content-language-mapped-to-webkit-locale_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/css/content-language-multiple_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/css/content-language-no-content_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/content/content-none_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/content/content-normal_t01: RuntimeError # Please triage this failure
+LayoutTests/fast/css/content/content-quotes-01_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/content/content-quotes-03_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/content/content-quotes-06_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/counters/complex-before_t01: RuntimeError # Please triage this failure
@@ -7955,20 +7970,17 @@
 LayoutTests/fast/css/css-selector-text_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/css3-nth-tokens-script_t01: Skip # Times out. Please triage this failure
 LayoutTests/fast/css/css3-nth-tokens-style_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/css/cssText-shorthand_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/cssom-remove-shorthand-property_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/csstext-of-content-string_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/cursor-parsing-image-set_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/css/cursor-parsing-quirks_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/cursor-parsing_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/deprecated-flex-box-zero-width-intrinsic-max-width_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/deprecated-flexbox-auto-min-size_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/draggable-region-parser_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/dynamic-class-backdrop-pseudo_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/dynamic-class-pseudo-elements_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/css/ex-unit-with-no-x-height_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/first-child-display-change-inverse_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/css/font-face-cache-bug_t01: RuntimeError # Please triage this failure
+LayoutTests/fast/css/font-face-cache-bug_t01: Pass, RuntimeError # Please triage this failure
 LayoutTests/fast/css/font-face-insert-link_t01: Pass, RuntimeError # Please triage this failure
 LayoutTests/fast/css/font-face-multiple-ranges-for-unicode-range_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/font-face-unicode-range-load_t01: RuntimeError # Please triage this failure
@@ -7986,7 +7998,6 @@
 LayoutTests/fast/css/getComputedStyle/computed-style-font_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/getComputedStyle/computed-style-properties_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/getComputedStyle/computed-style-select-overflow_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/css/getComputedStyle/counterIncrement-without-counter_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/getComputedStyle/font-family-fallback-reset_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/getComputedStyle/getComputedStyle-border-radius-shorthand_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/getComputedStyle/getComputedStyle-border-style-shorthand_t01: RuntimeError # Please triage this failure
@@ -8007,6 +8018,7 @@
 LayoutTests/fast/css/id-or-class-before-stylesheet_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/image-set-setting_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/implicit-attach-marking_t01: Skip # Times out. Please triage this failure
+LayoutTests/fast/css/important-js-override_t01:  RuntimeError # Please triage this failure
 LayoutTests/fast/css/inherit-initial-shorthand-values_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/inherited-properties-rare-text_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/insertRule-font-face_t01: RuntimeError # Please triage this failure
@@ -8105,7 +8117,6 @@
 LayoutTests/fast/css/text-align-initial_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/text-align-webkit-match-parent-parse_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/text-align-webkit-match-parent_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/css/transform-origin-parsing_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/unicode-bidi-computed-value_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/url-with-multi-byte-unicode-escape_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/webkit-color-adjust_t01: RuntimeError # Please triage this failure
@@ -8132,6 +8143,7 @@
 LayoutTests/fast/dom/Document/CaretRangeFromPoint/caretRangeFromPoint-in-zoom-and-scroll_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/dom/Document/CaretRangeFromPoint/caretRangeFromPoint-with-first-letter-style_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/dom/Document/CaretRangeFromPoint/hittest-relative-to-viewport_t01: RuntimeError # Please triage this failure
+LayoutTests/fast/dom/Document/CaretRangeFromPoint/replace-element_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/dom/Document/createElement-valid-names_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/dom/Document/createElementNS-namespace-err_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/dom/Element/attribute-uppercase_t01: RuntimeError # Please triage this failure
@@ -8321,7 +8333,6 @@
 LayoutTests/fast/dom/StyleSheet/detached-shadow-style_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/dom/StyleSheet/detached-style-2_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/dom/StyleSheet/detached-style_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/dom/StyleSheet/detached-stylesheet-without-wrapper_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/dom/StyleSheet/empty-shadow-style_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/dom/StyleSheet/removed-media-rule-deleted-parent-crash_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/dom/StyleSheet/removed-stylesheet-rule-deleted-parent-crash_t01: RuntimeError # Please triage this failure
@@ -8390,7 +8401,6 @@
 LayoutTests/fast/dom/dom-parse-serialize_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/dom/domparser-parsefromstring-mimetype-support_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/dom/focus-contenteditable_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/dom/fragment-activation-focuses-target_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/dom/gc-image-element-2_t01: Skip # Times out. Please triage this failure
 LayoutTests/fast/dom/gc-image-element_t01: Skip # Times out. Please triage this failure
 LayoutTests/fast/dom/getElementById-consistency2_t01: RuntimeError # Please triage this failure
@@ -8476,7 +8486,7 @@
 LayoutTests/fast/dom/shadow/remove-styles-in-shadow-crash_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/dom/shadow/shadow-aware-shadow-root_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/dom/shadow/shadow-content-crash_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/dom/shadow/shadow-disable_t01: RuntimeError # Please triage this failure
+LayoutTests/fast/dom/shadow/shadow-disable_t01: Skip # Issue 26737. Makes next test time out.
 LayoutTests/fast/dom/shadow/shadow-element-inactive_t01: Skip # Times out. Please triage this failure
 LayoutTests/fast/dom/shadow/shadow-element_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/dom/shadow/shadow-hierarchy-exception_t01: RuntimeError # Please triage this failure
@@ -8645,7 +8655,6 @@
 LayoutTests/fast/forms/date/ValidityState-stepMismatch-date_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/forms/date/ValidityState-typeMismatch-date_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/forms/date/date-input-type_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/forms/date/date-interactive-validation-required_t01: Pass, RuntimeError # Please triage this failure
 LayoutTests/fast/forms/date/date-pseudo-classes_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/forms/date/input-date-validation-message_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/forms/date/input-valueasdate-date_t01: RuntimeError # Please triage this failure
@@ -8771,7 +8780,6 @@
 LayoutTests/fast/innerHTML/innerHTML-svg-read_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/innerHTML/innerHTML-svg-write_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/innerHTML/innerHTML-uri-resolution_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/innerHTML/javascript-url_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/layers/zindex-hit-test_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/lists/list-style-position-inside_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/loader/about-blank-hash-change_t01: RuntimeError # Please triage this failure
@@ -8799,7 +8807,6 @@
 LayoutTests/fast/multicol/balance-trailing-border_t02: RuntimeError # Please triage this failure
 LayoutTests/fast/multicol/break-after-always-bottom-margin_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/multicol/break-properties_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/multicol/columns-shorthand-parsing_t02: RuntimeError # Please triage this failure
 LayoutTests/fast/multicol/cssom-view_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/multicol/flipped-blocks-hit-test_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/multicol/float-truncation_t01: RuntimeError # Please triage this failure
@@ -8837,7 +8844,6 @@
 LayoutTests/fast/parser/foster-parent-adopted_t02: RuntimeError # Please triage this failure
 LayoutTests/fast/parser/fragment-parser-doctype_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/parser/innerhtml-with-prefixed-elements_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/parser/pre-first-line-break_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/parser/residual-style-close-across-n-blocks_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/replaced/container-width-zero_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/replaced/iframe-with-percentage-height-within-table-with-anonymous-table-cell_t01: Pass, RuntimeError # Please triage this failure
@@ -8986,7 +8992,6 @@
 LayoutTests/fast/writing-mode/table-hit-test_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/writing-mode/vertical-font-vmtx-units-per-em_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/writing-mode/vertical-inline-block-hittest_t01: RuntimeError # Please triage this failure
-LayoutTests/fast/xmlhttprequest/xmlhttprequest-get_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/xmlhttprequest/xmlhttprequest-invalid-values_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/xmlhttprequest/xmlhttprequest-responseXML-xml-document-responsetype_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/xmlhttprequest/xmlhttprequest-responseXML-xml-text-responsetype_t01: RuntimeError # Please triage this failure
@@ -9512,6 +9517,21 @@
 WebPlatformTest/webstorage/storage_builtins_t01: RuntimeError # Please triage this failure
 WebPlatformTest/webstorage/storage_local_setitem_quotaexceedederr_t01: Skip # Times out. Please triage this failure
 
+[ $compiler == dart2js && $runtime == ie11 && $builder_tag == win7]
+LayoutTests/fast/canvas/webgl/copy-tex-image-and-sub-image-2d_t01: RuntimeError # Please triage this failure
+LayoutTests/fast/canvas/webgl/css-webkit-canvas-repaint_t01: RuntimeError # Please triage this failure
+LayoutTests/fast/canvas/webgl/css-webkit-canvas_t01: RuntimeError # Please triage this failure
+LayoutTests/fast/canvas/webgl/framebuffer-object-attachment_t01: RuntimeError #  fixed by co19 pull request whesse-patch-2
+LayoutTests/fast/canvas/webgl/object-deletion-behaviour_t01: RuntimeError # Please triage this failure
+LayoutTests/fast/canvas/webgl/tex-image-and-sub-image-2d-with-array-buffer-view_t01: RuntimeError # Please triage this failure
+LayoutTests/fast/canvas/webgl/tex-image-and-sub-image-2d-with-image-data-rgb565_t01: RuntimeError # Please triage this failure
+LayoutTests/fast/canvas/webgl/tex-image-and-sub-image-2d-with-image-data-rgba4444_t01: RuntimeError # Please triage this failure
+LayoutTests/fast/canvas/webgl/tex-image-and-sub-image-2d-with-image-data-rgba5551_t01: RuntimeError # Please triage this failure
+LayoutTests/fast/canvas/webgl/tex-image-and-sub-image-2d-with-image-data_t01: RuntimeError # Please triage this failure
+LayoutTests/fast/canvas/webgl/tex-sub-image-2d_t01: RuntimeError # Please triage this failure
+LayoutTests/fast/canvas/webgl/tex-sub-image-cube-maps_t01: RuntimeError # Please triage this failure
+LayoutTests/fast/canvas/webgl/webgl-depth-texture_t01: RuntimeError # Please triage this failure
+
 [ $compiler == dart2js && $cps_ir ]
 Language/Types/Interface_Types/subtype_t09: Crash # Pending static: JSArray
 LibTest/collection/ListBase/ListBase_class_A01_t02: Pass, Timeout
diff --git a/tests/co19/co19-dartium.status b/tests/co19/co19-dartium.status
index 545123a..927945b 100644
--- a/tests/co19/co19-dartium.status
+++ b/tests/co19/co19-dartium.status
@@ -12,13 +12,12 @@
 LayoutTests/fast/writing-mode/flipped-blocks-hit-test-overflow_t01: Pass, RuntimeError # Issue 21605
 LayoutTests/fast/writing-mode/vertical-inline-block-hittest_t01: Pass, RuntimeError # Issue 21605
 
-[ $compiler == none && $runtime == dartium && $checked  && $system == macos ]
-LayoutTests/fast/xpath/invalid-resolver_t01: RuntimeError # 45 roll
-
 [ $compiler == none && $runtime == dartium && $system == windows ]
 LayoutTests/fast/writing-mode/vertical-inline-block-hittest_t01: Pass, RuntimeError # Issue 21605
-WebPlatformTest/shadow-dom/events/retargeting-focus-events/test-002_t01: RuntimeError # Please triage this failure.
 WebPlatformTest/shadow-dom/events/retargeting-focus-events/test-001_t01: Skip # Timesout Issue 26134
+WebPlatformTest/shadow-dom/events/retargeting-focus-events/test-002_t01: Skip # Timesout sporadically
+WebPlatformTest/shadow-dom/events/retargeting-focus-events/test-003_t01: Skip # Timesout sporadically
+LayoutTests/fast/css/MarqueeLayoutTest_t01: Pass, RuntimeError # Please triage this failure
 
 [ $compiler == none && $runtime == dartium && $system != windows ]
 LayoutTests/fast/css/font-face-unicode-range-monospace_t01: RuntimeError # co19-roll r761: Please triage this failure.
@@ -58,7 +57,8 @@
 WebPlatformTest/html/semantics/forms/the-datalist-element/datalistoptions_t01: Skip # Issue 20540.
 
 [ $compiler == none && $runtime == dartium && $checked ]
-Language/Errors_and_Warnings/static_warning_t01: RuntimeError # Please triage this failure.
+LayoutTests/fast/parser/pre-first-line-break_t01: RuntimeError # co19-roll r801: Please triage this failure.
+LayoutTests/fast/xpath/ambiguous-operators_t01: RuntimeError # co19-roll r761: Please triage this failure.
 Language/Errors_and_Warnings/static_warning_t02: RuntimeError # Please triage this failure.
 Language/Errors_and_Warnings/static_warning_t03: RuntimeError # Please triage this failure.
 Language/Errors_and_Warnings/static_warning_t04: RuntimeError # Please triage this failure.
@@ -115,9 +115,9 @@
 LayoutTests/fast/replaced/computed-image-width-with-percent-height-inside-table-cell-and-fixed-ancestor_t01: Pass, RuntimeError # co19-roll r801: Please triage this failure.
 LayoutTests/fast/shapes/parsing/parsing-shape-image-threshold_t01: RuntimeError # co19-roll r801: Please triage this failure.
 LayoutTests/fast/shapes/parsing/parsing-shape-margin_t01: RuntimeError # co19-roll r801: Please triage this failure.
-LayoutTests/fast/shapes/parsing/parsing-shape-outside_t01: RuntimeError # co19-roll r801: Please triage this failure.
 LayoutTests/fast/table/absolute-table-percent-lengths_t01: RuntimeError # co19-roll r786: Please triage this failure.
 LayoutTests/fast/table/css-table-width_t01: RuntimeError # co19-roll r786: Please triage this failure.
+LayoutTests/fast/table/css-table-width-with-border-padding_t01: RuntimeError # Please triage this failure.
 LayoutTests/fast/table/fixed-table-layout-width-change_t01: RuntimeError # co19-roll r786: Please triage this failure.
 LayoutTests/fast/table/html-table-width-max-width-constrained_t01: RuntimeError # co19-roll r786: Please triage this failure.
 LayoutTests/fast/table/margins-flipped-text-direction_t01: RuntimeError # co19-roll r786: Please triage this failure.
@@ -132,7 +132,7 @@
 LayoutTests/fast/text/line-break-after-inline-latin1_t01: RuntimeError # co19-roll r786: Please triage this failure.
 LayoutTests/fast/url/trivial-segments_t01: RuntimeError # co19-roll r786: Please triage this failure.
 LayoutTests/fast/url/trivial_t01: RuntimeError # co19-roll r786: Please triage this failure.
-LayoutTests/fast/xpath/invalid-resolver_t01: RuntimeError # Dartium 45 roll
+LayoutTests/fast/writing-mode/auto-sizing-orthogonal-flows_t01: RuntimeError # Please triage this failure.
 LayoutTests/fast/xpath/xpath-result-eventlistener-crash_t01: RuntimeError # co19-roll r761: Please triage this failure.
 LibTest/html/Node/ownerDocument_A01_t01: RuntimeError # co19-roll r722: Issue 18251
 WebPlatformTest/DOMEvents/approved/Propagation.path.target.removed_t01: RuntimeError # co19-roll r738: Please triage this failure.
@@ -150,7 +150,6 @@
 Language/Classes/Constructors/Generative_Constructors/final_variables_t01: Pass, Fail #: Please triage this failure.
 Language/Classes/Getters/type_object_t01: RuntimeError # Please triage this failure.
 Language/Classes/Getters/type_object_t02: RuntimeError # Please triage this failure.
-Language/Classes/Setters/syntax_t04: RuntimeError # Please triage this failure.
 Language/Classes/Setters/type_object_t01: RuntimeError # Please triage this failure.
 Language/Classes/Setters/type_object_t02: RuntimeError # Please triage this failure.
 Language/Classes/Static_Methods/type_object_t01: RuntimeError # Please triage this failure.
@@ -184,37 +183,72 @@
 Language/Classes/deсlarations_t32: Skip # Times out. Please triage this failure.
 Language/Classes/deсlarations_t33: Skip # Times out. Please triage this failure.
 Language/Classes/deсlarations_t34: Skip # Times out. Please triage this failure.
-Language/Expressions/Instance_Creation/Const/abstract_class_t01: Fail # Issue 22007
-Language/Expressions/Instance_Creation/Const/abstract_class_t03: Fail # Issue 22007
+Language/Expressions/Assignment/super_assignment_failed_t05: RuntimeError # Issue 25671
+Language/Expressions/Function_Invocation/async_generator_invokation_t08: Skip # Times out. Issue 25967
+Language/Expressions/Function_Invocation/async_generator_invokation_t10: Skip # Times out. Issue 25967
+Language/Expressions/Identifier_Reference/built_in_identifier_t35: Fail # Issue 25732
+Language/Expressions/Identifier_Reference/built_in_identifier_t36: Fail # Issue 25732
+Language/Expressions/Identifier_Reference/built_in_identifier_t37: Fail # Issue 25732
+Language/Expressions/Identifier_Reference/built_in_identifier_t53: Fail # Issue 25733
+Language/Expressions/Identifier_Reference/built_in_identifier_t54: Fail # Issue 25733
+Language/Expressions/Identifier_Reference/built_in_identifier_t55: Fail # Issue 25733
+Language/Expressions/Identifier_Reference/built_in_identifier_t56: Fail # Issue 25733
+Language/Expressions/Identifier_Reference/built_in_identifier_t57: Fail # Issue 25733
+Language/Expressions/Identifier_Reference/built_in_identifier_t58: Fail # Issue 25733
+Language/Expressions/Identifier_Reference/built_in_identifier_t59: Fail # Issue 25733
+Language/Expressions/Identifier_Reference/built_in_identifier_t60: Fail # Issue 25733
+Language/Expressions/Identifier_Reference/built_in_identifier_t61: Fail # Issue 25733
+Language/Expressions/Identifier_Reference/built_in_identifier_t62: Fail # Issue 25733
+Language/Expressions/Identifier_Reference/built_in_identifier_t63: Fail # Issue 25733
+Language/Expressions/Identifier_Reference/built_in_identifier_t64: Fail # Issue 25733
+Language/Expressions/Identifier_Reference/built_in_identifier_t65: Fail # Issue 25733
+Language/Expressions/Identifier_Reference/built_in_identifier_t66: Fail # Issue 25733
+Language/Expressions/Identifier_Reference/built_in_identifier_t67: Fail # Issue 25733
+Language/Expressions/Identifier_Reference/built_in_identifier_t68: Fail # Issue 25733
+Language/Expressions/Identifier_Reference/built_in_not_dynamic_t14: Fail # Issue 25732
+Language/Expressions/Identifier_Reference/built_in_not_dynamic_t19: Fail # Issue 25772
 Language/Expressions/Instance_Creation/New/execution_t04: Fail, OK
 Language/Expressions/Instance_Creation/New/execution_t06: Fail, OK
-Language/Expressions/Property_Extraction/Named_Constructor_Extraction/not_class_t01: RuntimeError # Please triage this failure.
+Language/Expressions/Method_Invocation/Ordinary_Invocation/object_method_invocation_t01: Fail # Issue 25496
+Language/Expressions/Method_Invocation/Ordinary_Invocation/object_method_invocation_t02: Fail # Issue 25496
+Language/Expressions/Property_Extraction/Anonymous_Constructor_Closurization/identical_t01: RuntimeError # Issue 24607
+Language/Expressions/Property_Extraction/Anonymous_Constructor_Closurization/identical_t02: RuntimeError # Issue 24607
+
+Language/Expressions/Property_Extraction/General_Super_Property_Extraction: RuntimeError # Issue 26287
+
+Language/Expressions/Property_Extraction/Named_Constructor_Closurization/identical_t01: RuntimeError # Issue 24607
+
+Language/Expressions/Property_Extraction/Super_Closurization: RuntimeError # Issue 26287
+
 Language/Expressions/Spawning_an_Isolate/new_isolate_t01: RuntimeError, OK  # Uses Isolate.spawn.
 Language/Libraries_and_Scripts/Exports/reexport_t01: fail # Dart issue 12916
 Language/Libraries_and_Scripts/Exports/reexport_t02: fail # Dart issue 12916
-Language/Libraries_and_Scripts/Imports/namespace_changes_t10: RuntimeError # Please triage this failure.
 Language/Libraries_and_Scripts/Parts/compilation_t02: Skip # Please triage this failure.
 Language/Libraries_and_Scripts/Parts/syntax_t05: Skip # Times out flakily. Issue 20881
 Language/Libraries_and_Scripts/Scripts/top_level_main_t03: Pass # Issue 14478: This should break.
 Language/Libraries_and_Scripts/Scripts/top_level_main_t03: RuntimeError # co19-roll r786: Please triage this failure.
-Language/Libraries_and_Scripts/URIs/syntax_t04: RuntimeError # Please triage this failure.
-Language/Libraries_and_Scripts/URIs/syntax_t05: RuntimeError # Please triage this failure.
-Language/Libraries_and_Scripts/URIs/syntax_t09: RuntimeError # Please triage this failure.
-Language/Libraries_and_Scripts/URIs/syntax_t10: RuntimeError # Please triage this failure.
-Language/Libraries_and_Scripts/URIs/syntax_t14: RuntimeError # Please triage this failure.
-Language/Libraries_and_Scripts/URIs/syntax_t15: RuntimeError # Please triage this failure.
 Language/Metadata/before_variable_t01: RuntimeError # Please triage this failure.
-Language/Mixins/Mixin_Application/error_t01: Fail # co19 issue 43
-Language/Mixins/Mixin_Application/error_t02: Fail # co19 issue 43
-Language/Mixins/declaring_constructor_t01: Fail # co19 issue 43
+Language/Mixins/Mixin_Application/syntax_t16: RuntimeError # Please triage this failure.
+Language/Mixins/declaring_constructor_t05: Fail # Issue 24767
+Language/Mixins/declaring_constructor_t06: Fail # Issue 24767
 Language/Mixins/not_object_superclass_t01: Fail # Please triage this failure.
 Language/Mixins/reference_to_super_t01: Fail # Please triage this failure.
+Language/Reference/Operator_Precedence/precedence_12_Shift_t02: RuntimeError # Please triage this failure
+Language/Reference/Operator_Precedence/precedence_15_unary_prefix_t04: RuntimeError # Please triage this failure
 Language/Statements/Assert/execution_t02: skip # co19 issue 734
 Language/Statements/Assert/execution_t03: skip # co19 issue 734
 Language/Statements/Assert/type_t02: skip # co19 issue 734
 Language/Statements/Assert/type_t05: skip # co19 issue 734
+Language/Statements/Continue/async_loops_t10: Timeout, Skip # Issue 25748
 Language/Statements/Labels/syntax_t03: fail # Dart issue 2238
 Language/Statements/Switch/syntax_t02: fail # Dart issue 12908
+Language/Statements/Yield_and_Yield_Each/Yield/execution_async_t01: RuntimeError # Please triage this failure
+Language/Statements/Yield_and_Yield_Each/Yield/execution_async_t02: RuntimeError # Please triage this failure
+Language/Statements/Yield_and_Yield_Each/Yield/execution_async_t04: RuntimeError # Please triage this failure
+Language/Statements/Yield_and_Yield_Each/Yield_Each/execution_async_t08: RuntimeError # Issue 25748
+Language/Statements/Yield_and_Yield_Each/Yield_Each/execution_async_t09: RuntimeError # Issue 25748
+Language/Statements/Yield_and_Yield_Each/Yield_Each/execution_async_t10: RuntimeError # Issue 25748
+Language/Statements/Yield_and_Yield_Each/Yield_Each/execution_sync_t05: RuntimeError # Issue 25662,25634
 LayoutTests/fast/alignment/parse-align-items_t01: RuntimeError # co19-roll r761: Please triage this failure.
 LayoutTests/fast/alignment/parse-align-self_t01: RuntimeError # co19-roll r761: Please triage this failure.
 LayoutTests/fast/alignment/parse-justify-self_t01: RuntimeError # co19-roll r761: Please triage this failure.
@@ -403,17 +437,16 @@
 LayoutTests/fast/css-grid-layout/percent-padding-margin-resolution-grid-item_t01: RuntimeError # co19-roll r786: Please triage this failure.
 LayoutTests/fast/css-grid-layout/percent-resolution-grid-item_t01: RuntimeError # co19-roll r786: Please triage this failure.
 LayoutTests/fast/css-grid-layout/place-cell-by-index_t01: RuntimeError # co19-roll r786: Please triage this failure.
-LayoutTests/fast/css/add-remove-stylesheets-at-once-minimal-recalc-style_t01: RuntimeError # co19-roll r761: Please triage this failure.
 LayoutTests/fast/css/checked-pseudo-selector_t01: RuntimeError # co19-roll r761: Please triage this failure.
 LayoutTests/fast/css/collapsed-whitespace-reattach-in-style-recalc_t01: RuntimeError # co19-roll r761: Please triage this failure.
 LayoutTests/fast/css/collapsed-whitespace-reattach-in-style-recalc_t01: Skip # co19 issue 732.
 LayoutTests/fast/css/computed-offset-with-zoom_t01: Skip # co19 issue 732.
 LayoutTests/fast/css/content/content-none_t01: RuntimeError # co19-roll r761: Please triage this failure.
 LayoutTests/fast/css/content/content-normal_t01: RuntimeError # co19-roll r761: Please triage this failure.
+LayoutTests/fast/css/content/content-quotes-01_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/counters/complex-before_t01: RuntimeError, Pass # co19-roll r761: Please triage this failure.
 LayoutTests/fast/css/css-properties-case-insensitive_t01: RuntimeError # co19-roll r761: Please triage this failure.
 LayoutTests/fast/css/css3-nth-tokens-style_t01: RuntimeError # co19-roll r761: Please triage this failure.
-LayoutTests/fast/css/cursor-parsing-quirks_t01: RuntimeError # co19-roll r761: Please triage this failure.
 LayoutTests/fast/css/deprecated-flexbox-auto-min-size_t01: Pass, RuntimeError # co19-roll r761: Please triage this failure.
 LayoutTests/fast/css/first-child-display-change-inverse_t01: RuntimeError # co19-roll r761: Please triage this failure.
 LayoutTests/fast/css/focus-display-block-inline_t01: RuntimeError, Pass # co19-roll r761: Please triage this failure.
@@ -435,6 +468,7 @@
 LayoutTests/fast/css/link-alternate-stylesheet-5_t01: RuntimeError # co19-roll r761: Please triage this failure.
 LayoutTests/fast/css/media-query-recovery_t01: RuntimeError # co19-roll r761: Please triage this failure.
 LayoutTests/fast/css/parsing-at-rule-recovery_t01: RuntimeError # co19-roll r761: Please triage this failure.
+LayoutTests/fast/css/parsing-object-position_t01: RuntimeError # Please triage this failure
 LayoutTests/fast/css/parsing-page-rule_t01: RuntimeError # co19-roll r761: Please triage this failure.
 LayoutTests/fast/css/parsing-selector-error-recovery_t01: RuntimeError # co19-roll r761: Please triage this failure.
 LayoutTests/fast/css/pseudo-any_t01: RuntimeError, Pass # co19-roll r761: Please triage this failure.
@@ -568,7 +602,7 @@
 LayoutTests/fast/dynamic/insertAdjacentElement_t01: Skip # Timeout. co19-roll r786: Please triage this failure.
 LayoutTests/fast/dynamic/insertAdjacentHTML_t01: Pass, RuntimeError # co19 issue 11.
 LayoutTests/fast/dynamic/recursive-layout_t01: RuntimeError # co19-roll r786: Please triage this failure.
-LayoutTests/fast/events/change-overflow-on-overflow-change_t01: Timeout # Dartium 45 roll. Issue 25754
+LayoutTests/fast/events/change-overflow-on-overflow-change_t01: Skip # Timeout. Please triage this failure.
 LayoutTests/fast/events/clipboard-clearData_t01: Skip # Timeout. co19-roll r786: Please triage this failure.
 LayoutTests/fast/events/clipboard-dataTransferItemList_t01: Skip # Timeout. co19-roll r786: Please triage this failure.
 LayoutTests/fast/events/div-focus_t01: Pass, RuntimeError # co19-roll r786: Please triage this failure.
@@ -626,7 +660,6 @@
 LayoutTests/fast/forms/formmethod-attribute-input-2_t01: Skip # Test reloads itself. Issue 18558.
 LayoutTests/fast/forms/formmethod-attribute-input-html_t01: Pass, RuntimeError # co19-roll r801: Please triage this failure.
 LayoutTests/fast/forms/formmethod-attribute-input-html_t01: Skip # Test reloads itself. Issue 18558.
-LayoutTests/fast/forms/input-appearance-elementFromPoint_t01: RuntimeError # co19-roll r801: Please triage this failure.
 LayoutTests/fast/forms/input-hit-test-border_t01: Pass, RuntimeError # co19-roll r801: Please triage this failure.
 LayoutTests/fast/forms/input-inputmode_t01: RuntimeError # Experimental feature not exposed in Chrome yet
 LayoutTests/fast/forms/input-width-height-attributes-without-renderer-loaded-image_t01: RuntimeError # co19-roll r801: Please triage this failure.
@@ -661,32 +694,20 @@
 LayoutTests/fast/html/imports/import-events_t01: RuntimeError # co19-roll r706.  Please triage this failure.
 LayoutTests/fast/html/select-dropdown-consistent-background-color_t01: RuntimeError # co19-roll r801: Please triage this failure.
 LayoutTests/fast/inline/boundingBox-with-continuation_t01: RuntimeError # co19-roll r801: Please triage this failure.
-LayoutTests/fast/inline/empty-inline-before-collapsed-space_t01: RuntimeError # co19 issue 11.
-LayoutTests/fast/inline/fixed-pos-moves-with-abspos-inline-parent_t01: RuntimeError # co19 issue 11.
-LayoutTests/fast/inline/fixed-pos-moves-with-abspos-parent-relative-ancestor_t01: RuntimeError # co19 issue 11.
-LayoutTests/fast/inline/fixed-pos-moves-with-abspos-parent_t01: RuntimeError # co19 issue 11.
-LayoutTests/fast/inline/fixed-pos-with-transform-container-moves-with-abspos-parent_t01: RuntimeError # co19 issue 11.
 LayoutTests/fast/inline/inline-position-top-align_t01: RuntimeError # co19-roll r801: Please triage this failure.
-LayoutTests/fast/inline/inline-relative-offset-boundingbox_t01: RuntimeError # co19 issue 11.
 LayoutTests/fast/inline/inline-with-empty-inline-children_t01: RuntimeError # co19-roll r801: Please triage this failure.
 LayoutTests/fast/inline/out-of-flow-objects-and-whitespace-after-empty-inline_t01: RuntimeError # co19-roll r801: Please triage this failure.
 LayoutTests/fast/inline/parent-inline-element-padding-contributes-width_t01: RuntimeError # co19-roll r801: Please triage this failure.
 LayoutTests/fast/inline/positioned-element-padding-contributes-width_t01: RuntimeError # co19-roll r801: Please triage this failure.
-LayoutTests/fast/inline/reattach-inlines-in-anonymous-blocks-with-out-of-flow-siblings_t01: RuntimeError # co19 issue 11.
-LayoutTests/fast/innerHTML/innerHTML-special-elements_t01: RuntimeError # co19 issue 11.
-LayoutTests/fast/layers/normal-flow-hit-test_t01: RuntimeError # co19 issue 11.
-LayoutTests/fast/layers/normal-flow-hit-test_t01: RuntimeError # co19-roll r801: Please triage this failure.
 LayoutTests/fast/layers/zindex-hit-test_t01: Pass, RuntimeError # co19-roll r801: Please triage this failure.
 LayoutTests/fast/layers/zindex-hit-test_t01: RuntimeError # co19 issue 11.
-LayoutTests/fast/lists/item-not-in-list-line-wrapping_t01: RuntimeError # co19 issue 11.
 LayoutTests/fast/lists/list-style-position-inside_t01: Pass, RuntimeError # co19 issue 11.
-LayoutTests/fast/lists/marker-preferred-margins_t01: RuntimeError # co19 issue 11.
 LayoutTests/fast/loader/about-blank-hash-change_t01: Skip # Times out. co19-roll r801: Please triage this failure.
 LayoutTests/fast/loader/about-blank-hash-kept_t01: Skip # Times out. co19-roll r801: Please triage this failure.
 LayoutTests/fast/loader/hashchange-event-properties_t01: RuntimeError # co19-roll r801: Please triage this failure.
 LayoutTests/fast/loader/loadInProgress_t01: Pass, RuntimeError # co19-roll r801: Please triage this failure.
 LayoutTests/fast/loader/onhashchange-attribute-listeners_t01: Skip # Times out. co19-roll r801: Please triage this failure.
-LayoutTests/fast/loader/onload-policy-ignore-for-frame_t01: Timeout # Dartium 45 roll
+LayoutTests/fast/loader/onload-policy-ignore-for-frame_t01: Skip # Times out. Dartium 45 roll: Please triage this failure.
 LayoutTests/fast/loader/scroll-position-restored-on-back_t01: RuntimeError # co19-roll r801: Please triage this failure.
 LayoutTests/fast/loader/scroll-position-restored-on-reload-at-load-event_t01: Skip # Times out. co19-roll r801: Please triage this failure.
 LayoutTests/fast/loader/stateobjects/replacestate-in-onunload_t01: RuntimeError # co19-roll r801: Please triage this failure.
@@ -698,27 +719,20 @@
 LayoutTests/fast/mediastream/RTCPeerConnection-AddRemoveStream_t01: Skip # Issue 22111
 LayoutTests/fast/mediastream/RTCPeerConnection-AddRemoveStream_t01: Skip # Passes on Safari, Issue 23475 # co19 issue 11.
 LayoutTests/fast/mediastream/getusermedia_t01: Skip # co19 issue 738
-LayoutTests/fast/multicol/balance-trailing-border_t02: RuntimeError # co19-roll r801: Please triage this failure.
 LayoutTests/fast/multicol/balance-unbreakable_t01: Pass, RuntimeError # co19 issue 11.
 LayoutTests/fast/multicol/break-after-always-bottom-margin_t01: RuntimeError # co19-roll r801: Please triage this failure.
 LayoutTests/fast/multicol/break-properties_t01: RuntimeError # co19-roll r801: Please triage this failure.
 LayoutTests/fast/multicol/column-width-zero_t01: Pass, RuntimeError # co19 issue 11.
 LayoutTests/fast/multicol/cssom-view_t01: RuntimeError # co19-roll r801: Please triage this failure.
-LayoutTests/fast/multicol/fixed-column-percent-logical-height-orthogonal-writing-mode_t01: RuntimeError # co19 issue 11.
 LayoutTests/fast/multicol/flipped-blocks-hit-test_t01: Pass, RuntimeError # co19-roll r801: Please triage this failure.
 LayoutTests/fast/multicol/float-truncation_t01: RuntimeError # co19-roll r801: Please triage this failure.
 LayoutTests/fast/multicol/hit-test-above-or-below_t01: RuntimeError # co19-roll r801: Please triage this failure.
 LayoutTests/fast/multicol/hit-test-end-of-column-with-line-height_t01: RuntimeError # co19-roll r801: Please triage this failure.
 LayoutTests/fast/multicol/hit-test-end-of-column_t01: RuntimeError # co19-roll r801: Please triage this failure.
 LayoutTests/fast/multicol/hit-test-float_t01: RuntimeError # co19-roll r801: Please triage this failure.
-LayoutTests/fast/multicol/hit-test-gap-between-pages-flipped_t01: RuntimeError # co19-roll r801: Please triage this failure.
 LayoutTests/fast/multicol/hit-test-gap-between-pages_t01: Pass, RuntimeError # co19-roll r801: Please triage this failure.
-LayoutTests/fast/multicol/image-inside-nested-blocks-with-border_t01: RuntimeError # co19 issue 11.
-LayoutTests/fast/multicol/inherit-column-values_t01: RuntimeError # co19 issue 11.
-LayoutTests/fast/multicol/inline-getclientrects_t01: RuntimeError # co19 issue 11.
 LayoutTests/fast/multicol/newmulticol/balance-images_t01: RuntimeError # co19-roll r801: Please triage this failure.
 LayoutTests/fast/multicol/newmulticol/balance-maxheight_t01: RuntimeError # co19-roll r801: Please triage this failure.
-LayoutTests/fast/multicol/newmulticol/balance-maxheight_t02: RuntimeError # co19 issue 11.
 LayoutTests/fast/multicol/newmulticol/balance_t01: Pass, RuntimeError # co19 issue 11.
 LayoutTests/fast/multicol/newmulticol/balance_t02: Pass, RuntimeError # co19 issue 11.
 LayoutTests/fast/multicol/newmulticol/balance_t04: Pass, RuntimeError # co19 issue 11.
@@ -734,14 +748,12 @@
 LayoutTests/fast/multicol/newmulticol/balance_t10: Pass, RuntimeError # I don't understand how, but sometimes passes. # co19 issue 11.
 LayoutTests/fast/multicol/newmulticol/balance_t10: RuntimeError # co19-roll r801: Please triage this failure.
 LayoutTests/fast/multicol/orphans-relayout_t01: Pass, RuntimeError # co19 issue 11.
-LayoutTests/fast/multicol/vertical-lr/break-properties_t01: RuntimeError # co19 issue 11.
 LayoutTests/fast/multicol/vertical-lr/float-truncation_t01: RuntimeError # co19-roll r801: Please triage this failure.
 LayoutTests/fast/multicol/vertical-rl/break-properties_t01: RuntimeError # co19-roll r801: Please triage this failure.
 LayoutTests/fast/multicol/vertical-rl/float-truncation_t01: RuntimeError # co19-roll r801: Please triage this failure.
 LayoutTests/fast/multicol/vertical-rl/image-inside-nested-blocks-with-border_t01: RuntimeError # co19-roll r801: Please triage this failure.
 LayoutTests/fast/multicol/widows_t01: Pass, RuntimeError # co19 issue 11.
 LayoutTests/fast/multicol/widows_t01: Pass, RuntimeError # co19-roll r801: Please triage this failure.
-LayoutTests/fast/multicol/zeroColumnCount_t01: RuntimeError # co19 issue 11.
 LayoutTests/fast/overflow/child-100percent-height-inside-fixed-container-with-overflow-auto_t01: Pass, RuntimeError # co19-roll r801: Please triage this failure.
 LayoutTests/fast/overflow/child-100percent-height-inside-fixed-container-with-overflow-auto_t01: RuntimeError # co19 issue 11.
 LayoutTests/fast/overflow/overflow-rtl-vertical-origin_t01: Pass, RuntimeError # False passes on Firefox, but trying to keep these grouped with the issue. # co19 issue 11.
@@ -753,26 +765,19 @@
 LayoutTests/fast/parser/fragment-parser-doctype_t01: RuntimeError # co19-roll r801: Please triage this failure.
 LayoutTests/fast/parser/innerhtml-with-prefixed-elements_t01: RuntimeError # co19-roll r801: Please triage this failure.
 LayoutTests/fast/parser/parse-wbr_t01: Pass, RuntimeError # co19 issue 11.
-LayoutTests/fast/parser/pre-first-line-break_t01: RuntimeError # co19-roll r801: Please triage this failure.
-LayoutTests/fast/parser/stray-param_t01: RuntimeError # co19 issue 11.
-LayoutTests/fast/replaced/available-height-for-content_t01: RuntimeError # co19 issue 11.
 LayoutTests/fast/replaced/computed-image-width-with-percent-height-and-fixed-ancestor-vertical-lr_t01: Pass, RuntimeError # co19 issue 11.
 LayoutTests/fast/replaced/computed-image-width-with-percent-height-and-fixed-ancestor_t01: Pass # False pass # co19 issue 11.
 LayoutTests/fast/replaced/computed-image-width-with-percent-height-and-fixed-ancestor_t01: Pass, RuntimeError # co19 issue 11.
 LayoutTests/fast/replaced/computed-image-width-with-percent-height-inside-table-cell-and-fixed-ancestor-vertical-lr_t01: RuntimeError, Pass # Spurious intermittent pass # co19 issue 11.
 LayoutTests/fast/replaced/computed-image-width-with-percent-height-inside-table-cell-and-fixed-ancestor_t01: RuntimeError, Pass # Spurious intermittent pass # co19 issue 11.
 LayoutTests/fast/replaced/container-width-zero_t01: RuntimeError # co19-roll r801: Please triage this failure.
-LayoutTests/fast/replaced/iframe-with-percentage-height-within-table-with-anonymous-table-cell_t01: RuntimeError, Pass, Timeout # Spurious intermittent pass. # co19 issue 11.
-LayoutTests/fast/replaced/iframe-with-percentage-height-within-table-with-table-cell-ignore-height_t01: RuntimeError, Pass, Timeout # co19-roll r801: Please triage this failure.
+LayoutTests/fast/replaced/iframe-with-percentage-height-within-table-with-anonymous-table-cell_t01: Skip # Times out: Please triage this failure.
+LayoutTests/fast/replaced/iframe-with-percentage-height-within-table-with-table-cell-ignore-height_t01: Skip # Times out: Please triage this failure.
 LayoutTests/fast/replaced/preferred-widths_t01: Pass, RuntimeError # co19-roll r801: Please triage this failure.
-LayoutTests/fast/replaced/table-percent-height-text-controls_t01: RuntimeError # co19 issue 11.
 LayoutTests/fast/replaced/table-percent-height_t01: RuntimeError # co19-roll r801: Please triage this failure.
 LayoutTests/fast/replaced/table-percent-width_t01: RuntimeError # co19-roll r801: Please triage this failure.
-LayoutTests/fast/replaced/table-replaced-element_t01: RuntimeError # co19 issue 11.
 LayoutTests/fast/ruby/parse-rp_t01: Pass, RuntimeError # co19 issue 11.
 LayoutTests/fast/ruby/ruby-line-height_t01: RuntimeError # co19-roll r801: Please triage this failure.
-LayoutTests/fast/selectors/specificity-overflow_t01: RuntimeError # co19 issue 11.
-LayoutTests/fast/selectors/style-sharing-last-child_t01: RuntimeError # co19 issue 11.
 LayoutTests/fast/shapes/parsing/parsing-shape-lengths_t01: RuntimeError # co19-roll r801: Please triage this failure.
 LayoutTests/fast/shapes/shape-outside-floats/shape-outside-big-box-border-radius_t01: RuntimeError # co19-roll r801: Please triage this failure.
 LayoutTests/fast/shapes/shape-outside-floats/shape-outside-floats-diamond-margin-polygon_t01: RuntimeError # co19-roll r801: Please triage this failure.
@@ -797,7 +802,6 @@
 LayoutTests/fast/sub-pixel/size-of-span-with-different-positions_t01: Skip # co19 issue 732.
 LayoutTests/fast/sub-pixel/table-rows-have-stable-height_t01: RuntimeError # co19-roll r786: Please triage this failure.
 LayoutTests/fast/sub-pixel/table-rows-have-stable-height_t01: Skip # co19 issue 732.
-LayoutTests/fast/table/absolute-table-percent-lengths_t01: RuntimeError # co19 issue 11.
 LayoutTests/fast/table/anonymous-table-section-removed_t01: RuntimeError # co19-roll r786: Please triage this failure.
 LayoutTests/fast/table/anonymous-table-section-removed_t01: Skip # co19 issue 11.
 LayoutTests/fast/table/caption-orthogonal-writing-mode-sizing_t01: RuntimeError # co19-roll r786: Please triage this failure.
@@ -807,7 +811,6 @@
 LayoutTests/fast/table/computeLogicalWidth-table-needsSectionRecalc_t01: Pass, RuntimeError # co19-roll r786: Please triage this failure.
 LayoutTests/fast/table/css-table-max-height_t01: RuntimeError # co19-roll r786: Please triage this failure.
 LayoutTests/fast/table/css-table-max-width_t01: RuntimeError # co19-roll r786: Please triage this failure.
-LayoutTests/fast/table/css-table-width-with-border-padding_t01: RuntimeError # co19-roll r786: Please triage this failure.
 LayoutTests/fast/table/fixed-table-layout-width-change_t01: Pass, RuntimeError # False passes on Firefox # co19 issue 11.
 LayoutTests/fast/table/hittest-tablecell-bottom-edge_t01: RuntimeError # co19-roll r786: Please triage this failure.
 LayoutTests/fast/table/hittest-tablecell-bottom-edge_t01: Skip # co19 issue 11.
@@ -825,8 +828,6 @@
 LayoutTests/fast/table/min-width-html-inline-table_t01: Pass, RuntimeError # co19-roll r786: Please triage this failure.
 LayoutTests/fast/table/nested-tables-with-div-offset_t01: Pass, RuntimeError # co19-roll r801: Please triage this failure.
 LayoutTests/fast/table/padding-height-and-override-height_t01: RuntimeError # co19-roll r786: Please triage this failure.
-LayoutTests/fast/table/resize-table-row_t01: RuntimeError # co19 issue 11.
-LayoutTests/fast/table/switch-table-layout-multiple-section_t01: RuntimeError # co19 issue 11.
 LayoutTests/fast/table/table-all-rowspans-height-distribution-in-rows-except-overlapped_t01: RuntimeError # co19-roll r786: Please triage this failure.
 LayoutTests/fast/table/table-all-rowspans-height-distribution-in-rows_t01: RuntimeError # co19-roll r786: Please triage this failure.
 LayoutTests/fast/table/table-cell-offset-width_t01: RuntimeError # co19-roll r786: Please triage this failure.
@@ -834,13 +835,10 @@
 LayoutTests/fast/table/table-rowspan-cell-with-empty-cell_t01: RuntimeError # co19-roll r786: Please triage this failure.
 LayoutTests/fast/table/table-rowspan-height-distribution-in-rows_t01: RuntimeError # co19-roll r786: Please triage this failure.
 LayoutTests/fast/table/table-rowspan-height-distribution-in-rows_t02: RuntimeError # co19-roll r786: Please triage this failure.
-LayoutTests/fast/table/table-sections-border-spacing_t01: RuntimeError # co19 issue 11.
 LayoutTests/fast/table/table-width-exceeding-max-width_t01: Pass, RuntimeError # co19 issue 11.
 LayoutTests/fast/table/table-with-content-width-exceeding-max-width_t01: RuntimeError # co19-roll r786: Please triage this failure.
 LayoutTests/fast/text-autosizing/vertical-writing-mode_t01: RuntimeError # co19-roll r786: Please triage this failure.
 LayoutTests/fast/text-autosizing/vertical-writing-mode_t01: Skip # co19 issue 732.
-LayoutTests/fast/text/container-align-with-inlines_t01: RuntimeError # co19 issue 11.
-LayoutTests/fast/text/container-align-with-inlines_t01: RuntimeError # co19 issue 11.
 LayoutTests/fast/text/find-soft-hyphen_t01: Pass, RuntimeError # co19-roll r786: Please triage this failure.
 LayoutTests/fast/text/find-spaces_t01: RuntimeError # co19-roll r786: Please triage this failure.
 LayoutTests/fast/text/font-fallback-synthetic-italics_t01: Pass, RuntimeError # co19 issue 11.
@@ -852,7 +850,6 @@
 LayoutTests/fast/text/glyph-reordering_t01: Pass, RuntimeError # This is a false pass. The font gets sanitized, so whether it works or not probably depends on default sizes. # co19 issue 11.
 LayoutTests/fast/text/international/complex-text-rectangle_t01: Skip # co19 issue 732.
 LayoutTests/fast/text/international/iso-8859-8_t01: RuntimeError # co19-roll r786: Please triage this failure.
-LayoutTests/fast/text/international/listbox-width-rtl_t01: RuntimeError # co19 issue 11.
 LayoutTests/fast/text/international/rtl-text-wrapping_t01: Pass # This is a false pass. All the content gets sanitized, so there's nothing to assert fail on. If the code did anything it would fail. # co19 issue 11.
 LayoutTests/fast/text/international/rtl-text-wrapping_t01: RuntimeError # co19-roll r786: Please triage this failure.
 LayoutTests/fast/text/international/rtl-text-wrapping_t01: Skip # co19 issue 732.
@@ -860,14 +857,10 @@
 LayoutTests/fast/text/ipa-tone-letters_t01: Pass, RuntimeError # co19 issue 11.
 LayoutTests/fast/text/ipa-tone-letters_t01: Pass, RuntimeError # co19-roll r786: Please triage this failure.
 LayoutTests/fast/text/line-break-after-empty-inline-hebrew_t01: Pass, RuntimeError # co19 issue 11.
-LayoutTests/fast/text/line-break-after-inline-latin1_t01: RuntimeError # co19 issue 11.
 LayoutTests/fast/text/line-break-after-question-mark_t01: RuntimeError # co19-roll r786: Please triage this failure.
-LayoutTests/fast/text/line-breaks-after-closing-punctuations_t01: RuntimeError # co19 issue 11.
 LayoutTests/fast/text/line-breaks-after-hyphen-before-number_t01: RuntimeError # co19 issue 11.
 LayoutTests/fast/text/line-breaks-after-hyphen-before-number_t01: Skip # co19 issue 732.
-LayoutTests/fast/text/line-breaks-after-ideographic-comma-or-full-stop_t01: RuntimeError # co19 issue 11.
 LayoutTests/fast/text/offsetForPosition-cluster-at-zero_t01: RuntimeError # co19-roll r786: Please triage this failure.
-LayoutTests/fast/text/pre-wrap-trailing-tab_t01: RuntimeError # co19 issue 11.
 LayoutTests/fast/text/regional-indicator-symobls_t01: Pass, Fail # co19 issue 11.
 LayoutTests/fast/text/regional-indicator-symobls_t01: Pass, RuntimeError # co19-roll r786: Please triage this failure.
 LayoutTests/fast/text/remove-zero-length-run_t01: RuntimeError # co19-roll r786: Please triage this failure.
@@ -881,44 +874,26 @@
 LayoutTests/fast/transforms/bounding-rect-zoom_t01: RuntimeError, Pass # Erratic, but only passes because divs have been entirely removed. # co19 issue 11.
 LayoutTests/fast/transforms/hit-test-large-scale_t01: RuntimeError # co19-roll r786: Please triage this failure.
 LayoutTests/fast/transforms/scrollIntoView-transformed_t01: Pass, RuntimeError # False passes on Firefox. # co19 issue 11.
-LayoutTests/fast/transforms/topmost-becomes-bottomost-for-scrolling_t01: RuntimeError # co19 issue 11.
 LayoutTests/fast/transforms/transform-hit-test-flipped_t01: Pass, RuntimeError # Passes on Firefox, but is clearly not testing what it's trying to test. # co19 issue 11.
-LayoutTests/fast/transforms/transform-inside-overflow-scroll_t01: RuntimeError # co19 issue 11.
-LayoutTests/fast/url/anchor_t01: RuntimeError # co19 issue 11.
 LayoutTests/fast/url/file-http-base_t01: RuntimeError # co19-roll r786: Please triage this failure.
 LayoutTests/fast/url/file_t01: RuntimeError # co19-roll r786: Please triage this failure.
-LayoutTests/fast/url/host-lowercase-per-scheme_t01: RuntimeError # co19 issue 11.
 LayoutTests/fast/url/host_t01: RuntimeError # co19-roll r786: Please triage this failure.
 LayoutTests/fast/url/idna2003_t01: RuntimeError # co19-roll r786: Please triage this failure.
 LayoutTests/fast/url/idna2008_t01: RuntimeError # co19-roll r786: Please triage this failure.
 LayoutTests/fast/url/invalid-urls-utf8_t01: RuntimeError # co19-roll r786: Please triage this failure.
 LayoutTests/fast/url/ipv4_t01: RuntimeError # co19-roll r786: Please triage this failure.
 LayoutTests/fast/url/ipv6_t01: RuntimeError # co19-roll r786: Please triage this failure.
-LayoutTests/fast/url/mailto_t01: RuntimeError # co19 issue 11.
-LayoutTests/fast/url/path-url_t01: RuntimeError # co19 issue 11.
 LayoutTests/fast/url/path_t01: RuntimeError # co19-roll r786: Please triage this failure.
-LayoutTests/fast/url/port_t01: RuntimeError # co19 issue 11.
 LayoutTests/fast/url/query_t01: RuntimeError # co19-roll r786: Please triage this failure.
 LayoutTests/fast/url/relative-unix_t01: RuntimeError # co19-roll r786: Please triage this failure.
 LayoutTests/fast/url/relative-win_t01: RuntimeError # co19-roll r786: Please triage this failure.
 LayoutTests/fast/url/relative_t01: RuntimeError # co19-roll r786: Please triage this failure.
-LayoutTests/fast/url/safari-extension_t01: RuntimeError # co19 issue 11.
-LayoutTests/fast/url/safari-extension_t01: RuntimeError # co19 issue 11.
-LayoutTests/fast/url/scheme_t01: RuntimeError # co19 issue 11.
 LayoutTests/fast/url/segments-from-data-url_t01: RuntimeError # co19-roll r786: Please triage this failure.
 LayoutTests/fast/url/segments_t01: RuntimeError # co19-roll r786: Please triage this failure.
 LayoutTests/fast/url/standard-url_t01: RuntimeError # co19-roll r786: Please triage this failure.
-LayoutTests/fast/url/trivial-segments_t01: RuntimeError # co19 issue 11.
-LayoutTests/fast/url/trivial_t01: RuntimeError # co19 issue 11.
-LayoutTests/fast/writing-mode/auto-margins-across-boundaries_t01: RuntimeError # co19 issue 11.
-LayoutTests/fast/writing-mode/auto-sizing-orthogonal-flows_t01: RuntimeError # co19-roll r786: Please triage this failure.
-LayoutTests/fast/writing-mode/block-formatting-context_t01: RuntimeError # co19 issue 11.
-LayoutTests/fast/writing-mode/display-mutation_t01: RuntimeError # co19 issue 11.
 LayoutTests/fast/writing-mode/percentage-margins-absolute-replaced_t01: Pass, RuntimeError # co19 issue 11.
-LayoutTests/fast/writing-mode/percentage-padding_t01: RuntimeError # co19 issue 11.
 LayoutTests/fast/writing-mode/positionForPoint_t01: RuntimeError # co19-roll r786: Please triage this failure.
 LayoutTests/fast/writing-mode/positionForPoint_t01: Skip # co19 issue 732.
-LayoutTests/fast/writing-mode/relative-positioning-percentages_t01: RuntimeError # co19 issue 11.
 LayoutTests/fast/writing-mode/table-hit-test_t01: RuntimeError # co19-roll r786: Please triage this failure.
 LayoutTests/fast/writing-mode/vertical-font-vmtx-units-per-em_t01: RuntimeError # co19-roll r786: Please triage this failure.
 LayoutTests/fast/xmlhttprequest/xmlhttprequest-responsetype-abort_t01: Skip # Timeout. co19-roll r786: Please triage this failure.
@@ -930,7 +905,6 @@
 LayoutTests/fast/xpath/4XPath/Core/test_node_test_t01: RuntimeError # co19-roll r761: Please triage this failure.
 LayoutTests/fast/xpath/4XPath/Core/test_node_test_t02: RuntimeError # co19-roll r786: Please triage this failure.
 LayoutTests/fast/xpath/4XPath/Core/test_parser_t01: RuntimeError # Dartium JSInterop failure
-LayoutTests/fast/xpath/ambiguous-operators_t01: RuntimeError # co19-roll r761: Please triage this failure.
 LayoutTests/fast/xpath/attr-namespace_t02: RuntimeError # co19-roll r786: Please triage this failure.
 LayoutTests/fast/xpath/ensure-null-namespace_t01: RuntimeError # co19-roll r761: Please triage this failure.
 LayoutTests/fast/xpath/implicit-node-args_t01: RuntimeError # co19-roll r761: Please triage this failure.
@@ -944,14 +918,11 @@
 LayoutTests/fast/xpath/py-dom-xpath/paths_t01: RuntimeError # co19-roll r786: Please triage this failure.
 LayoutTests/fast/xpath/reverse-axes_t01: RuntimeError # co19-roll r761: Please triage this failure.
 LayoutTests/fast/xsl/default-html_t01: RuntimeError # co19-roll r786: Please triage this failure.
-LibTest/async/Completer/completeError_A02_t01: Pass, Fail # Please triage this failure.
 LibTest/async/Stream/Stream.periodic_A01_t01: Pass, RuntimeError # co19-roll r706.  Please triage this failure.
 LibTest/async/Timer/Timer_A01_t01: RuntimeError, Pass # Issue 16475
 LibTest/collection/ListBase/ListBase_class_A01_t01: Skip # co19-roll r722: Please triage this failure.
 LibTest/collection/ListMixin/ListMixin_class_A01_t01: Skip # co19-roll r722: Please triage this failure.
 LibTest/collection/ListMixin/ListMixin_class_A01_t02: Skip # co19-roll r722: Please triage this failure.
-LibTest/core/DateTime/parse_A01_t02: Skip # Times out. Please triage this failure.
-LibTest/core/DateTime/parse_A03_t01: fail # Issue 12514
 LibTest/core/RegExp/Pattern_semantics/firstMatch_NonEmptyClassRanges_A01_t01: Fail # Issue 22200
 LibTest/core/RegExp/Pattern_semantics/firstMatch_NonEmptyClassRanges_A01_t05: Fail # Issue 22200
 LibTest/core/RegExp/Pattern_semantics/firstMatch_NonEmptyClassRanges_A01_t06: Fail # Issue 22200
@@ -1068,7 +1039,7 @@
 LibTest/isolate/ReceivePort/asBroadcastStream_A04_t02: RuntimeError, OK  # Uses Isolate.spawn.
 LibTest/isolate/ReceivePort/asBroadcastStream_A04_t03: RuntimeError, OK  # Uses Isolate.spawn.
 LibTest/isolate/ReceivePort/close_A01_t01: Pass, RuntimeError # Issue 13921, co19 issue for false pass https://github.com/dart-lang/co19/issues/13
-LibTest/isolate/ReceivePort/close_A02_t01: Pass, RuntimeError # Issue 13921, co19 issue for false pass https://github.com/dart-lang/co19/issues/13
+LibTest/isolate/ReceivePort/close_A02_t01: Skip # Times out
 LibTest/isolate/ReceivePort/contains_A01_t01: RuntimeError, OK  # Uses Isolate.spawn.
 LibTest/isolate/ReceivePort/distinct_A01_t01: RuntimeError, OK  # Uses Isolate.spawn.
 LibTest/isolate/ReceivePort/distinct_A01_t02: RuntimeError, OK  # Uses Isolate.spawn.
@@ -1306,54 +1277,14 @@
 LayoutTests/fast/css/aspect-ratio-inheritance_t01: Skip # 45 Roll No longer supported.
 LayoutTests/fast/css/aspect-ratio-parsing-tests_t01: Skip # 45 Roll No longer supported.
 LayoutTests/fast/css/auto-min-size_t01: RuntimeError #  45 Roll co19 test rewrite issue 25807
-LayoutTests/fast/css/background-position-serialize_t01: RuntimeError #  45 Roll co19 test rewrite issue 25807
-LayoutTests/fast/css/content-language-case-insensitivity_t01: RuntimeError #  45 Roll co19 test rewrite issue 25807
-LayoutTests/fast/css/content-language-dynamically-added_t01: RuntimeError #  45 Roll co19 test rewrite issue 25807
-LayoutTests/fast/css/content-language-dynamically-removed_t01: RuntimeError #  45 Roll co19 test rewrite issue 25807
-LayoutTests/fast/css/content-language-mapped-to-webkit-locale_t01: RuntimeError #  45 Roll co19 test rewrite issue 25807
-LayoutTests/fast/css/content-language-multiple_t01: RuntimeError #  45 Roll co19 test rewrite issue 25807
-LayoutTests/fast/css/content-language-no-content_t01: RuntimeError #  45 Roll co19 test rewrite issue 25807
-LayoutTests/fast/css/counters/counter-cssText_t01: RuntimeError #  45 Roll co19 test rewrite issue 25807
 LayoutTests/fast/css-intrinsic-dimensions/intrinsic-sized-replaced-absolutes_t01: Skip # 45 Roll failure.
-LayoutTests/fast/css/cssText-shorthand_t01: RuntimeError #  45 Roll co19 test rewrite issue 25807
-LayoutTests/fast/css/csstext-of-content-string_t01: RuntimeError #  45 Roll co19 test rewrite issue 25807
-LayoutTests/fast/css/ex-unit-with-no-x-height_t01: RuntimeError #  45 Roll co19 test rewrite issue 25807
-LayoutTests/fast/css/font-shorthand-from-longhands_t01: RuntimeError #  45 Roll co19 test rewrite issue 25807
-LayoutTests/fast/css/getComputedStyle/computed-style-font_t01: RuntimeError #  45 Roll co19 test rewrite issue 25807
-LayoutTests/fast/css/getComputedStyle/computed-style-properties_t01: RuntimeError #  45 Roll co19 test rewrite issue 25807
-LayoutTests/fast/css/getComputedStyle/counterIncrement-without-counter_t01: RuntimeError #  45 Roll co19 test rewrite issue 25807
-LayoutTests/fast/css/getPropertyValue-columns_t01: RuntimeError #  45 Roll co19 test rewrite issue 25807
-LayoutTests/fast/css/image-set-setting_t01: RuntimeError #  45 Roll co19 test rewrite issue 25807
-LayoutTests/fast/css/important-js-override_t01: RuntimeError #  45 Roll co19 test rewrite issue 25807
-LayoutTests/fast/css/MarqueeLayoutTest_t01: RuntimeError #  45 Roll co19 test rewrite issue 25807
-LayoutTests/fast/css/nested-at-rules_t01: RuntimeError #  45 Roll co19 test rewrite issue 25807
-LayoutTests/fast/css/parse-color-int-or-percent-crash_t01: RuntimeError #  45 Roll co19 test rewrite issue 25807
 LayoutTests/fast/css/parsing-css-allowed-string-characters_t01: RuntimeError #  45 Roll co19 test rewrite issue 25807
-LayoutTests/fast/css/parsing-css-nth-child_t01: RuntimeError #  45 Roll co19 test rewrite issue 25807
-LayoutTests/fast/css/parsing-text-rendering_t01: RuntimeError #  45 Roll co19 test rewrite issue 25807
-LayoutTests/fast/css/pseudo-valid-unapplied_t01: RuntimeError #  45 Roll co19 test rewrite issue 25807
-LayoutTests/fast/css/string-quote-binary_t01: RuntimeError #  45 Roll co19 test rewrite issue 25807
-LayoutTests/fast/css/transform-origin-parsing_t01: RuntimeError #  45 Roll co19 test rewrite issue 25807
 LayoutTests/fast/dom/custom/element-names_t01: RuntimeError # 45 Roll issue dart-lang/co19/issues/25
-LayoutTests/fast/dom/background-shorthand-csstext_t01: RuntimeError #  45 Roll co19 test rewrite issue 25807
-LayoutTests/fast/dom/css-selectorText_t01: RuntimeError #  45 Roll co19 test rewrite issue 25807
-LayoutTests/fast/dom/fragment-activation-focuses-target_t01: RuntimeError #  45 Roll co19 issue https://github.com/dart-lang/co19/issues/36
-LayoutTests/fast/dom/shadow/content-reprojection-fallback-crash_t01: RuntimeError #  45 Roll co19 test rewrite issue 25807
-LayoutTests/fast/dom/shadow/event-path_t01: RuntimeError # 45 roll issue https://github.com/dart-lang/co19/issues/26
-LayoutTests/fast/dom/shadow/shadow-disable_t01: RuntimeError # 45 roll issue https://github.com/dart-lang/co19/issues/27
-LayoutTests/fast/dom/shadow/shadow-root-js-api_t01: RuntimeError #  45 Roll co19 test rewrite issue 25807
 LayoutTests/fast/encoding/css-charset-dom_t01: Skip #  45 Roll No longer supported see issue https://github.com/dart-lang/co19/issues/35
 LayoutTests/fast/events/overflowchanged-event-raf-timing_t01: Skip #  45 Roll No longer supported.
 LayoutTests/fast/forms/input-value-sanitization_t01: RuntimeError # 45 roll issue
-LayoutTests/fast/forms/validationMessage_t01: RuntimeError # 45 roll issue https://github.com/dart-lang/co19/issues/28
-LayoutTests/fast/forms/ValidityState-customError_t01: RuntimeError # 45 roll issue https://github.com/dart-lang/co19/issues/29
-LayoutTests/fast/innerHTML/javascript-url_t01: RuntimeError # 45 roll issue https://github.com/dart-lang/co19/issues/34
-LayoutTests/fast/multicol/columns-shorthand-parsing_t02: RuntimeError # 45 roll issue https://github.com/dart-lang/co19/issues/30
 LayoutTests/fast/canvas/webgl/texture-transparent-pixels-initialized_t01: RuntimeError # 45 rollwebgl doesn't run on on windows/linux but failed on mac bots.
-LayoutTests/fast/xmlhttprequest/xmlhttprequest-get_t01: RuntimeError # 45 roll issue https://github.com/dart-lang/co19/issues/31
 WebPlatformTest/shadow-dom/elements-and-dom-objects/shadowroot-object/shadowroot-methods/test-005_t01: RuntimeError #  45 Roll co19 test rewrite issue 25807
-WebPlatformTest/shadow-dom/shadow-trees/upper-boundary-encapsulation/dom-tree-accessors-002_t01: RuntimeError #  45 Roll co19 test rewrite issue 25807
-WebPlatformTest/shadow-dom/elements-and-dom-objects/extensions-to-event-interface/event-path-001_t01: RuntimeError # 45 roll issue https://github.com/dart-lang/co19/issues/32
 WebPlatformTest/shadow-dom/elements-and-dom-objects/shadowroot-object/shadowroot-methods/test-002_t01: RuntimeError # 45 roll issue https://github.com/dart-lang/co19/issues/33
 # Must fix failures below after JsInterop checkin.
 WebPlatformTest/dom/nodes/Node-appendChild_t01: RuntimeError # Issue 26134
@@ -1383,3 +1314,4 @@
 LibTest/html/Element/insertBefore_A01_t01: RuntimeError # Issue 26134
 LibTest/html/Element/insertAllBefore_A01_t01: RuntimeError # Issue 26134
 LibTest/html/CanvasRenderingContext2D/addEventListener_A01_t06: Skip # Issue 26134, timeout
+html/cross_domain_iframe_test: RuntimeError # Issue 26134
diff --git a/tests/co19/co19-runtime.status b/tests/co19/co19-runtime.status
index 1ce9410..941a1d8 100644
--- a/tests/co19/co19-runtime.status
+++ b/tests/co19/co19-runtime.status
@@ -27,9 +27,6 @@
 Language/Statements/Assert/type_t05: skip # co19 issue 734
 
 
-LibTest/core/DateTime/parse_A03_t01: fail # Issue 12514
-LibTest/core/DateTime/parse_A01_t02: Fail # co19 issue 17.
-
 LibTest/core/DateTime/DateTime.now_A01_t02: Pass, Fail # co19 issue 709
 
 LibTest/isolate/Isolate/spawnUri_A01_t02: Skip # Dart issue 15974
@@ -46,8 +43,6 @@
 [ ($runtime == vm || $runtime == dart_precompiled || $runtime == dart_app) ]
 LibTest/typed_data/Float32x4/reciprocalSqrt_A01_t01: Pass, Fail # co19 issue 599
 LibTest/typed_data/Float32x4/reciprocal_A01_t01: Pass, Fail # co19 issue 599
-Language/Expressions/Instance_Creation/Const/abstract_class_t01: MissingCompileTimeError # Issue 22007
-Language/Expressions/Instance_Creation/Const/abstract_class_t03: MissingCompileTimeError # Issue 22007
 # With asynchronous loading, the load errors in these tests are no longer recognized as compile errors:
 Language/Libraries_and_Scripts/Imports/invalid_uri_t02: Fail
 Language/Libraries_and_Scripts/Exports/invalid_uri_t02: Fail
@@ -56,7 +51,10 @@
 [ ($runtime == vm || $runtime == dart_precompiled || $runtime == dart_app) && $mode == debug ]
 LibTest/core/List/List_class_A01_t02: Pass, Slow
 
-[ ($runtime == vm || $runtime == dart_precompiled || $runtime == dart_app) && ($arch != x64 && $arch != simarm64 && $arch != arm64) ]
+[ ($runtime == vm || $runtime == dart_precompiled || $runtime == dart_app) && $system == windows ]
+Language/Expressions/Function_Invocation/async_invokation_t04: Pass, RuntimeError # co19 issue 54
+
+[ ($runtime == vm || $runtime == dart_precompiled || $runtime == dart_app) && ($arch != x64 && $arch != simarm64 && $arch != arm64 && $arch != simdbc && $arch != simdbc64) ]
 LibTest/core/int/operator_left_shift_A01_t02: Fail # co19 issue 129
 
 [ ($compiler == none || $compiler == precompiler) && ($runtime == vm || $runtime == dart_precompiled) && ($arch == mips || $arch == arm64) ]
@@ -66,7 +64,7 @@
 LibTest/collection/ListMixin/ListMixin_class_A01_t02: Skip # co19 issue 673
 LibTest/collection/ListBase/ListBase_class_A01_t02: Skip # co19 issue 673
 
-[ ($runtime == vm || $runtime == dart_precompiled || $runtime == dart_app) && ($arch == simarm || $arch == simarmv6 || $arch == simarmv5te || $arch == simmips || $arch == simarm64) ]
+[ ($runtime == vm || $runtime == dart_precompiled || $runtime == dart_app) && ($arch == simarm || $arch == simarmv6 || $arch == simarmv5te || $arch == simmips || $arch == simarm64 || $arch == simdbc || $arch == simdbc64) ]
 LibTest/core/Uri/Uri_A06_t03: Skip  # Timeout
 LibTest/collection/ListMixin/ListMixin_class_A01_t01: Skip  # Timeout
 LibTest/collection/ListBase/ListBase_class_A01_t01: Skip  # Timeout
@@ -86,14 +84,10 @@
 [ ($runtime == vm || $runtime == dart_precompiled || $runtime == dart_app) && $mode == debug && $builder_tag == asan ]
 Language/Types/Interface_Types/subtype_t27: Skip  # Issue 21174.
 
-[ ($runtime == vm || $runtime == dart_precompiled || $runtime == dart_app) && $arch == arm ]
-LibTest/typed_data/Float32x4/operator_multiplication_A01_t01: Fail # Dart issue 24416
-
 [ ($runtime == vm || $runtime == dart_precompiled || $runtime == dart_app) ]
 # co19 update Sep 29, 2015 (3ed795ea02e022ef19c77cf1b6095b7c8f5584d0)
 Language/Classes/Getters/type_object_t01: RuntimeError # Issue 23721
 Language/Classes/Getters/type_object_t02: RuntimeError # Issue 23721
-Language/Classes/Setters/syntax_t04: RuntimeError # co19 issue 38
 Language/Classes/Setters/type_object_t01: RuntimeError # Issue 23721
 Language/Classes/Setters/type_object_t02: RuntimeError # Issue 23721
 Language/Classes/Static_Methods/type_object_t01: RuntimeError # Issue 23721
@@ -112,34 +106,63 @@
 Language/Expressions/Property_Extraction/Getter_Access_and_Method_Extraction/class_object_member_t06: MissingCompileTimeError # Issue 24472
 Language/Expressions/Property_Extraction/Getter_Access_and_Method_Extraction/class_object_member_t07: MissingCompileTimeError # Issue 24472
 Language/Expressions/Property_Extraction/Getter_Access_and_Method_Extraction/class_object_member_t08: MissingCompileTimeError # Issue 24472
-Language/Expressions/Property_Extraction/Named_Constructor_Extraction/not_class_t01: CompileTimeError # co19 issue 41
-Language/Libraries_and_Scripts/Imports/namespace_changes_t10: RuntimeError # co19 issue 39
-Language/Libraries_and_Scripts/Parts/compilation_t09: MissingCompileTimeError # co19 issue 40
-Language/Libraries_and_Scripts/URIs/syntax_t04: RuntimeError # co19 issue 42
-Language/Libraries_and_Scripts/URIs/syntax_t05: RuntimeError # co19 issue 42
-Language/Libraries_and_Scripts/URIs/syntax_t09: RuntimeError # co19 issue 42
-Language/Libraries_and_Scripts/URIs/syntax_t10: RuntimeError # co19 issue 42
-Language/Libraries_and_Scripts/URIs/syntax_t14: RuntimeError # co19 issue 42
-Language/Libraries_and_Scripts/URIs/syntax_t15: RuntimeError # co19 issue 42
-Language/Mixins/Mixin_Application/error_t01: MissingCompileTimeError # co19 issue 43
-Language/Mixins/Mixin_Application/error_t02: MissingCompileTimeError # co19 issue 43
-Language/Mixins/declaring_constructor_t01: MissingCompileTimeError # co19 issue 43
-Language/Mixins/not_object_superclass_t01: MissingCompileTimeError # co19 issue 43 and 44
-Language/Mixins/reference_to_super_t01: MissingCompileTimeError # co19 issue 43 and 44
+Language/Expressions/Identifier_Reference/built_in_identifier_t35: MissingCompileTimeError # Issue 25732
+Language/Expressions/Identifier_Reference/built_in_identifier_t36: MissingCompileTimeError # Issue 25732
+Language/Expressions/Identifier_Reference/built_in_identifier_t37: MissingCompileTimeError # Issue 25732
+Language/Expressions/Identifier_Reference/built_in_identifier_t53: MissingCompileTimeError # Issue 25733
+Language/Expressions/Identifier_Reference/built_in_identifier_t54: MissingCompileTimeError # Issue 25733
+Language/Expressions/Identifier_Reference/built_in_identifier_t55: MissingCompileTimeError # Issue 25733
+Language/Expressions/Identifier_Reference/built_in_identifier_t56: MissingCompileTimeError # Issue 25733
+Language/Expressions/Identifier_Reference/built_in_identifier_t57: MissingCompileTimeError # Issue 25733
+Language/Expressions/Identifier_Reference/built_in_identifier_t58: MissingCompileTimeError # Issue 25733
+Language/Expressions/Identifier_Reference/built_in_identifier_t59: MissingCompileTimeError # Issue 25733
+Language/Expressions/Identifier_Reference/built_in_identifier_t60: MissingCompileTimeError # Issue 25733
+Language/Expressions/Identifier_Reference/built_in_identifier_t61: MissingCompileTimeError # Issue 25733
+Language/Expressions/Identifier_Reference/built_in_identifier_t62: MissingCompileTimeError # Issue 25733
+Language/Expressions/Identifier_Reference/built_in_identifier_t63: MissingCompileTimeError # Issue 25733
+Language/Expressions/Identifier_Reference/built_in_identifier_t64: MissingCompileTimeError # Issue 25733
+Language/Expressions/Identifier_Reference/built_in_identifier_t65: MissingCompileTimeError # Issue 25733
+Language/Expressions/Identifier_Reference/built_in_identifier_t66: MissingCompileTimeError # Issue 25733
+Language/Expressions/Identifier_Reference/built_in_identifier_t67: MissingCompileTimeError # Issue 25733
+Language/Expressions/Identifier_Reference/built_in_identifier_t68: MissingCompileTimeError # Issue 25733
+Language/Expressions/Identifier_Reference/built_in_not_dynamic_t14: MissingCompileTimeError # Issue 25732
+Language/Expressions/Identifier_Reference/built_in_not_dynamic_t19: MissingCompileTimeError # Issue 25772
+Language/Expressions/Method_Invocation/Ordinary_Invocation/object_method_invocation_t01: MissingCompileTimeError # Issue 25496
+Language/Expressions/Method_Invocation/Ordinary_Invocation/object_method_invocation_t02: MissingCompileTimeError # Issue 25496
+Language/Expressions/Property_Extraction/Anonymous_Constructor_Closurization/identical_t01: RuntimeError # Issue 24607
+Language/Expressions/Property_Extraction/Anonymous_Constructor_Closurization/identical_t01: RuntimeError # Issue 24607
+Language/Expressions/Property_Extraction/Named_Constructor_Closurization/identical_t01: RuntimeError # Issue 24607
+Language/Expressions/Assignment/super_assignment_failed_t05: RuntimeError # Issue 25671
+Language/Expressions/Function_Invocation/async_generator_invokation_t08: Fail # Issue 25967
+Language/Expressions/Function_Invocation/async_generator_invokation_t10: Fail # Issue 25967
+Language/Expressions/Property_Extraction/Anonymous_Constructor_Closurization/identical_t02: RuntimeError # Issue 24607
+Language/Mixins/Mixin_Application/syntax_t16: CompileTimeError # Issue 25765
+Language/Mixins/declaring_constructor_t05: MissingCompileTimeError # Issue 24767
+Language/Mixins/declaring_constructor_t06: MissingCompileTimeError # Issue 24767
+Language/Statements/Yield_and_Yield_Each/Yield_Each/execution_async_t08: RuntimeError # Issue 25748
+Language/Statements/Yield_and_Yield_Each/Yield_Each/execution_async_t09: RuntimeError # Issue 25748
+Language/Statements/Yield_and_Yield_Each/Yield_Each/execution_async_t10: RuntimeError # Issue 25748
+Language/Statements/Yield_and_Yield_Each/Yield_Each/execution_sync_t05: RuntimeError # Issue 25662,25634
+Language/Statements/Yield_and_Yield_Each/Yield_Each/location_t01: MissingCompileTimeError # Issue 25495
+Language/Statements/Yield_and_Yield_Each/Yield_Each/location_t03: MissingCompileTimeError # Issue 25495
+Language/Statements/Yield_and_Yield_Each/Yield_Each/location_t05: MissingCompileTimeError # Issue 25495
 
-[ ($runtime == vm || $runtime == dart_precompiled || $runtime == dart_app) && $checked ]
-Language/Errors_and_Warnings/static_warning_t01: RuntimeError # co19 issue 45
-Language/Errors_and_Warnings/static_warning_t02: RuntimeError # co19 issue 45
-Language/Errors_and_Warnings/static_warning_t03: RuntimeError # co19 issue 45
-Language/Errors_and_Warnings/static_warning_t04: RuntimeError # co19 issue 45
-Language/Errors_and_Warnings/static_warning_t05: RuntimeError # co19 issue 45
-Language/Errors_and_Warnings/static_warning_t06: RuntimeError # co19 issue 45
+Language/Expressions/Property_Extraction/General_Super_Property_Extraction: CompileTimeError # Issue 26287
+Language/Expressions/Property_Extraction/Super_Closurization: CompileTimeError # Issue 26287
+
+# co19 Roll May 30 2016
+Language/Reference/Operator_Precedence/precedence_12_Shift_t02: RuntimeError # Issue 26573
+Language/Reference/Operator_Precedence/precedence_15_unary_prefix_t04: RuntimeError # Issue 26573
 
 [ $noopt || $compiler == precompiler || $mode == product ]
 Language/Metadata/*: SkipByDesign # Uses dart:mirrors
+Language/Expressions/Null/instance_of_class_null_t01: Skip # Uses dart:mirrors
 
 [ $runtime == dart_precompiled || $runtime == dart_app ]
 LibTest/isolate/Isolate/spawnUri*: Skip # Isolate.spawnUri
+Language/Libraries_and_Scripts/Imports/invalid_uri_deferred_t01: CompileTimeError # spawnUri
+Language/Libraries_and_Scripts/Imports/invalid_uri_deferred_t02: RuntimeError # spawnUri
+Language/Libraries_and_Scripts/Imports/deferred_import_t02: RuntimeError # Unsupported
 
 [ $noopt || $compiler == precompiler ]
 LibTest/collection/ListBase/ListBase_class_A01_t02: Pass, Timeout
@@ -152,6 +175,7 @@
 Language/Mixins/Mixin_Application/error_t02: Pass
 Language/Mixins/declaring_constructor_t01: Pass
 
+
 [ ($arch == simdbc || $arch == simdbc64) && $mode == debug ]
 # TODO(vegorov) These tests are very slow on unoptimized SIMDBC
 LibTest/collection/ListMixin/ListMixin_class_A01_t02: Timeout
@@ -163,3 +187,26 @@
 
 LibTest/math/log_A01_t01: RuntimeError # Precision of Math.log (Issue #18998)
 Language/Expressions/Object_Identity/double_t02: RuntimeError # Issue #26374
+
+[ $compiler == precompiler && $runtime == dart_precompiled && $arch == simarm ]
+LibTest/typed_data/Float32x4/operator_division_A01_t02: RuntimeError # Issue #26675
+
+[ $hot_reload ]
+LibTest/collection/DoubleLinkedQueue/DoubleLinkedQueue_class_A01_t01: Pass, Crash
+LibTest/collection/HashSet/HashSet_class_A01_t01: Crash
+LibTest/collection/IterableBase/IterableBase_class_A01_t02: Crash
+LibTest/collection/LinkedHashSet/LinkedHashSet_class_A01_t01: Crash
+LibTest/collection/LinkedList/iterator_current_A01_t01: Crash
+LibTest/collection/ListBase/ListBase_class_A01_t01: Pass, Timeout, Crash
+LibTest/collection/ListBase/ListBase_class_A01_t02: Pass, Timeout
+LibTest/collection/ListMixin/ListMixin_class_A01_t01: Pass, Timeout, Crash
+LibTest/collection/ListMixin/ListMixin_class_A01_t02: Pass, Timeout
+LibTest/collection/ListQueue/ListQueue_class_A01_t01: Pass, Crash
+LibTest/collection/Queue/Queue_class_A01_t01: Pass, Crash
+LibTest/core/List/List.from_A01_t01: Crash
+LibTest/core/List/List_class_A01_t01: Pass, Crash
+LibTest/core/List/List_class_A01_t02: Pass, Timeout, Crash
+LibTest/core/Map/Map_class_A01_t04: Pass, Timeout
+LibTest/core/Set/IterableBase_A01_t01: Pass, Crash
+LibTest/core/Uri/Uri_A06_t03: Pass, Timeout
+LibTest/core/Uri/encodeQueryComponent_A01_t02: Pass, Timeout
diff --git a/tests/compiler/dart2js/analyze_dart2js_helpers_test.dart b/tests/compiler/dart2js/analyze_dart2js_helpers_test.dart
index 7f531eb..d4d85ef 100644
--- a/tests/compiler/dart2js/analyze_dart2js_helpers_test.dart
+++ b/tests/compiler/dart2js/analyze_dart2js_helpers_test.dart
@@ -48,9 +48,11 @@
         options: options, showDiagnostics: verbose);
     FormattingDiagnosticHandler diagnostics =
         new FormattingDiagnosticHandler(compiler.provider);
-    HelperAnalyzer analyzer = new HelperAnalyzer(diagnostics);
     Directory dir =
         new Directory.fromUri(Uri.base.resolve('pkg/compiler/lib/'));
+    String helpersUriPrefix = dir.uri.resolve('src/helpers/').toString();
+    HelperAnalyzer analyzer = new HelperAnalyzer(diagnostics, helpersUriPrefix);
+    LibraryElement helperLibrary;
     for (FileSystemEntity entity in dir.listSync(recursive: true)) {
       if (entity is File && entity.path.endsWith('.dart')) {
         Uri file = Uri.base.resolve(nativeToUriPath(entity.path));
@@ -59,6 +61,9 @@
         }
         LibraryElement library = await compiler.analyzeUri(file);
         if (library != null) {
+          if (library.libraryName == 'dart2js.helpers') {
+            helperLibrary = library;
+          }
           library.forEachLocalMember((Element element) {
             if (element is ClassElement) {
               element.forEachLocalMember((AstElement member) {
@@ -71,12 +76,16 @@
         }
       }
     }
+    Expect.isNotNull(helperLibrary, 'Helper library not found');
+    Expect.isTrue(analyzer.isHelper(helperLibrary),
+        "Helper library $helperLibrary is not considered a helper.");
     Expect.isTrue(analyzer.errors.isEmpty, "Errors found.");
   });
 }
 
 class HelperAnalyzer extends TraversalVisitor {
   final FormattingDiagnosticHandler diagnostics;
+  final String helpersUriPrefix;
   List<SourceSpan> errors = <SourceSpan>[];
 
   ResolvedAst resolvedAst;
@@ -86,7 +95,7 @@
 
   AnalyzableElement get analyzedElement => resolvedAst.element;
 
-  HelperAnalyzer(this.diagnostics) : super(null);
+  HelperAnalyzer(this.diagnostics, this.helpersUriPrefix) : super(null);
 
   @override
   void apply(Node node, [_]) {
@@ -105,7 +114,7 @@
 
   bool isHelper(Element element) {
     Uri uri = element.library.canonicalUri;
-    return '$uri'.startsWith('package:compiler/src/helpers/');
+    return '$uri'.startsWith(helpersUriPrefix);
   }
 
   void checkAccess(Node node, MemberElement element) {
diff --git a/tests/compiler/dart2js/command_line_test.dart b/tests/compiler/dart2js/command_line_test.dart
new file mode 100644
index 0000000..fe2842e
--- /dev/null
+++ b/tests/compiler/dart2js/command_line_test.dart
@@ -0,0 +1,80 @@
+// Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+// Test the command line options of dart2js.
+
+import 'dart:async';
+
+import 'package:async_helper/async_helper.dart';
+import 'package:expect/expect.dart';
+
+import 'package:compiler/compiler_new.dart' as api;
+import 'package:compiler/src/commandline_options.dart';
+import 'package:compiler/src/dart2js.dart' as entry;
+import 'package:compiler/src/options.dart' show CompilerOptions;
+
+main() {
+  asyncTest(() async {
+    await test([], exitCode: 1);
+    await test(['foo.dart']);
+    await test([Flags.resolveOnly, 'foo.dart'],
+        resolveOnly: true,
+        resolutionOutput: Uri.base.resolve('out.data'));
+    await test(['--resolution-input=bar.dart', 'foo.dart'],
+        resolutionInputs: [Uri.base.resolve('bar.dart')]);
+    await test([Flags.resolveOnly, '--resolution-input=bar.dart', 'foo.dart'],
+        resolveOnly: true,
+        resolutionOutput: Uri.base.resolve('out.data'),
+        resolutionInputs: [Uri.base.resolve('bar.dart')]);
+    await test([Flags.resolveOnly, '--resolution-input=out.data', 'foo.dart'],
+        exitCode: 1);
+  });
+}
+
+Future test(List<String> arguments,
+    {int exitCode,
+     bool resolveOnly: false,
+     Uri resolutionOutput,
+     List<Uri> resolutionInputs}) async {
+  print('--------------------------------------------------------------------');
+  print('dart2js ${arguments.join(' ')}');
+  print('--------------------------------------------------------------------');
+  entry.CompileFunc oldCompileFunc = entry.compileFunc;
+  entry.ExitFunc oldExitFunc = entry.exitFunc;
+
+  CompilerOptions options;
+  int actualExitCode;
+  entry.compileFunc = (_options, input, diagnostics, output) {
+    options = _options;
+    return new Future<api.CompilationResult>.value(
+        new api.CompilationResult(null));
+  };
+  entry.exitFunc = (_exitCode) {
+    actualExitCode = _exitCode;
+    throw 'exited';
+  };
+  try {
+    await entry.compilerMain(arguments);
+  } catch (e, s) {
+    Expect.equals('exited', e, "Unexpected exception: $e\n$s");
+  }
+  Expect.equals(exitCode, actualExitCode, "Unexpected exit code");
+  if (actualExitCode == null) {
+    Expect.isNotNull(options, "Missing options object");
+    Expect.equals(resolveOnly, options.resolveOnly,
+        "Unexpected resolveOnly value");
+    Expect.equals(resolutionOutput, options.resolutionOutput,
+        "Unexpected resolutionOutput value");
+    if (resolutionInputs == null) {
+      Expect.isNull(options.resolutionInputs,
+          "Unexpected resolutionInputs value");
+    } else {
+      Expect.listEquals(resolutionInputs, options.resolutionInputs,
+          "Unexpected resolutionInputs value");
+    }
+  }
+
+  entry.compileFunc = oldCompileFunc;
+  entry.exitFunc = oldExitFunc;
+}
\ No newline at end of file
diff --git a/tests/compiler/dart2js/constant_expression_evaluate_test.dart b/tests/compiler/dart2js/constant_expression_evaluate_test.dart
index 60563f8..49f3b51 100644
--- a/tests/compiler/dart2js/constant_expression_evaluate_test.dart
+++ b/tests/compiler/dart2js/constant_expression_evaluate_test.dart
@@ -55,6 +55,8 @@
     const ConstantData('"foo"', const { const {} : 'StringConstant("foo")' }),
     const ConstantData('1 + 2', const { const {} : 'IntConstant(3)' }),
     const ConstantData('-(1)', const { const {} : 'IntConstant(-1)' }),
+    const ConstantData('1 == 2', const { const {} : 'BoolConstant(false)' }),
+    const ConstantData('1 != 2', const { const {} : 'BoolConstant(true)' }),
     const ConstantData('"foo".length', const { const {} : 'IntConstant(3)' }),
     const ConstantData('identical(0, 1)',
                        const { const {} : 'BoolConstant(false)' }),
@@ -106,6 +108,10 @@
   const C({field1: 42, this.field2: false}) : super(field1);
   const C.named([field = false]) : this(field1: field, field2: field);
 }
+class D extends C {
+  final field3 = 99;
+  const D(a, b) : super(field2: a, field1: b);
+}
 ''', const [
     const ConstantData('const Object()',
         const { const {} : 'ConstructedConstant(Object())' }),
@@ -138,6 +144,10 @@
         const {'foo': 'false', 'bar': '87'} :
           'ConstructedConstant(C(field1=BoolConstant(false),'
                                 'field2=IntConstant(87)))', }),
+    const ConstantData('const D(42, 87)', const { const {} :
+       'ConstructedConstant(D(field1=IntConstant(87),'
+                             'field2=IntConstant(42),'
+                             'field3=IntConstant(99)))' }),
   ]),
   const TestData('''
 class A<T> implements B {
diff --git a/tests/compiler/dart2js/constant_expression_test.dart b/tests/compiler/dart2js/constant_expression_test.dart
index c24868f..16ec71e 100644
--- a/tests/compiler/dart2js/constant_expression_test.dart
+++ b/tests/compiler/dart2js/constant_expression_test.dart
@@ -51,6 +51,9 @@
     const ConstantData('0.0', ConstantExpressionKind.DOUBLE),
     const ConstantData('"foo"', ConstantExpressionKind.STRING),
     const ConstantData('1 + 2', ConstantExpressionKind.BINARY),
+    const ConstantData('1 == 2', ConstantExpressionKind.BINARY),
+    const ConstantData('1 != 2', ConstantExpressionKind.BINARY),
+    const ConstantData('1 ?? 2', ConstantExpressionKind.BINARY),
     const ConstantData('-(1)', ConstantExpressionKind.UNARY, text: '-1'),
     const ConstantData('"foo".length', ConstantExpressionKind.STRING_LENGTH),
     const ConstantData('identical(0, 1)', ConstantExpressionKind.IDENTICAL),
diff --git a/tests/compiler/dart2js/constant_value_test.dart b/tests/compiler/dart2js/constant_value_test.dart
new file mode 100644
index 0000000..5c5bec6
--- /dev/null
+++ b/tests/compiler/dart2js/constant_value_test.dart
@@ -0,0 +1,40 @@
+// Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+library dart2js.constants.values.test;
+
+import 'package:async_helper/async_helper.dart';
+import 'package:expect/expect.dart';
+import 'package:compiler/src/helpers/helpers.dart';
+import 'package:compiler/src/elements/elements.dart';
+import 'package:compiler/src/constants/values.dart';
+import 'type_test_helper.dart';
+
+void main() {
+  enableDebugMode();
+
+  asyncTest(() async {
+    TypeEnvironment env = await TypeEnvironment.create('''
+    class C {
+      final field1;
+      final field2;
+
+      C(this.field1, this.field2);
+    }
+    ''');
+    ClassElement C = env.getElement('C');
+    FieldElement field1 = C.lookupLocalMember('field1');
+    FieldElement field2 = C.lookupLocalMember('field2');
+    ConstantValue value1 = new ConstructedConstantValue(C.rawType, {
+      field1: new IntConstantValue(0),
+      field2: new IntConstantValue(1),
+    });
+    ConstantValue value2 = new ConstructedConstantValue(C.rawType, {
+      field2: new IntConstantValue(1),
+      field1: new IntConstantValue(0),
+    });
+    Expect.equals(value1.hashCode, value2.hashCode, "Hashcode mismatch.");
+    Expect.equals(value1, value2, "Value mismatch.");
+  });
+}
diff --git a/tests/compiler/dart2js/js_spec_string_test.dart b/tests/compiler/dart2js/js_spec_string_test.dart
index 01c9f64..fdd59da 100644
--- a/tests/compiler/dart2js/js_spec_string_test.dart
+++ b/tests/compiler/dart2js/js_spec_string_test.dart
@@ -20,15 +20,16 @@
     errorMessage = message;
     throw "error";
   }
-  reportError(message, [infos]) {
 
+  reportError(message, [infos = const <DiagnosticMessage>[]]) {
     errorMessage =
         '${message.message.arguments}'; // E.g.  "{text: Duplicate tag 'new'.}"
     throw "error";
   }
 
   @override
-  DiagnosticMessage createMessage(spannable, messageKind, [arguments]) {
+  DiagnosticMessage createMessage(spannable, messageKind,
+      [arguments = const {}]) {
     return new DiagnosticMessage(null, spannable,
         MessageTemplate.TEMPLATES[messageKind].message(arguments));
   }
diff --git a/tests/compiler/dart2js/message_kind_helper.dart b/tests/compiler/dart2js/message_kind_helper.dart
index c413241..34e7efe 100644
--- a/tests/compiler/dart2js/message_kind_helper.dart
+++ b/tests/compiler/dart2js/message_kind_helper.dart
@@ -35,6 +35,7 @@
     MessageKind.CANNOT_MIXIN_MALFORMED,
     MessageKind.CANNOT_INSTANTIATE_ENUM,
     MessageKind.CYCLIC_TYPEDEF_ONE,
+    MessageKind.DUPLICATE_DEFINITION,
     MessageKind.EQUAL_MAP_ENTRY_KEY,
     MessageKind.FINAL_FUNCTION_TYPE_PARAMETER,
     MessageKind.FORMAL_DECLARED_CONST,
diff --git a/tests/compiler/dart2js/mirrors_used_test.dart b/tests/compiler/dart2js/mirrors_used_test.dart
index 71d92ae..fc08b6b 100644
--- a/tests/compiler/dart2js/mirrors_used_test.dart
+++ b/tests/compiler/dart2js/mirrors_used_test.dart
@@ -69,7 +69,7 @@
     // 2. Some code was refactored, and there are more methods.
     // Either situation could be problematic, but in situation 2, it is often
     // acceptable to increase [expectedMethodCount] a little.
-    int expectedMethodCount = 449;
+    int expectedMethodCount = 432;
     Expect.isTrue(
         generatedCode.length <= expectedMethodCount,
         'Too many compiled methods: '
@@ -127,18 +127,22 @@
       });
     }
 
-    // There should at least be one metadata constant:
-    // 1. The constructed constant for 'MirrorsUsed'.
-    Expect.isTrue(backend.metadataConstants.length >= 1);
-
+    int metadataCount = 0;
     Set<ConstantValue> compiledConstants = backend.constants.compiledConstants;
     // Make sure that most of the metadata constants aren't included in the
     // generated code.
-    for (var dependency in backend.metadataConstants) {
-      ConstantValue constant = dependency.constant;
+    backend.processMetadata(
+        compiler.enqueuer.resolution.processedElements, (metadata) {
+      ConstantValue constant =
+          backend.constants.getConstantValueForMetadata(metadata);
       Expect.isFalse(compiledConstants.contains(constant),
                      constant.toStructuredText());
-    }
+      metadataCount++;
+    });
+
+    // There should at least be one metadata constant:
+    // 1. The constructed constant for 'MirrorsUsed'.
+    Expect.isTrue(metadataCount >= 1);
 
     // The type literal 'Foo' is both used as metadata, and as a plain value in
     // the program. Make sure that it isn't duplicated.
diff --git a/tests/compiler/dart2js/mock_libraries.dart b/tests/compiler/dart2js/mock_libraries.dart
index 1141416..24c42e4 100644
--- a/tests/compiler/dart2js/mock_libraries.dart
+++ b/tests/compiler/dart2js/mock_libraries.dart
@@ -258,7 +258,7 @@
     final Type owner;
     final String name;
     final int bound;
-    TypeVariable(this.owner, this.name, this.bound);
+    const TypeVariable(this.owner, this.name, this.bound);
   }''',
   'unwrapException': 'unwrapException(e) {}',
   'voidTypeCheck': 'voidTypeCheck(value) {}',
diff --git a/tests/compiler/dart2js/parser_helper.dart b/tests/compiler/dart2js/parser_helper.dart
index a86aebe..48ab2c4 100644
--- a/tests/compiler/dart2js/parser_helper.dart
+++ b/tests/compiler/dart2js/parser_helper.dart
@@ -70,7 +70,8 @@
     infos.forEach(log);
   }
 
-  void reportInfo(Spannable node, MessageKind errorCode, [Map arguments]) {
+  void reportInfo(Spannable node, MessageKind errorCode,
+      [Map arguments = const {}]) {
     log(new Message(MessageTemplate.TEMPLATES[errorCode], arguments, false));
   }
 
diff --git a/tests/compiler/dart2js/semantic_visitor_test_send_data.dart b/tests/compiler/dart2js/semantic_visitor_test_send_data.dart
index b72e479..5ee48ff 100644
--- a/tests/compiler/dart2js/semantic_visitor_test_send_data.dart
+++ b/tests/compiler/dart2js/semantic_visitor_test_send_data.dart
@@ -2702,8 +2702,7 @@
         const Visit(VisitKind.VISIT_SUPER_FIELD_SETTER_COMPOUND,
             getter: 'field(A#a)', setter: 'setter(B#a)',
             operator: '+=', rhs: '42')),
-    // TODO(johnniwinther): Enable this when dart2js supports shadow setters.
-    /*const Test.clazz(
+    const Test.clazz(
         '''
         class A {
           var a;
@@ -2716,9 +2715,12 @@
           m() => super.a += 42;
         }
         ''',
-        const Visit(VisitKind.VISIT_SUPER_FIELD_FIELD_COMPOUND,
-            getter: 'field(B#a)', setter: 'field(A#a)',
-            operator: '+=', rhs: '42')),*/
+        // TODO(johnniwinther): Change this to
+        // [VISIT_SUPER_FIELD_FIELD_COMPOUND] when dart2js supports shadow
+        // setters.
+        const Visit(VisitKind.VISIT_SUPER_FINAL_FIELD_COMPOUND,
+            element: 'field(B#a)',
+            operator: '+=', rhs: '42')),
     const Test.clazz(
         '''
         class B {
@@ -4437,8 +4439,7 @@
         const Visit(VisitKind.VISIT_SUPER_FIELD_SETTER_SET_IF_NULL,
             getter: 'field(A#a)', setter: 'setter(B#a)',
             rhs: '42')),
-    // TODO(johnniwinther): Enable this when dart2js supports shadow setters.
-    /*const Test.clazz(
+    const Test.clazz(
         '''
         class A {
           var a;
@@ -4451,9 +4452,12 @@
           m() => super.a ??= 42;
         }
         ''',
-        const Visit(VisitKind.VISIT_SUPER_FIELD_FIELD_SET_IF_NULL,
-            getter: 'field(B#a)', setter: 'field(A#a)',
-            rhs: '42')),*/
+        // TODO(johnniwinther): Change this to
+        // [VISIT_SUPER_FIELD_FIELD_SET_IF_NULL] when dart2js supports shadow
+        // setters.
+        const Visit(VisitKind.VISIT_SUPER_FINAL_FIELD_SET_IF_NULL,
+            element: 'field(B#a)',
+            rhs: '42')),
     const Test.clazz(
         '''
         class B {
diff --git a/tests/compiler/dart2js/serialization/analysis1_test.dart b/tests/compiler/dart2js/serialization/analysis1_test.dart
new file mode 100644
index 0000000..1b8532e
--- /dev/null
+++ b/tests/compiler/dart2js/serialization/analysis1_test.dart
@@ -0,0 +1,12 @@
+// Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+library dart2js.serialization.analysis1_test;
+
+import 'analysis_test_helper.dart' as test;
+import 'test_data.dart';
+
+main() {
+  test.main(['0', '${TESTS.length ~/ 2}']);
+}
diff --git a/tests/compiler/dart2js/serialization/analysis2_test.dart b/tests/compiler/dart2js/serialization/analysis2_test.dart
new file mode 100644
index 0000000..b13a6a3
--- /dev/null
+++ b/tests/compiler/dart2js/serialization/analysis2_test.dart
@@ -0,0 +1,12 @@
+// Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+library dart2js.serialization.analysis2_test;
+
+import 'analysis_test_helper.dart' as test;
+import 'test_data.dart';
+
+main() {
+  test.main(['${TESTS.length ~/ 2}']);
+}
diff --git a/tests/compiler/dart2js/serialization/analysis_test.dart b/tests/compiler/dart2js/serialization/analysis_test_helper.dart
similarity index 100%
rename from tests/compiler/dart2js/serialization/analysis_test.dart
rename to tests/compiler/dart2js/serialization/analysis_test_helper.dart
diff --git a/tests/compiler/dart2js/serialization/compilation0_test.dart b/tests/compiler/dart2js/serialization/compilation0_test.dart
new file mode 100644
index 0000000..9df7f53
--- /dev/null
+++ b/tests/compiler/dart2js/serialization/compilation0_test.dart
@@ -0,0 +1,12 @@
+// Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+library dart2js.serialization.compilation0_test;
+
+import 'compilation_test_helper.dart' as test;
+import 'test_data.dart';
+
+main() {
+  test.main(['0', '0']);
+}
diff --git a/tests/compiler/dart2js/serialization/compilation1_test.dart b/tests/compiler/dart2js/serialization/compilation1_test.dart
new file mode 100644
index 0000000..e3f6176
--- /dev/null
+++ b/tests/compiler/dart2js/serialization/compilation1_test.dart
@@ -0,0 +1,12 @@
+// Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+library dart2js.serialization.compilation1_test;
+
+import 'compilation_test_helper.dart' as test;
+import 'test_data.dart';
+
+main() {
+  test.main(['1', '${TESTS.length ~/ 4}']);
+}
diff --git a/tests/compiler/dart2js/serialization/compilation2_test.dart b/tests/compiler/dart2js/serialization/compilation2_test.dart
new file mode 100644
index 0000000..6141cbb
--- /dev/null
+++ b/tests/compiler/dart2js/serialization/compilation2_test.dart
@@ -0,0 +1,12 @@
+// Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+library dart2js.serialization.compilation2_test;
+
+import 'compilation_test_helper.dart' as test;
+import 'test_data.dart';
+
+main() {
+  test.main(['${TESTS.length ~/ 4}', '${2 * TESTS.length ~/ 4}']);
+}
diff --git a/tests/compiler/dart2js/serialization/compilation3_test.dart b/tests/compiler/dart2js/serialization/compilation3_test.dart
new file mode 100644
index 0000000..c168f22
--- /dev/null
+++ b/tests/compiler/dart2js/serialization/compilation3_test.dart
@@ -0,0 +1,12 @@
+// Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+library dart2js.serialization.compilation3_test;
+
+import 'compilation_test_helper.dart' as test;
+import 'test_data.dart';
+
+main() {
+  test.main(['${2 * TESTS.length ~/ 4}', '${3 * TESTS.length ~/ 4}']);
+}
diff --git a/tests/compiler/dart2js/serialization/compilation4_test.dart b/tests/compiler/dart2js/serialization/compilation4_test.dart
new file mode 100644
index 0000000..7e2dc7c
--- /dev/null
+++ b/tests/compiler/dart2js/serialization/compilation4_test.dart
@@ -0,0 +1,12 @@
+// Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+library dart2js.serialization.compilation4_test;
+
+import 'compilation_test_helper.dart' as test;
+import 'test_data.dart';
+
+main() {
+  test.main(['${3 * TESTS.length ~/ 4}']);
+}
diff --git a/tests/compiler/dart2js/serialization/compilation_test.dart b/tests/compiler/dart2js/serialization/compilation_test_helper.dart
similarity index 100%
rename from tests/compiler/dart2js/serialization/compilation_test.dart
rename to tests/compiler/dart2js/serialization/compilation_test_helper.dart
diff --git a/tests/compiler/dart2js/serialization/equivalence_test.dart b/tests/compiler/dart2js/serialization/equivalence_test.dart
index 7caae28..4e3d6e2 100644
--- a/tests/compiler/dart2js/serialization/equivalence_test.dart
+++ b/tests/compiler/dart2js/serialization/equivalence_test.dart
@@ -9,11 +9,14 @@
 import 'package:async_helper/async_helper.dart';
 import 'package:compiler/src/commandline_options.dart';
 import 'package:compiler/src/common.dart';
+import 'package:compiler/src/common/resolution.dart';
 import 'package:compiler/src/constants/constructors.dart';
 import 'package:compiler/src/compiler.dart';
 import 'package:compiler/src/diagnostics/invariant.dart';
 import 'package:compiler/src/elements/elements.dart';
 import 'package:compiler/src/elements/visitor.dart';
+import 'package:compiler/src/filenames.dart';
+import 'package:compiler/src/library_loader.dart';
 import 'package:compiler/src/ordered_typeset.dart';
 import 'package:compiler/src/serialization/element_serialization.dart';
 import 'package:compiler/src/serialization/equivalence.dart';
@@ -22,6 +25,14 @@
 import 'package:expect/expect.dart';
 import 'test_helper.dart';
 
+const TEST_SOURCES = const <String, String>{
+  'main.dart': '''
+import 'a.dart' deferred as a;
+''',
+  'a.dart': '''
+''',
+};
+
 main(List<String> arguments) {
   // Ensure that we can print out constant expressions.
   DEBUG_MODE = true;
@@ -42,20 +53,27 @@
       if (entryPoint != null) {
         print("Multiple entrypoints is not supported.");
       }
-      entryPoint = Uri.parse(arg);
+      entryPoint = Uri.base.resolve(nativeToUriPath(arg));
     }
   }
+  Map<String, String> sourceFiles = const <String, String>{};
   if (entryPoint == null) {
-    entryPoint = Uri.parse('dart:core');
+    entryPoint = Uri.parse('memory:main.dart');
+    sourceFiles = TEST_SOURCES;
   }
   asyncTest(() async {
     CompilationResult result = await runCompiler(
-        entryPoint: entryPoint, options: [Flags.analyzeAll]);
+        memorySourceFiles: sourceFiles,
+        entryPoint: entryPoint,
+        options: [Flags.analyzeAll]);
     Compiler compiler = result.compiler;
-    testSerialization(compiler.libraryLoader.libraries,
-                      compiler.reporter,
-                      outPath: outPath,
-                      prettyPrint: prettyPrint);
+    testSerialization(
+        compiler.libraryLoader.libraries,
+        compiler.reporter,
+        compiler.resolution,
+        compiler.libraryLoader,
+        outPath: outPath,
+        prettyPrint: prettyPrint);
     Expect.isFalse(compiler.reporter.hasReportedError,
         "Unexpected errors occured.");
   });
@@ -64,6 +82,8 @@
 void testSerialization(
     Iterable<LibraryElement> libraries1,
     DiagnosticReporter reporter,
+    Resolution resolution,
+    LibraryProvider libraryProvider,
     {String outPath,
      bool prettyPrint}) {
   Serializer serializer = new Serializer();
@@ -82,7 +102,8 @@
   }
 
   Deserializer deserializer = new Deserializer.fromText(
-      new DeserializationContext(reporter), Uri.parse('out1.data'),
+      new DeserializationContext(reporter, resolution, libraryProvider),
+      Uri.parse('out1.data'),
       text, const JsonSerializationDecoder());
   List<LibraryElement> libraries2 = <LibraryElement>[];
   for (LibraryElement library1 in libraries1) {
@@ -102,7 +123,8 @@
   String text2 = serializer2.toText(const JsonSerializationEncoder());
 
   Deserializer deserializer3 = new Deserializer.fromText(
-      new DeserializationContext(reporter), Uri.parse('out2.data'),
+      new DeserializationContext(reporter, resolution, libraryProvider),
+      Uri.parse('out2.data'),
       text2, const JsonSerializationDecoder());
   for (LibraryElement library1 in libraries1) {
     LibraryElement library2 =
@@ -135,7 +157,10 @@
 checkElementProperties(
     Object object1, object2, String property,
     Element element1, Element element2) {
+  currentCheck =
+      new Check(currentCheck, object1, object2, property, element1, element2);
   const ElementPropertyEquivalence().visit(element1, element2);
+  currentCheck = currentCheck.parent;
 }
 
 /// Checks the equivalence of [constructor1] and [constructor2].
@@ -236,6 +261,16 @@
                   list1, list2, checkElementProperties);
 }
 
+/// Check the equivalence of the two metadata annotations, [metadata1] and
+/// [metadata2].
+///
+/// Uses [object1], [object2] and [property] to provide context for failures.
+checkMetadata(Object object1, Object object2, String property,
+    MetadataAnnotation metadata1, MetadataAnnotation metadata2) {
+  check(object1, object2, property,
+      metadata1, metadata2, areMetadataAnnotationsEquivalent);
+}
+
 /// Visitor that checks for equivalence of [Element] properties.
 class ElementPropertyEquivalence extends BaseElementVisitor<dynamic, Element> {
   const ElementPropertyEquivalence();
@@ -255,6 +290,28 @@
         element1.isFinal, element2.isFinal);
     check(element1, element2, 'isConst',
         element1.isConst, element2.isConst);
+    check(element1, element2, 'isAbstract',
+        element1.isAbstract, element2.isAbstract);
+    check(element1, element2, 'isStatic',
+        element1.isStatic, element2.isStatic);
+    check(element1, element2, 'isTopLevel',
+        element1.isTopLevel, element2.isTopLevel);
+    check(element1, element2, 'isClassMember',
+        element1.isClassMember, element2.isClassMember);
+    check(element1, element2, 'isInstanceMember',
+        element1.isInstanceMember, element2.isInstanceMember);
+    List<MetadataAnnotation> metadata1 = <MetadataAnnotation>[];
+    metadata1.addAll(element1.metadata);
+    if (element1.isPatched) {
+      metadata1.addAll(element1.implementation.metadata);
+    }
+    List<MetadataAnnotation> metadata2 = <MetadataAnnotation>[];
+    metadata2.addAll(element2.metadata);
+    if (element2.isPatched) {
+      metadata2.addAll(element2.implementation.metadata);
+    }
+    checkListEquivalence(element1, element2, 'metadata',
+        metadata1, metadata2, checkMetadata);
   }
 
   @override
@@ -360,7 +417,10 @@
           throw message;
         }
       }
+      currentCheck = new Check(currentCheck, element1, element2,
+          'member:$name', member1, member2);
       visit(member1, member2);
+      currentCheck = currentCheck.parent;
     }
   }
 
@@ -368,15 +428,35 @@
   void visitClassElement(ClassElement element1, ClassElement element2) {
     checkElementIdentities(null, null, null, element1, element2);
     check(element1, element2, 'name',
-          element1.name, element2.name);
-    check(element1, element2, 'sourcePosition',
+        element1.name, element2.name);
+    if (!element1.isUnnamedMixinApplication) {
+      check(element1, element2, 'sourcePosition',
           element1.sourcePosition, element2.sourcePosition);
+    } else {
+      check(element1, element2, 'sourcePosition.uri',
+          element1.sourcePosition.uri, element2.sourcePosition.uri);
+      MixinApplicationElement mixin1 = element1;
+      MixinApplicationElement mixin2 = element2;
+      checkElementIdentities(mixin1, mixin2, 'subclass',
+          mixin1.subclass, mixin2.subclass);
+      checkTypes(mixin1, mixin2, 'mixinType',
+          mixin1.mixinType, mixin2.mixinType);
+    }
     checkElementIdentities(
         element1, element2, 'library',
         element1.library, element2.library);
     checkElementIdentities(
         element1, element2, 'compilationUnit',
         element1.compilationUnit, element2.compilationUnit);
+    checkTypeLists(
+        element1, element2, 'typeVariables',
+        element1.typeVariables, element2.typeVariables);
+    checkTypes(
+        element1, element2, 'thisType',
+        element1.thisType, element2.thisType);
+    checkTypes(
+        element1, element2, 'rawType',
+        element1.rawType, element2.rawType);
     check(element1, element2, 'isObject',
         element1.isObject, element2.isObject);
     checkTypeLists(element1, element2, 'typeVariables',
@@ -385,33 +465,37 @@
         element1.isAbstract, element2.isAbstract);
     check(element1, element2, 'isUnnamedMixinApplication',
         element1.isUnnamedMixinApplication, element2.isUnnamedMixinApplication);
+    check(element1, element2, 'isProxy',
+        element1.isProxy, element2.isProxy);
+    check(element1, element2, 'isInjected',
+        element1.isInjected, element2.isInjected);
     check(element1, element2, 'isEnumClass',
         element1.isEnumClass, element2.isEnumClass);
     if (element1.isEnumClass) {
       EnumClassElement enum1 = element1;
       EnumClassElement enum2 = element2;
       checkElementLists(enum1, enum2, 'enumValues',
-                        enum1.enumValues, enum2.enumValues);
+          enum1.enumValues, enum2.enumValues);
     }
     if (!element1.isObject) {
       checkTypes(element1, element2, 'supertype',
           element1.supertype, element2.supertype);
     }
     check(element1, element2, 'hierarchyDepth',
-          element1.hierarchyDepth, element2.hierarchyDepth);
+        element1.hierarchyDepth, element2.hierarchyDepth);
     checkTypeLists(
         element1, element2, 'allSupertypes',
         element1.allSupertypes.toList(),
         element2.allSupertypes.toList());
     OrderedTypeSet typeSet1 = element1.allSupertypesAndSelf;
-    OrderedTypeSet typeSet2 = element1.allSupertypesAndSelf;
+    OrderedTypeSet typeSet2 = element2.allSupertypesAndSelf;
     checkListEquivalence(
         element1, element2, 'allSupertypes',
         typeSet1.levelOffsets,
         typeSet2.levelOffsets,
         check);
     check(element1, element2, 'allSupertypesAndSelf.levels',
-          typeSet1.levels, typeSet2.levels);
+        typeSet1.levels, typeSet2.levels);
     checkTypeLists(
         element1, element2, 'supertypes',
         typeSet1.supertypes.toList(),
@@ -435,7 +519,20 @@
         getConstructors(element1),
         getConstructors(element2));
 
+    checkElementIdentities(element1, element2, 'defaultConstructor',
+        element1.lookupDefaultConstructor(),
+        element2.lookupDefaultConstructor());
+
     visitMembers(element1, element2);
+
+    ClassElement superclass1 = element1.superclass;
+    ClassElement superclass2 = element2.superclass;
+    while (superclass1 != null && superclass1.isMixinApplication) {
+      checkElementProperties(element1, element2, 'supermixin',
+          superclass1, superclass2);
+      superclass1 = superclass1.superclass;
+      superclass2 = superclass2.superclass;
+    }
   }
 
   @override
@@ -457,6 +554,8 @@
           element1.isStatic, element2.isStatic);
     check(element1, element2, 'isInstanceMember',
           element1.isInstanceMember, element2.isInstanceMember);
+    check(element1, element2, 'isInjected',
+        element1.isInjected, element2.isInjected);
 
     checkElementIdentities(
         element1, element2, 'library',
@@ -490,6 +589,8 @@
         element1, element2, 'asyncMarker',
         element1.asyncMarker,
         element2.asyncMarker);
+    check(element1, element2, 'isInjected',
+        element1.isInjected, element2.isInjected);
 
     checkElementIdentities(
         element1, element2, 'library',
@@ -550,8 +651,13 @@
     check(
         element1, element2, 'name',
         element1.name, element2.name);
-    check(element1, element2, 'sourcePosition',
+    if (!element1.isSynthesized) {
+      check(element1, element2, 'sourcePosition',
           element1.sourcePosition, element2.sourcePosition);
+    } else {
+      check(element1, element2, 'sourcePosition.uri',
+          element1.sourcePosition.uri, element2.sourcePosition.uri);
+    }
     checkListEquivalence(
         element1, element2, 'parameters',
         element1.parameters, element2.parameters,
@@ -589,6 +695,8 @@
         element2.immediateRedirectionTarget);
     checkElementIdentities(element1, element2, 'redirectionDeferredPrefix',
         element1.redirectionDeferredPrefix, element2.redirectionDeferredPrefix);
+    check(element1, element2, 'isInjected',
+        element1.isInjected, element2.isInjected);
   }
 
   @override
@@ -703,8 +811,12 @@
         element1, element2, 'isDeferred',
         element1.isDeferred, element2.isDeferred);
     checkElementIdentities(
-        element1, element2, 'importedLibrary',
+        element1, element2, 'deferredImport',
         element1.deferredImport, element2.deferredImport);
+    if (element1.isDeferred) {
+      checkElementProperties(element1, element2,
+          'loadLibrary', element1.loadLibrary, element2.loadLibrary);
+    }
     // TODO(johnniwinther): Check members.
   }
 }
diff --git a/tests/compiler/dart2js/serialization/helper.dart b/tests/compiler/dart2js/serialization/helper.dart
index a7d185c..e5fab48 100644
--- a/tests/compiler/dart2js/serialization/helper.dart
+++ b/tests/compiler/dart2js/serialization/helper.dart
@@ -88,6 +88,9 @@
       if (test.preserializedSourceFiles != null) {
         sourceFiles.addAll(test.preserializedSourceFiles);
       }
+      if (test.unserializedSourceFiles != null) {
+        sourceFiles.addAll(test.unserializedSourceFiles);
+      }
       List<Uri> resolutionInputs = <Uri>[];
       for (SerializedData data in dataList) {
         data.expandMemorySourceFiles(sourceFiles);
@@ -124,25 +127,45 @@
       print('Loading data from $file');
       serializedData = new SerializedData(uri, file.readAsStringSync());
     }
+  } else {
+    SerializationResult result = await serialize(Uris.dart_core, dataUri: uri);
+    serializedData = result.serializedData;
   }
-  if (serializedData == null) {
-    Compiler compiler = compilerFor(
-        options: [Flags.analyzeAll]);
-    compiler.serialization.supportSerialization = true;
-    await compiler.run(Uris.dart_core);
-    BufferedEventSink sink = new BufferedEventSink();
-    compiler.serialization.serializeToSink(
-        sink, compiler.libraryLoader.libraries);
-    serializedData = new SerializedData(uri, sink.text);
-    if (arguments.saveSerializedData) {
-      File file = new File(arguments.serializedDataFileName);
-      print('Saving data to $file');
-      file.writeAsStringSync(serializedData.data);
-    }
+  if (arguments.saveSerializedData) {
+    File file = new File(arguments.serializedDataFileName);
+    print('Saving data to $file');
+    file.writeAsStringSync(serializedData.data);
   }
   return serializedData;
 }
 
+class SerializationResult {
+  final Compiler compiler;
+  final SerializedData serializedData;
+
+  SerializationResult(this.compiler, this.serializedData);
+}
+
+Future<SerializationResult> serialize(Uri entryPoint,
+    {Map<String, String> memorySourceFiles: const <String, String>{},
+     List<Uri> resolutionInputs: const <Uri>[],
+     Uri dataUri}) async {
+  if (dataUri == null) {
+    dataUri = Uri.parse('memory:${DEFAULT_DATA_FILE_NAME}');
+  }
+  Compiler compiler = compilerFor(
+      options: [Flags.analyzeAll],
+      memorySourceFiles: memorySourceFiles,
+      resolutionInputs: resolutionInputs);
+  compiler.serialization.supportSerialization = true;
+  await compiler.run(entryPoint);
+  BufferedEventSink sink = new BufferedEventSink();
+  compiler.serialization.serializeToSink(
+      sink, compiler.libraryLoader.libraries);
+  SerializedData serializedData = new SerializedData(dataUri, sink.text);
+  return new SerializationResult(compiler, serializedData);
+}
+
 class SerializedData {
   final Uri uri;
   final String data;
@@ -196,9 +219,13 @@
   for (String key in test.preserializedSourceFiles.keys) {
     uriList.add(Uri.parse('memory:$key'));
   }
+  Map<String, String> sourceFiles = serializedData.toMemorySourceFiles();
+  sourceFiles.addAll(test.preserializedSourceFiles);
+  if (test.unserializedSourceFiles != null) {
+    sourceFiles.addAll(test.unserializedSourceFiles);
+  }
   Compiler compiler = compilerFor(
-      memorySourceFiles:
-          serializedData.toMemorySourceFiles(test.preserializedSourceFiles),
+      memorySourceFiles: sourceFiles,
       resolutionInputs: serializedData.toUris(),
       options: [Flags.analyzeOnly, Flags.analyzeMain]);
   compiler.librariesToAnalyzeWhenRun = uriList;
diff --git a/tests/compiler/dart2js/serialization/members_test.dart b/tests/compiler/dart2js/serialization/members_test.dart
new file mode 100644
index 0000000..ef23e87
--- /dev/null
+++ b/tests/compiler/dart2js/serialization/members_test.dart
@@ -0,0 +1,157 @@
+// Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+library dart2js.serialization.class_members_test;
+
+import 'dart:async';
+import 'package:async_helper/async_helper.dart';
+import 'package:compiler/src/common/names.dart';
+import 'package:compiler/src/commandline_options.dart';
+import 'package:compiler/src/compiler.dart';
+import 'package:compiler/src/elements/elements.dart';
+import 'package:compiler/src/filenames.dart';
+import 'package:compiler/src/resolution/class_members.dart';
+import 'package:compiler/src/serialization/equivalence.dart';
+import '../memory_compiler.dart';
+import 'helper.dart';
+import 'test_helper.dart';
+
+
+main(List<String> args) {
+  Arguments arguments = new Arguments.from(args);
+  asyncTest(() async {
+    SerializedData serializedData =
+    await serializeDartCore(arguments: arguments);
+    if (arguments.filename != null) {
+      Uri entryPoint = Uri.base.resolve(nativeToUriPath(arguments.filename));
+      await checkClassMembers(
+          serializedData, entryPoint, verbose: arguments.verbose);
+    } else {
+      await checkClassMembers(
+          serializedData, Uris.dart_core, verbose: arguments.verbose);
+    }
+  });
+}
+
+Future checkClassMembers(
+    SerializedData serializedData,
+    Uri entryPoint,
+    {bool verbose: false}) async {
+
+  Compiler compilerNormal = compilerFor(
+      options: [Flags.analyzeAll]);
+  await compilerNormal.run(entryPoint);
+
+  Compiler compilerDeserialized = compilerFor(
+      memorySourceFiles: serializedData.toMemorySourceFiles(),
+      resolutionInputs: serializedData.toUris(),
+      options: [Flags.analyzeAll]);
+  await compilerDeserialized.run(entryPoint);
+
+  checkAllMembers(compilerNormal, compilerDeserialized, verbose: true);
+}
+
+void checkAllMembers(
+    Compiler compiler1,
+    Compiler compiler2,
+    {bool verbose: false}) {
+  checkLoadedLibraryMembers(
+      compiler1,
+      compiler2,
+      (Element member1) => member1 is ClassElement,
+      checkMembers,
+      verbose: verbose);
+}
+
+
+/// Check equivalence of members of [class1] and [class2].
+void checkMembers(
+    Compiler compiler1, ClassMemberMixin class1,
+    Compiler compiler2, ClassMemberMixin class2,
+    {bool verbose: false}) {
+  if (verbose) {
+    print('Checking $class1 vs $class2');
+  }
+  MembersCreator.computeAllClassMembers(compiler1.resolution, class1);
+  MembersCreator.computeAllClassMembers(compiler2.resolution, class2);
+
+  check(class1, class2, 'interfaceMemberAreClassMembers',
+      class1.interfaceMembersAreClassMembers,
+      class2.interfaceMembersAreClassMembers);
+  class1.forEachClassMember((Member member1) {
+    Name name1 = member1.name;
+    Name name2 = convertName(name1, compiler2);
+    checkMember(class1, class2, 'classMember:$name1',
+        member1, class2.lookupClassMember(name2));
+  });
+
+  class1.forEachInterfaceMember((MemberSignature member1) {
+    Name name1 = member1.name;
+    Name name2 = convertName(name1, compiler2);
+    checkMemberSignature(class1, class2, 'interfaceMember:$name1',
+        member1, class2.lookupInterfaceMember(name2));
+  });
+}
+
+Name convertName(Name name, Compiler compiler) {
+  if (name.isPrivate) {
+    LibraryElement library =
+        compiler.libraryLoader.lookupLibrary(name.library.canonicalUri);
+    if (!areElementsEquivalent(name.library, library)) {
+      throw 'Libraries ${name.library} and ${library} are not equivalent';
+    }
+    name = new Name(name.text, library, isSetter: name.isSetter);
+  }
+  return name;
+}
+
+void checkMember(ClassElement class1, ClassElement class2, String property,
+    Member member1, Member member2) {
+  if (member2 == null) {
+    print('$class1 class members:');
+    class1.forEachClassMember((m) => print(' ${m.name} $m'));
+    print('$class2 class members:');
+    class2.forEachClassMember((m) => print(' ${m.name} $m'));
+    throw "No member ${member1.name} in $class2 for $property";
+  }
+  checkMemberSignature(class1, class2, property, member1, member2);
+  checkElementIdentities(class1, class2, '$property.element',
+      member1.element, member2.element);
+  check(class1, class2, '$property.declarer',
+      member1.declarer, member2.declarer, areTypesEquivalent);
+  check(class1, class2, '$property.isStatic',
+      member1.isStatic, member2.isStatic);
+  check(class1, class2, '$property.isDeclaredByField',
+      member1.isDeclaredByField, member2.isDeclaredByField);
+  check(class1, class2, '$property.isAbstract',
+      member1.isAbstract, member2.isAbstract);
+  if (member1.isAbstract && member1.implementation != null) {
+    checkMember(class1, class2, '$property.implementation',
+        member1.implementation, member2.implementation);
+  }
+}
+
+void checkMemberSignature(ClassElement class1, ClassElement class2,
+    String property,
+    MemberSignature member1, MemberSignature member2) {
+  if (member2 == null) {
+    print('$class1 interface members:');
+    class1.forEachInterfaceMember((m) => print(' ${m.name} $m'));
+    print('$class2 interface members:');
+    class2.forEachInterfaceMember((m) => print(' ${m.name} $m'));
+    throw "No member ${member1.name} in $class2 for $property";
+  }
+  check(class1, class2, '$property.name',
+      member1.name, member2.name, areNamesEquivalent);
+  check(class1, class2, '$property.type',
+      member1.type, member2.type, areTypesEquivalent);
+  check(class1, class2, '$property.functionType',
+      member1.functionType, member2.functionType, areTypesEquivalent);
+  check(class1, class2, '$property.isGetter',
+      member1.isGetter, member2.isGetter);
+  check(class1, class2, '$property.isSetter',
+      member1.isSetter, member2.isSetter);
+  check(class1, class2, '$property.isMethod',
+      member1.isMethod, member2.isMethod);
+}
diff --git a/tests/compiler/dart2js/serialization/model1_test.dart b/tests/compiler/dart2js/serialization/model1_test.dart
new file mode 100644
index 0000000..9582274
--- /dev/null
+++ b/tests/compiler/dart2js/serialization/model1_test.dart
@@ -0,0 +1,12 @@
+// Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+library dart2js.serialization.model1_test;
+
+import 'model_test_helper.dart' as test;
+import 'test_data.dart';
+
+main() {
+  test.main(['0', '${TESTS.length ~/ 2}']);
+}
diff --git a/tests/compiler/dart2js/serialization/model2_test.dart b/tests/compiler/dart2js/serialization/model2_test.dart
new file mode 100644
index 0000000..369e4a1
--- /dev/null
+++ b/tests/compiler/dart2js/serialization/model2_test.dart
@@ -0,0 +1,12 @@
+// Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+library dart2js.serialization.model2_test;
+
+import 'model_test_helper.dart' as test;
+import 'test_data.dart';
+
+main() {
+  test.main(['${TESTS.length ~/ 2}']);
+}
diff --git a/tests/compiler/dart2js/serialization/model_test.dart b/tests/compiler/dart2js/serialization/model_test.dart
deleted file mode 100644
index dd86acd..0000000
--- a/tests/compiler/dart2js/serialization/model_test.dart
+++ /dev/null
@@ -1,399 +0,0 @@
-// Copyright (c) 2015, the Dart project authors.  Please see the AUTHORS file
-// for details. All rights reserved. Use of this source code is governed by a
-// BSD-style license that can be found in the LICENSE file.
-
-library dart2js.serialization_model_test;
-
-import 'dart:async';
-import 'dart:io';
-import 'package:async_helper/async_helper.dart';
-import 'package:expect/expect.dart';
-import 'package:compiler/src/closure.dart';
-import 'package:compiler/src/commandline_options.dart';
-import 'package:compiler/src/compiler.dart';
-import 'package:compiler/src/elements/elements.dart';
-import 'package:compiler/src/filenames.dart';
-import 'package:compiler/src/serialization/equivalence.dart';
-import 'package:compiler/src/tree/nodes.dart';
-import 'package:compiler/src/universe/class_set.dart';
-import '../memory_compiler.dart';
-import 'helper.dart';
-import 'test_data.dart';
-import 'test_helper.dart';
-
-main(List<String> args) {
-  asyncTest(() async {
-    Arguments arguments = new Arguments.from(args);
-    SerializedData serializedData =
-        await serializeDartCore(arguments: arguments);
-    if (arguments.filename != null) {
-      Uri entryPoint = Uri.base.resolve(nativeToUriPath(arguments.filename));
-      await checkModels(entryPoint,
-          sourceFiles: serializedData.toMemorySourceFiles(),
-          resolutionInputs: serializedData.toUris());
-    } else {
-      Uri entryPoint = Uri.parse('memory:main.dart');
-      arguments.forEachTest(serializedData, TESTS, checkModels);
-    }
-  });
-}
-
-Future checkModels(
-    Uri entryPoint,
-    {Map<String, String> sourceFiles: const <String, String>{},
-     List<Uri> resolutionInputs,
-     int index,
-     Test test,
-     bool verbose: false}) async {
-
-  String testDescription = test != null ? test.name : '${entryPoint}';
-  String id = index != null ? '$index: ' : '';
-  print('------------------------------------------------------------------');
-  print('compile normal ${id}${testDescription}');
-  print('------------------------------------------------------------------');
-  Compiler compilerNormal = compilerFor(
-      memorySourceFiles: sourceFiles,
-      options: [Flags.analyzeOnly]);
-  compilerNormal.resolution.retainCachesForTesting = true;
-  await compilerNormal.run(entryPoint);
-  compilerNormal.phase = Compiler.PHASE_DONE_RESOLVING;
-  compilerNormal.world.populate();
-  compilerNormal.backend.onResolutionComplete();
-
-  print('------------------------------------------------------------------');
-  print('compile deserialized ${id}${testDescription}');
-  print('------------------------------------------------------------------');
-  Compiler compilerDeserialized = compilerFor(
-      memorySourceFiles: sourceFiles,
-      resolutionInputs: resolutionInputs,
-      options: [Flags.analyzeOnly]);
-  compilerDeserialized.resolution.retainCachesForTesting = true;
-  await compilerDeserialized.run(entryPoint);
-  compilerDeserialized.phase = Compiler.PHASE_DONE_RESOLVING;
-  compilerDeserialized.world.populate();
-  compilerDeserialized.backend.onResolutionComplete();
-
-  checkAllImpacts(
-      compilerNormal, compilerDeserialized,
-      verbose: verbose);
-
-  checkSets(
-      compilerNormal.resolverWorld.directlyInstantiatedClasses,
-      compilerDeserialized.resolverWorld.directlyInstantiatedClasses,
-      "Directly instantiated classes mismatch",
-      areElementsEquivalent,
-      verbose: verbose);
-
-  checkSets(
-      compilerNormal.resolverWorld.instantiatedTypes,
-      compilerDeserialized.resolverWorld.instantiatedTypes,
-      "Instantiated types mismatch",
-      areTypesEquivalent,
-      verbose: verbose);
-
-  checkSets(
-      compilerNormal.resolverWorld.isChecks,
-      compilerDeserialized.resolverWorld.isChecks,
-      "Is-check mismatch",
-      areTypesEquivalent,
-      verbose: verbose);
-
-  checkSets(
-      compilerNormal.enqueuer.resolution.processedElements,
-      compilerDeserialized.enqueuer.resolution.processedElements,
-      "Processed element mismatch",
-      areElementsEquivalent,
-      onSameElement: (a, b) {
-        checkElements(
-            compilerNormal, compilerDeserialized, a, b, verbose: verbose);
-      },
-      verbose: verbose);
-
-  checkClassHierarchyNodes(
-      compilerNormal,
-      compilerDeserialized,
-      compilerNormal.world.getClassHierarchyNode(
-          compilerNormal.coreClasses.objectClass),
-      compilerDeserialized.world.getClassHierarchyNode(
-          compilerDeserialized.coreClasses.objectClass),
-      verbose: verbose);
-}
-
-void checkElements(
-    Compiler compiler1, Compiler compiler2,
-    Element element1, Element element2,
-    {bool verbose: false}) {
-  if (element1.isFunction ||
-      element1.isConstructor ||
-      (element1.isField && element1.isInstanceMember)) {
-    AstElement astElement1 = element1;
-    AstElement astElement2 = element2;
-    ClosureClassMap closureData1 =
-    compiler1.closureToClassMapper.computeClosureToClassMapping(
-        astElement1.resolvedAst);
-    ClosureClassMap closureData2 =
-    compiler2.closureToClassMapper.computeClosureToClassMapping(
-        astElement2.resolvedAst);
-
-    checkElementIdentities(closureData1, closureData2,
-        '$element1.closureElement',
-        closureData1.closureElement, closureData2.closureElement);
-    checkElementIdentities(closureData1, closureData2,
-        '$element1.closureClassElement',
-        closureData1.closureClassElement, closureData2.closureClassElement);
-    checkElementIdentities(closureData1, closureData2,
-        '$element1.callElement',
-        closureData1.callElement, closureData2.callElement);
-    check(closureData1, closureData2,
-        '$element1.thisLocal',
-        closureData1.thisLocal, closureData2.thisLocal,
-        areLocalsEquivalent);
-    checkMaps(
-        closureData1.freeVariableMap,
-        closureData2.freeVariableMap,
-        "$element1.freeVariableMap",
-        areLocalsEquivalent,
-        areCapturedVariablesEquivalent,
-        verbose: verbose);
-    checkMaps(
-        closureData1.capturingScopes,
-        closureData2.capturingScopes,
-        "$element1.capturingScopes",
-        areNodesEquivalent,
-        areClosureScopesEquivalent,
-        verbose: verbose,
-        keyToString: nodeToString);
-    checkSets(
-        closureData1.variablesUsedInTryOrGenerator,
-        closureData2.variablesUsedInTryOrGenerator,
-        "$element1.variablesUsedInTryOrGenerator",
-        areLocalsEquivalent,
-        verbose: verbose);
-  }
-}
-
-void checkMixinUses(
-    Compiler compiler1, Compiler compiler2,
-    ClassElement class1, ClassElement class2,
-    {bool verbose: false}) {
-
-  checkSets(
-      compiler1.world.mixinUsesOf(class1),
-      compiler2.world.mixinUsesOf(class2),
-      "Mixin uses of $class1 vs $class2",
-      areElementsEquivalent,
-      verbose: verbose);
-
-}
-
-void checkClassHierarchyNodes(
-    Compiler compiler1,
-    Compiler compiler2,
-    ClassHierarchyNode node1, ClassHierarchyNode node2,
-    {bool verbose: false}) {
-  if (verbose) {
-    print('Checking $node1 vs $node2');
-  }
-  Expect.isTrue(
-      areElementsEquivalent(node1.cls, node2.cls),
-      "Element identity mismatch for ${node1.cls} vs ${node2.cls}.");
-  Expect.equals(
-      node1.isDirectlyInstantiated,
-      node2.isDirectlyInstantiated,
-      "Value mismatch for 'isDirectlyInstantiated' "
-      "for ${node1.cls} vs ${node2.cls}.");
-  Expect.equals(
-      node1.isIndirectlyInstantiated,
-      node2.isIndirectlyInstantiated,
-      "Value mismatch for 'isIndirectlyInstantiated' "
-      "for ${node1.cls} vs ${node2.cls}.");
-  // TODO(johnniwinther): Enforce a canonical and stable order on direct
-  // subclasses.
-  for (ClassHierarchyNode child in node1.directSubclasses) {
-    bool found = false;
-    for (ClassHierarchyNode other in node2.directSubclasses) {
-      if (areElementsEquivalent(child.cls, other.cls)) {
-        checkClassHierarchyNodes(compiler1, compiler2,
-            child, other, verbose: verbose);
-        found = true;
-        break;
-      }
-    }
-    if (!found) {
-      Expect.isFalse(child.isInstantiated,
-          'Missing subclass ${child.cls} of ${node1.cls}');
-    }
-  }
-  checkMixinUses(compiler1, compiler2, node1.cls, node2.cls, verbose: verbose);
-}
-
-void checkSets(
-    Iterable set1,
-    Iterable set2,
-    String messagePrefix,
-    bool sameElement(a, b),
-    {bool failOnUnfound: true,
-     bool verbose: false,
-     void onSameElement(a, b)}) {
-  List common = [];
-  List unfound = [];
-  Set remaining = computeSetDifference(
-      set1, set2, common, unfound,
-      sameElement: sameElement,
-      checkElements: onSameElement);
-  StringBuffer sb = new StringBuffer();
-  sb.write("$messagePrefix:");
-  if (verbose) {
-    sb.write("\n Common:\n  ${common.join('\n  ')}");
-  }
-  if (unfound.isNotEmpty || verbose) {
-    sb.write("\n Unfound:\n  ${unfound.join('\n  ')}");
-  }
-  if (remaining.isNotEmpty || verbose) {
-    sb.write("\n Extra: \n  ${remaining.join('\n  ')}");
-  }
-  String message = sb.toString();
-  if (unfound.isNotEmpty || remaining.isNotEmpty) {
-
-    if (failOnUnfound || remaining.isNotEmpty) {
-      Expect.fail(message);
-    } else {
-      print(message);
-    }
-  } else if (verbose) {
-    print(message);
-  }
-}
-
-String defaultToString(obj) => '$obj';
-
-void checkMaps(
-    Map map1,
-    Map map2,
-    String messagePrefix,
-    bool sameKey(a, b),
-    bool sameValue(a, b),
-    {bool failOnUnfound: true,
-     bool failOnMismatch: true,
-     bool verbose: false,
-     String keyToString(key): defaultToString,
-     String valueToString(key): defaultToString}) {
-  List common = [];
-  List unfound = [];
-  List<List> mismatch = <List>[];
-  Set remaining = computeSetDifference(
-      map1.keys, map2.keys, common, unfound,
-      sameElement: sameKey,
-      checkElements: (k1, k2) {
-        var v1 = map1[k1];
-        var v2 = map2[k2];
-        if (!sameValue(v1, v2)) {
-          mismatch.add([k1, k2]);
-        }
-      });
-  StringBuffer sb = new StringBuffer();
-  sb.write("$messagePrefix:");
-  if (verbose) {
-    sb.write("\n Common: \n");
-    for (List pair in common) {
-      var k1 = pair[0];
-      var k2 = pair[1];
-      var v1 = map1[k1];
-      var v2 = map2[k2];
-      sb.write(" key1   =${keyToString(k1)}\n");
-      sb.write(" key2   =${keyToString(k2)}\n");
-      sb.write("  value1=${valueToString(v1)}\n");
-      sb.write("  value2=${valueToString(v2)}\n");
-    }
-  }
-  if (unfound.isNotEmpty || verbose) {
-    sb.write("\n Unfound: \n");
-    for (var k1 in unfound) {
-      var v1 = map1[k1];
-      sb.write(" key1   =${keyToString(k1)}\n");
-      sb.write("  value1=${valueToString(v1)}\n");
-    }
-  }
-  if (remaining.isNotEmpty || verbose) {
-    sb.write("\n Extra: \n");
-    for (var k2 in remaining) {
-      var v2 = map2[k2];
-      sb.write(" key2   =${keyToString(k2)}\n");
-      sb.write("  value2=${valueToString(v2)}\n");
-    }
-  }
-  if (mismatch.isNotEmpty || verbose) {
-    sb.write("\n Mismatch: \n");
-    for (List pair in mismatch) {
-      var k1 = pair[0];
-      var k2 = pair[1];
-      var v1 = map1[k1];
-      var v2 = map2[k2];
-      sb.write(" key1   =${keyToString(k1)}\n");
-      sb.write(" key2   =${keyToString(k2)}\n");
-      sb.write("  value1=${valueToString(v1)}\n");
-      sb.write("  value2=${valueToString(v2)}\n");
-    }
-  }
-  String message = sb.toString();
-  if (unfound.isNotEmpty || mismatch.isNotEmpty || remaining.isNotEmpty) {
-    if ((unfound.isNotEmpty && failOnUnfound) ||
-        (mismatch.isNotEmpty && failOnMismatch) ||
-        remaining.isNotEmpty) {
-      Expect.fail(message);
-    } else {
-      print(message);
-    }
-  } else if (verbose) {
-    print(message);
-  }
-}
-
-bool areLocalsEquivalent(Local a, Local b) {
-  if (a == b) return true;
-  if (a == null || b == null) return false;
-
-  if (a is Element) {
-    return b is Element && areElementsEquivalent(a as Element, b as Element);
-  } else {
-    return a.runtimeType == b.runtimeType &&
-        areElementsEquivalent(a.executableContext, b.executableContext);
-  }
-}
-
-bool areCapturedVariablesEquivalent(CapturedVariable a, CapturedVariable b) {
-  if (a == b) return true;
-  if (a == null || b == null) return false;
-  if (a is ClosureFieldElement && b is ClosureFieldElement) {
-    return areElementsEquivalent(a.closureClass, b.closureClass) &&
-      areLocalsEquivalent(a.local, b.local);
-  } else if (a is BoxFieldElement && b is BoxFieldElement) {
-    return areElementsEquivalent(a.variableElement, b.variableElement) &&
-        areLocalsEquivalent(a.box, b.box);
-  }
-  return false;
-}
-
-bool areClosureScopesEquivalent(ClosureScope a, ClosureScope b) {
-  if (a == b) return true;
-  if (a == null || b == null) return false;
-  if (!areLocalsEquivalent(a.boxElement, b.boxElement)) {
-    return false;
-  }
-  checkMaps(a.capturedVariables, b.capturedVariables,
-      'ClosureScope.capturedVariables',
-      areLocalsEquivalent,
-      areElementsEquivalent);
-  checkSets(a.boxedLoopVariables, b.boxedLoopVariables,
-      'ClosureScope.boxedLoopVariables',
-      areElementsEquivalent);
-  return true;
-}
-
-String nodeToString(Node node) {
-  String text = '$node';
-  if (text.length > 40) {
-    return '(${node.runtimeType}) ${text.substring(0, 37)}...';
-  }
-  return '(${node.runtimeType}) $text';
-}
\ No newline at end of file
diff --git a/tests/compiler/dart2js/serialization/model_test_helper.dart b/tests/compiler/dart2js/serialization/model_test_helper.dart
new file mode 100644
index 0000000..2c6d861
--- /dev/null
+++ b/tests/compiler/dart2js/serialization/model_test_helper.dart
@@ -0,0 +1,305 @@
+// Copyright (c) 2015, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+library dart2js.serialization_model_test;
+
+import 'dart:async';
+import 'dart:io';
+import 'package:async_helper/async_helper.dart';
+import 'package:expect/expect.dart';
+import 'package:compiler/src/closure.dart';
+import 'package:compiler/src/commandline_options.dart';
+import 'package:compiler/src/compiler.dart';
+import 'package:compiler/src/elements/elements.dart';
+import 'package:compiler/src/filenames.dart';
+import 'package:compiler/src/js_backend/js_backend.dart';
+import 'package:compiler/src/serialization/equivalence.dart';
+import 'package:compiler/src/tree/nodes.dart';
+import 'package:compiler/src/universe/class_set.dart';
+import '../memory_compiler.dart';
+import 'helper.dart';
+import 'test_data.dart';
+import 'test_helper.dart';
+
+main(List<String> args) {
+  asyncTest(() async {
+    Arguments arguments = new Arguments.from(args);
+    SerializedData serializedData =
+        await serializeDartCore(arguments: arguments);
+    if (arguments.filename != null) {
+      Uri entryPoint = Uri.base.resolve(nativeToUriPath(arguments.filename));
+      await checkModels(entryPoint,
+          sourceFiles: serializedData.toMemorySourceFiles(),
+          resolutionInputs: serializedData.toUris());
+    } else {
+      Uri entryPoint = Uri.parse('memory:main.dart');
+      await arguments.forEachTest(serializedData, TESTS, checkModels);
+    }
+  });
+}
+
+Future checkModels(
+    Uri entryPoint,
+    {Map<String, String> sourceFiles: const <String, String>{},
+     List<Uri> resolutionInputs,
+     int index,
+     Test test,
+     bool verbose: false}) async {
+  String testDescription = test != null ? test.name : '${entryPoint}';
+  String id = index != null ? '$index: ' : '';
+  print('------------------------------------------------------------------');
+  print('compile normal ${id}${testDescription}');
+  print('------------------------------------------------------------------');
+  Compiler compilerNormal = compilerFor(
+      memorySourceFiles: sourceFiles,
+      options: [Flags.analyzeOnly]);
+  compilerNormal.resolution.retainCachesForTesting = true;
+  await compilerNormal.run(entryPoint);
+  compilerNormal.phase = Compiler.PHASE_DONE_RESOLVING;
+  compilerNormal.world.populate();
+  compilerNormal.backend.onResolutionComplete();
+
+  print('------------------------------------------------------------------');
+  print('compile deserialized ${id}${testDescription}');
+  print('------------------------------------------------------------------');
+  Compiler compilerDeserialized = compilerFor(
+      memorySourceFiles: sourceFiles,
+      resolutionInputs: resolutionInputs,
+      options: [Flags.analyzeOnly]);
+  compilerDeserialized.resolution.retainCachesForTesting = true;
+  await compilerDeserialized.run(entryPoint);
+  compilerDeserialized.phase = Compiler.PHASE_DONE_RESOLVING;
+  compilerDeserialized.world.populate();
+  compilerDeserialized.backend.onResolutionComplete();
+
+  checkAllImpacts(
+      compilerNormal, compilerDeserialized,
+      verbose: verbose);
+
+  checkSets(
+      compilerNormal.resolverWorld.directlyInstantiatedClasses,
+      compilerDeserialized.resolverWorld.directlyInstantiatedClasses,
+      "Directly instantiated classes mismatch",
+      areElementsEquivalent,
+      verbose: verbose);
+
+  checkSets(
+      compilerNormal.resolverWorld.instantiatedTypes,
+      compilerDeserialized.resolverWorld.instantiatedTypes,
+      "Instantiated types mismatch",
+      areTypesEquivalent,
+      verbose: verbose);
+
+  checkSets(
+      compilerNormal.resolverWorld.isChecks,
+      compilerDeserialized.resolverWorld.isChecks,
+      "Is-check mismatch",
+      areTypesEquivalent,
+      verbose: verbose);
+
+  checkSets(
+      compilerNormal.enqueuer.resolution.processedElements,
+      compilerDeserialized.enqueuer.resolution.processedElements,
+      "Processed element mismatch",
+      areElementsEquivalent,
+      onSameElement: (a, b) {
+        checkElements(
+            compilerNormal, compilerDeserialized, a, b, verbose: verbose);
+      },
+      verbose: verbose);
+
+  checkClassHierarchyNodes(
+      compilerNormal,
+      compilerDeserialized,
+      compilerNormal.world.getClassHierarchyNode(
+          compilerNormal.coreClasses.objectClass),
+      compilerDeserialized.world.getClassHierarchyNode(
+          compilerDeserialized.coreClasses.objectClass),
+      verbose: verbose);
+
+  Expect.equals(compilerNormal.enabledInvokeOn,
+      compilerDeserialized.enabledInvokeOn,
+      "Compiler.enabledInvokeOn mismatch");
+  Expect.equals(compilerNormal.enabledFunctionApply,
+      compilerDeserialized.enabledFunctionApply,
+      "Compiler.enabledFunctionApply mismatch");
+  Expect.equals(compilerNormal.enabledRuntimeType,
+      compilerDeserialized.enabledRuntimeType,
+      "Compiler.enabledRuntimeType mismatch");
+  Expect.equals(compilerNormal.hasIsolateSupport,
+      compilerDeserialized.hasIsolateSupport,
+      "Compiler.hasIsolateSupport mismatch");
+}
+
+void checkElements(
+    Compiler compiler1, Compiler compiler2,
+    Element element1, Element element2,
+    {bool verbose: false}) {
+  if (element1.isFunction ||
+      element1.isConstructor ||
+      (element1.isField && element1.isInstanceMember)) {
+    AstElement astElement1 = element1;
+    AstElement astElement2 = element2;
+    ClosureClassMap closureData1 =
+        compiler1.closureToClassMapper.computeClosureToClassMapping(
+            astElement1.resolvedAst);
+    ClosureClassMap closureData2 =
+        compiler2.closureToClassMapper.computeClosureToClassMapping(
+            astElement2.resolvedAst);
+
+    checkElementIdentities(closureData1, closureData2,
+        '$element1.closureElement',
+        closureData1.closureElement, closureData2.closureElement);
+    checkElementIdentities(closureData1, closureData2,
+        '$element1.closureClassElement',
+        closureData1.closureClassElement, closureData2.closureClassElement);
+    checkElementIdentities(closureData1, closureData2,
+        '$element1.callElement',
+        closureData1.callElement, closureData2.callElement);
+    check(closureData1, closureData2,
+        '$element1.thisLocal',
+        closureData1.thisLocal, closureData2.thisLocal,
+        areLocalsEquivalent);
+    checkMaps(
+        closureData1.freeVariableMap,
+        closureData2.freeVariableMap,
+        "$element1.freeVariableMap",
+        areLocalsEquivalent,
+        areCapturedVariablesEquivalent,
+        verbose: verbose);
+    checkMaps(
+        closureData1.capturingScopes,
+        closureData2.capturingScopes,
+        "$element1.capturingScopes",
+        areNodesEquivalent,
+        areClosureScopesEquivalent,
+        verbose: verbose,
+        keyToString: nodeToString);
+    checkSets(
+        closureData1.variablesUsedInTryOrGenerator,
+        closureData2.variablesUsedInTryOrGenerator,
+        "$element1.variablesUsedInTryOrGenerator",
+        areLocalsEquivalent,
+        verbose: verbose);
+  }
+  JavaScriptBackend backend1 = compiler1.backend;
+  JavaScriptBackend backend2 = compiler2.backend;
+  Expect.equals(
+      backend1.inlineCache.getCurrentCacheDecisionForTesting(element1),
+      backend2.inlineCache.getCurrentCacheDecisionForTesting(element2),
+      "Inline cache decision mismatch for $element1 vs $element2");
+}
+
+void checkMixinUses(
+    Compiler compiler1, Compiler compiler2,
+    ClassElement class1, ClassElement class2,
+    {bool verbose: false}) {
+
+  checkSets(
+      compiler1.world.mixinUsesOf(class1),
+      compiler2.world.mixinUsesOf(class2),
+      "Mixin uses of $class1 vs $class2",
+      areElementsEquivalent,
+      verbose: verbose);
+
+}
+
+void checkClassHierarchyNodes(
+    Compiler compiler1,
+    Compiler compiler2,
+    ClassHierarchyNode node1, ClassHierarchyNode node2,
+    {bool verbose: false}) {
+  if (verbose) {
+    print('Checking $node1 vs $node2');
+  }
+  Expect.isTrue(
+      areElementsEquivalent(node1.cls, node2.cls),
+      "Element identity mismatch for ${node1.cls} vs ${node2.cls}.");
+  Expect.equals(
+      node1.isDirectlyInstantiated,
+      node2.isDirectlyInstantiated,
+      "Value mismatch for 'isDirectlyInstantiated' "
+      "for ${node1.cls} vs ${node2.cls}.");
+  Expect.equals(
+      node1.isIndirectlyInstantiated,
+      node2.isIndirectlyInstantiated,
+      "Value mismatch for 'isIndirectlyInstantiated' "
+      "for ${node1.cls} vs ${node2.cls}.");
+  // TODO(johnniwinther): Enforce a canonical and stable order on direct
+  // subclasses.
+  for (ClassHierarchyNode child in node1.directSubclasses) {
+    bool found = false;
+    for (ClassHierarchyNode other in node2.directSubclasses) {
+      if (areElementsEquivalent(child.cls, other.cls)) {
+        checkClassHierarchyNodes(compiler1, compiler2,
+            child, other, verbose: verbose);
+        found = true;
+        break;
+      }
+    }
+    if (!found) {
+      if (child.isInstantiated) {
+        print('Missing subclass ${child.cls} of ${node1.cls} '
+            'in ${node2.directSubclasses}');
+        print(compiler1.world.dump(
+            verbose ? compiler1.coreClasses.objectClass : node1.cls));
+        print(compiler2.world.dump(
+            verbose ? compiler2.coreClasses.objectClass : node2.cls));
+      }
+      Expect.isFalse(child.isInstantiated,
+          'Missing subclass ${child.cls} of ${node1.cls} in '
+              '${node2.directSubclasses}');
+    }
+  }
+  checkMixinUses(compiler1, compiler2, node1.cls, node2.cls, verbose: verbose);
+}
+
+bool areLocalsEquivalent(Local a, Local b) {
+  if (a == b) return true;
+  if (a == null || b == null) return false;
+
+  if (a is Element) {
+    return b is Element && areElementsEquivalent(a as Element, b as Element);
+  } else {
+    return a.runtimeType == b.runtimeType &&
+        areElementsEquivalent(a.executableContext, b.executableContext);
+  }
+}
+
+bool areCapturedVariablesEquivalent(CapturedVariable a, CapturedVariable b) {
+  if (a == b) return true;
+  if (a == null || b == null) return false;
+  if (a is ClosureFieldElement && b is ClosureFieldElement) {
+    return areElementsEquivalent(a.closureClass, b.closureClass) &&
+      areLocalsEquivalent(a.local, b.local);
+  } else if (a is BoxFieldElement && b is BoxFieldElement) {
+    return areElementsEquivalent(a.variableElement, b.variableElement) &&
+        areLocalsEquivalent(a.box, b.box);
+  }
+  return false;
+}
+
+bool areClosureScopesEquivalent(ClosureScope a, ClosureScope b) {
+  if (a == b) return true;
+  if (a == null || b == null) return false;
+  if (!areLocalsEquivalent(a.boxElement, b.boxElement)) {
+    return false;
+  }
+  checkMaps(a.capturedVariables, b.capturedVariables,
+      'ClosureScope.capturedVariables',
+      areLocalsEquivalent,
+      areElementsEquivalent);
+  checkSets(a.boxedLoopVariables, b.boxedLoopVariables,
+      'ClosureScope.boxedLoopVariables',
+      areElementsEquivalent);
+  return true;
+}
+
+String nodeToString(Node node) {
+  String text = '$node';
+  if (text.length > 40) {
+    return '(${node.runtimeType}) ${text.substring(0, 37)}...';
+  }
+  return '(${node.runtimeType}) $text';
+}
\ No newline at end of file
diff --git a/tests/compiler/dart2js/serialization/native_data_test.dart b/tests/compiler/dart2js/serialization/native_data_test.dart
new file mode 100644
index 0000000..d2a9173
--- /dev/null
+++ b/tests/compiler/dart2js/serialization/native_data_test.dart
@@ -0,0 +1,100 @@
+// Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+library dart2js.serialization.native_data_test;
+
+import 'dart:async';
+import 'package:async_helper/async_helper.dart';
+import 'package:compiler/src/common/names.dart';
+import 'package:compiler/src/commandline_options.dart';
+import 'package:compiler/src/compiler.dart';
+import 'package:compiler/src/js_backend/js_backend.dart';
+import 'package:compiler/src/js_backend/native_data.dart';
+import 'package:compiler/src/filenames.dart';
+import 'package:compiler/src/serialization/equivalence.dart';
+import '../memory_compiler.dart';
+import 'helper.dart';
+import 'test_helper.dart';
+
+main(List<String> args) {
+  asyncTest(() async {
+    Arguments arguments = new Arguments.from(args);
+    Uri uri = Uris.dart_html;
+    if (arguments.filename != null) {
+      uri = Uri.base.resolve(nativeToUriPath(arguments.filename));
+    }
+    await checkNativeData(uri, verbose: arguments.verbose);
+  });
+}
+
+Future checkNativeData(Uri uri, {bool verbose: false}) async {
+  print('------------------------------------------------------------------');
+  print('analyze normal: $uri');
+  print('------------------------------------------------------------------');
+  SerializationResult result = await serialize(uri);
+  Compiler compiler1 = result.compiler;
+  SerializedData serializedData = result.serializedData;
+
+  print('------------------------------------------------------------------');
+  print('analyze deserialized: $uri');
+  print('------------------------------------------------------------------');
+  Compiler compiler2 = compilerFor(
+      memorySourceFiles: serializedData.toMemorySourceFiles(),
+      resolutionInputs: serializedData.toUris(),
+      options: [Flags.analyzeAll]);
+  await compiler2.run(uri);
+
+  JavaScriptBackend backend1 = compiler1.backend;
+  JavaScriptBackend backend2 = compiler2.backend;
+  NativeData nativeData1 = backend1.nativeData;
+  NativeData nativeData2 = backend2.nativeData;
+
+  checkMaps(
+      nativeData1.jsInteropNames,
+      nativeData2.jsInteropNames,
+      "NativeData.jsInteropNames",
+      areElementsEquivalent,
+      equality,
+      verbose: verbose);
+
+  checkMaps(
+      nativeData1.nativeMemberName,
+      nativeData2.nativeMemberName,
+      "NativeData.nativeMemberName",
+      areElementsEquivalent,
+      equality,
+      verbose: verbose);
+
+  checkMaps(
+      nativeData1.nativeClassTagInfo,
+      nativeData2.nativeClassTagInfo,
+      "NativeData.nativeClassTagInfo",
+      areElementsEquivalent,
+      equality,
+      verbose: verbose);
+
+  checkMaps(
+      nativeData1.nativeMethodBehavior,
+      nativeData2.nativeMethodBehavior,
+      "NativeData.nativeMethodBehavior",
+      areElementsEquivalent,
+      testNativeBehavior,
+      verbose: verbose);
+
+  checkMaps(
+      nativeData1.nativeFieldLoadBehavior,
+      nativeData2.nativeFieldLoadBehavior,
+      "NativeData.nativeFieldLoadBehavior",
+      areElementsEquivalent,
+      testNativeBehavior,
+      verbose: verbose);
+
+  checkMaps(
+      nativeData1.nativeFieldStoreBehavior,
+      nativeData2.nativeFieldStoreBehavior,
+      "NativeData.nativeFieldStoreBehavior",
+      areElementsEquivalent,
+      testNativeBehavior,
+      verbose: verbose);
+}
diff --git a/tests/compiler/dart2js/serialization/reserialization_test.dart b/tests/compiler/dart2js/serialization/reserialization_test.dart
new file mode 100644
index 0000000..691bdab
--- /dev/null
+++ b/tests/compiler/dart2js/serialization/reserialization_test.dart
@@ -0,0 +1,73 @@
+// Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+library dart2js.reserialization_test;
+
+import 'dart:async';
+import 'package:async_helper/async_helper.dart';
+import 'package:compiler/src/compiler.dart';
+import 'package:compiler/src/diagnostics/invariant.dart';
+import 'package:compiler/src/elements/elements.dart';
+import 'package:expect/expect.dart';
+import 'helper.dart';
+import 'test_helper.dart';
+import 'equivalence_test.dart';
+
+main(List<String> args) {
+  // Ensure that we can print out constant expressions.
+  DEBUG_MODE = true;
+
+  Arguments arguments = new Arguments.from(args);
+  Uri entryPoint;
+  if (arguments.filename != null) {
+    entryPoint = Uri.parse(arguments.filename);
+  } else {
+    entryPoint = Uri.parse('dart:core');
+  }
+  asyncTest(() async {
+    await testReserialization(entryPoint);
+  });
+}
+
+Future testReserialization(Uri entryPoint) async {
+  SerializationResult result1 = await serialize(entryPoint);
+  Compiler compiler1 = result1.compiler;
+  SerializedData serializedData1 = result1.serializedData;
+  Iterable<LibraryElement> libraries1 = compiler1.libraryLoader.libraries;
+
+  SerializationResult result2 = await serialize(entryPoint,
+      memorySourceFiles: serializedData1.toMemorySourceFiles(),
+      resolutionInputs: serializedData1.toUris());
+  Compiler compiler2 = result2.compiler;
+  SerializedData serializedData2 = result2.serializedData;
+  Iterable<LibraryElement> libraries2 = compiler2.libraryLoader.libraries;
+
+  SerializationResult result3 = await serialize(entryPoint,
+      memorySourceFiles: serializedData2.toMemorySourceFiles(),
+      resolutionInputs: serializedData2.toUris());
+  Compiler compiler3 = result3.compiler;
+  Iterable<LibraryElement> libraries3 = compiler3.libraryLoader.libraries;
+
+  for (LibraryElement library1 in libraries1) {
+    LibraryElement library2 = libraries2.firstWhere((LibraryElement library2) {
+      return library2.canonicalUri == library1.canonicalUri;
+    });
+    Expect.isNotNull(library2,
+        "No library found for ${library1.canonicalUri}.");
+    checkLibraryContent('library1', 'library2', 'library', library1, library2);
+
+    LibraryElement library3 = libraries3.firstWhere((LibraryElement library3) {
+      return library3.canonicalUri == library1.canonicalUri;
+    });
+    Expect.isNotNull(library3,
+        "No library found for ${library1.canonicalUri}.");
+    checkLibraryContent('library1', 'library3', 'library', library1, library3);
+  }
+
+  checkAllResolvedAsts(compiler1, compiler2);
+  checkAllResolvedAsts(compiler1, compiler3);
+
+  checkAllImpacts(compiler1, compiler2);
+  checkAllImpacts(compiler1, compiler3);
+}
diff --git a/tests/compiler/dart2js/serialization/resolved_ast_test.dart b/tests/compiler/dart2js/serialization/resolved_ast_test.dart
index 4610076..d39f906 100644
--- a/tests/compiler/dart2js/serialization/resolved_ast_test.dart
+++ b/tests/compiler/dart2js/serialization/resolved_ast_test.dart
@@ -55,39 +55,3 @@
 
   checkAllResolvedAsts(compilerNormal, compilerDeserialized, verbose: true);
 }
-
-void checkAllResolvedAsts(
-    Compiler compiler1,
-    Compiler compiler2,
-    {bool verbose: false}) {
-  checkLoadedLibraryMembers(
-      compiler1,
-      compiler2,
-      (Element member1) {
-        return member1 is ExecutableElement &&
-            compiler1.resolution.hasResolvedAst(member1);
-      },
-      checkResolvedAsts,
-      verbose: verbose);
-}
-
-
-/// Check equivalence of [impact1] and [impact2].
-void checkResolvedAsts(Compiler compiler1, Element member1,
-                       Compiler compiler2, Element member2,
-                       {bool verbose: false}) {
-  if (!compiler2.serialization.isDeserialized(member2)) {
-    return;
-  }
-  ResolvedAst resolvedAst1 = compiler1.resolution.getResolvedAst(member1);
-  ResolvedAst resolvedAst2 = compiler2.serialization.getResolvedAst(member2);
-
-  if (resolvedAst1 == null || resolvedAst2 == null) return;
-
-  if (verbose) {
-    print('Checking resolved asts for $member1 vs $member2');
-  }
-
-  testResolvedAstEquivalence(
-      resolvedAst1, resolvedAst2, const CheckStrategy());
-}
diff --git a/tests/compiler/dart2js/serialization/test_data.dart b/tests/compiler/dart2js/serialization/test_data.dart
index 49f3f44..83cf2da 100644
--- a/tests/compiler/dart2js/serialization/test_data.dart
+++ b/tests/compiler/dart2js/serialization/test_data.dart
@@ -5,6 +5,18 @@
 library dart2js.serialization_test_data;
 
 const List<Test> TESTS = const <Test>[
+  // This test is very long-running and put here first to compile it on its own
+  // in compilation0_test.dart
+  const Test('Disable tree shaking through reflection', const {
+    'main.dart': '''
+import 'dart:mirrors';
+
+main() {
+  reflect(null).invoke(#toString, []).reflectee;
+}
+''',
+  }, expectedWarningCount: 1),
+
   const Test('Empty program', const {
     'main.dart': 'main() {}'
   }),
@@ -308,12 +320,239 @@
 class C = S with M;
 ''',
       }),
+
+  const Test('Import mirrors, thus checking import paths', const {
+    'main.dart': '''
+import 'dart:mirrors';
+main() {}
+''',
+  },
+      expectedWarningCount: 1),
+
+  const Test('Serialized symbol literal', const {
+    'main.dart': '''
+import 'lib.dart';
+main() => m();
+''',
+  }, preserializedSourceFiles: const {
+    'lib.dart': '''
+m() => print(#main);
+''',
+  }),
+
+  const Test('Indirect unserialized library', const {
+    'main.dart': '''
+import 'a.dart';
+main() => foo();
+''',
+  }, preserializedSourceFiles: const {
+    'a.dart': '''
+import 'memory:b.dart';
+foo() => bar();
+''',
+  }, unserializedSourceFiles: const {
+    'b.dart': '''
+import 'memory:a.dart';
+bar() => foo();
+''',
+  }),
+
+  const Test('Multiple structurally identical mixins', const {
+    'main.dart': '''
+class S {}
+class M {}
+class C1 extends S with M {}
+class C2 extends S with M {}
+main() {
+  new C1();
+  new C2();
+}
+''',
+  }),
+
+  const Test('Deferred loading', const {
+    'main.dart': '''
+import 'a.dart' deferred as lib;
+main() {
+  lib.foo();
+}
+''',
+    'a.dart': '''
+void foo() {}
+''',
+  }),
+
+  const Test('fromEnvironment constants', const {
+    'main.dart': '''
+main() => const String.fromEnvironment("foo");
+''',
+  }),
+
+  const Test('Unused noSuchMethod', const {
+    'main.dart': '''
+import 'a.dart';
+
+main() {
+  new A().m();
+}
+''',
+  }, preserializedSourceFiles: const {
+    'a.dart': '''
+class A {
+  noSuchMethod(_) => null;
+  m();
+}
+''',
+  }),
+
+  const Test('Malformed types', const {
+    'main.dart': '''
+import 'a.dart';
+
+main() {
+  m();
+}
+''',
+  }, preserializedSourceFiles: const {
+    'a.dart': '''
+Unresolved m() {}
+''',
+  }),
+
+  const Test('Function types for closures', const {
+    'main.dart': '''
+import 'a.dart';
+
+typedef Func();
+
+main(args) {
+  (args.isEmpty ? new B() : new C()) is Func;
+}
+''',
+  }, preserializedSourceFiles: const {
+    'a.dart': '''
+class B {
+  call(a) {}
+}
+class C {
+  call() {}
+}
+''',
+  }),
+
+  const Test('Double literal in constant constructor', const {
+    'main.dart': '''
+import 'a.dart';
+
+main() {
+  const A(1.0);
+}
+''',
+  }, preserializedSourceFiles: const {
+    'a.dart': '''
+class A {
+  final field1;
+  const A(a) : this.field1 = a + 1.0;
+}
+''',
+  }),
+
+  const Test('If-null expression in constant constructor', const {
+    'main.dart': '''
+import 'a.dart';
+
+main() {
+  const A(1.0);
+}
+''',
+  }, preserializedSourceFiles: const {
+    'a.dart': '''
+class A {
+  final field1;
+  const A(a) : this.field1 = a ?? 1.0;
+}
+''',
+  }),
+
+  const Test('Forwarding constructor defined by forwarding constructor', const {
+    'main.dart': '''
+import 'a.dart';
+
+main() => new C();
+''',
+  }, preserializedSourceFiles: const {
+    'a.dart': '''
+class A {}
+class B {}
+class C {}
+class D = A with B, C;
+''',
+    'b.dart': '''
+''',
+}),
+
+  const Test('Deferred prefix loadLibrary', const {
+    'main.dart': '''
+import 'a.dart';
+
+main() {
+  test();
+}
+''',
+  }, preserializedSourceFiles: const {
+    'a.dart': '''
+import 'b.dart' deferred as pre;
+test() {
+  pre.loadLibrary();
+}
+''',
+    'b.dart': '''
+''',
+  }),
+
+  const Test('Deferred without prefix', const {
+    'main.dart': '''
+import 'a.dart';
+
+main() {
+  test();
+}
+''',
+  }, preserializedSourceFiles: const {
+    'a.dart': '''
+import 'b.dart' deferred;
+test() {}
+''',
+    'b.dart': '''
+''',
+  }, expectedErrorCount: 1),
+
+  const Test('Deferred with duplicate prefix', const {
+    'main.dart': '''
+import 'a.dart';
+
+main() {
+  test();
+}
+''',
+  }, preserializedSourceFiles: const {
+    'a.dart': '''
+import 'b.dart' deferred as pre;
+import 'c.dart' deferred as pre;
+test() {}
+''',
+    'b.dart': '''
+''',
+    'c.dart': '''
+''',
+  }, expectedErrorCount: 1),
 ];
 
 class Test {
   final String name;
   final Map sourceFiles;
   final Map preserializedSourceFiles;
+  final Map unserializedSourceFiles;
   final int expectedErrorCount;
   final int expectedWarningCount;
   final int expectedHintCount;
@@ -323,6 +562,7 @@
       this.name,
       this.sourceFiles,
       {this.preserializedSourceFiles,
+      this.unserializedSourceFiles,
       this.expectedErrorCount: 0,
       this.expectedWarningCount: 0,
       this.expectedHintCount: 0,
diff --git a/tests/compiler/dart2js/serialization/test_helper.dart b/tests/compiler/dart2js/serialization/test_helper.dart
index 496d0e3..9d3115f 100644
--- a/tests/compiler/dart2js/serialization/test_helper.dart
+++ b/tests/compiler/dart2js/serialization/test_helper.dart
@@ -4,6 +4,7 @@
 
 library dart2js.serialization_test_helper;
 
+import 'dart:collection';
 import 'package:compiler/src/common/resolution.dart';
 import 'package:compiler/src/constants/expressions.dart';
 import 'package:compiler/src/dart_types.dart';
@@ -11,6 +12,41 @@
 import 'package:compiler/src/elements/elements.dart';
 import 'package:compiler/src/serialization/equivalence.dart';
 import 'package:compiler/src/tree/nodes.dart';
+import 'package:expect/expect.dart';
+
+Check currentCheck;
+
+class Check {
+  final Check parent;
+  final Object object1;
+  final Object object2;
+  final String property;
+  final Object value1;
+  final Object value2;
+
+  Check(this.parent, this.object1, this.object2, this.property, this.value1, this.value2);
+
+  String printOn(StringBuffer sb, String indent) {
+    if (parent != null) {
+      indent = parent.printOn(sb, indent);
+      sb.write('\n$indent|\n');
+    }
+    sb.write("${indent}property='$property'\n ");
+    sb.write("${indent}object1=$object1 (${object1.runtimeType})\n ");
+    sb.write("${indent}value=${value1 == null ? "null" : "'$value1'"} ");
+    sb.write("(${value1.runtimeType}) vs\n ");
+    sb.write("${indent}object2=$object2 (${object2.runtimeType})\n ");
+    sb.write("${indent}value=${value2 == null ? "null" : "'$value2'"} ");
+    sb.write("(${value2.runtimeType})");
+    return ' $indent';
+  }
+
+  String toString() {
+    StringBuffer sb = new StringBuffer();
+    printOn(sb, '');
+    return sb.toString();
+  }
+}
 
 /// Strategy for checking equivalence.
 ///
@@ -99,15 +135,12 @@
 /// [value2] respectively, are equal and throw otherwise.
 bool check(var object1, var object2, String property, var value1, var value2,
            [bool equivalence(a, b) = equality]) {
+  currentCheck = new Check(
+      currentCheck, object1, object2, property, value1, value2);
   if (!equivalence(value1, value2)) {
-    throw "property='$property'\n "
-          "object1=$object1 (${object1.runtimeType})\n "
-          "value=${value1 == null ? "null" : "'$value1'"} "
-          "(${value1.runtimeType}) <>\n "
-          "object2=$object2 (${object2.runtimeType})\n "
-          "value=${value2 == null ? "null" : "'$value2'"} "
-          "(${value2.runtimeType})";
+    throw currentCheck;
   }
+  currentCheck = currentCheck.parent;
   return true;
 }
 
@@ -119,6 +152,8 @@
     Object object1, Object object2, String property,
     Iterable list1, Iterable list2,
     void checkEquivalence(o1, o2, property, a, b)) {
+  currentCheck =
+      new Check(currentCheck, object1, object2, property, list1, list2);
   for (int i = 0; i < list1.length && i < list2.length; i++) {
     checkEquivalence(
         object1, object2, property,
@@ -138,6 +173,7 @@
         '`${property}` on $object1:\n ${list1.join('\n ')}\n'
         '`${property}` on $object2:\n ${list2.join('\n ')}';
   }
+  currentCheck = currentCheck.parent;
   return true;
 }
 
@@ -150,7 +186,7 @@
 Set computeSetDifference(
     Iterable set1,
     Iterable set2,
-    List common,
+    List<List> common,
     List unfound,
     {bool sameElement(a, b): equality,
      void checkElements(a, b)}) {
@@ -162,19 +198,19 @@
   // set.difference would work)
   Set remaining = set2.toSet();
   for (var element1 in set1) {
-    bool found = false;
+    var correspondingElement;
     for (var element2 in remaining) {
       if (sameElement(element1, element2)) {
         if (checkElements != null) {
           checkElements(element1, element2);
         }
-        found = true;
+        correspondingElement = element2;
         remaining.remove(element2);
         break;
       }
     }
-    if (found) {
-      common.add(element1);
+    if (correspondingElement != null) {
+      common.add([element1, correspondingElement]);
     } else {
       unfound.add(element1);
     }
@@ -194,7 +230,7 @@
     Iterable set2,
     bool sameElement(a, b),
     {void onSameElement(a, b)}) {
-  List common = [];
+  List<List> common = <List>[];
   List unfound = [];
   Set remaining =
       computeSetDifference(set1, set2, common, unfound,
@@ -244,7 +280,8 @@
   if (type1 == null || type2 == null) {
     return check(object1, object2, property, type1, type2);
   } else {
-    return const TypeEquivalence(const CheckStrategy()).visit(type1, type2);
+    return check(object1, object2, property, type1, type2,
+        (a, b) => const TypeEquivalence(const CheckStrategy()).visit(a, b));
   }
 }
 
@@ -268,7 +305,8 @@
   if (exp1 == null || exp2 == null) {
     return check(object1, object2, property, exp1, exp2);
   } else {
-    return const ConstantEquivalence(const CheckStrategy()).visit(exp1, exp2);
+    return check(object1, object2, property, exp1, exp2,
+        (a, b) => const ConstantEquivalence(const CheckStrategy()).visit(a, b));
   }
 }
 
@@ -302,6 +340,15 @@
       ClassElement class2 = member2;
       if (!class1.isResolved) return;
 
+      if (hasProperty(member1)) {
+        if (areElementsEquivalent(member1, member2)) {
+          checkMemberProperties(
+              compiler1, member1,
+              compiler2, member2,
+              verbose: verbose);
+        }
+      }
+
       class1.forEachLocalMember((m1) {
         checkMembers(m1, class2.localLookup(m1.name));
       });
@@ -382,3 +429,163 @@
 
   testResolutionImpactEquivalence(impact1, impact2, const CheckStrategy());
 }
+
+void checkSets(
+    Iterable set1,
+    Iterable set2,
+    String messagePrefix,
+    bool sameElement(a, b),
+    {bool failOnUnfound: true,
+    bool failOnExtra: true,
+    bool verbose: false,
+    void onSameElement(a, b)}) {
+  List<List> common = <List>[];
+  List unfound = [];
+  Set remaining = computeSetDifference(
+      set1, set2, common, unfound,
+      sameElement: sameElement,
+      checkElements: onSameElement);
+  StringBuffer sb = new StringBuffer();
+  sb.write("$messagePrefix:");
+  if (verbose) {
+    sb.write("\n Common:\n  ${common.join('\n  ')}");
+  }
+  if (unfound.isNotEmpty || verbose) {
+    sb.write("\n Unfound:\n  ${unfound.join('\n  ')}");
+  }
+  if (remaining.isNotEmpty || verbose) {
+    sb.write("\n Extra: \n  ${remaining.join('\n  ')}");
+  }
+  String message = sb.toString();
+  if (unfound.isNotEmpty || remaining.isNotEmpty) {
+
+    if ((failOnUnfound && unfound.isNotEmpty) ||
+        (failOnExtra && remaining.isNotEmpty)) {
+      Expect.fail(message);
+    } else {
+      print(message);
+    }
+  } else if (verbose) {
+    print(message);
+  }
+}
+
+String defaultToString(obj) => '$obj';
+
+void checkMaps(
+    Map map1,
+    Map map2,
+    String messagePrefix,
+    bool sameKey(a, b),
+    bool sameValue(a, b),
+    {bool failOnUnfound: true,
+    bool failOnMismatch: true,
+    bool verbose: false,
+    String keyToString(key): defaultToString,
+    String valueToString(key): defaultToString}) {
+  List<List> common = <List>[];
+  List unfound = [];
+  List<List> mismatch = <List>[];
+  Set remaining = computeSetDifference(
+      map1.keys, map2.keys, common, unfound,
+      sameElement: sameKey,
+      checkElements: (k1, k2) {
+        var v1 = map1[k1];
+        var v2 = map2[k2];
+        if (!sameValue(v1, v2)) {
+          mismatch.add([k1, k2]);
+        }
+      });
+  StringBuffer sb = new StringBuffer();
+  sb.write("$messagePrefix:");
+  if (verbose) {
+    sb.write("\n Common: \n");
+    for (List pair in common) {
+      var k1 = pair[0];
+      var k2 = pair[1];
+      var v1 = map1[k1];
+      var v2 = map2[k2];
+      sb.write(" key1   =${keyToString(k1)}\n");
+      sb.write(" key2   =${keyToString(k2)}\n");
+      sb.write("  value1=${valueToString(v1)}\n");
+      sb.write("  value2=${valueToString(v2)}\n");
+    }
+  }
+  if (unfound.isNotEmpty || verbose) {
+    sb.write("\n Unfound: \n");
+    for (var k1 in unfound) {
+      var v1 = map1[k1];
+      sb.write(" key1   =${keyToString(k1)}\n");
+      sb.write("  value1=${valueToString(v1)}\n");
+    }
+  }
+  if (remaining.isNotEmpty || verbose) {
+    sb.write("\n Extra: \n");
+    for (var k2 in remaining) {
+      var v2 = map2[k2];
+      sb.write(" key2   =${keyToString(k2)}\n");
+      sb.write("  value2=${valueToString(v2)}\n");
+    }
+  }
+  if (mismatch.isNotEmpty || verbose) {
+    sb.write("\n Mismatch: \n");
+    for (List pair in mismatch) {
+      var k1 = pair[0];
+      var k2 = pair[1];
+      var v1 = map1[k1];
+      var v2 = map2[k2];
+      sb.write(" key1   =${keyToString(k1)}\n");
+      sb.write(" key2   =${keyToString(k2)}\n");
+      sb.write("  value1=${valueToString(v1)}\n");
+      sb.write("  value2=${valueToString(v2)}\n");
+    }
+  }
+  String message = sb.toString();
+  if (unfound.isNotEmpty || mismatch.isNotEmpty || remaining.isNotEmpty) {
+    if ((unfound.isNotEmpty && failOnUnfound) ||
+        (mismatch.isNotEmpty && failOnMismatch) ||
+        remaining.isNotEmpty) {
+      Expect.fail(message);
+    } else {
+      print(message);
+    }
+  } else if (verbose) {
+    print(message);
+  }
+}
+
+void checkAllResolvedAsts(
+    Compiler compiler1,
+    Compiler compiler2,
+    {bool verbose: false}) {
+  checkLoadedLibraryMembers(
+      compiler1,
+      compiler2,
+      (Element member1) {
+        return member1 is ExecutableElement &&
+            compiler1.resolution.hasResolvedAst(member1);
+      },
+      checkResolvedAsts,
+      verbose: verbose);
+}
+
+
+/// Check equivalence of [impact1] and [impact2].
+void checkResolvedAsts(Compiler compiler1, Element member1,
+    Compiler compiler2, Element member2,
+    {bool verbose: false}) {
+  if (!compiler2.serialization.isDeserialized(member2)) {
+    return;
+  }
+  ResolvedAst resolvedAst1 = compiler1.resolution.getResolvedAst(member1);
+  ResolvedAst resolvedAst2 = compiler2.serialization.getResolvedAst(member2);
+
+  if (resolvedAst1 == null || resolvedAst2 == null) return;
+
+  if (verbose) {
+    print('Checking resolved asts for $member1 vs $member2');
+  }
+
+  testResolvedAstEquivalence(
+      resolvedAst1, resolvedAst2, const CheckStrategy());
+}
diff --git a/tests/compiler/dart2js_extra/dart2js_extra.status b/tests/compiler/dart2js_extra/dart2js_extra.status
index 897d5b3..6fa9843 100644
--- a/tests/compiler/dart2js_extra/dart2js_extra.status
+++ b/tests/compiler/dart2js_extra/dart2js_extra.status
@@ -10,9 +10,6 @@
 constant_javascript_semantics4_test: Fail, OK
 mirrors_used_closure_test: Fail # Issue 17939
 
-[ $compiler == dart2js && $runtime == jsshell ]
-mirror_printer_test: Pass, Slow # Issue 16473
-
 [ $compiler == dart2js && $checked ]
 variable_type_test/03: Fail, OK
 variable_type_test/01: Fail, OK
@@ -84,3 +81,6 @@
 
 [ $compiler == dart2js && $runtime == ff && $system == windows ]
 consistent_index_error_string_test: Pass, Slow # Issue 25940
+
+[ $compiler == dart2js ]
+mirror_printer_test: Pass, Slow # Issue 25940, 16473
diff --git a/tests/corelib/corelib.status b/tests/corelib/corelib.status
index 60be0ba..53ec300 100644
--- a/tests/corelib/corelib.status
+++ b/tests/corelib/corelib.status
@@ -209,3 +209,15 @@
 
 [ $arch == simdbc || $arch == simdbc64 ]
 regexp/stack-overflow_test: RuntimeError, OK # Smaller limit with irregex interpreter
+
+[ $hot_reload ]
+big_integer_huge_mul_vm_test: Pass, Timeout
+big_integer_parsed_mul_div_vm_test: Pass, Timeout
+collection_length_test: Fail, Crash
+hash_map2_test: Pass, Crash
+queue_test: Pass, Crash
+regexp/regexp_test: Pass, Fail, Crash
+uri_parse_test: Pass, Timeout
+uri_test: Pass, RuntimeError
+data_uri_test: Pass, RuntimeError
+int_parse_radix_test: Pass, Timeout
diff --git a/tests/corelib/date_time_test.dart b/tests/corelib/date_time_test.dart
index efb2206..27758f8 100644
--- a/tests/corelib/date_time_test.dart
+++ b/tests/corelib/date_time_test.dart
@@ -1234,7 +1234,7 @@
       String source1 = "$date ${time}Z";
       String source2 = "${date}T${time}Z";
       var utcTime1 = DateTime.parse(source1);
-      var utcTime2 = DateTime.parse(source1);
+      var utcTime2 = DateTime.parse(source2);
       Expect.isTrue(utcTime1.isUtc);
       Expect.equals(utcTime1, utcTime2);
       Expect.equals(source1, utcTime1.toString());
@@ -1244,7 +1244,7 @@
       String source1 = "$date $time";
       String source2 = "${date}T$time";
       var utcTime1 = DateTime.parse(source1);
-      var utcTime2 = DateTime.parse(source1);
+      var utcTime2 = DateTime.parse(source2);
       Expect.isFalse(utcTime1.isUtc);
       Expect.equals(utcTime1, utcTime2);
       Expect.equals(source1, utcTime1.toString());
diff --git a/tests/corelib/uri_test.dart b/tests/corelib/uri_test.dart
index f1dce7a..ef7a9a3 100644
--- a/tests/corelib/uri_test.dart
+++ b/tests/corelib/uri_test.dart
@@ -26,6 +26,11 @@
     Expect.equals(uri,
                   Uri.parse(uriText + "#fragment").removeFragment());
   }
+
+  // Test uri.replace on uri with fragment
+  uri = Uri.parse('http://hello.com/fake#fragment');
+  uri = uri.replace(path: "D/E/E");
+  Expect.stringEquals('http://hello.com/D/E/E#fragment', uri.toString());
 }
 
 testEncodeDecode(String orig, String encoded) {
@@ -130,9 +135,13 @@
   // Test non-URI base (no scheme, no authority, relative path).
   base = Uri.parse("a/b/c?_#_");
   testResolve("a/b/g?q#f", "g?q#f");
+  testResolve("./", "../..");
   testResolve("../", "../../..");
   testResolve("a/b/", ".");
   testResolve("c", "../../c");
+  base = Uri.parse("../../a/b/c?_#_");  // Initial ".." in base url.
+  testResolve("../../a/d", "../d");
+  testResolve("../../../d", "../../../d");
 
   base = Uri.parse("s:a/b");
   testResolve("s:/c", "../c");
diff --git a/tests/html/html.status b/tests/html/html.status
index d39e5b9..4d57f42 100644
--- a/tests/html/html.status
+++ b/tests/html/html.status
@@ -31,7 +31,7 @@
 [ $compiler == dart2js && $checked ]
 js_function_getter_trust_types_test: Skip # --trust-type-annotations incompatible with --checked
 
-[ $compiler == dart2js && $checked && $browser ]
+[ $compiler == dart2js && $checked && $browser && $runtime != drt]
 js_typed_interop_test/method: Fail # Issue 24822
 
 [ $compiler == dart2js && $csp && $browser ]
@@ -104,12 +104,14 @@
 element_types_test/supported_object: RuntimeError # Issue 25155
 element_types_test/supported_embed: RuntimeError # Issue 25155
 svgelement_test/PathElement: RuntimeError # Issue 25665
+element_animate_test/timing_dict: RuntimeError # Issue 26730
 
 [ $runtime == chrome && $system == macos ]
 canvasrenderingcontext2d_test/drawImage_video_element: Skip # Times out. Please triage this failure.
 canvasrenderingcontext2d_test/drawImage_video_element_dataUrl: Skip # Times out. Please triage this failure.
 transition_event_test/functional: Skip # Times out. Issue 22167
 request_animation_frame_test: Skip # Times out. Issue 22167
+custom/*: Pass, Timeout # Issue 26789
 
 [$runtime == drt || $runtime == dartium || $runtime == chrome || $runtime == chromeOnAndroid ]
 webgl_1_test: Pass, Fail # Issue 8219
@@ -284,6 +286,7 @@
 event_test: RuntimeError # Safarimobilesim does not support WheelEvent
 
 [ $runtime == safari ]
+audiobuffersourcenode_test/functional: RuntimeError
 input_element_test/supported_month: RuntimeError
 input_element_test/supported_time: RuntimeError
 input_element_test/supported_week: RuntimeError
@@ -291,49 +294,14 @@
 input_element_test/supported_datetime-local: Fail
 touchevent_test/supported: Fail # Safari does not support TouchEvents
 notification_test/constructors: Fail # Safari doesn't let us access the fields of the Notification to verify them.
-
-[ $runtime == safari ]
 indexeddb_1_test/functional: Skip # Times out. Issue 21433
 indexeddb_2_test: RuntimeError # Issue 21433
 indexeddb_4_test: RuntimeError # Issue 21433
 indexeddb_5_test: RuntimeError # Issue 21433
 
-[ $runtime == opera ]
-blob_constructor_test: Fail
-canvas_test: Fail
-canvas_test: Pass,Fail
-cssstyledeclaration_test: Fail
-document_test/document: Fail # Issue: 7413
-element_add_test: Fail
-element_constructor_1_test: Fail
-element_test/children: Fail # Issue: 7413
-element_test/constructors: Fail
-element_test/elements: Fail
-element_test/eventListening: Crash
-element_test/eventListening: Fail # Issue: 7413
-element_test/queryAll: Fail
-fileapi_test: Skip # Timeout.
-form_data_test: Fail # Issue: 7413
-htmlelement_test: Fail
-isolates_test: Skip # Timeout.
-keyboard_event_test: Fail # Issue: 7413
-serialized_script_value_test: Fail
-typed_arrays_arraybuffer_test: Fail
-url_test: Fail
 
-# Opera Feature support statuses-
-# All changes should be accompanied by platform support annotation changes.
-audiobuffersourcenode_test/supported: Fail
-audiocontext_test/supported: Fail
-crypto_test/supported: Fail
-css_test/supportsPointConversions: Fail
-element_types_test/supported_template: Fail
-indexeddb_1_test/supported: Fail
-indexeddb_1_test/supportsDatabaseNames: Fail
-mutationobserver_test/supported: Fail
-performance_api_test/supported: Fail
-speechrecognition_test/supported: Fail
-websql_test/supported: Fail
+js_test/JsArray: RuntimeError # Fails 10 out of 10.
+indexeddb_3_test: Skip # Times out 1 out of 10.
 
 [  $compiler == dart2js && $runtime == ff ]
 history_test/history: Skip # Issue 22050
diff --git a/tests/html/js_test.dart b/tests/html/js_test.dart
index ae9d9b3..c554081 100644
--- a/tests/html/js_test.dart
+++ b/tests/html/js_test.dart
@@ -194,6 +194,8 @@
   document.body.append(script);
 }
 
+typedef bool StringToBool(String s);
+
 // Some test are either causing other test to fail in IE9, or they are failing
 // for unknown reasons
 // useHtmlConfiguration+ImageData bug: dartbug.com/14355
@@ -922,6 +924,17 @@
 
     });
 
+    group('JavaScriptFunction', () {
+      test('is check', () {
+        var fn = (String s) => true;
+        var jsFn = allowInterop(fn);
+        expect(fn is StringToBool, isTrue);
+        expect(jsFn is StringToBool, isTrue);
+        expect(jsFn is Function, isTrue);
+        expect(jsFn is List, isFalse);
+      });
+    });
+
     group('Dart->JS', () {
 
       test('Date', () {
diff --git a/tests/isolate/isolate.status b/tests/isolate/isolate.status
index d9976d3..67d2941 100644
--- a/tests/isolate/isolate.status
+++ b/tests/isolate/isolate.status
@@ -132,6 +132,9 @@
 browser/typed_data_message_test: StaticWarning
 mint_maker_test: StaticWarning
 
+[ $compiler == none && $runtime == vm ]
+scenarios/short_package/short_package_test: Fail, OK  # We do not plan to support the tested behavior anyway.
+
 [ $compiler != none || $runtime != vm ]
 package_root_test: SkipByDesign  # Uses Isolate.packageRoot
 package_config_test: SkipByDesign  # Uses Isolate.packageConfig
diff --git a/tests/language/arg_param_trailing_comma_test.dart b/tests/language/arg_param_trailing_comma_test.dart
new file mode 100644
index 0000000..4b87b1a
--- /dev/null
+++ b/tests/language/arg_param_trailing_comma_test.dart
@@ -0,0 +1,401 @@
+// Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+// Dart test program for testing params.
+
+// Convenience values.
+var c = new C();
+var x = 42;
+var y = 42;
+var z = 42;
+
+// Trailing comma in parameter litss.
+
+// Typedefs.
+typedef fx(x, );                                      /// none: ok
+typedef fy([y,]);                                     /// none: continued
+typedef fxy(x, [y, ]);                                /// none: continued
+typedef fz({z,});                                     /// none: continued
+typedef fxz(x, {z, });                                /// none: continued
+
+// As arguments type.
+argfx(void f(x, )) {}                                 /// none: continued
+argfy(void f([y, ])) {}                               /// none: continued
+argfxy(void f(x, [y, ])) {}                           /// none: continued
+argfz(void f({z, })) {}                               /// none: continued
+argfxz(void f(x, {z, })) {}                           /// none: continued
+
+// Top level functions
+void topx(x,) {}                                      /// none: continued
+void topy([y, ]) {}                                   /// none: continued
+void topxy(x, [y, ]) {}                               /// none: continued
+void topz({z, }) {}                                   /// none: continued
+void topxz(x, {z, }) {}                               /// none: continued
+
+void set topsetx(x, ) {}                              /// none: continued
+
+// After specific parameter formats.
+void afterDefaultValueY([int y = 42, ]) {}            /// none: continued
+void afterDefaultValueZ({int z : 42, }) {}            /// none: continued
+void afterFunsigX(void f(),) {}                       /// none: continued
+void afterFunsigY([void f(),]) {}                     /// none: continued
+void afterFunsigZ({void f(),}) {}                     /// none: continued
+void afterFunsigDefaultValueY([void f() = topy,]) {}  /// none: continued
+void afterFunsigDefaultValueZ({void f() : topt,}) {}  /// none: continued
+
+class C {
+  C();
+
+  // Constructors.
+  C.x(x, );                                           /// none: continued
+  C.y([y, ]);                                         /// none: continued
+  C.xy(x, [y, ]);                                     /// none: continued
+  C.z({z, });                                         /// none: continued
+  C.xz(x, {z, });                                     /// none: continued
+
+  // Static members
+  static void staticx(x,) {}                          /// none: continued
+  static void staticy([y, ]) {}                       /// none: continued
+  static void staticxy(x, [y, ]) {}                   /// none: continued
+  static void staticz({z, }) {}                       /// none: continued
+  static void staticxz(x, {z, }) {}                   /// none: continued
+
+  static void set staticsetx(x, ) {}                  /// none: continued
+
+  // Instance members
+  void instancex(x,) {}                               /// none: continued
+  void instancey([y, ]) {}                            /// none: continued
+  void instancexy(x, [y, ]) {}                        /// none: continued
+  void instancez({z, }) {}                            /// none: continued
+  void instancexz(x, {z, }) {}                        /// none: continued
+
+  void set instancesetx(x, ) {}                       /// none: continued
+
+  operator +(x, ) => this;                            /// none: continued
+  operator []=(x, y, ) {}                             /// none: continued
+}
+
+main() {
+  testCalls();                                        /// none: continued
+  // Make sure the Bad class is checked.
+  new Bad().method();
+}
+
+void testCalls() {
+  // Check that all functions can be called normally
+  topx(x);                                            /// none: continued
+  topy(y);                                            /// none: continued
+  topxy(x, y);                                        /// none: continued
+  topz();                                             /// none: continued
+  topz(z: z);                                         /// none: continued
+  topxz(x);                                           /// none: continued
+  topxz(x, z: z);                                     /// none: continued
+  topsetx = x;                                        /// none: continued
+  afterDefaultValueY();                               /// none: continued
+  afterDefaultValueY(y);                              /// none: continued
+  afterDefaultValueZ();                               /// none: continued
+  afterDefaultValueZ(z: z);                           /// none: continued
+  new C.x(x);                                         /// none: continued
+  new C.xy(x);                                        /// none: continued
+  new C.xy(x, y);                                     /// none: continued
+  new C.y(y);                                         /// none: continued
+  new C.xz(x);                                        /// none: continued
+  new C.xz(x, z: z);                                  /// none: continued
+  new C.z(z: z);                                      /// none: continued
+  C.staticx(x);                                       /// none: continued
+  C.staticy(y);                                       /// none: continued
+  C.staticxy(x);                                      /// none: continued
+  C.staticxy(x, y);                                   /// none: continued
+  C.staticz();                                        /// none: continued
+  C.staticz(z: z);                                    /// none: continued
+  C.staticxz(x);                                      /// none: continued
+  C.staticxz(x, z: z);                                /// none: continued
+  C.staticsetx = x;                                   /// none: continued
+  c.instancex(x);                                     /// none: continued
+  c.instancey();                                      /// none: continued
+  c.instancey(y);                                     /// none: continued
+  c.instancexy(x);                                    /// none: continued
+  c.instancexy(x, y);                                 /// none: continued
+  c.instancez();                                      /// none: continued
+  c.instancez(z: z);                                  /// none: continued
+  c.instancexz(x);                                    /// none: continued
+  c.instancexz(x, z: z);                              /// none: continued
+  c.instancesetx = x;                                 /// none: continued
+  c + x;                                              /// none: continued
+  c[x] = y;                                           /// none: continued
+
+  // Call with ekstra comma (not possible for setters and operators).
+  topx(x, );                                          /// none: continued
+  topy(y, );                                          /// none: continued
+  topxy(x, y, );                                      /// none: continued
+  topxy(x, );                                         /// none: continued
+  topz(z: z, );                                       /// none: continued
+  topxz(x, );                                         /// none: continued
+  topxz(x, z: z, );                                   /// none: continued
+  new C.x(x, );                                       /// none: continued
+  new C.xy(x, y, );                                   /// none: continued
+  new C.xy(x, );                                      /// none: continued
+  new C.y(y, );                                       /// none: continued
+  new C.xz(x, );                                      /// none: continued
+  new C.xz(x, z: z, );                                /// none: continued
+  new C.z(z: z, );                                    /// none: continued
+  C.staticx(x, );                                     /// none: continued
+  C.staticy(y, );                                     /// none: continued
+  C.staticxy(x, y, );                                 /// none: continued
+  C.staticxy(x, );                                    /// none: continued
+  C.staticz(z: z, );                                  /// none: continued
+  C.staticxz(x, );                                    /// none: continued
+  C.staticxz(x, z: z, );                              /// none: continued
+  c.instancex(x, );                                   /// none: continued
+  c.instancey(y, );                                   /// none: continued
+  c.instancexy(x, y, );                               /// none: continued
+  c.instancexy(x, );                                  /// none: continued
+  c.instancez(z: z, );                                /// none: continued
+  c.instancexz(x, );                                  /// none: continued
+  c.instancexz(x, z: z, );                            /// none: continued
+
+  // Typedefs work as expected.
+  if (topx is! fx) throw "Bad type: $fx";             /// none: continued
+  if (topy is! fy) throw "Bad type: $fy";             /// none: continued
+  if (topxy is! fxy) throw "Bad type: $fxy";          /// none: continued
+  if (topz is! fz) throw "Bad type: $fz";             /// none: continued
+  if (topxz is! fxz) throw "Bad type: $fxz";          /// none: continued
+
+  // Parameter types work (checked mode only test).
+  argfx(topx);                                        /// none: continued
+  argfy(topy);                                        /// none: continued
+  argfxy(topxy);                                      /// none: continued
+  argfz(topz);                                        /// none: continued
+  argfxz(topxz);                                      /// none: continued
+}
+
+
+// Invalid syntax. This was invalid syntax before the addition of trailing
+// commas too, and should stay that way.
+void topBadEmpty(,) {}                           /// 1: compile-time error
+void topBadStart(, a) {}                         /// 2: compile-time error
+void topBadEnd(a,,) {}                           /// 3: compile-time error
+void topBadMiddle(a,, b) {}                      /// 4: compile-time error
+void topBadPosEmpty([]) {}                       /// 5: compile-time error
+void topBadPosEmpty(,[]) {}                      /// 6: compile-time error
+void topBadPosEmpty([,]) {}                      /// 7: compile-time error
+void topBadPosEmpty([],) {}                      /// 8: compile-time error
+void topBadPosStart(,[a]) {}                     /// 9: compile-time error
+void topBadPosStart([, a]) {}                    /// 10: compile-time error
+void topBadPosEnd([a,,]) {}                      /// 11: compile-time error
+void topBadPosStart([a],) {}                     /// 12: compile-time error
+void topBadPosMiddle([a,, b]) {}                 /// 13: compile-time error
+void topBadNamEmpty({}) {}                       /// 14: compile-time error
+void topBadNamEmpty(,{}) {}                      /// 15: compile-time error
+void topBadNamEmpty({,}) {}                      /// 16: compile-time error
+void topBadNamEmpty({},) {}                      /// 17: compile-time error
+void topBadNamStart(,{a}) {}                     /// 18: compile-time error
+void topBadNamStart({, a}) {}                    /// 19: compile-time error
+void topBadNamEnd({a,,}) {}                      /// 20: compile-time error
+void topBadNamStart({a},) {}                     /// 21: compile-time error
+void topBadNamMiddle({a,, b}) {}                 /// 22: compile-time error
+void set topSetBadEmpty(,) {}                    /// 23: compile-time error
+void set topSetBadStart(, a) {}                  /// 24: compile-time error
+void set topSetBadEnd(a,,) {}                    /// 25: compile-time error
+void set topSetBadMiddle(a,, b) {}               /// 26: compile-time error
+class Bad {
+  Bad.empty(,) {}                                /// 27: compile-time error
+  Bad.start(, a) {}                              /// 28: compile-time error
+  Bad.end(a,,) {}                                /// 29: compile-time error
+  Bad.middle(a,, b) {}                           /// 30: compile-time error
+  Bad.posEmpty([]) {}                            /// 31: compile-time error
+  Bad.posEmpty(,[]) {}                           /// 32: compile-time error
+  Bad.posEmpty([,]) {}                           /// 33: compile-time error
+  Bad.posEmpty([],) {}                           /// 34: compile-time error
+  Bad.posStart(,[a]) {}                          /// 35: compile-time error
+  Bad.posStart([, a]) {}                         /// 36: compile-time error
+  Bad.posEnd([a,,]) {}                           /// 37: compile-time error
+  Bad.posStart([a],) {}                          /// 38: compile-time error
+  Bad.PosMiddle([a,, b]) {}                      /// 39: compile-time error
+  Bad.namEmpty({}) {}                            /// 40: compile-time error
+  Bad.namEmpty(,{}) {}                           /// 41: compile-time error
+  Bad.namEmpty({,}) {}                           /// 42: compile-time error
+  Bad.namEmpty({},) {}                           /// 43: compile-time error
+  Bad.namStart(,{a}) {}                          /// 44: compile-time error
+  Bad.namStart({, a}) {}                         /// 45: compile-time error
+  Bad.namEnd({a,,}) {}                           /// 46: compile-time error
+  Bad.namStart({a},) {}                          /// 47: compile-time error
+  Bad.namMiddle({a,, b}) {}                      /// 48: compile-time error
+  static void staticBadEmpty(,) {}               /// 49: compile-time error
+  static void staticBadStart(, a) {}             /// 50: compile-time error
+  static void staticBadEnd(a,,) {}               /// 51: compile-time error
+  static void staticBadMiddle(a,, b) {}          /// 52: compile-time error
+  static void staticBadPosEmpty([]) {}           /// 53: compile-time error
+  static void staticBadPosEmpty(,[]) {}          /// 54: compile-time error
+  static void staticBadPosEmpty([,]) {}          /// 55: compile-time error
+  static void staticBadPosEmpty([],) {}          /// 56: compile-time error
+  static void staticBadPosStart(,[a]) {}         /// 57: compile-time error
+  static void staticBadPosStart([, a]) {}        /// 58: compile-time error
+  static void staticBadPosEnd([a,,]) {}          /// 59: compile-time error
+  static void staticBadPosStart([a],) {}         /// 60: compile-time error
+  static void staticBadPosMiddle([a,, b]) {}     /// 61: compile-time error
+  static void staticBadNamEmpty({}) {}           /// 62: compile-time error
+  static void staticBadNamEmpty(,{}) {}          /// 63: compile-time error
+  static void staticBadNamEmpty({,}) {}          /// 64: compile-time error
+  static void staticBadNamEmpty({},) {}          /// 65: compile-time error
+  static void staticBadNamStart(,{a}) {}         /// 66: compile-time error
+  static void staticBadNamStart({, a}) {}        /// 67: compile-time error
+  static void staticBadNamEnd({a,,}) {}          /// 68: compile-time error
+  static void staticBadNamStart({a},) {}         /// 69: compile-time error
+  static void staticBadNamMiddle({a,, b}) {}     /// 70: compile-time error
+  static void set staticSetBadEmpty(,) {}        /// 71: compile-time error
+  static void set staticSetBadStart(, a) {}      /// 72: compile-time error
+  static void set staticSetBadEnd(a,,) {}        /// 73: compile-time error
+  static void set staticSetBadMiddle(a,, b) {}   /// 74: compile-time error
+  void instanceBadEmpty(,) {}                    /// 75: compile-time error
+  void instanceBadStart(, a) {}                  /// 76: compile-time error
+  void instanceBadEnd(a,,) {}                    /// 77: compile-time error
+  void instanceBadMiddle(a,, b) {}               /// 78: compile-time error
+  void instanceBadPosEmpty([]) {}                /// 79: compile-time error
+  void instanceBadPosEmpty(,[]) {}               /// 80: compile-time error
+  void instanceBadPosEmpty([,]) {}               /// 81: compile-time error
+  void instanceBadPosEmpty([],) {}               /// 82: compile-time error
+  void instanceBadPosStart(,[a]) {}              /// 83: compile-time error
+  void instanceBadPosStart([, a]) {}             /// 84: compile-time error
+  void instanceBadPosEnd([a,,]) {}               /// 85: compile-time error
+  void instanceBadPosStart([a],) {}              /// 86: compile-time error
+  void instanceBadPosMiddle([a,, b]) {}          /// 87: compile-time error
+  void instanceBadNamEmpty({}) {}                /// 88: compile-time error
+  void instanceBadNamEmpty(,{}) {}               /// 89: compile-time error
+  void instanceBadNamEmpty({,}) {}               /// 90: compile-time error
+  void instanceBadNamEmpty({},) {}               /// 91: compile-time error
+  void instanceBadNamStart(,{a}) {}              /// 92: compile-time error
+  void instanceBadNamStart({, a}) {}             /// 93: compile-time error
+  void instanceBadNamEnd({a,,}) {}               /// 94: compile-time error
+  void instanceBadNamStart({a},) {}              /// 95: compile-time error
+  void instanceBadNamMiddle({a,, b}) {}          /// 96: compile-time error
+  void set instanceSetBadEmpty(,) {}             /// 97: compile-time error
+  void set instanceSetBadStart(, a) {}           /// 98: compile-time error
+  void set instanceSetBadEnd(a,,) {}             /// 99: compile-time error
+  void set instanceSetBadMiddle(a,, b) {}        /// 100: compile-time error
+  void operator *(,);                            /// 101: compile-time error
+  void operator *(, a);                          /// 102: compile-time error
+  void operator *(a,,);                          /// 103: compile-time error
+  void operator []=(, a);                        /// 104: compile-time error
+  void operator []=(a,,);                        /// 105: compile-time error
+  void operator []=(a,, b);                      /// 106: compile-time error
+  void operator []=(a,);                         /// 107: compile-time error
+
+  method() {
+    // Local methods.
+    void localBadEmpty(,) {}                     /// 108: compile-time error
+    void localBadStart(, a) {}                   /// 109: compile-time error
+    void localBadEnd(a,,) {}                     /// 110: compile-time error
+    void localBadMiddle(a,, b) {}                /// 111: compile-time error
+    void localBadPosEmpty([]) {}                 /// 112: compile-time error
+    void localBadPosEmpty(,[]) {}                /// 113: compile-time error
+    void localBadPosEmpty([,]) {}                /// 114: compile-time error
+    void localBadPosEmpty([],) {}                /// 115: compile-time error
+    void localBadPosStart(,[a]) {}               /// 116: compile-time error
+    void localBadPosStart([, a]) {}              /// 117: compile-time error
+    void localBadPosEnd([a,,]) {}                /// 118: compile-time error
+    void localBadPosStart([a],) {}               /// 119: compile-time error
+    void localBadPosMiddle([a,, b]) {}           /// 120: compile-time error
+    void localBadNamEmpty({}) {}                 /// 121: compile-time error
+    void localBadNamEmpty(,{}) {}                /// 122: compile-time error
+    void localBadNamEmpty({,}) {}                /// 123: compile-time error
+    void localBadNamEmpty({},) {}                /// 124: compile-time error
+    void localBadNamStart(,{a}) {}               /// 125: compile-time error
+    void localBadNamStart({, a}) {}              /// 126: compile-time error
+    void localBadNamEnd({a,,}) {}                /// 127: compile-time error
+    void localBadNamStart({a},) {}               /// 128: compile-time error
+    void localBadNamMiddle({a,, b}) {}           /// 129: compile-time error
+
+    // invalid calls.
+
+    topx(,);                                     /// 130: compile-time error
+    topy(,);                                     /// 131: compile-time error
+    topz(,);                                     /// 132: compile-time error
+    topx(, x);                                   /// 133: compile-time error
+    topz(, z:z);                                 /// 134: compile-time error
+    topxy(x,, y);                                /// 135: compile-time error
+    topxz(x,, z:z);                              /// 136: compile-time error
+    topx(x,,);                                   /// 137: compile-time error
+    topz(z:z,,);                                 /// 138: compile-time error
+
+    new C.x(,);                                  /// 139: compile-time error
+    new C.y(,);                                  /// 140: compile-time error
+    new C.z(,);                                  /// 141: compile-time error
+    new C.x(, x);                                /// 142: compile-time error
+    new C.z(, z:z);                              /// 143: compile-time error
+    new C.xy(x,, y);                             /// 144: compile-time error
+    new C.xz(x,, z:z);                           /// 145: compile-time error
+    new C.x(x,,);                                /// 146: compile-time error
+    new C.z(z:z,,);                              /// 147: compile-time error
+
+    C.staticx(,);                                /// 148: compile-time error
+    C.staticy(,);                                /// 149: compile-time error
+    C.staticz(,);                                /// 150: compile-time error
+    C.staticx(, x);                              /// 151: compile-time error
+    C.staticz(, z:z);                            /// 152: compile-time error
+    C.staticxy(x,, y);                           /// 153: compile-time error
+    C.staticxz(x,, z:z);                         /// 154: compile-time error
+    C.staticx(x,,);                              /// 155: compile-time error
+    C.staticz(z:z,,);                            /// 156: compile-time error
+
+    c.instancex(,);                              /// 157: compile-time error
+    c.instancey(,);                              /// 158: compile-time error
+    c.instancez(,);                              /// 159: compile-time error
+    c.instancex(, x);                            /// 160: compile-time error
+    c.instancez(, z:z);                          /// 161: compile-time error
+    c.instancexy(x,, y);                         /// 162: compile-time error
+    c.instancexz(x,, z:z);                       /// 163: compile-time error
+    c.instancex(x,,);                            /// 164: compile-time error
+    c.instancez(z:z,,);                          /// 165: compile-time error
+
+    c[x,] = y;                                   /// 166: compile-time error
+  }
+
+  // As parameters:
+  void f(void topBadEmpty(,)) {}                 /// 167: compile-time error
+  void f(void topBadStart(, a)) {}               /// 168: compile-time error
+  void f(void topBadEnd(a,,)) {}                 /// 169: compile-time error
+  void f(void topBadMiddle(a,, b)) {}            /// 170: compile-time error
+  void f(void topBadPosEmpty([])) {}             /// 171: compile-time error
+  void f(void topBadPosEmpty(,[])) {}            /// 172: compile-time error
+  void f(void topBadPosEmpty([,])) {}            /// 173: compile-time error
+  void f(void topBadPosEmpty([],)) {}            /// 174: compile-time error
+  void f(void topBadPosStart(,[a])) {}           /// 175: compile-time error
+  void f(void topBadPosStart([, a])) {}          /// 176: compile-time error
+  void f(void topBadPosEnd([a,,])) {}            /// 177: compile-time error
+  void f(void topBadPosStart([a],)) {}           /// 178: compile-time error
+  void f(void topBadPosMiddle([a,, b])) {}       /// 179: compile-time error
+  void f(void topBadNamEmpty({})) {}             /// 180: compile-time error
+  void f(void topBadNamEmpty(,{})) {}            /// 181: compile-time error
+  void f(void topBadNamEmpty({,})) {}            /// 182: compile-time error
+  void f(void topBadNamEmpty({},)) {}            /// 183: compile-time error
+  void f(void topBadNamStart(,{a})) {}           /// 184: compile-time error
+  void f(void topBadNamStart({, a})) {}          /// 185: compile-time error
+  void f(void topBadNamEnd({a,,})) {}            /// 186: compile-time error
+  void f(void topBadNamStart({a},)) {}           /// 187: compile-time error
+  void f(void topBadNamMiddle({a,, b})) {}       /// 188: compile-time error
+}
+
+// As typedefs
+typedef void BadEmpty(,);                        /// 189: compile-time error
+typedef void BadStart(, a);                      /// 190: compile-time error
+typedef void BadEnd(a,,);                        /// 191: compile-time error
+typedef void BadMiddle(a,, b);                   /// 192: compile-time error
+typedef void BadPosEmpty([]);                    /// 193: compile-time error
+typedef void BadPosEmpty(,[]);                   /// 194: compile-time error
+typedef void BadPosEmpty([,]);                   /// 195: compile-time error
+typedef void BadPosEmpty([],);                   /// 196: compile-time error
+typedef void BadPosStart(,[a]);                  /// 197: compile-time error
+typedef void BadPosStart([, a]);                 /// 198: compile-time error
+typedef void BadPosEnd([a,,]);                   /// 199: compile-time error
+typedef void BadPosStart([a],);                  /// 200: compile-time error
+typedef void BadPosMiddle([a,, b]);              /// 201: compile-time error
+typedef void BadNamEmpty({});                    /// 202: compile-time error
+typedef void BadNamEmpty(,{});                   /// 203: compile-time error
+typedef void BadNamEmpty({,});                   /// 204: compile-time error
+typedef void BadNamEmpty({},);                   /// 205: compile-time error
+typedef void BadNamStart(,{a});                  /// 206: compile-time error
+typedef void BadNamStart({, a});                 /// 207: compile-time error
+typedef void BadNamEnd({a,,});                   /// 208: compile-time error
+typedef void BadNamStart({a},);                  /// 209: compile-time error
+typedef void BadNamMiddle({a,, b});              /// 210: compile-time error
diff --git a/tests/language/const_constructor_super2_test.dart b/tests/language/const_constructor_super2_test.dart
new file mode 100644
index 0000000..8832d24
--- /dev/null
+++ b/tests/language/const_constructor_super2_test.dart
@@ -0,0 +1,29 @@
+// Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+import 'package:expect/expect.dart';
+
+class A {
+  final a;
+
+  const A(this.a);
+}
+
+class B extends A {
+  final b;
+
+  const B(a, this.b) : super(a);
+}
+
+@NoInline()
+foo() => const B(1, 2);
+
+@NoInline()
+bar() => const B(2, 2);
+
+void main() {
+  Expect.notEquals(foo(), bar());
+  Expect.notEquals(foo().a, bar().a);
+  Expect.equals(foo().b, bar().b);
+}
\ No newline at end of file
diff --git a/tests/language/initializing_formal_access_test.dart b/tests/language/initializing_formal_access_test.dart
new file mode 100644
index 0000000..3d3ceed
--- /dev/null
+++ b/tests/language/initializing_formal_access_test.dart
@@ -0,0 +1,28 @@
+// Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+//
+// DartOptions=--initializing-formal-access
+
+import "package:expect/expect.dart";
+
+class C {
+  final int x;
+  final int y;
+
+  const C.constant(this.x) : y = x + 1;
+
+  C(this.x) : y = x + 1 {
+    int z = x + 2;
+    assert(z == y + 1);
+  }
+}
+
+main() {
+  C c = new C(2);
+  Expect.equals(c.x, 2);
+  Expect.equals(c.y, 3);
+  const C cc = const C.constant(4);
+  Expect.equals(cc.x, 4);
+  Expect.equals(cc.y, 5);
+}
diff --git a/tests/language/initializing_formal_capture_test.dart b/tests/language/initializing_formal_capture_test.dart
new file mode 100644
index 0000000..7ad3f08
--- /dev/null
+++ b/tests/language/initializing_formal_capture_test.dart
@@ -0,0 +1,19 @@
+// Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+//
+// DartOptions=--initializing-formal-access
+
+import "package:expect/expect.dart";
+
+class A {
+  var x, y;
+  A(this.x) : y = (() => x);
+}
+
+main() {
+  A a = new A(2);
+  a.x = 3;
+  Expect.equals(a.x, 3);
+  Expect.equals(a.y(), 2);
+}
diff --git a/tests/language/initializing_formal_final_test.dart b/tests/language/initializing_formal_final_test.dart
new file mode 100644
index 0000000..0832ce0
--- /dev/null
+++ b/tests/language/initializing_formal_final_test.dart
@@ -0,0 +1,20 @@
+// Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+//
+// DartOptions=--initializing-formal-access
+
+import "package:expect/expect.dart";
+
+class A {
+  var x, y;
+  // This should cause a warning because `x` is final when
+  // accessed as an initializing formal.
+  A(this.x) : y = (() { x = 3; });
+}
+
+main() {
+  A a = new A(2);
+  Expect.equals(a.x, 2);
+  Expect.throws(() => a.y(), (e) => e is NoSuchMethodError);
+}
diff --git a/tests/language/initializing_formal_promotion_test.dart b/tests/language/initializing_formal_promotion_test.dart
new file mode 100644
index 0000000..b0314e8
--- /dev/null
+++ b/tests/language/initializing_formal_promotion_test.dart
@@ -0,0 +1,30 @@
+// Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+//
+// DartOptions=--initializing-formal-access
+
+import "package:expect/expect.dart";
+
+class B {}
+
+class A {
+  B x, y;
+  A(this.x) {
+    // Promote to subtype.
+    if (x is C) y = x.x;
+    // Promotion fails, not a subtype.
+    if (x is A) y = x;
+  }
+}
+
+class C extends A implements B {
+  C(B x) : super(x);
+}
+
+main() {
+  C c2 = new C(null);
+  C cc = new C(c2);
+  Expect.equals(c2.y, null);
+  Expect.equals(cc.y, c2);
+}
diff --git a/tests/language/initializing_formal_scope_test.dart b/tests/language/initializing_formal_scope_test.dart
new file mode 100644
index 0000000..dd85fad
--- /dev/null
+++ b/tests/language/initializing_formal_scope_test.dart
@@ -0,0 +1,25 @@
+// Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+//
+// DartOptions=--initializing-formal-access
+
+import "package:expect/expect.dart";
+
+// Duplicate definition checks for `this.x` will check the scopes associated
+// with the constructor, not all enclosing scopes; so this is not a conflict.
+var x;
+
+class A {
+  var x;
+  A(this.x) {
+    // In the body the field is in scope, not the initializing formal;
+    // so we can use the setter.
+    x += 1;
+  }
+}
+
+main() {
+  A a = new A(2);
+  Expect.equals(a.x, 3);
+}
diff --git a/tests/language/initializing_formal_type_test.dart b/tests/language/initializing_formal_type_test.dart
new file mode 100644
index 0000000..352eecf
--- /dev/null
+++ b/tests/language/initializing_formal_type_test.dart
@@ -0,0 +1,19 @@
+// Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+//
+// DartOptions=--initializing-formal-access
+
+import "package:expect/expect.dart";
+
+class A {
+  int x;
+  String y;
+  A(this.x) : y = x { y = x; }
+}
+
+main() {
+  A a = new A(null);
+  Expect.equals(a.x, null);
+  Expect.equals(a.y, null);
+}
diff --git a/tests/language/language.status b/tests/language/language.status
index d744e27..03fd476 100644
--- a/tests/language/language.status
+++ b/tests/language/language.status
@@ -5,6 +5,12 @@
 # This directory contains tests that are intended to show the
 # current state of the language.
 
+# Trailing commas are so far supported by:
+# - The VM (vm, dartium, drt, precompiler+dart_precompiled)
+# Remaining targets still fail on arg_param_trailing_comma_test/none.
+[($compiler != none && $compiler != precompiler) || ($runtime != vm && $runtime != dartium && $runtime != drt && $runtime != dart_precompiled)]
+arg_param_trailing_comma_test/none: Fail # Issue 26644
+
 [ ($compiler == none || $compiler == precompiler || $compiler == dart2app || $compiler == dart2appjit) ]
 tearoff_constructor_basic_test: Skip # Crashes in checked mode -- hausner investigating
 
@@ -53,6 +59,12 @@
 generic_methods_function_type_test: CompiletimeError # Issue 25869
 generic_methods_type_expression_test: CompiletimeError # Issue 25869
 
+# Experimental feature: Use initializing formals in initializers and constructor body.
+initializing_formal_access_test: CompiletimeError # Issue 26656
+initializing_formal_capture_test: CompiletimeError # Issue 26656
+initializing_formal_final_test: CompiletimeError # Issue 26656
+initializing_formal_type_test: CompiletimeError # Issue 26656
+
 [ ($compiler == none || $compiler == precompiler || $compiler == dart2app || $compiler == dart2appjit) && ($runtime == vm || $runtime == dart_precompiled || $runtime == dart_app) ]
 
 class_keyword_test/02: MissingCompileTimeError # Issue 13627
@@ -92,6 +104,7 @@
 main_test/42: Fail # Issue 20028
 mirror_in_static_init_test: Fail # Issue 22071
 vm/debug_break_enabled_vm_test/*: Skip # Issue 14651.
+
 # Experimental feature: Syntactic support for generic methods.
 generic_methods_test: RuntimeError # Issue 25869
 generic_functions_test: RuntimeError # Issue 25869
@@ -100,6 +113,13 @@
 generic_methods_new_test: RuntimeError # Issue 25869
 generic_methods_function_type_test: RuntimeError # Issue 25869
 generic_methods_type_expression_test: RuntimeError # Issue 25869
+
+# Experimental feature: Use initializing formals in initializers and constructor body.
+initializing_formal_access_test: RuntimeError # Issue 26656
+initializing_formal_capture_test: RuntimeError # Issue 26656
+initializing_formal_final_test: RuntimeError # Issue 26656
+initializing_formal_type_test: RuntimeError # Issue 26656
+
 config_import_test: Skip  # Issue 26250
 
 [ $compiler == none && $runtime == dartium && $system == linux && $arch != x64 ]
@@ -230,3 +250,8 @@
 [ $compiler == precompiler && $runtime == dart_precompiled && $system == android ]
 vm/optimized_guarded_field_isolates_test: Skip # Issue #26373
 issue23244_test: Skip # Issue #26373
+
+[ $hot_reload ]
+deferred_load_inval_code_test: RuntimeError
+regress_26453_test: Pass, Fail, Crash
+vm/regress_16873_test: Pass, Crash
diff --git a/tests/language/language_analyzer2.status b/tests/language/language_analyzer2.status
index fe2f70b..63ac9b5 100644
--- a/tests/language/language_analyzer2.status
+++ b/tests/language/language_analyzer2.status
@@ -511,3 +511,10 @@
 generic_methods_new_test: CompiletimeError # Issue 25868
 generic_methods_function_type_test: CompiletimeError # Issue 25868
 generic_methods_type_expression_test: CompiletimeError # Issue 25868
+
+# Experimental feature: Use initializing formals in initializers and constructor body.
+initializing_formal_access_test: CompiletimeError # Issue 26658
+initializing_formal_capture_test: CompiletimeError # Issue 26658
+initializing_formal_final_test: CompiletimeError # Issue 26658
+initializing_formal_promotion_test: StaticWarning # Issue 26658
+initializing_formal_type_test: CompiletimeError # Issue 26658
diff --git a/tests/language/language_dart2js.status b/tests/language/language_dart2js.status
index b61dd39..aec3d55 100644
--- a/tests/language/language_dart2js.status
+++ b/tests/language/language_dart2js.status
@@ -16,6 +16,8 @@
 deep_nesting1_negative_test: Crash # Issue 25557
 deep_nesting2_negative_test: Crash # Issue 25557
 
+regress_18713_test: Fail # Issue 26743
+
 call_function_apply_test: RuntimeError # Issue 23873
 mixin_supertype_subclass_test: CompileTimeError # Issue 23773
 mixin_supertype_subclass2_test: CompileTimeError # Issue 23773
@@ -54,6 +56,12 @@
 generic_methods_function_type_test: CompiletimeError # DartOptions not passed to compiler.
 generic_methods_type_expression_test: CompiletimeError # DartOptions not passed to compiler.
 
+# Experimental feature: Use initializing formals in initializers and constructor body.
+initializing_formal_access_test: CompiletimeError # DartOptions not passed to compiler.
+initializing_formal_capture_test: CompiletimeError # DartOptions not passed to compiler.
+initializing_formal_final_test: CompiletimeError # DartOptions not passed to compiler.
+initializing_formal_type_test: CompiletimeError # DartOptions not passed to compiler.
+
 [ $compiler == dart2js ]
 invocation_mirror_empty_arguments_test: Fail # Issue 24331
 nan_identical_test: Fail # Issue 11551
@@ -221,6 +229,9 @@
 [ $compiler == dart2js && $runtime == ff ]
 round_test: Pass, Fail, OK # Fixed in ff 35. Common JavaScript engine Math.round bug.
 
+[ $compiler == dart2js && $runtime == chrome && $system == macos ]
+await_future_test: Pass, Timeout # Issue 26735
+
 [ $compiler == dart2js && ($runtime == safari || $runtime == safarimobilesim)]
 # Safari codegen bug, fixed on some versions of Safari 7.1 (Version 7.1 (9537.85.10.17.1))
 call_through_getter_test: Fail, OK
diff --git a/tests/language/regress_18713_test.dart b/tests/language/regress_18713_test.dart
new file mode 100644
index 0000000..b0d8736
--- /dev/null
+++ b/tests/language/regress_18713_test.dart
@@ -0,0 +1,25 @@
+// Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+import "package:expect/expect.dart";
+
+class T<X> {
+  final Type tType = X;
+  Type get getTType => X;
+}
+
+class S<Y> {
+  final Type sType = Y;
+  Type get getSType => Y;
+}
+
+class TS<A, B> = T<A> with S<B>;
+
+main() {
+  var ts = new TS<int, String>();
+  Expect.equals("String", ts.sType.toString());
+  Expect.equals("int", ts.tType.toString());
+  Expect.equals("String", ts.getSType.toString());
+  Expect.equals("int", ts.getTType.toString());
+}
diff --git a/tests/language/regress_26453_test.dart b/tests/language/regress_26453_test.dart
new file mode 100644
index 0000000..32d6f59
--- /dev/null
+++ b/tests/language/regress_26453_test.dart
@@ -0,0 +1,33 @@
+// Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+// The program crashed with segfault because we when we first compile foo
+// and bar we allocate all four variables (a, b, c and d) to the context.
+// When we compile foo the second time (with optimizations) we allocate
+// only c and d to the context. This happened because parser folds away
+// "${a}" and "${b}" as constant expressions when parsing bar on its own,
+// i.e. the expressions were not parsed again and thus a and b were not
+// marked as captured.
+// This caused a mismatch between a context that bar expects and that
+// the optimized version of foo produces.
+
+foo() {
+  const a = 1;
+  const b = 2;
+  var c = 3;
+  var d = 4;
+
+  bar() {
+    if ("${a}" != "1") throw "failed";
+    if ("${b}" != "2") throw "failed";
+    if ("${c}" != "3") throw "failed";
+    if ("${d}" != "4") throw "failed";
+  }
+
+  bar();
+}
+
+main() {
+  for (var i = 0; i < 50000; i++) foo();
+}
diff --git a/tests/language/regress_26530_test.dart b/tests/language/regress_26530_test.dart
new file mode 100644
index 0000000..a41761a
--- /dev/null
+++ b/tests/language/regress_26530_test.dart
@@ -0,0 +1,32 @@
+// Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+import "package:expect/expect.dart";
+
+var trace = "";
+
+main() {
+  var x = 0;
+  try {
+    try {
+      throw x++;  // 1
+    } on int catch (e) {
+      trace += "$e";
+      trace += "-$x";
+      x++;  // 2
+      try {
+        x++;  // 3
+        rethrow;
+      } finally {
+        trace += "-f";
+        x++;  // 4
+      }
+    }
+  } catch (e) {
+    trace += "-c";
+    trace += "-$e";
+    trace += "-$x";
+  }
+  Expect.equals("0-1-f-c-0-4", trace);
+}
diff --git a/tests/language/regress_26543_1_test.dart b/tests/language/regress_26543_1_test.dart
new file mode 100644
index 0000000..8c425de
--- /dev/null
+++ b/tests/language/regress_26543_1_test.dart
@@ -0,0 +1,14 @@
+// Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file. 
+
+// Regression test for issue 26543
+
+class C {
+  var x;
+  C() : x = null ?? <int, int>{} {}
+}
+
+main() {
+  print(new C());
+}
\ No newline at end of file
diff --git a/tests/language/regress_26543_2_test.dart b/tests/language/regress_26543_2_test.dart
new file mode 100644
index 0000000..36c06f0
--- /dev/null
+++ b/tests/language/regress_26543_2_test.dart
@@ -0,0 +1,14 @@
+// Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file. 
+
+// Regression test for issue 26543
+
+class C {
+  var x, y;
+  C() : x = null ?? <int, int>{}, y = 0 {}
+}
+
+main() {
+  print(new C());
+}
\ No newline at end of file
diff --git a/tests/language/regress_26543_3_test.dart b/tests/language/regress_26543_3_test.dart
new file mode 100644
index 0000000..379e7567
--- /dev/null
+++ b/tests/language/regress_26543_3_test.dart
@@ -0,0 +1,14 @@
+// Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file. 
+
+// Regression test for issue 26543
+
+class C {
+  var x, y;
+  C() : x = 0, y = null ?? <int, int>{} {}
+}
+
+main() {
+  print(new C());
+}
\ No newline at end of file
diff --git a/tests/language/regress_26668_test.dart b/tests/language/regress_26668_test.dart
index ffadcc7..1b329aa 100644
--- a/tests/language/regress_26668_test.dart
+++ b/tests/language/regress_26668_test.dart
@@ -1,7 +1,6 @@
 // Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
 // for details. All rights reserved. Use of this source code is governed by a
 // BSD-style license that can be found in the LICENSE file.
-// Tests that the VM does not crash on weird corner cases of class Math.
 
 import 'dart:async';
 
diff --git a/tests/language/vm/deopt_smi_check_vm_test.dart b/tests/language/vm/deopt_smi_check_vm_test.dart
new file mode 100644
index 0000000..3c61430
--- /dev/null
+++ b/tests/language/vm/deopt_smi_check_vm_test.dart
@@ -0,0 +1,20 @@
+// Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+// Test deoptimization on a smi check.
+// VMOptions=--optimization-counter-threshold=10  --no-background-compilation
+
+import 'package:expect/expect.dart';
+
+hc(a) {
+  var r = a.hashCode;
+  return r;
+}
+
+main() {
+  for (var i = 0; i < 20; i++) {
+    Expect.equals((1).hashCode, hc(1));
+  }
+  // Passing double causes deoptimization.
+  Expect.equals((1.0).hashCode, hc(1.0));
+}
diff --git a/tests/language/vm/string_polymorphic_test.dart b/tests/language/vm/string_polymorphic_test.dart
index 72eac28..8e0dc22 100644
--- a/tests/language/vm/string_polymorphic_test.dart
+++ b/tests/language/vm/string_polymorphic_test.dart
@@ -1,7 +1,6 @@
 // Copyright (c) 2014, the Dart project authors.  Please see the AUTHORS file
 // for details. All rights reserved. Use of this source code is governed by a
 // BSD-style license that can be found in the LICENSE file.
-// Tests that the VM does not crash on weird corner cases of class Math.
 // VMOptions=--optimization_counter_threshold=10 --no-background_compilation
 
 import 'package:expect/expect.dart';
diff --git a/tests/language/vm/typed_data_polymorphic_view_test.dart b/tests/language/vm/typed_data_polymorphic_view_test.dart
index 694c899..fbbaa7f 100644
--- a/tests/language/vm/typed_data_polymorphic_view_test.dart
+++ b/tests/language/vm/typed_data_polymorphic_view_test.dart
@@ -1,7 +1,6 @@
 // Copyright (c) 2014, the Dart project authors.  Please see the AUTHORS file
 // for details. All rights reserved. Use of this source code is governed by a
 // BSD-style license that can be found in the LICENSE file.
-// Tests that the VM does not crash on weird corner cases of class Math.
 // VMOptions=--optimization_counter_threshold=10 --no-background_compilation
 
 import 'dart:typed_data';
diff --git a/tests/lib/async/timer_regress22626_test.dart b/tests/lib/async/timer_regress22626_test.dart
index 483b9338..810be7c 100644
--- a/tests/lib/async/timer_regress22626_test.dart
+++ b/tests/lib/async/timer_regress22626_test.dart
@@ -12,7 +12,7 @@
 import 'dart:math';
 import 'package:expect/expect.dart';
 
-int countdown = 10;
+int countdown = 5;
 var rng = new Random(1234);
 
 void test(int delay, int delta) {
@@ -31,5 +31,5 @@
 }
 
 void main() {
-  test(50, 2);
+  test(200, 2);
 }
diff --git a/tests/lib/collection/linked_list_test.dart b/tests/lib/collection/linked_list_test.dart
index 3ed06b7..785ebe7 100644
--- a/tests/lib/collection/linked_list_test.dart
+++ b/tests/lib/collection/linked_list_test.dart
@@ -14,6 +14,55 @@
 }
 
 
+testPreviousNext() {
+  var list = new LinkedList<MyEntry>();
+  Expect.throws(() => list.first);
+  Expect.throws(() => list.last);
+  Expect.equals(0, list.length);
+
+  for (int i = 0; i < 3; i++) {
+    list.add(new MyEntry(i));
+  }
+  Expect.equals(3, list.length);
+
+  var entry = list.first;
+  Expect.isNull(entry.previous);
+  Expect.equals(0, entry.value);
+  entry = entry.next;
+  Expect.equals(1, entry.value);
+  entry = entry.next;
+  Expect.equals(2, entry.value);
+  Expect.isNull(entry.next);
+  entry = entry.previous;
+  Expect.equals(1, entry.value);
+  entry = entry.previous;
+  Expect.equals(0, entry.value);
+  Expect.isNull(entry.previous);
+}
+
+testUnlinked() {
+  var unlinked = new MyEntry(0);
+  Expect.isNull(unlinked.previous);
+  Expect.isNull(unlinked.next);
+  var list = new LinkedList<MyEntry>();
+  list.add(unlinked);
+  Expect.isNull(unlinked.previous);
+  Expect.isNull(unlinked.next);
+  list.remove(unlinked);
+  Expect.isNull(unlinked.previous);
+  Expect.isNull(unlinked.next);
+  list.add(unlinked);
+  list.add(new MyEntry(1));
+  Expect.isNull(unlinked.previous);
+  Expect.equals(1, unlinked.next.value);
+  list.remove(unlinked);
+  Expect.isNull(unlinked.previous);
+  Expect.isNull(unlinked.next);
+  list.add(unlinked);
+  Expect.isNull(unlinked.next);
+  Expect.equals(1, unlinked.previous.value);
+}
+
 testInsert() {
   // Insert last.
   var list = new LinkedList<MyEntry>();
@@ -168,6 +217,8 @@
 }
 
 main() {
+  testPreviousNext();
+  testUnlinked();
   testInsert();
   testRemove();
   testBadAdd();
diff --git a/tests/lib/lib.status b/tests/lib/lib.status
index 07ac39b..8d3dcee 100644
--- a/tests/lib/lib.status
+++ b/tests/lib/lib.status
@@ -229,6 +229,9 @@
 # TODO(efortuna): Investigate.
 async/timer_test: Fail, Pass
 
+[ $runtime == vm ]
+convert/streamed_conversion_json_utf8_decode_test: Pass, Slow # Infrequent timeouts.
+
 [ ($runtime == vm || $runtime == dart_precompiled || $runtime == dart_app) ]
 async/timer_not_available_test: Fail, OK
 mirrors/native_class_test: Fail, OK # This test is meant to run in a browser.
@@ -370,3 +373,8 @@
 # TODO(vegorov) LoadField bytecode supports only up to 256 fields. Need a long
 # version.
 mirrors/accessor_cache_overflow_test: Skip
+
+[ $hot_reload ]
+convert/chunked_conversion_utf88_test: Pass, Timeout
+convert/streamed_conversion_json_utf8_decode_test: Fail, Crash
+convert/utf85_test: Fail, Crash
diff --git a/tests/standalone/deferred/alpha.dart b/tests/standalone/deferred/alpha.dart
new file mode 100644
index 0000000..20aac88
--- /dev/null
+++ b/tests/standalone/deferred/alpha.dart
@@ -0,0 +1,2 @@
+// beta.dart does not exist!
+import 'beta.dart';
diff --git a/tests/standalone/deferred/exists.dart b/tests/standalone/deferred/exists.dart
new file mode 100644
index 0000000..26cc193
--- /dev/null
+++ b/tests/standalone/deferred/exists.dart
@@ -0,0 +1 @@
+var x = 99;
diff --git a/tests/standalone/deferred/transitive_error.dart b/tests/standalone/deferred/transitive_error.dart
new file mode 100644
index 0000000..e5a9af7
--- /dev/null
+++ b/tests/standalone/deferred/transitive_error.dart
@@ -0,0 +1 @@
+import 'alpha.dart';
diff --git a/tests/standalone/deferred_transitive_import_error_test.dart b/tests/standalone/deferred_transitive_import_error_test.dart
new file mode 100644
index 0000000..585c9cd
--- /dev/null
+++ b/tests/standalone/deferred_transitive_import_error_test.dart
@@ -0,0 +1,29 @@
+// Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+import "package:expect/expect.dart";
+// A deferred library that doesn't exist.
+import 'package:foo/foo.dart' deferred as foo;
+// A deferred library that does exist.
+import 'deferred/exists.dart' deferred as exists;
+// A deferred library that transitively will fail due to a file not found.
+import 'deferred/transitive_error.dart' deferred as te;
+
+main() async {
+  // Attempt to load foo which will fail.
+  var fooError;
+  await foo.loadLibrary().catchError((e) {
+    fooError = e;
+  });
+  Expect.isNotNull(fooError);
+  await exists.loadLibrary();
+  Expect.equals(99, exists.x);
+  /* TODO(johnmccutchan): Implement transitive error reporting.
+  var teError;
+  await te.loadLibrary().catchError((e) {
+    teError = e;
+  });
+  Expect.isNotNull(teError);
+  */
+}
diff --git a/tests/standalone/io/bytes_builder_test.dart b/tests/standalone/io/bytes_builder_test.dart
new file mode 100644
index 0000000..7dd4909
--- /dev/null
+++ b/tests/standalone/io/bytes_builder_test.dart
@@ -0,0 +1,64 @@
+// Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+import "dart:io";
+import "dart:typed_data";
+import "package:expect/expect.dart";
+
+main() {
+  for (var copying in [true, false]) {
+    var b;
+    testLength(n) {
+      Expect.equals(n, b.length);
+      if (n == 0) {
+        Expect.isTrue(b.isEmpty, "isEmpty: #${b.length}");
+        Expect.isFalse(b.isNotEmpty, "isNotEmpty: #${b.length}");
+      } else {
+        Expect.isTrue(b.isNotEmpty, "isNotEmpty: #${b.length}");
+        Expect.isFalse(b.isEmpty, "isEmpty: #${b.length}");
+      }
+    }
+
+    b = new BytesBuilder(copy: copying);
+    testLength(0);
+
+    b.addByte(0);
+    testLength(1);
+
+    b.add([1, 2, 3]);
+    testLength(4);
+
+    b.add(<int>[4, 5, 6]);
+    testLength(7);
+
+    b.add(new Uint8List.fromList([7, 8, 9]));
+    testLength(10);
+
+    b.add(new Uint16List.fromList([10, 11, 12]));
+    testLength(13);
+
+    var bytes = b.toBytes();
+    Expect.isTrue(bytes is Uint8List);
+    Expect.listEquals([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12], bytes);
+    testLength(13);
+
+    b.add("\x0d\x0e\x0f".codeUnits);
+    testLength(16);
+
+    bytes = b.takeBytes();
+    testLength(0);
+    Expect.isTrue(bytes is Uint8List);
+    Expect.listEquals([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15],
+                      bytes);
+
+    b.addByte(0);
+    testLength(1);
+
+    b.clear();
+    testLength(0);
+
+    b.addByte(0);
+    testLength(1);
+  }
+}
diff --git a/tests/standalone/io/file_blocking_lock_script.dart b/tests/standalone/io/file_blocking_lock_script.dart
new file mode 100644
index 0000000..5c955c4
--- /dev/null
+++ b/tests/standalone/io/file_blocking_lock_script.dart
@@ -0,0 +1,50 @@
+// Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+//
+// Script used by the file_lock_test.dart test.
+
+import "dart:async";
+import "dart:io";
+
+Future<int> testLockWholeFile(File file, int len) async {
+  var raf = await file.open(mode: APPEND);
+  await raf.setPosition(0);
+  int nextToWrite = 1;
+  while (nextToWrite <= len) {
+    await raf.lock(FileLock.BLOCKING_EXCLUSIVE, 0, len);
+
+    int at;
+    int p;
+    while (true) {
+      p = await raf.position();
+      at = await raf.readByte();
+      if (at == 0 || at == -1) break;
+      nextToWrite++;
+    }
+    await raf.setPosition(p);
+    await raf.writeByte(nextToWrite);
+    await raf.flush();
+    nextToWrite++;
+    await raf.unlock(0, len);
+  }
+
+  await raf.lock(FileLock.BLOCKING_EXCLUSIVE, 0, len);
+  await raf.setPosition(0);
+  for (int i = 1; i <= len; i++) {
+    if ((await raf.readByte()) != i) {
+      await raf.unlock(0, len);
+      await raf.close();
+      return 1;
+    }
+  }
+  await raf.unlock(0, len);
+  await raf.close();
+  return 0;
+}
+
+main(List<String> args) async {
+  File file = new File(args[0]);
+  int len = int.parse(args[1]);
+  exit(await testLockWholeFile(file, len));
+}
diff --git a/tests/standalone/io/file_blocking_lock_test.dart b/tests/standalone/io/file_blocking_lock_test.dart
new file mode 100644
index 0000000..9cd6689
--- /dev/null
+++ b/tests/standalone/io/file_blocking_lock_test.dart
@@ -0,0 +1,109 @@
+// Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+// This test works by spawning a new process running
+// file_blocking_lock_script.dart, trading the file lock back and forth,
+// writing bytes 1 ... 25 in order to the file. There are checks to ensure
+// that the bytes are written in order, that one process doesn't write all the
+// bytes and that a non-blocking lock fails such that a blocking lock must
+// be taken, which succeeds.
+
+import 'dart:async';
+import 'dart:convert';
+import 'dart:io';
+
+import "package:async_helper/async_helper.dart";
+import "package:expect/expect.dart";
+import "package:path/path.dart";
+
+// Check whether the file is locked or not.
+runPeer(String path, int len, FileLock mode) {
+  var script = Platform.script.resolve(
+      'file_blocking_lock_script.dart').toFilePath();
+  var arguments = []
+      ..addAll(Platform.executableArguments)
+      ..add(script)
+      ..add(path)
+      ..add(len.toString());
+  return Process.start(Platform.executable, arguments).then((process) {
+    process.stdout
+        .transform(UTF8.decoder)
+        .listen((data) { print(data); });
+    process.stderr
+        .transform(UTF8.decoder)
+        .listen((data) { print(data); });
+    return process;
+  });
+}
+
+testLockWholeFile() async {
+  const int length = 25;
+  Directory directory = await Directory.systemTemp.createTemp('dart_file_lock');
+  File file = new File(join(directory.path, "file"));
+  await file.writeAsBytes(new List.filled(length, 0));
+  var raf = await file.open(mode: APPEND);
+  await raf.setPosition(0);
+  await raf.lock(FileLock.BLOCKING_EXCLUSIVE, 0, length);
+  Process peer = await runPeer(file.path, length, FileLock.BLOCKING_EXCLUSIVE);
+
+  int nextToWrite = 1;
+  int at = 0;
+  List iWrote = new List.filled(length, 0);
+  bool nonBlockingFailed = false;
+  while (nextToWrite <= length) {
+    int p = await raf.position();
+    await raf.writeByte(nextToWrite);
+    await raf.flush();
+    // Record which bytes this process wrote so that we can check that the
+    // other process was able to take the lock and write some bytes.
+    iWrote[nextToWrite-1] = nextToWrite;
+    nextToWrite++;
+    // Let the other process get the lock at least once by spinning until the
+    // non-blocking lock fails.
+    while (!nonBlockingFailed) {
+      await raf.unlock(0, length);
+      try {
+        await raf.lock(FileLock.EXCLUSIVE, 0, length);
+      } catch(e) {
+        // Check that at some point the non-blocking lock fails.
+        nonBlockingFailed = true;
+        await raf.lock(FileLock.BLOCKING_EXCLUSIVE, 0, length);
+      }
+    }
+    while (true) {
+      p = await raf.position();
+      at = await raf.readByte();
+      if (at == 0 || at == -1) break;
+      nextToWrite++;
+    }
+    await raf.setPosition(p);
+  }
+
+  await raf.setPosition(0);
+  for (int i = 1; i <= length; i++) {
+    Expect.equals(i, await raf.readByte());
+  }
+  await raf.unlock(0, length);
+
+  bool wroteAll = true;
+  for (int i = 0; i < length; i++) {
+    // If there's a 0 entry, this process didn't write all bytes.
+    wroteAll = wroteAll && (iWrote[i] == 0);
+  }
+  Expect.equals(false, wroteAll);
+
+  Expect.equals(true, nonBlockingFailed);
+
+  await peer.exitCode.then((v) async {
+    Expect.equals(0, v);
+    await raf.close();
+    await directory.delete(recursive: true);
+  });
+}
+
+main() async {
+  asyncStart();
+  await testLockWholeFile();
+  asyncEnd();
+}
diff --git a/tests/standalone/io/socket_bind_test.dart b/tests/standalone/io/socket_bind_test.dart
index 084280d..f701f0c 100644
--- a/tests/standalone/io/socket_bind_test.dart
+++ b/tests/standalone/io/socket_bind_test.dart
@@ -66,22 +66,22 @@
                                   bool addr2V6Only) async {
   int freePort = await freeIPv4AndIPv6Port();
 
-  asyncStart();
-  return ServerSocket.bind(
-      addr1, freePort, v6Only: addr1V6Only, shared: false).then((socket) {
+  var socket = await ServerSocket.bind(
+      addr1, freePort, v6Only: addr1V6Only, shared: false);
+
+  try {
     Expect.isTrue(socket.port > 0);
 
-    asyncStart();
-    return ServerSocket.bind(
-        addr2, freePort, v6Only: addr2V6Only, shared: false).then((socket2) {
+    var socket2 = await ServerSocket.bind(
+        addr2, freePort, v6Only: addr2V6Only, shared: false);
+    try {
       Expect.equals(socket.port, socket2.port);
-
-      return Future.wait([
-          socket.close().whenComplete(asyncEnd),
-          socket2.close().whenComplete(asyncEnd),
-      ]);
-    });
-  });
+    } finally {
+      await socket2.close();
+    }
+  } finally {
+    await socket.close();
+  }
 }
 
 testListenCloseListenClose(String host) async {
@@ -120,16 +120,34 @@
   return port;
 }
 
+Future retry(Future fun(), {int maxCount: 10}) async {
+  for (int i = 0; i < maxCount; i++) {
+    try {
+      // If there is no exception this will simply return, otherwise we keep
+      // trying.
+      return await fun();
+    } catch (_) {}
+    print("Failed to execute test closure in attempt $i "
+          "(${maxCount - i} retries left).");
+  }
+  return await fun();
+}
+
 main() async {
   asyncStart();
-  await testBindDifferentAddresses(InternetAddress.ANY_IP_V6,
-                                   InternetAddress.ANY_IP_V4,
-                                   true,
-                                   false);
-  await testBindDifferentAddresses(InternetAddress.ANY_IP_V4,
-                                   InternetAddress.ANY_IP_V6,
-                                   false,
-                                   true);
+
+  await retry(() async {
+    await testBindDifferentAddresses(InternetAddress.ANY_IP_V6,
+                                     InternetAddress.ANY_IP_V4,
+                                     true,
+                                     false);
+  });
+  await retry(() async {
+    await testBindDifferentAddresses(InternetAddress.ANY_IP_V4,
+                                     InternetAddress.ANY_IP_V6,
+                                     false,
+                                     true);
+  });
 
   for (var host in ['127.0.0.1', '::1']) {
     testBindShared(host, false);
diff --git a/tests/standalone/io/test_runner_test.dart b/tests/standalone/io/test_runner_test.dart
index e5ec3ae..1eaae3f 100644
--- a/tests/standalone/io/test_runner_test.dart
+++ b/tests/standalone/io/test_runner_test.dart
@@ -85,7 +85,8 @@
 
   TestCase _makeNormalTestCase(name, expectations) {
     var command = CommandBuilder.instance.getProcessCommand(
-        'custom', Platform.executable, [Platform.script.toFilePath(), name],
+        'custom', Platform.executable,
+        ['--package-root=${Platform.packageRoot}', Platform.script.toFilePath(), name],
         {});
     return _makeTestCase(name, DEFAULT_TIMEOUT, command, expectations);
   }
diff --git a/tests/standalone/packages_file_test.dart b/tests/standalone/packages_file_test.dart
new file mode 100644
index 0000000..d41e46b
--- /dev/null
+++ b/tests/standalone/packages_file_test.dart
@@ -0,0 +1,986 @@
+// Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+import "dart:async";
+import "dart:io";
+import "dart:convert" show JSON;
+import "package:path/path.dart" as p;
+import "package:async_helper/async_helper.dart";
+
+/// Root directory of generated files.
+/// Path contains trailing slash.
+/// Each configuration gets its own sub-directory.
+Directory fileRoot;
+/// Shared HTTP server serving the files in [httpFiles].
+/// Each configuration gets its own "sub-dir" entry in `httpFiles`.
+HttpServer httpServer;
+/// Directory structure served by HTTP server.
+Map<String, dynamic> httpFiles = {};
+/// List of configurations.
+List<Configuration> configurations = [];
+/// Collection of failing tests and their failure messages.
+///
+/// Each test may fail in more than one way.
+var failingTests = <String, List<String>>{};
+
+main() async {
+  asyncStart();
+  await setUp();
+
+  await runTests();                          /// 01: ok
+  await runTests([spawn]);                   /// 02: ok
+  await runTests([spawn, spawn]);            /// 03: ok
+  await runTests([spawnUriInherit]);         /// 04: ok
+  await runTests([spawnUriInherit, spawn]);  /// 05: ok
+  await runTests([spawn, spawnUriInherit]);  /// 06: ok
+
+  // Test that spawning a new VM with file paths instead of URIs as arguments
+  // gives the same URIs in the internal values.
+  await runTests([asPath]);                  /// 07: ok
+
+  // Test that spawnUri can reproduce the behavior of VM command line parameters
+  // exactly.
+  // (Don't run all configuration combinations in the same test, so
+  // unroll the configurations into multiple groups and run each group
+  // as its own multitest.
+  {
+    var groupCount = 8;
+    var groups = new List.generate(8, (_)=>[]);
+    for (int i = 0; i < configurations.length; i++) {
+      groups[i % groupCount].add(configurations[i]);
+    }
+    var group = -1;
+    group = 0;                               /// 10: ok
+    group = 1;                               /// 11: ok
+    group = 2;                               /// 12: ok
+    group = 3;                               /// 13: ok
+    group = 4;                               /// 14: ok
+    group = 5;                               /// 15: ok
+    group = 6;                               /// 16: ok
+    group = 7;                               /// 17: ok
+    if (group >= 0) {
+      for (var other in groups[group]) {
+        await runTests([spawnUriOther(other)]);
+      }
+    }
+  }
+
+
+  await tearDown();
+
+  if (failingTests.isNotEmpty) {
+    print("Errors found in tests:");
+    failingTests.forEach((test, actual) {
+      print("$test:\n  ${actual.join("\n  ")}");
+    });
+    exit(255);
+  }
+
+  asyncEnd();
+}
+
+/// Test running the test of the configuration through [Isolate.spawn].
+///
+/// This should not change the expected results compared to running it
+/// directly.
+Configuration spawn(Configuration conf) {
+  return conf.update(
+    description: conf.description + "/spawn",
+    main: "spawnMain",
+    newArgs: [conf.mainType],
+    expect: null
+  );
+}
+
+/// Tests running a spawnUri on top of the configuration before testing.
+///
+/// The `spawnUri` call has no explicit root or config parameter, and
+/// shouldn't search for one, so it implicitly inherits the current isolate's
+/// actual root or configuration.
+Configuration spawnUriInherit(Configuration conf) {
+  if (conf.expect["iroot"] == null &&
+      conf.expect["iconf"] == null &&
+      conf.expect["pconf"] != null) {
+    // This means that the specified configuration file didn't exist.
+    // spawning a new URI to "inherit" that will actually do an automatic
+    // package resolution search with results that are unpredictable.
+    // That behavior will be tested in a setting where we have more control over
+    // the files around the spawned URI.
+    return null;
+  }
+  return conf.update(
+    description: conf.description + "/spawnUri-inherit",
+    main: "spawnUriMain",
+    // encode null parameters as "-". Windows fails if using empty string.
+    newArgs: [conf.mainFile, "-", "-", "false"],
+    expect: {
+      "proot": conf.expect["iroot"],
+      "pconf": conf.expect["iconf"],
+    }
+  );
+}
+
+/// Tests running a spawnUri with an explicit configuration different
+/// from the original configuration.
+///
+/// Duplicates the explicit parameters as arguments to the spawned isolate.
+ConfigurationTransformer spawnUriOther(Configuration other) {
+  return (Configuration conf) {
+    bool search = (other.config == null) && (other.root == null);
+    return conf.update(
+      description: "${conf.description} -spawnUri-> ${other.description}",
+      main: "spawnUriMain",
+      newArgs: [other.mainFile,
+                other.config ?? "-", other.root ?? "-", "$search"],
+      expect: other.expect
+    );
+  };
+}
+
+
+/// Convert command line parameters to file paths.
+///
+/// This only works on the command line, not with `spawnUri`.
+Configuration asPath(Configuration conf) {
+  bool change = false;
+
+  String toPath(String string) {
+    if (string == null) return null;
+    if (string.startsWith("file:")) {
+      change = true;
+      return new File.fromUri(Uri.parse(string)).path;
+    }
+    return string;
+  }
+
+  var mainFile = toPath(conf.mainFile);
+  var root = toPath(conf.root);
+  var config = toPath(conf.config);
+  if (!change) return null;
+  return conf.update(description: conf.description + "/as path",
+                     mainFile: mainFile, root: root, config: config);
+}
+
+/// --------------------------------------------------------------
+
+
+Future setUp() async {
+  fileRoot = createTempDir();
+  // print("FILES: $fileRoot");
+  httpServer = await startServer(httpFiles);
+  // print("HTTPS: ${httpServer.address.address}:${httpServer.port}");
+  createConfigurations();
+}
+
+Future tearDown() async {
+  fileRoot.deleteSync(recursive: true);
+  await httpServer.close();
+}
+
+typedef Configuration ConfigurationTransformer(Configuration conf);
+
+Future runTests([List<ConfigurationTransformer> transformations]) async {
+  outer: for (var config in configurations) {
+    if (transformations != null) {
+      for (int i = transformations.length - 1; i >= 0; i--) {
+        config = transformations[i](config);
+        if (config == null) {
+          continue outer;  // Can be used to skip some tests.
+        }
+      }
+    }
+    await testConfiguration(config);
+  }
+}
+
+// Creates a combination of configurations for running the Dart VM.
+//
+// The combinations covers most configurations of implicit and explicit
+// package configurations over both file: and http: file sources.
+// It also specifies the expected values of the following for a VM
+// run in that configuration.
+//
+// * `Process.packageRoot`
+// * `Process.packageConfig`
+// * `Isolate.packageRoot`
+// * `Isolate.packageRoot`
+// * `Isolate.resolvePackageUri` of various inputs.
+// * A variable defined in a library loaded using a `package:` URI.
+//
+// The configurations all have URIs as `root`, `config` and `mainFile` strings,
+// have empty argument lists and `mainFile` points to the the `main.dart` file.
+void createConfigurations() {
+  add(String description, String mainDir, {String root, String config,
+      Map file, Map http, Map expect}) {
+    var id = freshName("conf");
+
+    file ??= {};
+    http ??= {};
+
+    // Fix-up paths.
+    String fileUri = fileRoot.uri.resolve("$id/").toString();
+    String httpUri =
+        "http://${httpServer.address.address}:${httpServer.port}/$id/";
+
+    String fixPath(String path) {
+      return path?.replaceAllMapped(fileHttpRegexp, (match) {
+        if (path.startsWith("%file/", match.start)) return fileUri;
+        return httpUri;
+      });
+    }
+
+    void fixPaths(Map dirs) {
+      for (var name in dirs.keys) {
+        var value = dirs[name];
+        if (value is Map) {
+          Map subDir = value;
+          fixPaths(subDir);
+        } else {
+          var newValue = fixPath(value);
+          if (newValue != value) dirs[name] = newValue;
+        }
+      }
+    }
+
+    if (!mainDir.endsWith("/")) mainDir += "/";
+    // Insert main files into the main-dir map.
+    Map mainDirMap;
+    {
+      if (mainDir.startsWith("%file/")) {
+        mainDirMap = file;
+      } else {
+        mainDirMap = http;
+
+      }
+      var parts = mainDir.split('/');
+      for (int i = 1; i < parts.length - 1; i++) {
+        var dirName = parts[i];
+        mainDirMap = mainDirMap[dirName] ?? (mainDirMap[dirName] = {});
+      }
+    }
+
+    mainDirMap["main"] = testMain;
+    mainDirMap["spawnMain"] = spawnMain.replaceAll("%mainDir/", mainDir);
+    mainDirMap["spawnUriMain"] = spawnUriMain;
+
+    mainDir = fixPath(mainDir);
+    root = fixPath(root);
+    config = fixPath(config);
+    fixPaths(file);
+    fixPaths(http);
+    // These expectations are default. If not overridden the value will be
+    // expected to be null. That is, you can't avoid testing the actual
+    // value of these, you can only change what value to expect.
+    // For values not included here (commented out), the result is not tested
+    // unless a value (maybe null) is provided.
+    fixPaths(expect);
+
+    expect = {
+      "pconf":    null,
+      "proot":    null,
+      "iconf":    null,
+      "iroot":    null,
+      "foo":      null,
+      "foo/":     null,
+      "foo/bar":  null,
+      "foo.x":    "qux",
+      "bar/bar":  null,
+      "relative": "relative/path",
+      "nonpkg":   "http://example.org/file"
+    }..addAll(expect ?? const {});
+
+    // Add http files to the http server.
+    if (http.isNotEmpty) {
+      httpFiles[id] = http;
+    }
+    // Add file files to the file system.
+    if (file.isNotEmpty) {
+      createFiles(fileRoot, id, file);
+    }
+
+    configurations.add(new Configuration(
+        description: description,
+        root: root,
+        config: config,
+        mainFile: mainDir + "main.dart",
+        args: const [],
+        expect: expect));
+  }
+
+  // The `test` function can generate file or http resources.
+  // It replaces "%file/" with URI of the root directory of generated files and
+  // "%http/" with the URI of the HTTP server's root in appropriate contexts
+  // (all file contents and parameters).
+
+  // Tests that only use one scheme to access files.
+  for (var scheme in ["file", "http"]) {
+
+    /// Run a test in the current scheme.
+    ///
+    /// The files are served either through HTTP or in a local directory.
+    /// Use "%$scheme/" to refer to the root of the served files.
+    addScheme(description, main, {expect, files, args, root, config}) {
+      add("$scheme/$description", main, expect: expect,
+        root: root, config: config,
+        file: (scheme == "file") ? files : null,
+        http: (scheme == "http") ? files : null);
+    }
+
+    {
+      // No parameters, no .packages files or packages/ dir.
+      // A "file:" source realizes there is no configuration and can't resolve
+      // any packages, but a "http:" source assumes a "packages/" directory.
+      addScheme("no resolution",
+        "%$scheme/",
+        files: {},
+        expect: (scheme == "file") ? {
+          "foo.x": null
+        } : {
+          "iroot": "%http/packages/",
+          "foo": "%http/packages/foo",
+          "foo/": "%http/packages/foo/",
+          "foo/bar": "%http/packages/foo/bar",
+          "foo.x": null,
+          "bar/bar": "%http/packages/bar/bar",
+        });
+    }
+
+    {
+      // No parameters, no .packages files,
+      // packages/ dir exists and is detected.
+      var files = {"packages": fooPackage};
+      addScheme("implicit packages dir","%$scheme/",
+        files: files,
+        expect: {
+          "iroot": "%$scheme/packages/",
+          "foo": "%$scheme/packages/foo",
+          "foo/": "%$scheme/packages/foo/",
+          "foo/bar": "%$scheme/packages/foo/bar",
+          "bar/bar": "%$scheme/packages/bar/bar",
+        });
+    }
+
+    {
+      // No parameters, no .packages files in current dir, but one in parent,
+      // packages/ dir exists and is used.
+      //
+      // Should not detect the .packages file in parent directory.
+      // That file is empty, so if it is used, the system cannot resolve "foo".
+      var files = {"sub": {"packages": fooPackage},
+                   ".packages": ""};
+      addScheme("implicit packages dir overrides parent .packages",
+        "%$scheme/sub/",
+        files: files,
+        expect: {
+          "iroot": "%$scheme/sub/packages/",
+          "foo": "%$scheme/sub/packages/foo",
+          "foo/": "%$scheme/sub/packages/foo/",
+          "foo/bar": "%$scheme/sub/packages/foo/bar",
+          // "foo.x": "qux",  // Blocked by issue http://dartbug.com/26482
+          "bar/bar": "%$scheme/sub/packages/bar/bar",
+        });
+    }
+
+    {
+      // No parameters, a .packages file next to entry is found and used.
+      // A packages/ directory is ignored.
+      var files = {".packages": "foo:pkgs/foo/",
+                   "packages": {},
+                   "pkgs": fooPackage};
+      addScheme("implicit .packages file", "%$scheme/",
+        files: files,
+        expect: {
+          "iconf": "%$scheme/.packages",
+          "foo/": "%$scheme/pkgs/foo/",
+          "foo/bar": "%$scheme/pkgs/foo/bar",
+        });
+    }
+
+    {
+      // No parameters, a .packages file in parent dir, no packages/ dir.
+      // With a file: URI, find the .packages file.
+      // WIth a http: URI, assume a packages/ dir.
+      var files = {"sub": {},
+                   ".packages": "foo:pkgs/foo/",
+                   "pkgs": fooPackage};
+      addScheme(".packages file in parent", "%$scheme/sub/",
+        files: files,
+        expect: (scheme == "file") ? {
+          "iconf": "%file/.packages",
+          "foo/": "%file/pkgs/foo/",
+          "foo/bar": "%file/pkgs/foo/bar",
+        } : {
+          "iroot": "%http/sub/packages/",
+          "foo": "%http/sub/packages/foo",
+          "foo/": "%http/sub/packages/foo/",
+          "foo/bar": "%http/sub/packages/foo/bar",
+          "foo.x": null,
+          "bar/bar": "%http/sub/packages/bar/bar",
+        });
+    }
+
+    {
+      // Specified package root that doesn't exist.
+      // Ignores existing .packages file and packages/ dir.
+      addScheme("explicit root not there",
+        "%$scheme/",
+        files: {"packages": fooPackage,
+                ".packages": "foo:%$scheme/packages/"},
+        root: "%$scheme/notthere/",
+        expect: {
+          "proot": "%$scheme/notthere/",
+          "iroot": "%$scheme/notthere/",
+          "foo": "%$scheme/notthere/foo",
+          "foo/": "%$scheme/notthere/foo/",
+          "foo/bar": "%$scheme/notthere/foo/bar",
+          "foo.x": null,
+          "bar/bar": "%$scheme/notthere/bar/bar",
+        });
+    }
+
+    {
+      // Specified package config that doesn't exist.
+      // Ignores existing .packages file and packages/ dir.
+      addScheme("explicit config not there",
+        "%$scheme/",
+        files: {".packages": "foo:packages/foo/",
+                "packages": fooPackage},
+        config: "%$scheme/.notthere",
+        expect: {
+          "pconf": "%$scheme/.notthere",
+          "iconf": null,   // <- Only there if actually loaded (unspecified).
+          "foo/": null,
+          "foo/bar": null,
+          "foo.x": null,
+        });
+    }
+
+    {
+      // Specified package root with no trailing slash.
+      // The Platform.packageRoot and Isolate.packageRoot has a trailing slash.
+      var files = {".packages": "foo:packages/foo/",
+                   "packages": {},
+                   "pkgs": fooPackage};
+      addScheme("explicit package root, no slash", "%$scheme/",
+        files: files,
+        root: "%$scheme/pkgs",
+        expect: {
+          "proot": "%$scheme/pkgs/",
+          "iroot": "%$scheme/pkgs/",
+          "foo": "%$scheme/pkgs/foo",
+          "foo/": "%$scheme/pkgs/foo/",
+          "foo/bar": "%$scheme/pkgs/foo/bar",
+          "bar/bar": "%$scheme/pkgs/bar/bar",
+        });
+    }
+
+    {
+      // Specified package root with trailing slash.
+      var files = {".packages": "foo:packages/foo/",
+                   "packages": {},
+                   "pkgs": fooPackage};
+      addScheme("explicit package root, slash", "%$scheme/",
+        files: files,
+        root: "%$scheme/pkgs",
+        expect: {
+          "proot": "%$scheme/pkgs/",
+          "iroot": "%$scheme/pkgs/",
+          "foo": "%$scheme/pkgs/foo",
+          "foo/": "%$scheme/pkgs/foo/",
+          "foo/bar": "%$scheme/pkgs/foo/bar",
+          "bar/bar": "%$scheme/pkgs/bar/bar",
+        });
+    }
+
+    {
+      // Specified package config.
+      var files = {".packages": "foo:packages/foo/",
+                   "packages": {},
+                   ".pkgs": "foo:pkgs/foo/",
+                   "pkgs": fooPackage};
+      addScheme("explicit package config file", "%$scheme/",
+        files: files,
+        config: "%$scheme/.pkgs",
+        expect: {
+          "pconf": "%$scheme/.pkgs",
+          "iconf": "%$scheme/.pkgs",
+          "foo/": "%$scheme/pkgs/foo/",
+          "foo/bar": "%$scheme/pkgs/foo/bar",
+        });
+    }
+
+    {
+      // Specified package config as data: URI.
+      // The package config can be specified as a data: URI.
+      // (In that case, relative URI references in the config file won't work).
+      var files = {".packages": "foo:packages/foo/",
+                   "packages": {},
+                   "pkgs": fooPackage};
+      var dataUri = "data:,foo:%$scheme/pkgs/foo/\n";
+      addScheme("explicit data: config file", "%$scheme/",
+        files: files,
+        config: dataUri,
+        expect: {
+          "pconf": dataUri,
+          "iconf": dataUri,
+          "foo/": "%$scheme/pkgs/foo/",
+          "foo/bar": "%$scheme/pkgs/foo/bar",
+        });
+    }
+  }
+
+  // Tests where there are files on both http: and file: sources.
+
+  for (var entryScheme in const ["file", "http"]) {
+    for (var pkgScheme in const ["file", "http"]) {
+      // Package root.
+      if (entryScheme != pkgScheme) {
+        // Package dir and entry point on different schemes.
+        var files = {};
+        var https = {};
+        (entryScheme == "file" ? files : https)["main"] = testMain;
+        (pkgScheme == "file" ? files : https)["pkgs"] = fooPackage;
+        add("$pkgScheme pkg/$entryScheme main", "%$entryScheme/",
+          file: files, http: https,
+          root: "%$pkgScheme/pkgs/",
+          expect: {
+            "proot": "%$pkgScheme/pkgs/",
+            "iroot": "%$pkgScheme/pkgs/",
+            "foo": "%$pkgScheme/pkgs/foo",
+            "foo/": "%$pkgScheme/pkgs/foo/",
+            "foo/bar": "%$pkgScheme/pkgs/foo/bar",
+            "bar/bar": "%$pkgScheme/pkgs/bar/bar",
+            "foo.x": "qux",
+          });
+      }
+      // Package config. The configuration file may also be on either source.
+      for (var configScheme in const ["file", "http"]) {
+        // Don't do the boring stuff!
+        if (entryScheme == configScheme && entryScheme == pkgScheme) continue;
+        // Package config, packages and entry point not all on same scheme.
+        var files = {};
+        var https = {};
+        (entryScheme == "file" ? files : https)["main"] = testMain;
+        (configScheme == "file" ? files : https)[".pkgs"] =
+            "foo:%$pkgScheme/pkgs/foo/\n";
+        (pkgScheme == "file" ? files : https)["pkgs"] = fooPackage;
+        add("$pkgScheme pkg/$configScheme config/$entryScheme main",
+          "%$entryScheme/",
+          file: files, http: https,
+          config: "%$configScheme/.pkgs",
+          expect: {
+            "pconf": "%$configScheme/.pkgs",
+            "iconf": "%$configScheme/.pkgs",
+            "foo/": "%$pkgScheme/pkgs/foo/",
+            "foo/bar": "%$pkgScheme/pkgs/foo/bar",
+            "foo.x": "qux",
+          });
+      }
+    }
+  }
+}
+
+
+// ---------------------------------------------------------
+// Helper functionality.
+
+var fileHttpRegexp = new RegExp(r"%(?:file|http)/");
+
+// Executes a test in a configuration.
+//
+// The test must specify which main file to use
+// (`main`, `spawnMain` or `spawnUriMain`)
+// and any arguments which will be used by `spawnMain` and `spawnUriMain`.
+//
+// The [expect] map may be used to override the expectations of the
+// configuration on a value-by-value basis. Passing, e.g., `{"pconf": null}`
+// will override only the `pconf` (`Platform.packageConfig`) expectation.
+Future testConfiguration(Configuration conf) async {
+  print("-- ${conf.description}");
+  var description = conf.description;
+  try {
+    var output = await execDart(conf.mainFile,
+                                root: conf.root,
+                                config: conf.config,
+                                scriptArgs: conf.args);
+    match(JSON.decode(output), conf.expect, description, output);
+  } catch (e, s) {
+    // Unexpected error calling execDart or parsing the result.
+    // Report it and continue.
+    print("ERROR running $description: $e\n$s");
+    failingTests.putIfAbsent(description, () => []).add("$e");
+  }
+}
+
+
+/// Test that the output of running testMain matches the expectations.
+///
+/// The output is a string which is parse as a JSON literal.
+/// The resulting map is always mapping strings to strings, or possibly `null`.
+/// The expectations can have non-string values other than null,
+/// they are `toString`'ed  before being compared (so the caller can use a URI
+/// or a File/Directory directly as an expectation).
+void match(Map actuals, Map expectations, String desc, String actualJson) {
+  for (var key in expectations.keys) {
+    var expectation = expectations[key]?.toString();
+    var actual = actuals[key];
+    if (expectation != actual) {
+      print("ERROR: $desc: $key: Expected: <$expectation> Found: <$actual>");
+      failingTests.putIfAbsent(desc, ()=>[]).add(
+          "$key: $expectation != $actual");
+    }
+  }
+}
+
+const String improt = "import";  // Avoid multitest import rewriting.
+
+/// Script that prints the current state and the result of resolving
+/// a few package URIs. This script will be invoked in different settings,
+/// and the result will be parsed and compared to the expectations.
+const String testMain = """
+$improt "dart:convert" show JSON;
+$improt "dart:io" show Platform, Directory;
+$improt "dart:isolate" show Isolate;
+$improt "package:foo/foo.dart" deferred as foo;
+main(_) async {
+  String platformRoot = await Platform.packageRoot;
+  String platformConfig = await Platform.packageConfig;
+  Directory cwd = Directory.current;
+  Uri script = Platform.script;
+  Uri isolateRoot = await Isolate.packageRoot;
+  Uri isolateConfig = await Isolate.packageConfig;
+  Uri base = Uri.base;
+  Uri res1 = await Isolate.resolvePackageUri(Uri.parse("package:foo"));
+  Uri res2 = await Isolate.resolvePackageUri(Uri.parse("package:foo/"));
+  Uri res3 = await Isolate.resolvePackageUri(Uri.parse("package:foo/bar"));
+  Uri res4 = await Isolate.resolvePackageUri(Uri.parse("package:bar/bar"));
+  Uri res5 = await Isolate.resolvePackageUri(Uri.parse("relative/path"));
+  Uri res6 = await Isolate.resolvePackageUri(
+      Uri.parse("http://example.org/file"));
+  String fooX = await foo
+    .loadLibrary()
+    .timeout(const Duration(seconds: 1))
+    .then((_) => foo.x, onError: (_) => null);
+  print(JSON.encode({
+    "cwd": cwd.path,
+    "base": base?.toString(),
+    "script": script?.toString(),
+    "proot": platformRoot,
+    "pconf": platformConfig,
+    "iroot" : isolateRoot?.toString(),
+    "iconf" : isolateConfig?.toString(),
+    "foo": res1?.toString(),
+    "foo/": res2?.toString(),
+    "foo/bar": res3?.toString(),
+    "foo.x": fooX?.toString(),
+    "bar/bar": res4?.toString(),
+    "relative": res5?.toString(),
+    "nonpkg": res6?.toString(),
+  }));
+}
+""";
+
+/// Script that spawns a new Isolate using Isolate.spawnUri.
+///
+/// Takes URI of target isolate, package config, package root and
+/// automatic package resolution-flag parameters as command line arguments.
+/// Any further arguments are forwarded to the spawned isolate.
+const String spawnUriMain = """
+$improt "dart:isolate";
+$improt "dart:async";
+main(args) async {
+  Uri target = Uri.parse(args[0]);
+  Uri config = (args[1] == "-") ? null : Uri.parse(args[1]);
+  Uri root = (args[2] == "-") ? null : Uri.parse(args[2]);
+  bool search = args[3] == "true";
+  var restArgs = args.skip(4).toList();
+  // Port keeps isolate alive until spawned isolate terminates.
+  var port = new RawReceivePort();
+  port.handler = (res) async {
+    port.close();  // Close on exit or first error.
+    if (res != null) {
+      await new Future.error(res[0], new StackTrace.fromString(res[1]));
+    }
+  };
+  Isolate.spawnUri(target, restArgs, null,
+                   packageRoot: root, packageConfig: config,
+                   automaticPackageResolution: search,
+                   onError: port.sendPort, onExit: port.sendPort);
+}
+""";
+
+/// Script that spawns a new Isolate using Isolate.spawn.
+///
+/// Uses the first argument to select which target to spawn.
+/// Should be either "test", "uri" or "spawn".
+const String spawnMain = """
+$improt "dart:async";
+$improt "dart:isolate";
+$improt "%mainDir/main.dart" as test;
+$improt "%mainDir/spawnUriMain.dart" as spawnUri;
+main(List<String> args) async {
+  // Port keeps isolate alive until spawned isolate terminates.
+  var port = new RawReceivePort();
+  port.handler = (res) async {
+    port.close();  // Close on exit or first error.
+    if (res != null) {
+      await new Future.error(res[0], new StackTrace.fromString(res[1]));
+    }
+  };
+  var arg = args.first;
+  var rest = args.skip(1).toList();
+  var target;
+  if (arg == "main") {
+    target = test.main;
+  } else if (arg == "spawnUriMain") {
+    target = spawnUri.main;
+  } else {
+    target = main;
+  }
+  Isolate.spawn(target, rest, onError: port.sendPort, onExit: port.sendPort);
+}
+""";
+
+/// A package directory containing only one package, "foo", with one file.
+const Map fooPackage = const { "foo": const { "foo": "var x = 'qux';" }};
+
+
+/// Runs the Dart executable with the provided parameters.
+///
+/// Captures and returns the output.
+Future<String> execDart(String script,
+                        {String root, String config,
+                         Iterable<String> scriptArgs}) async {
+  var checked = false;
+  assert((checked = true));
+  // TODO: Find a way to change CWD before running script.
+  var executable = Platform.executable;
+  var args = [];
+  if (checked) args.add("--checked");
+  if (root != null) args.add("--package-root=$root");
+  if (config != null) args.add("--packages=$config");
+  args.add(script);
+  if (scriptArgs != null) {
+    args.addAll(scriptArgs);
+  }
+  return Process.run(executable, args).then((results) {
+    if (results.exitCode != 0 || results.stderr.isNotEmpty) {
+      throw results.stderr;
+    }
+    return results.stdout;
+  });
+}
+
+/// Creates a number of files and subdirectories.
+///
+/// The [content] is the content of the directory itself. The map keys are
+/// names and the values are either strings that represent Dart file contents
+/// or maps that represent subdirectories.
+void createFiles(Directory tempDir, String subDir, Map content) {
+  Directory createDir(Directory base, String name) {
+    Directory newDir = new Directory(p.join(base.path, name));
+    newDir.createSync();
+    return newDir;
+  }
+
+  void createTextFile(Directory base, String name, String content) {
+    File newFile = new File(p.join(base.path, name));
+    newFile.writeAsStringSync(content);
+  }
+
+  void createRecursive(Directory dir, Map map) {
+    for (var name in map.keys) {
+      var content = map[name];
+      if (content is String) {
+        // If the name starts with "." it's a .packages file, otherwise it's
+        // a dart file. Those are the only files we care about in this test.
+        createTextFile(dir,
+                       name.startsWith(".") ? name : name + ".dart",
+                       content);
+      } else {
+        assert(content is Map);
+        var subdir = createDir(dir, name);
+        createRecursive(subdir, content);
+      }
+    }
+  }
+
+  createRecursive(createDir(tempDir, subDir), content);
+}
+
+/// Start an HTTP server which serves a directory/file structure.
+///
+/// The directories and files are described by [files].
+///
+/// Each map key is an entry in a directory. A `Map` value is a sub-directory
+/// and a `String` value is a text file.
+/// The file contents are run through [fixPaths] to allow them to be self-
+/// referential.
+Future<HttpServer> startServer(Map files) async {
+  return (await HttpServer.bind(InternetAddress.LOOPBACK_IP_V4, 0))
+      ..forEach((request) {
+        var result = files;
+        onFailure: {
+          for (var part in request.uri.pathSegments) {
+            if (part.endsWith(".dart")) {
+              part = part.substring(0, part.length - 5);
+            }
+            if (result is Map) {
+              result = result[part];
+            } else {
+              break onFailure;
+            }
+          }
+          if (result is String) {
+            request.response..write(result)
+                            ..close();
+            return;
+          }
+        }
+        request.response..statusCode = HttpStatus.NOT_FOUND
+                        ..close();
+      });
+}
+
+// Counter used to avoid reusing temporary file or directory names.
+//
+// Used when adding extra files to an existing directory structure,
+// and when creating temporary directories.
+//
+// Some platform temporary-directory implementations are timer based,
+// and creating two temp-dirs withing a short duration may cause a collision.
+int tmpNameCounter = 0;
+
+// Fresh file name.
+String freshName([String base = "tmp"]) => "$base${tmpNameCounter++}";
+
+Directory createTempDir() {
+  return Directory.systemTemp.createTempSync(freshName("pftest-"));
+}
+
+typedef void ConfigUpdate(Configuration configuration);
+
+/// The configuration for a single test.
+class Configuration {
+  /// The "description" of the test - a description of the set-up.
+  final String description;
+  /// The package root parameter passed to the Dart isolate.
+  ///
+  /// At most one of [root] and [config] should be supplied. If both are
+  /// omitted, a VM will search for a packages file or dir.
+  final String root;
+  /// The package configuration file location passed to the Dart isolate.
+  final String config;
+  /// Path to the main file to run.
+  final String mainFile;
+  /// List of arguments to pass to the main function.
+  final List<String> args;
+  /// The expected values for `Platform.package{Root,Config}`,
+  /// `Isolate.package{Root,Config}` and resolution of package URIs
+  /// in a `foo` package.
+  ///
+  /// The results are found by running the `main.dart` file inside [mainDir].
+  /// The tests can run this file after doing other `spawn` or `spawnUri` calls.
+  final Map expect;
+
+  Configuration({this.description,
+                 this.root,
+                 this.config,
+                 this.mainFile,
+                 this.args,
+                 this.expect});
+
+  // Gets the type of main file, one of `main`, `spawnMain` or `spawnUriMain`.
+  String get mainType {
+    var lastSlash = mainFile.lastIndexOf("/");
+    if (lastSlash < 0) {
+      // Assume it's a Windows path.
+      lastSlash = mainFile.lastIndexOf(r"\");
+    }
+    var name = mainFile.substring(lastSlash + 1, mainFile.length - 5);
+    assert(name == "main" || name == "spawnMain" || name == "spawnUriMain");
+    return name;
+  }
+
+  String get mainPath {
+    var lastSlash = mainFile.lastIndexOf("/");
+    if (lastSlash < 0) {
+      // Assume it's a Windows path.
+      lastSlash = mainFile.lastIndexOf(r"\");
+    }
+    return mainFile.substring(0, lastSlash + 1);
+  }
+
+  /// Create a new configuration from the old one.
+  ///
+  /// [description] is new description.
+  ///
+  /// [main] is one of `main`, `spawnMain` or `spawnUriMain`, and changes
+  /// the [Configuration.mainFile] to a different file in the same directory.
+  ///
+  /// [mainFile] overrides [Configuration.mainFile] completely, and ignores
+  /// [main].
+  ///
+  /// [newArgs] are prepended to the existing [Configuration.args].
+  ///
+  /// [args] overrides [Configuration.args] completely and ignores [newArgs].
+  ///
+  /// [expect] overrides individual expectations.
+  ///
+  /// [root] and [config] overrides the existing values.
+  Configuration update({
+      String description,
+      String main,
+      String mainFile,
+      String root,
+      String config,
+      List<String> args,
+      List<String> newArgs,
+      Map expect
+    }) {
+    return new Configuration(
+      description: description ?? this.description,
+      root: root ?? this.root,
+      config: config ?? this.config,
+      mainFile: mainFile ??
+          ((main == null) ? this.mainFile : "${this.mainPath}$main.dart"),
+      args:
+          args ?? (<String>[]..addAll(newArgs ?? const <String>[])
+                             ..addAll(this.args)),
+      expect: expect == null
+          ? this.expect
+          : new Map.from(this.expect)..addAll(expect ?? const {}));
+  }
+
+  // For debugging.
+  String toString() {
+    return "Configuration($description\n"
+      "  root  : $root\n"
+      "  config: $config\n"
+      "  main  : $mainFile\n"
+      "  args  : ${args.map((x) => '"$x"').join(" ")}\n"
+      ") : expect {\n${expect.keys.map((k) =>
+           '  "$k"'.padRight(6) + ":${JSON.encode(expect[k])}\n").join()}"
+      "}";
+  }
+}
+
+
+// Inserts the file with generalized [name] at [path] with [content].
+//
+// The [path] is a directory where the file is created. It must start with
+// either '%file/' or '%http/' to select the structure to put it into.
+//
+// The [name] should not have a trailing ".dart" for Dart files. Any file
+// not starting with "." is assumed to be a ".dart" file.
+void insertFileAt(Map file, Map http,
+                  String path, String name, String content) {
+  var parts = path.split('/').toList();
+  var dir = (parts[0] == "%file") ? file : http;
+  for (var i = 1; i < parts.length - 1; i++) {
+    var entry = parts[i];
+    dir = dir[entry] ?? (dir[entry] = {});
+  }
+  dir[name] = content;
+}
diff --git a/tests/standalone/standalone.status b/tests/standalone/standalone.status
index 4ab080c..bcad73f 100644
--- a/tests/standalone/standalone.status
+++ b/tests/standalone/standalone.status
@@ -7,6 +7,9 @@
 # listed in tests/lib/analyzer/analyze_tests.status without the "standalone"
 # prefix.
 
+packages_file_test: Skip # Issue 26715
+packages_file_test/none: Skip   # contains no tests.
+
 package/invalid_uri_test: Fail, OK # CompileTimeErrors intentionally
 package/scenarios/packages_file_strange_formatting/empty_package_dir_test: Fail, OK # CompileTimeErrors intentionally
 package/scenarios/empty_packages_file/empty_packages_file_discovery_test: Fail, OK # CompileTimeErrors intentionally
@@ -14,10 +17,12 @@
 package/scenarios/invalid/invalid_package_name_test: RuntimeError, CompileTimeError # Errors intentionally
 package/scenarios/invalid/same_package_twice_test.dart: RuntimeError, CompileTimeError # Errors intentionally
 full_coverage_test: Pass, Slow, Timeout
-verified_mem_test: Pass, Slow, Timeout # Does verify before and after GC.
 
 issue14236_test: Pass # Do not remove this line. It serves as a marker for Issue 14516 comment #4.
 
+[ ($runtime != vm || $compiler != none) && $compiler != dartanalyzer && $compiler != dart2analyzer ]
+packages_file_test: Skip # Uses Platform.executable
+
 [ ($runtime != vm && $runtime != dart_precompiled && $runtime != dart_app) && ($runtime != drt || $compiler != none)) ]
 no_assert_test: Fail, OK # This is testing a vm flag.
 
@@ -108,6 +113,7 @@
 unboxed_int_converter_test: Skip
 pair_location_remapping_test: Skip
 regress_25335_test: Skip # Int64List not supported.
+deferred_transitive_import_error_test: Skip # Contains intentional errors.
 
 [ $compiler == dart2js && $cps_ir && $checked ]
 *: Skip # `assert` not implemented
@@ -135,6 +141,9 @@
 out_of_memory_test: Skip # passes on Mac, crashes on Linux
 oom_error_stacktrace_test: Skip # Fails on Linux
 
+[ $arch == simarm && $mode == debug && $checked ]
+io/web_socket_test: Pass, Fail # Issue 26814
+
 [ $arch == mips ]
 io/file_stat_test: Fail # Issue 17440
 io/process_sync_test: Skip # Starts 10 dart subprocesses, uses too much memory.
@@ -164,6 +173,7 @@
 package/scenarios/packages_file_in_parent/sub/packages_file_in_parent_test: StaticWarning
 typed_data_test: StaticWarning
 typed_data_view_test: StaticWarning
+deferred_transitive_import_error_test: Skip # Contains intentional errors.
 
 [ $compiler == dart2analyzer ]
 package/package1_test: CompileTimeError
@@ -181,11 +191,9 @@
 io/skipping_dart2js_compilations_test: Fail # Issue 19551.
 verbose_gc_to_bmu_test: Skip
 io/platform_resolved_executable_test/06: RuntimeError  # Issue 23641
+io/process_sync_test: Pass, Timeout # Issue 24596
 io/sleep_test: Pass, Fail # Issue 25757
 
-[ $arch != ia32 && $arch != x64 && $arch != simarm && $arch != simarmv5te && $mode == debug ]
-verified_mem_test: Skip  # Not yet implemented.
-
 [ ($runtime == vm || $runtime == dart_precompiled || $runtime == dart_app) && $mode == debug && $builder_tag == asan ]
 full_coverage_test: Skip  # Timeout.
 io/file_lock_test: Skip  # Timeout.
@@ -253,7 +261,7 @@
 io/test_runner_test: Skip # Platform.executable
 io/file_lock_test: Skip # Platform.executable
 io/code_collection_test: Skip # Platform.executable
-io/file_lock_test: Skip # Platform.executable
+io/file_blocking_lock_test: Skip # Platform.executable
 io/raw_socket_cross_process_test: Skip # Platform.executable
 io/test_extension_test: Skip # Platform.executable
 io/named_pipe_script_test: Skip # Platform.executable
@@ -310,3 +318,14 @@
 
 oom_error_stacktrace_test: Skip # Issue #26377
 out_of_memory_test: Skip # Issue #26377
+
+[ $runtime == dart_precompiled ]
+deferred_transitive_import_error_test: Skip # Contains intentional errors.
+
+[ $hot_reload ]
+io/bytes_builder_test: RuntimeError
+io/file_input_stream_test: Crash
+io/file_test: Pass, Crash
+io/web_socket_protocol_processor_test: Pass, Crash
+map_insert_remove_oom_test: Crash
+priority_queue_stress_test: Crash
diff --git a/tests/standalone/verified_mem_test.dart b/tests/standalone/verified_mem_test.dart
deleted file mode 100644
index 53da2ca..0000000
--- a/tests/standalone/verified_mem_test.dart
+++ /dev/null
@@ -1,17 +0,0 @@
-// Copyright (c) 2014, the Dart project authors.  Please see the AUTHORS file
-// for details. All rights reserved. Use of this source code is governed by a
-// BSD-style license that can be found in the LICENSE file.
-//
-// Test write barrier verification mode.
-// VMOptions=--verify_before_gc --verify_after_gc --old_gen_growth_rate=1
-
-var a = [];
-
-void main() {
-  for (int i = 0; i < 12; ++i) {
-    a.add(new List(12345));
-  }
-  for (int i = 0; i < 1234; ++i) {
-    a[0] = new List(100000);
-  }
-}
diff --git a/third_party/firefox_jsshell/README.google b/third_party/firefox_jsshell/README.google
index 08b90f8..7868748 100644
--- a/third_party/firefox_jsshell/README.google
+++ b/third_party/firefox_jsshell/README.google
@@ -1,8 +1,8 @@
 Name: Firefox command line javascript shell.
 Short Name: js-shell
-URL: http://ftp.mozilla.org/pub/mozilla.org/firefox/candidates/38-candidates/build2/
-Version: JavaScript-C38
-Date: May 05 2015
+URL: http://ftp.mozilla.org/pub/firefox/candidates/47.0-candidates/build2/
+Version: JavaScript-C47
+Date: June 3, 2016
 License: MPL, http://www.mozilla.org/MPL
 
 Description:
diff --git a/third_party/firefox_jsshell/linux/jsshell.tar.gz.sha1 b/third_party/firefox_jsshell/linux/jsshell.tar.gz.sha1
index 8f8d719..1348fe9 100644
--- a/third_party/firefox_jsshell/linux/jsshell.tar.gz.sha1
+++ b/third_party/firefox_jsshell/linux/jsshell.tar.gz.sha1
@@ -1 +1 @@
-624241d790b53c24ea67997ed658f31f1ed9292d
\ No newline at end of file
+f44ede84757b76c86bc6dc8aee8f24699390623f
\ No newline at end of file
diff --git a/third_party/firefox_jsshell/mac/jsshell.tar.gz.sha1 b/third_party/firefox_jsshell/mac/jsshell.tar.gz.sha1
index a5c645a..952f5cc 100644
--- a/third_party/firefox_jsshell/mac/jsshell.tar.gz.sha1
+++ b/third_party/firefox_jsshell/mac/jsshell.tar.gz.sha1
@@ -1 +1 @@
-16c59a086b713720df0154cd4ceaed2ee8cf0d33
\ No newline at end of file
+3d866ec4efe98698381671b25940b8ed1926ac4a
\ No newline at end of file
diff --git a/third_party/firefox_jsshell/win/jsshell.tar.gz.sha1 b/third_party/firefox_jsshell/win/jsshell.tar.gz.sha1
index d43fcd1..f8c7c1b 100644
--- a/third_party/firefox_jsshell/win/jsshell.tar.gz.sha1
+++ b/third_party/firefox_jsshell/win/jsshell.tar.gz.sha1
@@ -1 +1 @@
-a7ed194f518813a9915f67026ef138dd96adfec1
\ No newline at end of file
+354b952f339d454fb89f2a8288f755d37eae6443
\ No newline at end of file
diff --git a/tools/.packages b/tools/.packages
new file mode 100644
index 0000000..888665a
--- /dev/null
+++ b/tools/.packages
@@ -0,0 +1,9 @@
+# The test runner logic depends on `package:yaml`, because it is not structured
+# as a pub package, we generated this file manually to contain all of yaml's
+# transitive dependencies.
+charcode:../third_party/pkg/charcode/lib/
+collection:../third_party/pkg/collection/lib/
+path:../third_party/pkg/path/lib/
+source_span:../third_party/pkg/source_span/lib/
+string_scanner:../third_party/pkg/string_scanner/lib/
+yaml:../third_party/pkg/yaml/lib/
diff --git a/tools/VERSION b/tools/VERSION
index b32057e..6318e03 100644
--- a/tools/VERSION
+++ b/tools/VERSION
@@ -25,7 +25,7 @@
 #
 CHANNEL stable
 MAJOR 1
-MINOR 17
-PATCH 1
+MINOR 18
+PATCH 0
 PRERELEASE 0
 PRERELEASE_PATCH 0
diff --git a/tools/bots/compiler.py b/tools/bots/compiler.py
index 581d762..1127f27 100644
--- a/tools/bots/compiler.py
+++ b/tools/bots/compiler.py
@@ -202,7 +202,7 @@
     else:
       cmd.extend(['--progress=buildbot', '-v'])
 
-    cmd.append('--clear_browser_cache')
+    cmd.append('--reset-browser-configuration')
 
     global IsFirstTestStepCall
     if IsFirstTestStepCall:
diff --git a/tools/build.py b/tools/build.py
index 518a015..1f9eb8b 100755
--- a/tools/build.py
+++ b/tools/build.py
@@ -60,7 +60,7 @@
       default=utils.GuessArchitecture())
   result.add_option("--os",
     help='Target OSs (comma-separated).',
-    metavar='[all,host,android]',
+    metavar='[all,host,android,fuchsia]',
     default='host')
   result.add_option("-t", "--toolchain",
     help='Cross-compiler toolchain path',
@@ -109,11 +109,11 @@
       return False
   options.os = [ProcessOsOption(os_name) for os_name in options.os]
   for os_name in options.os:
-    if not os_name in ['android', 'freebsd', 'linux', 'macos', 'win32']:
+    if not os_name in ['android', 'freebsd', 'fuchsia', 'linux', 'macos', 'win32']:
       print "Unknown os %s" % os_name
       return False
     if os_name != HOST_OS:
-      if os_name != 'android':
+      if os_name != 'android' and os_name != 'fuchsia':
         print "Unsupported target os %s" % os_name
         return False
       if not HOST_OS in ['linux']:
@@ -148,6 +148,13 @@
     if arch == 'x64':
       return os.path.join(android_toolchain, 'x86_64-linux-android')
 
+  if target_os == 'fuchsia':
+    fuchsia_toolchain = GetFuchsiaToolchainDir(HOST_OS, arch)
+    if arch == 'arm64':
+      return os.path.join(fuchsia_toolchain, 'aarch64-elf')
+    if arch == 'x64':
+      return os.path.join(fuchsia_toolchain, 'x86_64-elf')
+
   # If no cross compiler is specified, only try to figure one out on Linux.
   if not HOST_OS in ['linux']:
     raise Exception('Unless --toolchain is used cross-building is only '
@@ -175,6 +182,8 @@
   linker = ""
   if target_os == 'android':
     linker = os.path.join(DART_ROOT, 'tools', 'android_link.py')
+  elif target_os == 'fuchsia':
+    linker = os.path.join(DART_ROOT, 'tools', 'fuchsia_link.py')
   elif toolchainprefix:
     linker = toolchainprefix + "-g++"
 
@@ -226,6 +235,28 @@
   return android_toolchain
 
 
+def GetFuchsiaToolchainDir(host_os, target_arch):
+  global THIRD_PARTY_ROOT
+  if host_os not in ['linux']:
+    raise Exception('Unsupported host os %s' % host_os)
+  if target_arch not in ['x64', 'arm64',]:
+    raise Exception('Unsupported target architecture %s' % target_arch)
+
+  # Set up path to the Android NDK.
+  CheckDirExists(THIRD_PARTY_ROOT, 'third party tools')
+  fuchsia_tools = os.path.join(THIRD_PARTY_ROOT, 'fuchsia_tools')
+  CheckDirExists(fuchsia_tools, 'Fuchsia tools')
+
+  toolchain_arch = 'x86_64-elf-5.3.0-Linux-x86_64'
+  if target_arch == 'arm64':
+    toolchain_arch = 'aarch64-elf-5.3.0-Linux-x86_64'
+  fuchsia_toolchain = os.path.join(
+      fuchsia_tools, 'toolchains', toolchain_arch, 'bin')
+  CheckDirExists(fuchsia_toolchain, 'Fuchsia toolchain')
+
+  return fuchsia_toolchain
+
+
 def Execute(args):
   process = subprocess.Popen(args)
   process.wait()
diff --git a/tools/dartium/buildbot_annotated_steps.py b/tools/dartium/buildbot_annotated_steps.py
index a4d7fc3..c568b8e 100755
--- a/tools/dartium/buildbot_annotated_steps.py
+++ b/tools/dartium/buildbot_annotated_steps.py
@@ -30,9 +30,12 @@
 
 def RunDartTests(mode, component, suite, arch, checked, test_filter=None,
                  is_win_ninja=False):
-  """Runs the Dart WebKit Layout tests.
+  """Runs tests using the Dart test.py or the layout test runner.
   """
-  cmd = [sys.executable]
+  cmd = []
+  if sys.platform.startswith('linux'):
+    cmd = ['xvfb-run', '--server-args=-screen 0 1024x768x24','-a']
+  cmd.append(sys.executable)
   script = os.path.join(DART_PATH, 'tools', 'dartium', 'test.py')
   cmd.append(script)
   cmd.append('--buildbot')
diff --git a/tools/dartium/upload_steps.py b/tools/dartium/upload_steps.py
index 2ab3fc8..d0a7cb0 100755
--- a/tools/dartium/upload_steps.py
+++ b/tools/dartium/upload_steps.py
@@ -238,18 +238,21 @@
   dir_name = os.path.dirname(layout_test_results_dir)
   base_name = os.path.basename(layout_test_results_dir)
   cwd = os.getcwd()
-  os.chdir(dir_name)
+  try:
+    os.chdir(dir_name)
 
-  archive_name = 'layout_test_results.zip'
-  archive.ZipDir(archive_name, base_name)
+    archive_name = 'layout_test_results.zip'
+    archive.ZipDir(archive_name, base_name)
 
-  target = '/'.join([GS_DIR, 'layout-test-results', name, component + '-' +
-                     checked + '-' + version + '.zip'])
-  status = OldUploadFile(os.path.abspath(archive_name), GS_SITE + target)
-  os.remove(archive_name)
-  if status == 0:
-    print ('@@@STEP_LINK@download@' + GS_URL + target + '@@@')
-  else:
+    target = '/'.join([GS_DIR, 'layout-test-results', name, component + '-' +
+                       checked + '-' + version + '.zip'])
+    status = OldUploadFile(os.path.abspath(archive_name), GS_SITE + target)
+    os.remove(archive_name)
+    if status == 0:
+      print ('@@@STEP_LINK@download@' + GS_URL + target + '@@@')
+    else:
+      print '@@@STEP_FAILURE@@@'
+  except:
     print '@@@STEP_FAILURE@@@'
   os.chdir(cwd)
 
diff --git a/tools/deps/dartium.deps/DEPS b/tools/deps/dartium.deps/DEPS
index 8c8343c..a12cdc3 100644
--- a/tools/deps/dartium.deps/DEPS
+++ b/tools/deps/dartium.deps/DEPS
@@ -8,8 +8,8 @@
 # Now we need to override some settings and add some new ones.
 
 vars.update({
-  "dartium_chromium_commit": "b6b6b76417ce80120ee48b662a7c7ef257723494",
-  "dartium_webkit_commit": "586bcb7d9e5c46c84a3c9d1f43f26343da78548a",
+  "dartium_chromium_commit": "67a7ba9669f7bb0300ef35085d4e6bb98b1966cc",
+  "dartium_webkit_commit": "d3db7d1b53979ca91cbf8f3117971f49d0fddf13",
   "chromium_base_revision": "338390",
 
   # We use mirrors of all github repos to guarantee reproducibility and
@@ -45,7 +45,7 @@
   "mime_rev": "@75890811d4af5af080351ba8a2853ad4c8df98dd",
   "metatest_rev": "@e5aa8e4e19fc4188ac2f6d38368a47d8f07c3df1",
   "oauth2_rev": "@1bff41f4d54505c36f2d1a001b83b8b745c452f5",
-  "observatory_pub_packages_rev": "@cf90eb9077177d3d6b3fd5e8289477c2385c026a",
+  "observatory_pub_packages_rev": "@e5e1e543bea10d4bed95b22ad3e7aa2b20a23584",
   "package_config_rev": "@0.1.3",
   "path_rev": "@b657c0854d1cf41c014986fa9d2321f1173df805",
   "plugin_tag": "@0.1.0",
@@ -68,7 +68,7 @@
   "web_components_rev": "@6349e09f9118dce7ae1b309af5763745e25a9d61",
   "WebCore_rev": "@a86fe28efadcfc781f836037a80f27e22a5dad17",
 
-  "co19_rev": "@3ed795ea02e022ef19c77cf1b6095b7c8f5584d0",
+  "co19_rev": "@3f0a4bc9a080a792cdf5f093147a900f99ea301f",
 })
 
 deps.update({
diff --git a/tools/dom/scripts/dartmetadata.py b/tools/dom/scripts/dartmetadata.py
index 37877f1..2b2d594 100644
--- a/tools/dom/scripts/dartmetadata.py
+++ b/tools/dom/scripts/dartmetadata.py
@@ -255,12 +255,12 @@
     ],
 
     'MediaStream.getAudioTracks': [
-      "@Creates('JSExtendableArray')",
+      "@Creates('JSExtendableArray|MediaStreamTrack')",
       "@Returns('JSExtendableArray')",
     ],
 
     'MediaStream.getVideoTracks': [
-      "@Creates('JSExtendableArray')",
+      "@Creates('JSExtendableArray|MediaStreamTrack')",
       "@Returns('JSExtendableArray')",
     ],
 
diff --git a/tools/dom/templates/html/impl/impl_AudioNode.darttemplate b/tools/dom/templates/html/impl/impl_AudioNode.darttemplate
index 43db4b3..e5ba37a 100644
--- a/tools/dom/templates/html/impl/impl_AudioNode.darttemplate
+++ b/tools/dom/templates/html/impl/impl_AudioNode.darttemplate
@@ -7,10 +7,12 @@
 $(ANNOTATIONS)$(NATIVESPEC)class $CLASSNAME$EXTENDS$IMPLEMENTS {
 $!MEMBERS
   @DomName('AudioNode.connect')
-  void connectNode(AudioNode destination, [int output = 0, int input = 0]) =>
-      _connect(destination, output, input);
+  void connectNode(AudioNode destination, [int output = 0, int input = 0]) {
+    _connect(destination, output, input);
+  }
 
   @DomName('AudioNode.connect')
-  void connectParam(AudioParam destination, [int output = 0]) =>
-      _connect(destination, output);
+  void connectParam(AudioParam destination, [int output = 0]) {
+    _connect(destination, output);
+  }
 }
diff --git a/tools/fuchsia_link.py b/tools/fuchsia_link.py
new file mode 100755
index 0000000..c3c537b
--- /dev/null
+++ b/tools/fuchsia_link.py
@@ -0,0 +1,127 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2016 The Dart Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+This script performs the final link step for Fuchsia NDK executables.
+Usage:
+./fuchsia_link {arm,arm64,ia32} {executable,library,shared_library}
+               {host,target} [linker args]
+"""
+
+import os
+import subprocess
+import sys
+
+# Figure out where we are.
+SCRIPT_DIR = os.path.dirname(sys.argv[0])
+DART_ROOT = os.path.realpath(os.path.join(SCRIPT_DIR, '..'))
+THIRD_PARTY_ROOT = os.path.join(DART_ROOT, 'third_party')
+
+
+def CheckDirExists(path, docstring):
+  if not os.path.isdir(path):
+    raise Exception('Could not find %s directory %s'
+          % (docstring, path))
+
+
+def execute(args):
+  process = subprocess.Popen(args)
+  process.wait()
+  return process.returncode
+
+
+def main():
+  if len(sys.argv) < 5:
+    raise Exception(sys.argv[0] + " failed: not enough arguments")
+
+  # gyp puts -shared first in a shared_library link. Remove it.
+  if sys.argv[1] == '-shared':
+    sys.argv.remove('-shared')
+
+  # Grab the command line arguments.
+  target_arch = sys.argv[1]
+  link_type = sys.argv[2]
+  link_target = sys.argv[3]
+  link_args = sys.argv[4:]
+
+  # Check arguments.
+  if target_arch not in ['arm64', 'x64',]:
+    raise Exception(sys.argv[0] +
+        " first argument must be 'arm64', or 'x64'")
+  if link_type not in ['executable', 'library', 'shared_library']:
+    raise Exception(sys.argv[0] +
+      " second argument must be 'executable' or 'library' or 'shared_library'")
+  if link_target not in ['host', 'target']:
+    raise Exception(sys.argv[0] + " third argument must be 'host' or 'target'")
+
+  # TODO(zra): Figure out how to link a shared library with the
+  # cross-compilers. For now, we disable it by generating empty files
+  # for the results. We disable it here to avoid inspecting the OS type in
+  # the gyp files.
+  if link_type == 'shared_library':
+    print "NOT linking shared library for Fuchsia."
+    o_index = link_args.index('-o')
+    output = os.path.join(DART_ROOT, link_args[o_index + 1])
+    open(output, 'a').close()
+    sys.exit(0)
+
+  # Set up path to the Fuchsia NDK.
+  CheckDirExists(THIRD_PARTY_ROOT, 'third party tools')
+  fuchsia_tools = os.path.join(THIRD_PARTY_ROOT, 'fuchsia_tools')
+  CheckDirExists(fuchsia_tools, 'Fuchsia tools')
+
+  # Set up the directory of the Fuchsia NDK cross-compiler toolchain.
+  toolchain_arch = 'x86_64-elf-5.3.0-Linux-x86_64'
+  if target_arch == 'arm64':
+    toolchain_arch = 'aarch64-elf-5.3.0-Linux-x86_64'
+  fuchsia_toolchain = os.path.join(
+      fuchsia_tools, 'toolchains', toolchain_arch, 'bin')
+  CheckDirExists(fuchsia_toolchain, 'Fuchsia toolchain')
+
+  # Set up the path to the linker executable.
+  fuchsia_linker = os.path.join(fuchsia_toolchain, 'x86_64-elf-g++')
+  if target_arch == 'arm64':
+    fuchsia_linker = os.path.join(fuchsia_toolchain, 'aarch64-elf-c++')
+
+  # Grab the path to libgcc.a, which we must explicitly add to the link,
+  # by invoking the cross-compiler with the -print-libgcc-file-name flag.
+  fuchsia_gcc = os.path.join(fuchsia_toolchain, 'x86_64-elf-gcc')
+  if target_arch == 'arm64':
+    fuchsia_gcc = os.path.join(fuchsia_toolchain, 'aarch64-elf-gcc')
+  fuchsia_libgcc = subprocess.check_output(
+      [fuchsia_gcc, '-print-libgcc-file-name']).strip()
+
+  # Set up the path to the system root directory, which is where we'll find the
+  # Fuchsia specific system includes and libraries.
+  fuchsia_sysroot = os.path.join(fuchsia_tools, 'sysroot', 'x86_64')
+  if target_arch == 'arm64':
+    fuchsia_sysroot = os.path.join(fuchsia_tools, 'sysroot', 'arm64')
+  CheckDirExists(fuchsia_sysroot, 'Fuchsia sysroot')
+  fuchsia_lib = os.path.join(fuchsia_sysroot, 'usr', 'lib')
+  crtn_fuchsia = os.path.join(fuchsia_lib, 'crtn.o')
+
+  if link_target == 'target':
+    # Add and remove libraries as listed in configurations_fuchsia.gypi
+    libs_to_rm = ['-lrt', '-lpthread', '-ldl']
+    libs_to_add = [fuchsia_libgcc, '-lc',]
+
+    # Add crtn_fuchsia to end if we are linking an executable.
+    if link_type == 'executable':
+      libs_to_add.extend([crtn_fuchsia])
+
+    link_args = [i for i in link_args if i not in libs_to_rm]
+    link_args.extend(libs_to_add)
+
+    link_args.insert(0, fuchsia_linker)
+  else:
+    link_args.extend(['-ldl', '-lrt'])
+    link_args.insert(0, 'g++')
+
+  print ' '.join(link_args)
+  sys.exit(execute(link_args))
+
+if __name__ == '__main__':
+  main()
diff --git a/tools/gyp/configurations.gypi b/tools/gyp/configurations.gypi
index 29f457e..267be04 100644
--- a/tools/gyp/configurations.gypi
+++ b/tools/gyp/configurations.gypi
@@ -41,6 +41,7 @@
   },
   'includes': [
     'configurations_android.gypi',
+    'configurations_fuchsia.gypi',
     'configurations_make.gypi',
     'configurations_xcode.gypi',
     'configurations_msvs.gypi',
@@ -729,6 +730,21 @@
         ],
       },
 
+      # Fuchsia configurations. The configuration names explicitly include
+      # 'Fuchsia' because we are cross-building from Linux, and, when building
+      # the standalone VM, we cannot inspect the gyp built-in 'OS' variable to
+      # figure out that we are building for Fuchsia. Since we have not re-run
+      # gyp, it will still be 'linux'.
+      'ProductFuchsiaX64': {
+        'inherit_from': [
+          'Dart_Base', 'Dart_x64_Base', 'Dart_Product',
+          'Dart_Fuchsia_Base',
+          'Dart_Fuchsia_x64_Base',
+          'Dart_Fuchsia_Product',
+        ],
+      },
+
+
       # Android configurations. The configuration names explicitly include
       # 'Android' because we are cross-building from Linux, and, when building
       # the standalone VM, we cannot inspect the gyp built-in 'OS' variable to
diff --git a/tools/gyp/configurations_android.gypi b/tools/gyp/configurations_android.gypi
index 8708d6d..2aa561b 100644
--- a/tools/gyp/configurations_android.gypi
+++ b/tools/gyp/configurations_android.gypi
@@ -19,8 +19,17 @@
       # order.
       'Dart_Android_Base': {
         'abstract': 1,
-        'cflags': [ '-Wno-abi', '-Wall', '-W', '-Wno-unused-parameter',
-                    '-Wnon-virtual-dtor', '-fno-rtti', '-fno-exceptions',],
+        'cflags': [
+          # No -Werror due to warnings in stl.
+          '<@(common_gcc_warning_flags)',
+          '-Wnon-virtual-dtor',
+          '-Wvla',
+          '-Woverloaded-virtual',
+          '-g3',
+          '-ggdb3',
+          '-fno-rtti',
+          '-fno-exceptions',
+        ],
         'target_conditions': [
           ['_toolset=="target"', {
             'defines': [
diff --git a/tools/gyp/configurations_fuchsia.gypi b/tools/gyp/configurations_fuchsia.gypi
new file mode 100644
index 0000000..5b291d3
--- /dev/null
+++ b/tools/gyp/configurations_fuchsia.gypi
@@ -0,0 +1,152 @@
+# Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
+# for details. All rights reserved. Use of this source code is governed by a
+# BSD-style license that can be found in the LICENSE file.
+
+# Definitions for building standalone Dart binaries to run on Fuchsia.
+
+{
+  'variables': {
+    'fuchsia_tools': '<(PRODUCT_DIR)/../../third_party/fuchsia_tools/',
+  },  # variables
+  'target_defaults': {
+    'configurations': {
+      'Dart_Fuchsia_Base': {
+        'abstract': 1,
+        'cflags': [
+          '-Werror',
+          '<@(common_gcc_warning_flags)',
+          '-Wnon-virtual-dtor',
+          '-Wvla',
+          '-Woverloaded-virtual',
+          '-g3',
+          '-ggdb3',
+          '-fno-rtti',
+          '-fno-exceptions',
+          '-fstack-protector',
+          '-Wa,--noexecstack',
+        ],
+        'target_conditions': [
+          ['_toolset=="target"', {
+            'cflags!': [
+              '-pthread',  # Not supported by Android toolchain.
+            ],
+          }],
+        ],
+      },
+      'Dart_Fuchsia_Debug': {
+        'abstract': 1,
+        'defines': [
+          'DEBUG',
+        ],
+        'cflags': [
+          '-fno-omit-frame-pointer',
+        ],
+      },
+      'Dart_Fuchsia_Release': {
+        'abstract': 1,
+        'defines': [
+          'NDEBUG',
+        ],
+        'cflags!': [
+          '-O2',
+          '-Os',
+        ],
+        'cflags': [
+          '-fno-omit-frame-pointer',
+          '-fdata-sections',
+          '-ffunction-sections',
+          '-O3',
+        ],
+      },
+      'Dart_Fuchsia_Product': {
+        'abstract': 1,
+        'defines': [
+          'NDEBUG',
+          'PRODUCT',
+        ],
+        'cflags!': [
+          '-O2',
+          '-Os',
+        ],
+        'cflags': [
+          '-fdata-sections',
+          '-ffunction-sections',
+          '-O3',
+        ],
+      },
+      'Dart_Fuchsia_x64_Base': {
+        'abstract': 1,
+        'variables': {
+          'fuchsia_sysroot': '<(fuchsia_tools)/sysroot/x86_64',
+          'fuchsia_include': '<(fuchsia_sysroot)/usr/include',
+          'fuchsia_lib': '<(fuchsia_sysroot)/usr/lib',
+        },
+        'target_conditions': [
+          ['_toolset=="target"', {
+            'defines': [
+              'TARGET_OS_FUCHSIA',
+            ],
+            'cflags': [
+              '--sysroot=<(fuchsia_sysroot)',
+              '-I<(fuchsia_include)',
+              '-fno-threadsafe-statics',
+            ],
+            'ldflags': [
+              'x64', '>(_type)', 'target',
+              '-nostdlib',
+              '-T<(fuchsia_sysroot)/usr/user.ld',
+              '-L<(fuchsia_lib)',
+              '-Wl,-z,noexecstack',
+              '-Wl,-z,now',
+              '-Wl,-z,relro',
+              '<(fuchsia_lib)/crt1.o',
+              '<(fuchsia_lib)/crti.o',
+            ],
+            'ldflags!': [
+              '-pthread',
+            ],
+          }],
+          ['_toolset=="host"', {
+            'cflags': [ '-pthread' ],
+            'ldflags': [ '-pthread' ],
+          }],
+        ],
+      },
+      'Dart_Fuchsia_arm64_Base': {
+        'abstract': 1,
+        'variables': {
+          'fuchsia_sysroot': '<(fuchsia_tools)/sysroot/arm64',
+          'fuchsia_include': '<(fuchsia_sysroot)/usr/include',
+          'fuchsia_lib': '<(fuchsia_sysroot)/usr/lib',
+        },
+        'target_conditions': [
+          ['_toolset=="target"', {
+            'defines': [
+              'TARGET_OS_FUCHSIA',
+            ],
+            'cflags': [
+              '--sysroot=<(fuchsia_sysroot)',
+              '-I<(fuchsia_include)',
+              '-fno-threadsafe-statics',
+            ],
+            'ldflags': [
+              'arm64', '>(_type)', 'target',
+              '-nostdlib',
+              '-L<(fuchsia_lib)',
+              '-Wl,-z,noexecstack',
+              '-Wl,-z,now',
+              '-Wl,-z,relro',
+            ],
+            'ldflags!': [
+              '-pthread',
+            ],
+          }],
+          ['_toolset=="host"', {
+            'cflags': [ '-pthread' ],
+            'ldflags': [ '-pthread' ],
+          }],
+        ],
+      },  # Dart_Fuchsia_arm64_Base
+    },  # configurations
+  },  # target_defaults
+}
diff --git a/tools/gyp/configurations_make.gypi b/tools/gyp/configurations_make.gypi
index 05292bf..8b61127 100644
--- a/tools/gyp/configurations_make.gypi
+++ b/tools/gyp/configurations_make.gypi
@@ -18,16 +18,11 @@
           '<@(common_gcc_warning_flags)',
           '-Wnon-virtual-dtor',
           '-Wvla',
-          '-Wno-conversion-null',
           '-Woverloaded-virtual',
           '-g3',
           '-ggdb3',
-          # TODO(iposva): Figure out if we need to pass anything else.
-          #'-ansi',
           '-fno-rtti',
           '-fno-exceptions',
-          # '-fvisibility=hidden',
-          # '-fvisibility-inlines-hidden',
           '-fstack-protector',
           '-Wa,--noexecstack',
         ],
diff --git a/tools/gyp/configurations_msvs.gypi b/tools/gyp/configurations_msvs.gypi
index adcc1d4..68a08c5 100644
--- a/tools/gyp/configurations_msvs.gypi
+++ b/tools/gyp/configurations_msvs.gypi
@@ -10,6 +10,11 @@
     'configurations': {
       'Dart_Win_Base': {
         'abstract': 1,
+        'msvs_configuration_attributes': {
+          'OutputDirectory': '<(DEPTH)\\out\\$(ConfigurationName)',
+          'IntermediateDirectory': '$(OutDir)\\obj\\$(ProjectName)',
+          'CharacterSet': '1',
+        },
         'defines': [
           '_HAS_EXCEPTIONS=0',  # disable C++ exceptions use in C++ std. libs.
         ],
diff --git a/tools/safari_factory_reset.py b/tools/safari_factory_reset.py
new file mode 100755
index 0000000..9276fa5
--- /dev/null
+++ b/tools/safari_factory_reset.py
@@ -0,0 +1,27 @@
+#!/usr/bin/env python
+# Copyright (c) 2011, the Dart project authors.  Please see the AUTHORS file
+# for details. All rights reserved. Use of this source code is governed by a
+# BSD-style license that can be found in the LICENSE file.
+
+import os
+import string
+import subprocess
+import sys
+
+import utils
+
+
+def Main():
+  args = sys.argv[1:]
+  tools_dir = os.path.dirname(os.path.realpath(__file__))
+  dart_script_name = os.path.join(
+    tools_dir, 'testing', 'dart', 'reset_safari.dart');
+  command = [utils.CheckedInSdkExecutable(),
+             '--checked', dart_script_name] + args
+  exit_code = subprocess.call(command)
+  utils.DiagnoseExitCode(exit_code, command)
+  return exit_code
+
+
+if __name__ == '__main__':
+  sys.exit(Main())
diff --git a/tools/testing/dart/browser_controller.dart b/tools/testing/dart/browser_controller.dart
index 2776569..cacdd2a 100644
--- a/tools/testing/dart/browser_controller.dart
+++ b/tools/testing/dart/browser_controller.dart
@@ -4,16 +4,19 @@
 library browser;
 
 import "dart:async";
-import "dart:convert" show LineSplitter, UTF8, JSON;
+import "dart:convert" show UTF8, JSON;
 import "dart:core";
 import "dart:io";
-import "dart:math" show max, min;
+import "dart:math" show min;
 
 import 'android.dart';
 import 'http_server.dart';
 import 'path.dart';
 import 'utils.dart';
 
+import 'reset_safari.dart' show
+    killAndResetSafari;
+
 class BrowserOutput {
   final StringBuffer stdout = new StringBuffer();
   final StringBuffer stderr = new StringBuffer();
@@ -53,10 +56,10 @@
   String id;
 
   /**
-   * Delete the browser specific caches on startup.
+   * Reset the browser to a known configuration on start-up.
    * Browser specific implementations are free to ignore this.
    */
-  static bool deleteCache = false;
+  static bool resetBrowserConfiguration = false;
 
   /** Print everything (stdout, stderr, usageLog) whenever we add to it */
   bool debugPrint = false;
@@ -105,6 +108,15 @@
     'ie10'
   ];
 
+  /// If [browserName] doesn't support Window.open, we use iframes instead.
+  static bool requiresIframe(String browserName) {
+    return !BROWSERS_WITH_WINDOW_SUPPORT.contains(browserName);
+  }
+
+  static bool requiresFocus(String browserName) {
+    return browserName == "safari";
+  }
+
   // TODO(kustermann): add standard support for chrome on android
   static bool supportedBrowser(String name) {
     return SUPPORTED_BROWSERS.contains(name);
@@ -269,6 +281,13 @@
 
   /** Starts the browser loading the given url */
   Future<bool> start(String url);
+
+  /// Called when the driver page is requested, that is, when the browser first
+  /// contacts the test server. At this time, it's safe to assume that the
+  /// browser process has started and opened its first window.
+  ///
+  /// This is used by [Safari] to ensure the browser window has focus.
+  Future<Null> onDriverPageRequested() => new Future<Null>.value();
 }
 
 class Safari extends Browser {
@@ -278,62 +297,48 @@
   static const String versionFile =
       "/Applications/Safari.app/Contents/version.plist";
 
-  /**
-   * Directories where safari stores state. We delete these if the deleteCache
-   * is set
-   */
-  static const List<String> CACHE_DIRECTORIES = const [
-    "Library/Caches/com.apple.Safari",
-    "Library/Safari",
-    "Library/Saved Application State/com.apple.Safari.savedState",
-    "Library/Caches/Metadata/Safari"
-  ];
+  static const String safariBundleLocation = "/Applications/Safari.app/";
 
-  Future<bool> allowPopUps() {
-    var command = "defaults";
-    var args = [
-      "write",
-      "com.apple.safari",
-      "com.apple.Safari.ContentPageGroupIdentifier."
-          "WebKit2JavaScriptCanOpenWindowsAutomatically",
-      "1"
-    ];
-    return Process.run(command, args).then((result) {
-      if (result.exitCode != 0) {
-        _logEvent("Could not disable pop-up blocking for safari");
-        return false;
-      }
-      return true;
-    });
-  }
-
-  Future<bool> deleteIfExists(Iterator<String> paths) {
-    if (!paths.moveNext()) return new Future.value(true);
-    Directory directory = new Directory(paths.current);
-    return directory.exists().then((exists) {
-      if (exists) {
-        _logEvent("Deleting ${paths.current}");
-        return directory
-            .delete(recursive: true)
-            .then((_) => deleteIfExists(paths))
-            .catchError((error) {
-          _logEvent("Failure trying to delete ${paths.current}: $error");
-          return false;
-        });
-      } else {
-        _logEvent("${paths.current} is not present");
-        return deleteIfExists(paths);
-      }
-    });
-  }
-
-  // Clears the cache if the static deleteCache flag is set.
+  // Clears the cache if the static resetBrowserConfiguration flag is set.
   // Returns false if the command to actually clear the cache did not complete.
-  Future<bool> clearCache() {
-    if (!Browser.deleteCache) return new Future.value(true);
-    var home = Platform.environment['HOME'];
-    Iterator iterator = CACHE_DIRECTORIES.map((s) => "$home/$s").iterator;
-    return deleteIfExists(iterator);
+  Future<bool> resetConfiguration() async {
+    if (!Browser.resetBrowserConfiguration) return true;
+
+    Completer completer = new Completer();
+    handleUncaughtError(error, StackTrace stackTrace) {
+      if (!completer.isCompleted) {
+        completer.completeError(error, stackTrace);
+      } else {
+        throw new AsyncError(error, stackTrace);
+      }
+    }
+    Zone parent = Zone.current;
+    ZoneSpecification specification = new ZoneSpecification(
+        print: (Zone self, ZoneDelegate delegate, Zone zone, String line) {
+          delegate.run(parent, () {
+            _logEvent(line);
+          });
+        });
+    Future zoneWrapper() {
+      Uri safariUri = Uri.base.resolve(safariBundleLocation);
+      return new Future(() => killAndResetSafari(bundle: safariUri))
+          .then(completer.complete);
+    }
+
+    // We run killAndResetSafari in a Zone as opposed to running an external
+    // process. The Zone allows us to collect its output, and protect the rest
+    // of the test infrastructure against errors in it.
+    runZoned(
+        zoneWrapper, zoneSpecification: specification,
+        onError: handleUncaughtError);
+
+    try {
+      await completer.future;
+      return true;
+    } catch (error, st) {
+      _logEvent("Unable to reset Safari: $error$st");
+      return false;
+    }
   }
 
   Future<String> getVersion() {
@@ -369,42 +374,58 @@
     });
   }
 
-  void _createLaunchHTML(var path, var url) {
+  Future<Null> _createLaunchHTML(var path, var url) async {
     var file = new File("${path}/launch.html");
-    var randomFile = file.openSync(mode: FileMode.WRITE);
+    var randomFile = await file.open(mode: FileMode.WRITE);
     var content = '<script language="JavaScript">location = "$url"</script>';
-    randomFile.writeStringSync(content);
-    randomFile.close();
+    await randomFile.writeString(content);
+    await randomFile.close();
   }
 
-  Future<bool> start(String url) {
+  Future<bool> start(String url) async {
     _logEvent("Starting Safari browser on: $url");
-    return allowPopUps().then((success) {
-      if (!success) {
-        return false;
-      }
-      return clearCache().then((cleared) {
-        if (!cleared) {
-          _logEvent("Could not clear cache");
-          return false;
-        }
-        // Get the version and log that.
-        return getVersion().then((version) {
-          _logEvent("Got version: $version");
-          return Directory.systemTemp.createTemp().then((userDir) {
-            _cleanup = () {
-              userDir.deleteSync(recursive: true);
-            };
-            _createLaunchHTML(userDir.path, url);
-            var args = ["${userDir.path}/launch.html"];
-            return startBrowserProcess(_binary, args);
-          });
-        }).catchError((error) {
-          _logEvent("Running $_binary --version failed with $error");
-          return false;
-        });
-      });
-    });
+    if (!await resetConfiguration()) {
+      _logEvent("Could not clear cache");
+      return false;
+    }
+    String version;
+    try {
+      version = await getVersion();
+    } catch (error) {
+      _logEvent("Running $_binary --version failed with $error");
+      return false;
+    }
+    _logEvent("Got version: $version");
+    Directory userDir;
+    try {
+      userDir = await Directory.systemTemp.createTemp();
+    } catch (error) {
+      _logEvent("Error creating temporary directory: $error");
+      return false;
+    }
+    _cleanup = () {
+      userDir.deleteSync(recursive: true);
+    };
+    try {
+      await _createLaunchHTML(userDir.path, url);
+    } catch (error) {
+      _logEvent("Error creating launch HTML: $error");
+      return false;
+    }
+    var args = [
+        "-d", "-i", "-m", "-s", "-u", _binary,
+        "${userDir.path}/launch.html"];
+    try {
+      return startBrowserProcess("/usr/bin/caffeinate", args);
+    } catch (error) {
+      _logEvent("Error starting browser process: $error");
+      return false;
+    }
+  }
+
+  Future<Null> onDriverPageRequested() async {
+    await Process.run("/usr/bin/osascript",
+        ['-e', 'tell application "Safari" to activate']);
   }
 
   String toString() => "Safari";
@@ -477,24 +498,43 @@
 class SafariMobileSimulator extends Safari {
   /**
    * Directories where safari simulator stores state. We delete these if the
-   * deleteCache is set
+   * resetBrowserConfiguration is set
    */
   static const List<String> CACHE_DIRECTORIES = const [
     "Library/Application Support/iPhone Simulator/7.1/Applications"
   ];
 
-  // Clears the cache if the static deleteCache flag is set.
+  // Helper function to delete many directories recursively.
+  Future<bool> deleteIfExists(Iterable<String> paths) async {
+    for (var path in paths) {
+      Directory directory = new Directory(path);
+      if (await directory.exists()) {
+        _logEvent("Deleting $path");
+        try {
+          await directory.delete(recursive: true);
+        } catch (error) {
+          _logEvent("Failure trying to delete $path: $error");
+          return false;
+        }
+      } else {
+        _logEvent("$path is not present");
+      }
+    }
+    return true;
+  }
+
+  // Clears the cache if the static resetBrowserConfiguration flag is set.
   // Returns false if the command to actually clear the cache did not complete.
-  Future<bool> clearCache() {
-    if (!Browser.deleteCache) return new Future.value(true);
+  Future<bool> resetConfiguration() {
+    if (!Browser.resetBrowserConfiguration) return new Future.value(true);
     var home = Platform.environment['HOME'];
-    Iterator iterator = CACHE_DIRECTORIES.map((s) => "$home/$s").iterator;
-    return deleteIfExists(iterator);
+    var paths = CACHE_DIRECTORIES.map((s) => "$home/$s");
+    return deleteIfExists(paths);
   }
 
   Future<bool> start(String url) {
     _logEvent("Starting safari mobile simulator browser on: $url");
-    return clearCache().then((success) {
+    return resetConfiguration().then((success) {
       if (!success) {
         _logEvent("Could not clear cache, exiting");
         return false;
@@ -561,9 +601,10 @@
     });
   }
 
-  // Clears the recovery cache if the static deleteCache flag is set.
-  Future<bool> clearCache() {
-    if (!Browser.deleteCache) return new Future.value(true);
+  // Clears the recovery cache if the static resetBrowserConfiguration flag is
+  // set.
+  Future<bool> resetConfiguration() {
+    if (!Browser.resetBrowserConfiguration) return new Future.value(true);
     var localAppData = Platform.environment['LOCALAPPDATA'];
 
     Directory dir = new Directory("$localAppData\\Microsoft\\"
@@ -578,7 +619,7 @@
 
   Future<bool> start(String url) {
     _logEvent("Starting ie browser on: $url");
-    return clearCache().then((_) => getVersion()).then((version) {
+    return resetConfiguration().then((_) => getVersion()).then((version) {
       _logEvent("Got version: $version");
       return startBrowserProcess(_binary, [url]);
     });
@@ -876,12 +917,12 @@
   static const Duration MIN_NONEMPTY_QUEUE_TIME = const Duration(seconds: 1);
 
   final Map configuration;
-  BrowserTestingServer testingServer;
+  final BrowserTestingServer testingServer;
 
   final String localIp;
-  String browserName;
+  final String browserName;
   int maxNumBrowsers;
-  bool checkedMode;
+  final bool checkedMode;
   int numBrowsers = 0;
   // Used to send back logs from the browser (start, stop etc)
   Function logger;
@@ -928,27 +969,23 @@
     if (_currentStartingBrowserId == id) _currentStartingBrowserId = null;
   }
 
-  // If [browserName] doesn't support opening new windows, we use new iframes
-  // instead.
-  bool get useIframe =>
-      !Browser.BROWSERS_WITH_WINDOW_SUPPORT.contains(browserName);
-
-  /// The optional testingServer parameter allows callers to pass in
-  /// a testing server with different behavior than the  default
-  /// BrowserTestServer. The url handlers of the testingServer are
-  /// overwritten, so an existing handler can't be shared between instances.
   BrowserTestRunner(
-      this.configuration, this.localIp, this.browserName, this.maxNumBrowsers,
-      {BrowserTestingServer this.testingServer}) {
-    checkedMode = configuration['checked'];
-    if (browserName == 'ff') browserName = 'firefox';
+      Map configuration,
+      String localIp,
+      String browserName,
+      this.maxNumBrowsers)
+      : configuration = configuration,
+        localIp = localIp,
+        browserName = (browserName == 'ff') ? 'firefox' : browserName,
+        checkedMode = configuration['checked'],
+        testingServer = new BrowserTestingServer(
+            configuration, localIp,
+            Browser.requiresIframe(browserName),
+            Browser.requiresFocus(browserName)) {
+    testingServer.testRunner = this;
   }
 
   Future start() async {
-    if (testingServer == null) {
-      testingServer =
-          new BrowserTestingServer(configuration, localIp, useIframe);
-    }
     await testingServer.start();
     testingServer
       ..testDoneCallBack = handleResults
@@ -1285,6 +1322,9 @@
   ///                                   test
 
   final String localIp;
+  final bool useIframe;
+  final bool requiresFocus;
+  BrowserTestRunner testRunner;
 
   static const String driverPath = "/driver";
   static const String nextTestPath = "/next_test";
@@ -1297,14 +1337,14 @@
   var testCount = 0;
   var errorReportingServer;
   bool underTermination = false;
-  bool useIframe = false;
 
   Function testDoneCallBack;
   Function testStatusUpdateCallBack;
   Function testStartedCallBack;
   Function nextTestCallBack;
 
-  BrowserTestingServer(this.configuration, this.localIp, this.useIframe);
+  BrowserTestingServer(
+      this.configuration, this.localIp, this.useIframe, this.requiresFocus);
 
   Future start() {
     var test_driver_error_port = configuration['test_driver_error_port'];
@@ -1366,29 +1406,41 @@
       handleStarted(request, browserId(request, startedPath), testId(request));
     });
 
-    makeSendPageHandler(String prefix) => (HttpRequest request) {
-          noCache(request);
-          var textResponse = "";
-          if (prefix == driverPath) {
-            textResponse = getDriverPage(browserId(request, prefix));
-            request.response.headers.set('Content-Type', 'text/html');
-          }
-          if (prefix == nextTestPath) {
-            textResponse = getNextTest(browserId(request, prefix));
-            request.response.headers.set('Content-Type', 'text/plain');
-          }
-          request.response.write(textResponse);
-          request.listen((_) {}, onDone: request.response.close);
-          request.response.done.catchError((error) {
-            if (!underTermination) {
-              print("URI ${request.uri}");
-              print("Textresponse $textResponse");
-              throw "Error returning content to browser: $error";
-            }
+    void sendPageHandler(HttpRequest request) {
+      // Do NOT make this method async. We need to call catchError below
+      // synchronously to avoid unhandled asynchronous errors.
+      noCache(request);
+      Future<String> textResponse;
+      if (request.uri.path.startsWith(driverPath)) {
+        textResponse = getDriverPage(browserId(request, driverPath));
+        request.response.headers.set('Content-Type', 'text/html');
+      } else if (request.uri.path.startsWith(nextTestPath)) {
+        textResponse = new Future<String>.value(
+            getNextTest(browserId(request, nextTestPath)));
+        request.response.headers.set('Content-Type', 'text/plain');
+      } else {
+        textResponse = new Future<String>.value("");
+      }
+      request.response.done.catchError((error) {
+        if (!underTermination) {
+          return textResponse.then((String text) {
+            print("URI ${request.uri}");
+            print("textResponse $textResponse");
+            throw "Error returning content to browser: $error";
           });
-        };
-    server.addHandler(driverPath, makeSendPageHandler(driverPath));
-    server.addHandler(nextTestPath, makeSendPageHandler(nextTestPath));
+        }
+      });
+      textResponse.then((String text) async {
+        request.response.write(text);
+        await request.listen(null).asFuture();
+        // Ignoring the returned closure as it returns the 'done' future
+        // which alread has catchError installed above.
+        request.response.close();
+      });
+    }
+
+    server.addHandler(driverPath, sendPageHandler);
+    server.addHandler(nextTestPath, sendPageHandler);
   }
 
   void handleReport(HttpRequest request, String browserId, var testId,
@@ -1447,33 +1499,34 @@
     return "http://$localIp:$port/driver/$browserId";
   }
 
-  String getDriverPage(String browserId) {
+  Future<String> getDriverPage(String browserId) async {
+    await testRunner.browserStatus[browserId].browser.onDriverPageRequested();
     var errorReportingUrl =
         "http://$localIp:${errorReportingServer.port}/$browserId";
     String driverContent = """
 <!DOCTYPE html><html>
 <head>
-  <style>
-    body {
-      margin: 0;
-    }
-    .box {
-      overflow: hidden;
-      overflow-y: auto;
-      position: absolute;
-      left: 0;
-      right: 0;
-    }
-    .controller.box {
-      height: 75px;
-      top: 0;
-    }
-    .test.box {
-      top: 75px;
-      bottom: 0;
-    }
-  </style>
   <title>Driving page</title>
+  <style>
+.big-notice {
+  background-color: red;
+  color: white;
+  font-weight: bold;
+  font-size: xx-large;
+  text-align: center;
+}
+.controller.box {
+  white-space: nowrap;
+  overflow: scroll;
+  height: 6em;
+}
+body {
+  font-family: sans-serif;
+}
+body div {
+  padding-top: 10px;
+}
+  </style>
   <script type='text/javascript'>
     var STATUS_UPDATE_INTERVAL = 10000;
 
@@ -1603,7 +1656,8 @@
           embedded_iframe_div.removeChild(embedded_iframe);
           embedded_iframe = document.createElement('iframe');
           embedded_iframe.id = "embedded_iframe";
-          embedded_iframe.style="width:100%;height:100%";
+          embedded_iframe.width='800px';
+          embedded_iframe.height='600px';
           embedded_iframe_div.appendChild(embedded_iframe);
           embedded_iframe.src = url;
         } else {
@@ -1775,13 +1829,26 @@
   </script>
 </head>
   <body onload="startTesting()">
+
+    <div class='big-notice'>
+      Please keep this window in focus at all times.
+    </div>
+
+    <div>
+      Some browsers, Safari, in particular, may pause JavaScript when not
+      visible to conserve power consumption and CPU resources. In addition,
+      some tests of focus events will not work correctly if this window doesn't
+      have focus. It's also advisable to close any other programs that may open
+      modal dialogs, for example, Chrome with Calendar open.
+    </div>
+
     <div class="controller box">
     Dart test driver, number of tests: <span id="number"></span><br>
     Currently executing: <span id="currently_executing"></span><br>
     Unhandled error: <span id="unhandled_error"></span>
     </div>
     <div id="embedded_iframe_div" class="test box">
-      <iframe style="width:100%;height:100%;" id="embedded_iframe"></iframe>
+      <iframe id="embedded_iframe"></iframe>
     </div>
   </body>
 </html>
diff --git a/tools/testing/dart/compiler_configuration.dart b/tools/testing/dart/compiler_configuration.dart
index 398986a..76a0e55 100644
--- a/tools/testing/dart/compiler_configuration.dart
+++ b/tools/testing/dart/compiler_configuration.dart
@@ -53,6 +53,7 @@
     bool isCsp = configuration['csp'];
     bool useCps = configuration['cps_ir'];
     bool useBlobs = configuration['use_blobs'];
+    bool hotReload = configuration['hot_reload'];
 
     switch (compiler) {
       case 'dart2analyzer':
@@ -89,7 +90,8 @@
             isDebug: isDebug,
             isChecked: isChecked,
             isHostChecked: isHostChecked,
-            useSdk: useSdk);
+            useSdk: useSdk,
+            hotReload: hotReload);
       default:
         throw "Unknown compiler '$compiler'";
     }
@@ -149,13 +151,17 @@
 
 /// The "none" compiler.
 class NoneCompilerConfiguration extends CompilerConfiguration {
+  final bool hotReload;
+
   NoneCompilerConfiguration(
-      {bool isDebug, bool isChecked, bool isHostChecked, bool useSdk})
+      {bool isDebug, bool isChecked, bool isHostChecked, bool useSdk,
+       bool hotReload})
       : super._subclass(
             isDebug: isDebug,
             isChecked: isChecked,
             isHostChecked: isHostChecked,
-            useSdk: useSdk);
+            useSdk: useSdk),
+        this.hotReload = hotReload;
 
   bool get hasCompiler => false;
 
@@ -172,6 +178,13 @@
       args.add('--enable_asserts');
       args.add('--enable_type_checks');
     }
+    if (hotReload) {
+      args.add('--hot-reload-test-mode');
+      // Remove the following once known bugs with background compilation
+      // and OSR are fixed.
+      args.add('--no-background-compilation');
+      args.add('--no-osr');
+    }
     return args
       ..addAll(vmOptions)
       ..addAll(sharedOptions)
diff --git a/tools/testing/dart/reset_safari.dart b/tools/testing/dart/reset_safari.dart
new file mode 100644
index 0000000..e1886a5
--- /dev/null
+++ b/tools/testing/dart/reset_safari.dart
@@ -0,0 +1,227 @@
+// Copyright (c) 2016, the Dart project authors.  Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+
+/// Helper program for killing and resetting all Safari settings to a known
+/// state that works well for testing dart2js output in Safari.
+///
+/// Warning: this will delete all your Safari settings and bookmarks.
+library testing.reset_safari;
+
+import 'dart:async' show
+    Future,
+    Timer;
+
+import 'dart:io' show
+    Directory,
+    File,
+    Platform,
+    Process,
+    ProcessResult;
+
+const String defaultSafariBundleLocation = "/Applications/Safari.app/";
+
+const String relativeSafariLocation = "Contents/MacOS/Safari";
+
+const String lsofLocation = "/usr/sbin/lsof";
+
+const String killLocation = "/bin/kill";
+
+const String pkillLocation = "/usr/bin/pkill";
+
+const String safari = "com.apple.Safari";
+
+const String defaultsLocation = "/usr/bin/defaults";
+
+final List<String> safariSettings = <String>[
+    "Library/Caches/$safari",
+    "Library/Safari",
+    "Library/Saved Application State/$safari.savedState",
+    "Library/Caches/Metadata/Safari",
+    "Library/Preferences/$safari.plist",
+];
+
+const Duration defaultPollDelay = const Duration(milliseconds: 1);
+
+final String cpgi = "$safari.ContentPageGroupIdentifier";
+
+final String knownSafariPreference = '''
+{
+    DefaultBrowserPromptingState2 = 2;
+    StartPageViewControllerMode = 0;
+    TestDriveOriginBrowser = 1;
+    TestDriveUserDecision = 2;
+    TestDriveState = 3;
+    AlwaysRestoreSessionAtLaunch = 0;
+    NewTabBehavior = 1;
+    NewWindowBehavior = 1;
+    LastSafariVersionWithWelcomePage = "9.0";
+    OpenNewTabsInFront = 0;
+    TabCreationPolicy = 0;
+
+    IncludeDevelopMenu = 1;
+    WebKitDeveloperExtrasEnabledPreferenceKey = 1;
+    "$cpgi.WebKit2DeveloperExtrasEnabled" = 1;
+
+    AutoFillCreditCardData = 0;
+    AutoFillMiscellaneousForms = 0;
+    AutoFillPasswords = 0;
+
+    SuppressSearchSuggestions = 1;
+
+    PreloadTopHit = 0;
+    ShowFavoritesUnderSmartSearchField = 0;
+    WebsiteSpecificSearchEnabled = 0;
+
+    WarnAboutFraudulentWebsites = 0;
+
+
+    WebKitJavaScriptEnabled = 1;
+    "$cpgi.WebKit2JavaScriptEnabled" = 1;
+
+    WebKitJavaScriptCanOpenWindowsAutomatically = 1;
+    "$cpgi.WebKit2JavaScriptCanOpenWindowsAutomatically" = 1;
+
+    "$cpgi.WebKit2WebGLEnabled" = 1;
+    WebGLDefaultLoadPolicy = WebGLPolicyAllowNoSecurityRestrictions;
+
+    "$cpgi.WebKit2PluginsEnabled" = 0;
+
+    BlockStoragePolicy = 1;
+    WebKitStorageBlockingPolicy = 0;
+    "$cpgi.WebKit2StorageBlockingPolicy" = 0;
+
+
+    SafariGeolocationPermissionPolicy = 0;
+
+    CanPromptForPushNotifications = 0;
+
+    InstallExtensionUpdatesAutomatically = 0;
+
+    ShowFullURLInSmartSearchField = 1;
+
+    "$cpgi.WebKit2PlugInSnapshottingEnabled" = 0;
+}
+''';
+
+Future<Null> get pollDelay => new Future.delayed(defaultPollDelay);
+
+String signalArgument(
+    String defaultSignal,
+    {bool force: false,
+     bool testOnly: false}) {
+  if (force && testOnly) {
+    throw new ArgumentError("[force] and [testOnly] can't both be true.");
+  }
+  if (force) return "-KILL";
+  if (testOnly) return "-0";
+  return defaultSignal;
+}
+
+Future<int> kill(
+    List<String> pids,
+    {bool force: false,
+     bool testOnly: false}) async {
+  List<String> arguments =
+      <String>[signalArgument("-TERM", force: force, testOnly: testOnly)]
+      ..addAll(pids);
+  ProcessResult result = await Process.run(killLocation, arguments);
+  return result.exitCode;
+}
+
+Future<int> pkill(
+    String pattern,
+    {bool force: false,
+     bool testOnly: false}) async {
+  List<String> arguments = <String>[
+      signalArgument("-HUP", force: force, testOnly: testOnly),
+      pattern];
+  ProcessResult result = await Process.run(pkillLocation, arguments);
+  return result.exitCode;
+}
+
+Uri validatedBundleName(Uri bundle) {
+  if (bundle == null) return Uri.base.resolve(defaultSafariBundleLocation);
+  if (!bundle.path.endsWith("/")) {
+    throw new ArgumentError(
+        "Bundle ('$bundle') must end with a slash ('/').");
+  }
+  return bundle;
+}
+
+Future<Null> killSafari({Uri bundle}) async {
+  bundle = validatedBundleName(bundle);
+  Uri safariBinary = bundle.resolve(relativeSafariLocation);
+  ProcessResult result = await Process.run(
+      lsofLocation, ["-t", safariBinary.toFilePath()]);
+  if (result.exitCode == 0) {
+    String stdout = result.stdout;
+    List<String> pids = new List<String>.from(
+        stdout.split("\n").where((String line) => !line.isEmpty));
+    Timer timer = new Timer(const Duration(seconds: 10), () {
+      print("Kill -9 Safari $pids");
+      kill(pids, force: true);
+    });
+    int exitCode = await kill(pids);
+    while (exitCode == 0) {
+      await pollDelay;
+      print("Polling Safari $pids");
+      exitCode = await kill(pids, testOnly: true);
+    }
+    timer.cancel();
+  }
+  Timer timer = new Timer(const Duration(seconds: 10), () {
+    print("Kill -9 $safari");
+    pkill(safari, force: true);
+  });
+  int exitCode = await pkill(safari);
+  while (exitCode == 0) {
+    await pollDelay;
+    print("Polling $safari");
+    exitCode = await pkill(safari, testOnly: true);
+  }
+  timer.cancel();
+}
+
+Future<Null> deleteIfExists(Uri uri) async {
+  Directory directory = new Directory.fromUri(uri);
+  if (await directory.exists()) {
+    print("Deleting directory '$uri'.");
+    await directory.delete(recursive: true);
+  } else {
+    File file = new File.fromUri(uri);
+    if (await file.exists()) {
+      print("Deleting file '$uri'.");
+      await file.delete();
+    } else {
+      print("File '$uri' not found.");
+    }
+  }
+}
+
+Future<Null> resetSafariSettings() async {
+  String home = Platform.environment["HOME"];
+  if (!home.endsWith("/")) {
+    home = "$home/";
+  }
+  Uri homeDirectory = Uri.base.resolve(home);
+  for (String setting in safariSettings) {
+    await deleteIfExists(homeDirectory.resolve(setting));
+  }
+  ProcessResult result = await Process.run(
+      defaultsLocation, <String>["write", safari, knownSafariPreference]);
+  if (result.exitCode != 0) {
+    throw "Unable to reset Safari settings: ${result.stdout}${result.stderr}";
+  }
+}
+
+Future<Null> killAndResetSafari({Uri bundle}) async {
+  bundle = validatedBundleName(bundle);
+  await killSafari(bundle: bundle);
+  await resetSafariSettings();
+}
+
+Future<Null> main() async {
+  await killAndResetSafari();
+}
diff --git a/tools/testing/dart/status_file_parser.dart b/tools/testing/dart/status_file_parser.dart
index 8fa9bb7..c4be801 100644
--- a/tools/testing/dart/status_file_parser.dart
+++ b/tools/testing/dart/status_file_parser.dart
@@ -35,6 +35,11 @@
   static Expectation SKIP_SLOW = byName('SkipSlow');
   static Expectation SKIP_BY_DESIGN = byName('SkipByDesign');
 
+  // Can be returned by the test runner to say the result should be ignored,
+  // and assumed to meet the expectations, due to an infrastructure failure.
+  // Do not place in status files.
+  static Expectation IGNORE = byName('Ignore');
+
   static Expectation byName(String name) {
     _initialize();
     name = name.toLowerCase();
@@ -77,6 +82,7 @@
       build("SkipSlow", group: skip, isMetaExpectation: true);
       build("Ok", isMetaExpectation: true);
       build("Slow", isMetaExpectation: true);
+      build("Ignore");
     }
   }
 
@@ -94,6 +100,7 @@
 
   bool canBeOutcomeOf(Expectation expectation) {
     Expectation outcome = this;
+    if (outcome == IGNORE) return true;
     while (outcome != null) {
       if (outcome == expectation) {
         return true;
diff --git a/tools/testing/dart/test_configurations.dart b/tools/testing/dart/test_configurations.dart
index f8cd209..ca3bfbe 100644
--- a/tools/testing/dart/test_configurations.dart
+++ b/tools/testing/dart/test_configurations.dart
@@ -31,6 +31,7 @@
   new Path('third_party/pkg_tested'),
   new Path('runtime/tests/vm'),
   new Path('runtime/observatory/tests/service'),
+  new Path('runtime/observatory/tests/observatory_ui'),
   new Path('samples'),
   new Path('samples-dev'),
   new Path('tests/benchmark_smoke'),
@@ -56,9 +57,6 @@
   var firstConf = configurations[0];
   var maxProcesses = firstConf['tasks'];
   var progressIndicator = firstConf['progress'];
-  // TODO(kustermann): Remove this option once the buildbots don't use it
-  // anymore.
-  var failureSummary = firstConf['failure-summary'];
   BuildbotProgressIndicator.stepName = firstConf['step_name'];
   var verbose = firstConf['verbose'];
   var printTiming = firstConf['time'];
@@ -69,7 +67,7 @@
   var recordingPath = firstConf['record_to_file'];
   var recordingOutputPath = firstConf['replay_from_file'];
 
-  Browser.deleteCache = firstConf['clear_browser_cache'];
+  Browser.resetBrowserConfiguration = firstConf['reset_browser_configuration'];
 
   if (recordingPath != null && recordingOutputPath != null) {
     print("Fatal: Can't have the '--record_to_file' and '--replay_from_file'"
@@ -183,6 +181,13 @@
       var suite_path = new Path(conf['suite_dir']);
       testSuites.add(new PKGTestSuite(conf, suite_path));
     } else {
+      for (final testSuiteDir in TEST_SUITE_DIRECTORIES) {
+        final name = testSuiteDir.filename;
+        if (selectors.containsKey(name)) {
+          testSuites
+              .add(new StandardTestSuite.forDirectory(conf, testSuiteDir));
+        }
+      }
       for (String key in selectors.keys) {
         if (key == 'co19') {
           testSuites.add(new Co19TestSuite(conf));
@@ -214,14 +219,6 @@
               new PkgBuildTestSuite(conf, 'pkgbuild', 'pkg/pkgbuild.status'));
         }
       }
-
-      for (final testSuiteDir in TEST_SUITE_DIRECTORIES) {
-        final name = testSuiteDir.filename;
-        if (selectors.containsKey(name)) {
-          testSuites
-              .add(new StandardTestSuite.forDirectory(conf, testSuiteDir));
-        }
-      }
     }
   }
 
@@ -280,6 +277,7 @@
     eventListener.add(new SummaryPrinter(jsonOnly: reportInJson));
   } else {
     eventListener.add(new ExitCodeSetter());
+    eventListener.add(new IgnoredTestMonitor());
   }
 
   // If any of the configurations need to access android devices we'll first
diff --git a/tools/testing/dart/test_options.dart b/tools/testing/dart/test_options.dart
index c5c6e34..da2c5d0 100644
--- a/tools/testing/dart/test_options.dart
+++ b/tools/testing/dart/test_options.dart
@@ -25,7 +25,8 @@
   'lib',
   'pkg',
   'analyze_library',
-  'service'
+  'service',
+  'observatory_ui'
 ];
 
 /**
@@ -186,8 +187,12 @@
           'noopt', 'Run an in-place precompilation', ['--noopt'], [], false,
           type: 'bool'),
       new _TestOptionSpecification(
-          'use_blobs', 'Use mmap instead of shared libraries for precompilation', ['--use-blobs'], [], false,
-          type: 'bool'),
+          'hot_reload', 'Run hot reload stress tests', ['--hot-reload'], [],
+          false, type: 'bool'),
+      new _TestOptionSpecification(
+          'use_blobs',
+          'Use mmap instead of shared libraries for precompilation',
+          ['--use-blobs'], [], false, type: 'bool'),
       new _TestOptionSpecification(
           'timeout', 'Timeout in seconds', ['-t', '--timeout'], [], -1,
           type: 'int'),
@@ -338,9 +343,11 @@
           false,
           type: 'bool'),
       new _TestOptionSpecification(
-          'clear_browser_cache',
-          'Browser specific clearing of caches(i.e., delete it).',
-          ['--clear_browser_cache'],
+          'reset_browser_configuration',
+          'Browser specific reset of configuration. '
+          'WARNING: Using this option may remove your bookmarks and '
+          'other settings.',
+          ['--reset-browser-configuration'],
           [],
           false,
           type: 'bool'),
diff --git a/tools/testing/dart/test_progress.dart b/tools/testing/dart/test_progress.dart
index 3fdfbf3..b93294b 100644
--- a/tools/testing/dart/test_progress.dart
+++ b/tools/testing/dart/test_progress.dart
@@ -174,6 +174,31 @@
   }
 }
 
+class IgnoredTestMonitor extends EventListener {
+  static final int maxIgnored = 5;
+
+  int countIgnored = 0;
+
+  void done(TestCase test) {
+    if (test.lastCommandOutput.result(test) == Expectation.IGNORE) {
+      countIgnored++;
+      if (countIgnored > maxIgnored) {
+        print("/nMore than $maxIgnored tests were ignored due to flakes in");
+        print("the test infrastructure. Notify whesse@google.com.");
+        print("Output of the last ignored test was:");
+        print(_buildFailureOutput(test));
+        exit(1);
+      }
+    }
+  }
+
+  void allDone() {
+    if (countIgnored > 0) {
+      print("Ignored $countIgnored tests due to flaky infrastructure");
+    }
+  }
+}
+
 class FlakyLogWriter extends EventListener {
   void done(TestCase test) {
     if (test.isFlaky && test.result != Expectation.PASS) {
diff --git a/tools/testing/dart/test_runner.dart b/tools/testing/dart/test_runner.dart
index e9fed49..132d0c6 100644
--- a/tools/testing/dart/test_runner.dart
+++ b/tools/testing/dart/test_runner.dart
@@ -18,6 +18,9 @@
 // CommandOutput.exitCode in subclasses of CommandOutput.
 import "dart:io" as io;
 import "dart:math" as math;
+
+import 'package:yaml/yaml.dart';
+
 import 'android.dart';
 import 'dependency_graph.dart' as dgraph;
 import "browser_controller.dart";
@@ -352,9 +355,7 @@
   final bool useBlobs;
 
   AdbPrecompilationCommand._(this.precompiledRunnerFilename,
-                             this.precompiledTestDirectory,
-                             this.arguments,
-                             this.useBlobs)
+      this.precompiledTestDirectory, this.arguments, this.useBlobs)
       : super._("adb_precompilation");
 
   void _buildHashCode(HashCodeBuilder builder) {
@@ -373,7 +374,7 @@
       precompiledTestDirectory == other.precompiledTestDirectory;
 
   String toString() => 'Steps to push precompiled runner and precompiled code '
-                       'to an attached device. Uses (and requires) adb.';
+      'to an attached device. Uses (and requires) adb.';
 }
 
 class JSCommandlineCommand extends ProcessCommand {
@@ -470,6 +471,26 @@
     assert(_destinationFile.endsWith("pubspec.yaml"));
   }
 
+  static Map<String, Map> _filterOverrides(
+      String pubspec, Map<String, Map> overrides) {
+    if (overrides.isEmpty) return overrides;
+    var yaml = loadYaml(pubspec);
+    var deps = yaml['dependencies'];
+    var filteredOverrides = <String, Map>{};
+    if (deps != null) {
+      for (var d in deps.keys) {
+        if (!overrides.containsKey(d)) {
+          // pub depends on compiler_unsupported instead of compiler
+          // The dependency is so hackish that we currently ignore it here.
+          if (d == 'compiler_unsupported') continue;
+          throw "Repo doesn't have package $d used in $pubspec";
+        }
+        filteredOverrides[d] = overrides[d];
+      }
+    }
+    return filteredOverrides;
+  }
+
   String get reproductionCommand =>
       "Adding necessary dependency overrides to '$_pubspecYamlFile' "
       "(destination = $_destinationFile).";
@@ -485,12 +506,13 @@
     var destinationFile = new io.File(_destinationFile);
     var lockfile = new io.File(pubspecLockFile);
     return file.readAsString().then((String yamlString) {
+      var overrides = _filterOverrides(yamlString, _dependencyOverrides);
       var dependencyOverrideSection = new StringBuffer();
       if (_dependencyOverrides.isNotEmpty) {
         dependencyOverrideSection.write("\n"
             "# This section was autogenerated by test.py!\n"
             "dependency_overrides:\n");
-        _dependencyOverrides.forEach((String packageName, Map override) {
+        overrides.forEach((String packageName, Map override) {
           dependencyOverrideSection.write("  $packageName:\n");
           override.forEach((overrideKey, overrideValue) {
             dependencyOverrideSection
@@ -643,9 +665,7 @@
   }
 
   AdbPrecompilationCommand getAdbPrecompiledCommand(String precompiledRunner,
-                                                    String testDirectory,
-                                                    List<String> arguments,
-                                                    bool useBlobs) {
+      String testDirectory, List<String> arguments, bool useBlobs) {
     var command = new AdbPrecompilationCommand._(
         precompiledRunner, testDirectory, arguments, useBlobs);
     return _getUniqueCommand(command);
@@ -897,8 +917,9 @@
 /**
  * CommandOutput records the output of a completed command: the process's exit
  * code, the standard output and standard error, whether the process timed out,
- * and the time the process took to run.  It also contains a pointer to the
- * [TestCase] this is the output of.
+ * and the time the process took to run.  It does not contain a pointer to the
+ * [TestCase] this is the output of, so some functions require the test case
+ * to be passed as an argument.
  */
 abstract class CommandOutput {
   Command get command;
@@ -950,7 +971,6 @@
    */
   bool alreadyPrintedWarning = false;
 
-  // TODO(kustermann): Remove testCase from this class.
   CommandOutputImpl(
       Command this.command,
       int this.exitCode,
@@ -1016,7 +1036,7 @@
   Expectation _negateOutcomeIfNegativeTest(
       Expectation outcome, bool isNegative) {
     if (!isNegative) return outcome;
-
+    if (outcome == Expectation.IGNORE) return outcome;
     if (outcome.canBeOutcomeOf(Expectation.FAIL)) {
       return Expectation.PASS;
     }
@@ -1030,26 +1050,48 @@
   // See: http://dartbug.com/15139.
   static int WHITELISTED_CONTENTSHELL_EXITCODE = -1073740022;
   static bool isWindows = io.Platform.operatingSystem == 'windows';
+  static bool _failedBecauseOfFlakyInfrastructure(List<int> stderrBytes) {
+    // If the browser test failed, it may have been because content shell
+    // and the virtual framebuffer X server didn't hook up, or it crashed with
+    // a core dump. Sometimes content shell crashes after it has set the stdout
+    // to PASS, so we have to do this check first.
+    // Content shell also fails with a broken pipe message: Issue 26739
+    var zygoteCrash =
+        new RegExp(r"ERROR:zygote_linux\.cc\(\d+\)] write: Broken pipe");
+    var stderr = decodeUtf8(stderrBytes);
+    // TODO(whesse): Issue: 7564
+    // This may not be happening anymore.  Test by removing this suppression.
+    if (stderr.contains(MESSAGE_CANNOT_OPEN_DISPLAY) ||
+        stderr.contains(MESSAGE_FAILED_TO_RUN_COMMAND)) {
+      DebugLogger.warning(
+          "Warning: Failure because of missing XDisplay. Test ignored");
+      return true;
+    }
+    // Issue 26739
+    if (zygoteCrash.hasMatch(stderr)) {
+      DebugLogger.warning("Warning: Failure because of content_shell "
+          "zygote crash. Test ignored");
+      return true;
+    }
+    return false;
+  }
 
-  bool _failedBecauseOfMissingXDisplay;
+  bool _infraFailure;
 
   BrowserCommandOutputImpl(
       command, exitCode, timedOut, stdout, stderr, time, compilationSkipped)
       : super(command, exitCode, timedOut, stdout, stderr, time,
-            compilationSkipped, 0) {
-    _failedBecauseOfMissingXDisplay = _didFailBecauseOfMissingXDisplay();
-    if (_failedBecauseOfMissingXDisplay) {
-      DebugLogger.warning("Warning: Test failure because of missing XDisplay");
-      // If we get the X server error, or DRT crashes with a core dump, retry
-      // the test.
-    }
-  }
+            compilationSkipped, 0),
+        _infraFailure = _failedBecauseOfFlakyInfrastructure(stderr);
 
   Expectation result(TestCase testCase) {
     // Handle crashes and timeouts first
     if (hasCrashed) return Expectation.CRASH;
     if (hasTimedOut) return Expectation.TIMEOUT;
 
+    if (_infraFailure) {
+      return Expectation.IGNORE;
+    }
     var outcome = _getOutcome();
 
     if (testCase.hasRuntimeError) {
@@ -1067,8 +1109,8 @@
   bool get successful => canRunDependendCommands;
 
   bool get canRunDependendCommands {
-    // We cannot rely on the exit code of content_shell as a method to determine
-    // if we were successful or not.
+    // We cannot rely on the exit code of content_shell as a method to
+    // determine if we were successful or not.
     return super.canRunDependendCommands && !didFail(null);
   }
 
@@ -1077,34 +1119,12 @@
   }
 
   Expectation _getOutcome() {
-    if (_failedBecauseOfMissingXDisplay) {
-      return Expectation.FAIL;
-    }
-
     if (_browserTestFailure) {
       return Expectation.RUNTIME_ERROR;
     }
     return Expectation.PASS;
   }
 
-  bool _didFailBecauseOfMissingXDisplay() {
-    // Browser case:
-    // If the browser test failed, it may have been because content shell
-    // and the virtual framebuffer X server didn't hook up, or it crashed with
-    // a core dump. Sometimes content shell crashes after it has set the stdout
-    // to PASS, so we have to do this check first.
-    var stderrLines = decodeUtf8(super.stderr).split("\n");
-    for (String line in stderrLines) {
-      // TODO(kustermann,ricow): Issue: 7564
-      // This seems to happen quite frequently, we need to figure out why.
-      if (line.contains(MESSAGE_CANNOT_OPEN_DISPLAY) ||
-          line.contains(MESSAGE_FAILED_TO_RUN_COMMAND)) {
-        return true;
-      }
-    }
-    return false;
-  }
-
   bool get _rendererCrashed =>
       decodeUtf8(super.stdout).contains("#CRASHED - rendere");
 
@@ -1310,13 +1330,6 @@
 
   factory BrowserControllerTestOutcome(
       Command command, BrowserTestOutput result) {
-    void validate(String assertion, bool value) {
-      if (!value) {
-        throw "InvalidFormat sent from browser driving page: $assertion:\n\n"
-            "${result.lastKnownMessage}";
-      }
-    }
-
     String indent(String string, int numSpaces) {
       var spaces = new List.filled(numSpaces, ' ').join('');
       return string
@@ -1350,11 +1363,7 @@
         stderr = "This test timed out. The delay until the test actually "
             "started was: ${result.delayUntilTestStarted}.";
       } else {
-        // TODO(ricow/kustermann) as soon as we record the state periodically,
-        // we will have more information and can remove this warning.
-        stderr = "This test has not notified test.py that it started running. "
-            "This could be a bug in test.py! "
-            "Please contact ricow/whesse";
+        stderr = "This test has not notified test.py that it started running.";
       }
     }
 
@@ -1559,7 +1568,15 @@
   Expectation result(TestCase testCase) {
     // Handle general crash/timeout detection.
     if (hasCrashed) return Expectation.CRASH;
-    if (hasTimedOut) return Expectation.TIMEOUT;
+    if (hasTimedOut) {
+      bool isWindows = io.Platform.operatingSystem == 'windows';
+      bool isBrowserTestCase =
+          testCase.commands.any((command) => command is BrowserTestCommand);
+      // TODO(26060) Dart2js batch mode hangs on Windows under heavy load.
+      return (isWindows && isBrowserTestCase)
+          ? Expectation.IGNORE
+          : Expectation.TIMEOUT;
+    }
 
     // Handle dart2js specific crash detection
     if (exitCode == DART2JS_EXITCODE_CRASH ||
@@ -2509,8 +2526,8 @@
     } else if (command is AdbPrecompilationCommand) {
       assert(adbDevicePool != null);
       return adbDevicePool.acquireDevice().then((AdbDevice device) {
-        return _runAdbPrecompilationCommand(
-            device, command, timeout).whenComplete(() {
+        return _runAdbPrecompilationCommand(device, command, timeout)
+            .whenComplete(() {
           adbDevicePool.releaseDevice(device);
         });
       });
@@ -2541,35 +2558,36 @@
     // All closures are of type "Future<AdbCommandResult> run()"
     List<Function> steps = [];
 
-    steps.add(() => device.runAdbShellCommand(
-          ['rm', '-Rf', deviceTestDir]));
-    steps.add(() => device.runAdbShellCommand(
-          ['mkdir', '-p', deviceTestDir]));
+    steps.add(() => device.runAdbShellCommand(['rm', '-Rf', deviceTestDir]));
+    steps.add(() => device.runAdbShellCommand(['mkdir', '-p', deviceTestDir]));
     // TODO: We should find a way for us to cache the runner binary and avoid
     // pushhing it for every single test (this is bad for SSD cycle time, test
     // timing).
     steps.add(() => device.runAdbCommand(
-          ['push', runner, '$devicedir/dart_precompiled_runtime']));
+        ['push', runner, '$devicedir/dart_precompiled_runtime']));
     steps.add(() => device.runAdbShellCommand(
-          ['chmod', '777', '$devicedir/dart_precompiled_runtime']));
+        ['chmod', '777', '$devicedir/dart_precompiled_runtime']));
 
     for (var file in files) {
-      steps.add(() => device.runAdbCommand(
-            ['push', '$testdir/$file', '$deviceTestDir/$file']));
+      steps.add(() => device
+          .runAdbCommand(['push', '$testdir/$file', '$deviceTestDir/$file']));
     }
 
     if (command.useBlobs) {
       steps.add(() => device.runAdbShellCommand(
-            ['$devicedir/dart_precompiled_runtime',
-             '--run-app-snapshot=$deviceTestDir',
-             '--use-blobs']..addAll(arguments),
-            timeout: timeoutDuration));
+          [
+            '$devicedir/dart_precompiled_runtime',
+            '--run-app-snapshot=$deviceTestDir',
+            '--use-blobs'
+          ]..addAll(arguments),
+          timeout: timeoutDuration));
     } else {
       steps.add(() => device.runAdbShellCommand(
-            ['$devicedir/dart_precompiled_runtime',
-             '--run-app-snapshot=$deviceTestDir'
-             ]..addAll(arguments),
-            timeout: timeoutDuration));
+          [
+            '$devicedir/dart_precompiled_runtime',
+            '--run-app-snapshot=$deviceTestDir'
+          ]..addAll(arguments),
+          timeout: timeoutDuration));
     }
 
     var stopwatch = new Stopwatch()..start();
@@ -2599,9 +2617,8 @@
       // immediately.
       if (result.exitCode != 0) break;
     }
-    return createCommandOutput(
-        command, result.exitCode, result.timedOut, UTF8.encode('$writer'),
-        [], stopwatch.elapsed, false);
+    return createCommandOutput(command, result.exitCode, result.timedOut,
+        UTF8.encode('$writer'), [], stopwatch.elapsed, false);
   }
 
   BatchRunnerProcess _getBatchRunner(String identifier) {
@@ -2957,8 +2974,8 @@
         executor = new ReplayingCommandExecutor(new Path(recordedInputFile));
       } else {
         executor = new CommandExecutorImpl(
-            _globalConfiguration, maxProcesses,
-            maxBrowserProcesses, adbDevicePool: adbDevicePool);
+            _globalConfiguration, maxProcesses, maxBrowserProcesses,
+            adbDevicePool: adbDevicePool);
       }
 
       // Run "runnable commands" using [executor] subject to
diff --git a/tools/testing/dart/test_suite.dart b/tools/testing/dart/test_suite.dart
index 8bc8e9a..c73dc7c 100644
--- a/tools/testing/dart/test_suite.dart
+++ b/tools/testing/dart/test_suite.dart
@@ -437,6 +437,7 @@
      *  dart/
      *      pkg/PACKAGE_NAME
      *      third_party/pkg/PACKAGE_NAME
+     *      third_party/pkg_tested/PACKAGE_NAME
      *      runtime/observatory/PACKAGE_NAME
      *      sdk/lib/_internal/PACKAGE_NAME
      */
@@ -450,6 +451,7 @@
     var futures = [
       listDir(dartDir.append('pkg'), isValid),
       listDir(dartDir.append('third_party').append('pkg'), isValid),
+      listDir(dartDir.append('third_party').append('pkg_tested'), isValid),
       listDir(dartDir.append('runtime').append('observatory'), isValid),
       listDir(dartDir.append('sdk').append('lib').append('_internal'), isValid),
     ];
@@ -1336,6 +1338,14 @@
         contentShellOptions.add('--no-timeout');
         contentShellOptions.add('--dump-render-tree');
 
+        // Disable the GPU under Linux and Dartium. If the GPU is enabled,
+        // Chrome may send a termination signal to a test.  The test will be
+        // terminated if a machine (bot) doesn't have a GPU or if a test is
+        // still running after a certain period of time.
+        if (configuration['system'] == 'linux' &&
+            configuration['runtime'] == 'drt') {
+          contentShellOptions.add('--disable-gpu');
+        }
         if (compiler == 'none') {
           dartFlags.add('--ignore-unrecognized-flags');
           if (configuration["checked"]) {
@@ -2235,12 +2245,10 @@
   static String outputDir(Map configuration) {
     var result = '';
     var system = configuration['system'];
-    if (system == 'linux' || system == 'android') {
+    if (system == 'linux' || system == 'android' || system == 'windows') {
       result = 'out/';
     } else if (system == 'macos') {
       result = 'xcodebuild/';
-    } else if (system == 'windows') {
-      result = 'build/';
     } else {
       throw new Exception('Unknown operating system: "$system"');
     }
@@ -2386,6 +2394,8 @@
   static List<String> getExtraVmOptions(Map configuration) =>
       getExtraOptions(configuration, 'vm_options');
 
+  static int shortNameCounter = 0;  // Make unique short file names on Windows.
+
   static String getShortName(String path) {
     final PATH_REPLACEMENTS = const {
       "pkg_polymer_e2e_test_bad_import_test": "polymer_bi",
@@ -2449,6 +2459,7 @@
     }
     path = path.replaceAll('/', '_');
     final int WINDOWS_SHORTEN_PATH_LIMIT = 58;
+    final int WINDOWS_PATH_END_LENGTH = 30;
     if (Platform.operatingSystem == 'windows' &&
         path.length > WINDOWS_SHORTEN_PATH_LIMIT) {
       for (var key in PATH_REPLACEMENTS.keys) {
@@ -2457,6 +2468,11 @@
           break;
         }
       }
+      if (path.length > WINDOWS_SHORTEN_PATH_LIMIT) {
+        ++shortNameCounter;
+        var pathEnd = path.substring(path.length - WINDOWS_PATH_END_LENGTH);
+        path = "short${shortNameCounter}_$pathEnd";
+      }
     }
     return path;
   }
diff --git a/tools/utils.py b/tools/utils.py
index c4be5af..496b499 100644
--- a/tools/utils.py
+++ b/tools/utils.py
@@ -221,7 +221,7 @@
 
 # Mapping table between OS and build output location.
 BUILD_ROOT = {
-  'win32': os.path.join('build'),
+  'win32': os.path.join('out'),
   'linux': os.path.join('out'),
   'freebsd': os.path.join('out'),
   'macos': os.path.join('xcodebuild'),
@@ -268,7 +268,7 @@
           (target_os != GuessOS()))
 
 def GetBuildConf(mode, arch, conf_os=None):
-  if conf_os == 'android':
+  if conf_os == 'android' or conf_os == 'fuchsia':
     return '%s%s%s' % (GetBuildMode(mode), conf_os.title(), arch.upper())
   else:
     # Ask for a cross build if the host and target architectures don't match.